test_communicator_geo.py 5.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
import os
import sys
17
import time
18 19
import subprocess
import unittest
20
import numpy
21

22
import paddle
23 24
import paddle.fluid as fluid

25
import paddle.distributed.fleet.base.role_maker as role_maker
26
import paddle.distributed.fleet as fleet
27

R
Roc 已提交
28
from paddle.distributed.utils.launch_utils import find_free_ports
T
tangwei12 已提交
29

P
pangyoki 已提交
30 31
paddle.enable_static()

32

33
class TestCommunicatorGeoEnd2End(unittest.TestCase):
34

35 36
    def net(self):
        x = fluid.layers.data(name='x', shape=[13], dtype='float32')
37 38 39 40 41 42 43 44 45 46 47 48 49
        x1 = fluid.layers.data(name='x1', shape=[1], dtype='int64', lod_level=1)

        emb = fluid.layers.embedding(
            input=x1,
            size=[10000, 10],
            param_attr=fluid.ParamAttr(
                name="embedding",
                initializer=fluid.initializer.Constant(value=0.01)),
            is_sparse=True)

        pool = fluid.layers.sequence_pool(input=emb, pool_type="sum")
        z = fluid.layers.concat(input=[x, pool], axis=1)
        y_predict = fluid.layers.fc(input=z, size=1, act=None)
50 51 52
        y = fluid.layers.data(name='y', shape=[1], dtype='float32')

        cost = fluid.layers.square_error_cost(input=y_predict, label=y)
53
        avg_cost = paddle.mean(cost)
54
        return avg_cost, x, x1, y
55

56
    def fake_reader(self):
57

58 59 60
        def reader():
            for i in range(10000):
                x = numpy.random.random((1, 13)).astype('float32')
61
                z = numpy.random.randint(0, 9999, (1, 1)).astype('int64')
62
                y = numpy.random.randint(0, 2, (1, 1)).astype('int64')
63
                yield x, z, y
64

65
        return reader
66

67 68
    def run_pserver(self, role, strategy):
        fleet.init(role)
69
        avg_cost, x, z, y = self.net()
70
        optimizer = fluid.optimizer.SGD(0.01)
71 72
        optimizer = fleet.distributed_optimizer(optimizer, strategy)
        optimizer.minimize(avg_cost)
73

74 75 76 77 78 79 80 81
        fleet.init_server()
        fleet.run_server()

    def run_trainer(self, role, strategy):
        place = fluid.core.CPUPlace()
        exe = fluid.Executor(place)

        fleet.init(role)
82
        avg_cost, x, z, y = self.net()
83
        optimizer = fluid.optimizer.SGD(0.01)
84 85 86
        optimizer = fleet.distributed_optimizer(optimizer, strategy)
        optimizer.minimize(avg_cost)

87
        exe.run(fluid.default_startup_program())
T
tangwei12 已提交
88
        fleet.init_worker()
89 90

        train_reader = paddle.batch(self.fake_reader(), batch_size=24)
91
        feeder = fluid.DataFeeder(place=place, feed_list=[x, z, y])
92 93

        for batch_id, data in enumerate(train_reader()):
94 95 96
            exe.run(fluid.default_main_program(),
                    feed=feeder.feed(data),
                    fetch_list=[])
97

98
        fleet.stop_worker()
99

100 101 102
    def run_ut(self):
        training_role = os.getenv("TRAINING_ROLE", "TRAINER")

103 104 105 106
        os.environ["PADDLE_PSERVER_NUMS"] = "1"
        os.environ["PADDLE_TRAINERS_NUM"] = "1"
        os.environ["PADDLE_TRAINER_ID"] = "0"
        os.environ["PADDLE_TRAINERS_NUM"] = "1"
T
tangwei12 已提交
107
        os.environ["POD_IP"] = "127.0.0.1"
108

109 110
        role = role_maker.PaddleCloudRoleMaker()

111
        strategy = paddle.distributed.fleet.DistributedStrategy()
112 113
        strategy.a_sync = True
        strategy.a_sync_configs = {"k_steps": 100}
C
Chengmo 已提交
114
        strategy.a_sync_configs = {"launch_barrier": False}
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138

        if training_role == "TRAINER":
            self.run_trainer(role, strategy)
        else:
            self.run_pserver(role, strategy)

    def test_communicator(self):
        run_server_cmd = """

import sys
import os

import time
import threading
import subprocess
import unittest
import numpy

import paddle
import paddle.fluid as fluid

from paddle.fluid.communicator import Communicator
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
from paddle.fluid.incubate.fleet.parameter_server.mode import DistributedMode
139
import paddle.distributed.fleet as fleet
140 141 142

from test_communicator_geo import TestCommunicatorGeoEnd2End

P
pangyoki 已提交
143
paddle.enable_static()
144 145 146 147 148 149 150 151 152 153 154 155 156 157

class RunServer(TestCommunicatorGeoEnd2End):
    def runTest(self):
        pass

os.environ["TRAINING_ROLE"] = "PSERVER"

half_run_server = RunServer()
half_run_server.run_ut()
"""

        server_file = "run_server_for_communicator_geo.py"
        with open(server_file, "w") as wb:
            wb.write(run_server_cmd)
T
tangwei12 已提交
158 159 160

        port = find_free_ports(1).pop()

161
        os.environ["TRAINING_ROLE"] = "PSERVER"
T
tangwei12 已提交
162 163
        os.environ["PADDLE_PORT"] = str(port)
        os.environ["PADDLE_PSERVERS_IP_PORT_LIST"] = "127.0.0.1:{}".format(port)
164 165 166 167

        _python = sys.executable

        ps_cmd = "{} {}".format(_python, server_file)
T
tangwei12 已提交
168

169 170 171
        ps_proc = subprocess.Popen(ps_cmd.strip().split(" "),
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
172

T
tangwei12 已提交
173
        time.sleep(5)
174

175
        os.environ["TRAINING_ROLE"] = "TRAINER"
176

177 178
        self.run_ut()
        ps_proc.kill()
T
tangwei12 已提交
179
        ps_proc.wait()
T
tangwei12 已提交
180
        outs, errs = ps_proc.communicate()
181

182 183
        if os.path.exists(server_file):
            os.remove(server_file)
184

185 186 187

if __name__ == '__main__':
    unittest.main()