communicator.py 8.2 KB
Newer Older
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17 18 19 20 21 22 23 24 25 26 27 28
# Copyright(c) 2019 PaddlePaddle Authors.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0(the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:  // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

29
from .executor import global_scope
30

31 32 33 34
"""
Communicator is used for async distribute training in distribute_transpiler mode.
It's a wrapper of a cpp class Communicator and should be used inside fleet API.
"""
35
from . import core
36
from paddle.fluid.incubate.fleet.parameter_server.mode import DistributedMode
37

38
__all__ = ['Communicator', 'FLCommunicator', 'LargeScaleKV']
39 40 41


class Communicator(object):
42
    def __init__(self, mode, kwargs=None, envs=None):
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
        """
        Communicator is used for async distribute training in distribute_transpiler mode.
        It's a wrapper of a cpp class Communicator and should be used inside fleet API.

        Args:
            program(Program): the trainers program after transpile of distribute_transpiler.
            It's used by communicator to extract the information to do communication.

        Returns:
            None

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.start()
                comm.stop()
        """
        # set all recv op to not_run mode
65

66 67
        if kwargs is None:
            if envs is None:
68 69 70
                envs = {}
        else:
            if mode == DistributedMode.SYNC:
71
                envs["pserver_endpoints"] = ','.join(
72 73
                    kwargs["pserver_endpoints"]
                )
74 75 76 77 78

            envs["trainers"] = str(kwargs["trainers"])
            envs["trainer_id"] = str(kwargs["trainer_id"])
            envs["need_global_step"] = str(kwargs["need_global_step"])
            envs["barrier_table_id"] = str(kwargs["barrier_table_id"])
79

80 81 82 83 84 85 86 87 88 89 90
        mode_str = None

        if mode == DistributedMode.SYNC:
            mode_str = "SYNC"
        elif mode == DistributedMode.ASYNC:
            mode_str = "ASYNC"
        elif mode == DistributedMode.HALF_ASYNC:
            mode_str = "HALF_ASYNC"
        elif mode == DistributedMode.GEO:
            mode_str = "GEO"

91 92 93
        self.mode = mode_str
        self.envs = envs
        self.communicator_ = None
T
tangwei12 已提交
94 95 96
        self.send_ctx_ = None
        self.recv_ctx_ = None

97 98 99
    def init_with_ctx(
        self, send_ctx, recv_ctx, proto_txt, unit64_hosts, scope=None
    ):
100
        if scope is None:
Z
zhaocaibei123 已提交
101
            scope = global_scope()
102 103 104 105 106 107 108 109 110
        self.communicator_ = core.DistCommunicator(
            self.mode,
            proto_txt,
            unit64_hosts,
            send_ctx,
            recv_ctx,
            scope,
            self.envs,
        )
T
tangwei12 已提交
111 112
        self.send_ctx_ = send_ctx
        self.recv_ctx_ = recv_ctx
113

114 115 116 117 118 119
    def create_client_to_client_connection(
        self,
        pserver_timeout_ms=500000,
        pserver_connect_timeout_ms=10000,
        max_retry=3,
    ):
120
        self.communicator_.create_client_to_client_connection(
121 122
            pserver_timeout_ms, pserver_connect_timeout_ms, max_retry
        )
123 124 125 126 127 128 129

    def get_client_info(self):
        return self.communicator_.get_client_info()

    def set_clients(self, host_list):
        self.communicator_.set_clients(host_list)

130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146
    def start(self):
        """
        Start communicator. Should call before training process.

        Returns:
            None

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.start()
                comm.stop()
        """
147
        if self.communicator_ is None:
148 149
            print('you must call init_with_ctx first to init comm before start')
            return
150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168
        self.communicator_.start()

    def stop(self):
        """
        Stop communicator. Should call after training process.

        Returns:
            None

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.start()
                comm.stop()
        """
169
        if self.communicator_ is None:
170 171
            print('you must call init_with_ctx first to init comm before stop')
            return
172
        self.communicator_.stop()
173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189

    def is_running(self):
        """
        Get communicator is running or stop.

        Returns:
            bool

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.is_running()
        """
190
        if self.communicator_ is None:
191 192
            print('you must call init_with_ctx first to init comm before stop')
            return
193
        self.communicator_.is_running()
194 195 196 197

    def recv(self):
        self.communicator_.recv()

T
tangwei12 已提交
198 199 200
    def init_params(self, context):
        self.communicator_.init_params(context)

201 202 203
    def pull_dense(self, context):
        self.communicator_.pull_dense(context)

Z
zhaocaibei123 已提交
204
    def push_sparse_param(self, var_name, table_id=-1, scope=None):
205
        if scope is None:
Z
zhaocaibei123 已提交
206
            scope = global_scope()
T
tangwei12 已提交
207 208 209 210 211 212 213 214 215 216
        if not self.is_running():
            raise ValueError(
                "Communicator should init first. Using fleet.init_worker() before push_sparse_param()"
            )
        assert isinstance(var_name, str)
        assert isinstance(table_id, int)
        if table_id == -1:
            table_id = self.send_ctx_[var_name].table_id()
        self.communicator_.push_sparse_param(var_name, table_id, scope)

217

218
class FLCommunicator(Communicator):  ## only for coordinator
219 220 221 222 223 224 225 226 227 228
    def __init__(self, ps_hosts, kwargs=None):
        mode = None
        super(FLCommunicator, self).__init__(mode, kwargs)
        send_ctx = {}
        dense_map = {}
        prototxt = ""
        self.mode = "WITH_COORDINATOR"
        self.init_with_ctx(send_ctx, dense_map, prototxt, ps_hosts)

    def start_coordinator(self, self_endpoint, trainer_endpoints):
229
        if self.communicator_ is not None:
230 231 232
            self.communicator_.start_coordinator(
                self_endpoint, trainer_endpoints
            )
233 234 235
        return

    def save_fl_strategy(self, mp):
236
        if self.communicator_ is not None:
237 238 239 240 241 242 243
            self.communicator_.save_fl_strategy(mp)
        else:
            raise ValueError("self.communicator_ is null")
        return

    def query_fl_clients_info(self):
        info_mp = {}
244
        if self.communicator_ is not None:
245 246 247 248
            info_mp = self.communicator_.query_fl_clients_info()
        return info_mp


249 250 251 252 253 254 255 256 257
class LargeScaleKV(object):
    def __init__(self):
        self.scale_kv = core.LargeScaleKV()

    def save(self, varname, dirname):
        self.scale_kv.save(varname, dirname)

    def load(self, varname, dirname):
        self.scale_kv.load(varname, dirname)
258 259 260

    def size(self, varname):
        return self.scale_kv.size(varname)
T
tangwei12 已提交
261 262 263


class HeterClient(object):
264
    def __init__(self, endpoint, previous_endpoint, trainer_id):
265 266 267
        self.heter_client_ = core.HeterClient(
            endpoint, previous_endpoint, trainer_id
        )
T
tangwei12 已提交
268 269 270

    def stop(self):
        self.heter_client_.stop()