communicator.py 4.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from .executor import global_scope
16 17 18 19
"""
Communicator is used for async distribute training in distribute_transpiler mode.
It's a wrapper of a cpp class Communicator and should be used inside fleet API.
"""
20 21
from . import core
from .framework import Program
22
from .transpiler.distribute_transpiler import DistributedMode
23 24 25 26 27

__all__ = ['Communicator']


class Communicator(object):
28
    def __init__(self, program, mode, kwargs=None, envs={}):
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54
        """
        Communicator is used for async distribute training in distribute_transpiler mode.
        It's a wrapper of a cpp class Communicator and should be used inside fleet API.

        Args:
            program(Program): the trainers program after transpile of distribute_transpiler.
            It's used by communicator to extract the information to do communication.

        Returns:
            None

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.start()
                comm.stop()
        """
        # set all recv op to not_run mode
        assert isinstance(program, Program)
        for op in program.block(0).ops:
            if op.type == "recv":
                op._set_attr('do_not_run', True)
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85

        if mode == DistributedMode.GEO:
            push_vars = kwargs["push_vars"]
            push_var_names = []

            for k, vs in push_vars.items():
                varnames = "&".join(vs["var_names"])
                sections = "&".join([str(v) for v in vs["sections"]])
                endpoints = "&".join(vs["epmap"])
                is_sparse = "1" if vs["is_sparse"] else "0"

                push_var_names.append(k)
                envs[k] = "#".join([varnames, sections, endpoints, is_sparse])

            envs["geo_trainer_nums"] = str(kwargs["trainers"])
            envs["geo_need_push_nums"] = str(kwargs["push_nums"])
            envs["geo_send_varnames"] = '#'.join(push_var_names)

        mode_str = None

        if mode == DistributedMode.SYNC:
            mode_str = "SYNC"
        elif mode == DistributedMode.ASYNC:
            mode_str = "ASYNC"
        elif mode == DistributedMode.HALF_ASYNC:
            mode_str = "HALF_ASYNC"
        elif mode == DistributedMode.GEO:
            mode_str = "GEO"

        self.communicator_ = core.DistCommunicator(mode_str, program.desc,
                                                   global_scope(), envs)
86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123

    def start(self):
        """
        Start communicator. Should call before training process.

        Returns:
            None

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.start()
                comm.stop()
        """
        self.communicator_.start()

    def stop(self):
        """
        Stop communicator. Should call after training process.

        Returns:
            None

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.start()
                comm.stop()
        """
        self.communicator_.stop()
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141

    def is_running(self):
        """
        Get communicator is running or stop.

        Returns:
            bool

        Examples:
            .. code-block:: python

                import paddle.fluid as fluid

                prog = fluid.Program()
                comm = fluid.communicator.Communicator(prog)
                comm.is_running()
        """
        self.communicator_.is_running()