device_worker.py 8.6 KB
Newer Older
1
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

H
hutuxian 已提交
15
__all__ = ['DeviceWorker', 'Hogwild', 'DownpourSGD', 'Section']
16

17 18

class DeviceWorker(object):
X
xjqbest 已提交
19
    """
20
    DeviceWorker is an abstract class, which generates worker desc.
21 22
    This class is an inner class that we do computation logics within
    the implementation. For example, execution of a program or a graph.
X
xjqbest 已提交
23
    """
24

25
    def __init__(self):
X
xjqbest 已提交
26 27 28
        """
        Init.
        """
D
dongdaxiang 已提交
29 30
        self._program = None
        self._infer = None
31

32 33 34 35 36 37 38
    def _set_infer(self, infer=False):
        """
        set inference flag for current device worker
        
        Args:
            infer(bool): whether to do inference
        """
D
dongdaxiang 已提交
39
        self._infer = infer
D
dongdaxiang 已提交
40

41
    def _set_fleet_desc(self, fleet_desc):
X
xjqbest 已提交
42 43 44 45 46 47
        """
        Set fleet desc.

        Args:
            fleet_desc(PSParameter): pslib.PSParameter object
        """
D
dongdaxiang 已提交
48
        self._fleet_desc = fleet_desc
D
dongdaxiang 已提交
49

50
    def _set_program(self, program):
X
xjqbest 已提交
51 52 53 54 55 56
        """
        Set program.

        Args:
            program(Program): a Program object
        """
D
dongdaxiang 已提交
57
        self._program = program
58

59
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
60 61 62 63 64 65 66 67 68
        """
        Generator worker desc.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        raise NotImplementedError(
            "DeviceWorker does not implement gen_worker_desc, "
            "please use Hogwild or DownpourSGD, etc.")
69 70 71


class Hogwild(DeviceWorker):
X
xjqbest 已提交
72 73 74 75
    """
    Hogwild is a kind of SGD algorithm.

    """
76

77
    def __init__(self):
X
xjqbest 已提交
78 79 80
        """
        Init.
        """
81 82
        super(Hogwild, self).__init__()

83
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
84 85 86 87 88 89
        """
        Generator worker desc, which device worker is HogwildWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
90
        trainer_desc.device_worker_name = "HogwildWorker"
D
dongdaxiang 已提交
91
        if self._infer:
92 93
            # just ignore feed op for inference model
            trainer_desc.hogwild_param.skip_ops.extend(["feed"])
94 95


D
dongdaxiang 已提交
96
class DownpourSGD(DeviceWorker):
X
xjqbest 已提交
97 98 99
    """
    DownpourSGD is a kind of distributed SGD algorithm.
    """
100

101
    def __init__(self):
X
xjqbest 已提交
102 103
        """
        Init.
104
        initialize downpourSGD device worker
X
xjqbest 已提交
105
        """
D
dongdaxiang 已提交
106
        super(DownpourSGD, self).__init__()
107

108
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
109 110 111 112 113 114
        """
        Generator worker desc, which device worker is DownpourWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
X
fix bug  
xjqbest 已提交
115
        dense_table_set = set()
D
dongdaxiang 已提交
116 117
        program_id = str(id(self._program))
        if self._program == None:
D
dongdaxiang 已提交
118
            print("program of current device worker is not configured")
119
            exit(-1)
D
dongdaxiang 已提交
120
        opt_info = self._program._fleet_opt
D
dongdaxiang 已提交
121
        program_configs = opt_info["program_configs"]
122
        downpour = trainer_desc.downpour_param
D
dongdaxiang 已提交
123

D
dongdaxiang 已提交
124 125
        for pid in program_configs:
            if pid == program_id:
D
dongdaxiang 已提交
126 127 128 129 130 131
                pc = downpour.program_config.add()
                pc.program_id = program_id
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
X
xjqbest 已提交
132
                    dense_table_set.add(i)
D
dongdaxiang 已提交
133 134 135 136
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
X
fix bug  
xjqbest 已提交
137
                    dense_table_set.add(i)
D
dongdaxiang 已提交
138
                break
139

140 141 142
        trainer_desc.device_worker_name = "DownpourWorker"
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
D
dongdaxiang 已提交
143
        for i in self._fleet_desc.trainer_param.dense_table:
144 145
            if i.table_id in dense_table_set:
                dense_table = pull_thread.dense_table.add()
146
                dense_table.dense_value_name.extend(i.dense_variable_name)
147 148
                dense_table.table_id = \
                    i.table_id
149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171
        sparse_len = len(self._fleet_desc.trainer_param.sparse_table)
        for i in range(sparse_len):
            sparse_table = downpour.sparse_table.add()
            sparse_table.table_id = \
                        self._fleet_desc.trainer_param.sparse_table[i].table_id
            sparse_table.sparse_key_name.extend(
                self._fleet_desc.trainer_param.sparse_table[i].slot_key)
            sparse_table.sparse_value_name.extend(
                self._fleet_desc.trainer_param.sparse_table[i].slot_value)
            sparse_table.sparse_grad_name.extend(
                self._fleet_desc.trainer_param.sparse_table[i].slot_gradient)
            if opt_info["use_cvm"]:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
                    i].accessor.fea_dim
                sparse_table.fea_dim = sparse_table.emb_dim
            else:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
                    i].accessor.fea_dim - 2
                sparse_table.fea_dim = sparse_table.emb_dim + 2
            # TODO(guru4elephant): hard code here, need to improve
            sparse_table.label_var_name = "click"
172

D
dongdaxiang 已提交
173
        for i in self._fleet_desc.trainer_param.dense_table:
X
fix bug  
xjqbest 已提交
174 175 176
            if i.table_id in dense_table_set:
                dense_table = downpour.dense_table.add()
                dense_table.table_id = i.table_id
177
                dense_table.dense_value_name.extend(i.dense_variable_name)
X
fix bug  
xjqbest 已提交
178 179
                dense_table.dense_grad_name.extend(
                    i.dense_gradient_variable_name)
D
dongdaxiang 已提交
180 181
                downpour.skip_ops.extend(self._fleet_desc.trainer_param.skip_op)
        if self._infer:
182 183
            downpour.push_dense = False
            downpour.push_sparse = False
X
fix bug  
xjqbest 已提交
184

185

H
hutuxian 已提交
186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
class Section(DeviceWorker):
    """
    SectionWorker
    """

    def __init__(self):
        """
        Init.
        """
        super(Section, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is SectionWorker.
        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        from google.protobuf import text_format
        from . import core
        trainer_desc.device_worker_name = "SectionWorker"
        pipeline_opt = self._program._pipeline_opt
        section_param = trainer_desc.section_param
        section_param.queue_size = pipeline_opt["queue_size"]
        section_param.sync_steps = pipeline_opt["sync_steps"]
        section_param.start_cpu_core_id = pipeline_opt["start_cpu_core_id"]
        for e in pipeline_opt["param_need_sync"]:
            section_param.param_need_sync.append(e)
        for i, program in enumerate(pipeline_opt["section_program_list"]):
            cfg = section_param.section_config.add()
            cfg.program_desc.ParseFromString(program["program"]._get_desc()
                                             .serialize_to_string())
            # TODO: why does not work
            #cfg.program_desc.CopyFrom(program.program._get_desc())
            place = pipeline_opt["place_list"][i]
            if isinstance(place, core.CPUPlace):
                cfg.place = cfg.CPUPlace
            elif isinstance(place, core.CUDAPlace):
                cfg.place = cfg.CUDAPlace
            elif isinstance(place, core.CUDAPinnedPlace):
                cfg.place = cfg.CUDAPinnedPlace
            else:
                raise NotImplementedError(
                    "SectionWorker only supports CPUPlace, CUDAPlace and CUDAPinnedPlace now."
                )

            cfg.concurrency = pipeline_opt["concurrency_list"][i]
            for var in program["input_set"]:
                cfg.section_in_var_names.append(var)
            for var in program["output_set"]:
                cfg.section_out_var_names.append(var)


238
class DeviceWorkerFactory(object):
239
    def _create_device_worker(self, worker_type):
240 241
        classname = worker_type.capitalize()
        return globals()[classname]()