device_worker.py 19.6 KB
Newer Older
1
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
14
"""Defination of device workers."""
15

C
Chengmo 已提交
16 17
from __future__ import print_function

18
__all__ = [
19 20
    'DeviceWorker', 'Hogwild', 'DownpourSGD', 'Section', 'DownpourSGDOPT',
    'HeterSection'
21
]
22

23 24

class DeviceWorker(object):
X
xjqbest 已提交
25
    """
26
    DeviceWorker is an abstract class, which generates worker desc.
27 28
    This class is an inner class that we do computation logics within
    the implementation. For example, execution of a program or a graph.
X
xjqbest 已提交
29
    """
30

31
    def __init__(self):
32
        """Init."""
D
dongdaxiang 已提交
33 34
        self._program = None
        self._infer = None
35

36 37 38
    def _set_infer(self, infer=False):
        """
        set inference flag for current device worker
C
Chengmo 已提交
39

40 41 42
        Args:
            infer(bool): whether to do inference
        """
D
dongdaxiang 已提交
43
        self._infer = infer
D
dongdaxiang 已提交
44

45
    def _set_fleet_desc(self, fleet_desc):
X
xjqbest 已提交
46 47 48 49 50 51
        """
        Set fleet desc.

        Args:
            fleet_desc(PSParameter): pslib.PSParameter object
        """
D
dongdaxiang 已提交
52
        self._fleet_desc = fleet_desc
D
dongdaxiang 已提交
53

54
    def _set_program(self, program):
X
xjqbest 已提交
55 56 57 58 59 60
        """
        Set program.

        Args:
            program(Program): a Program object
        """
D
dongdaxiang 已提交
61
        self._program = program
62

63
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
64 65 66 67 68 69 70 71 72
        """
        Generator worker desc.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        raise NotImplementedError(
            "DeviceWorker does not implement gen_worker_desc, "
            "please use Hogwild or DownpourSGD, etc.")
73 74 75


class Hogwild(DeviceWorker):
X
xjqbest 已提交
76 77 78 79
    """
    Hogwild is a kind of SGD algorithm.

    """
80

81
    def __init__(self):
82
        """Init."""
83 84
        super(Hogwild, self).__init__()

85
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
86 87 88 89 90 91
        """
        Generator worker desc, which device worker is HogwildWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
92
        trainer_desc.device_worker_name = "HogwildWorker"
D
dongdaxiang 已提交
93
        if self._infer:
94
            # just ignore feed op for inference model
W
wangguanqun 已提交
95 96 97 98
            trainer_desc.hogwild_param.skip_ops.extend([
                "feed", "push_sparse", "push_sparse_v2", "push_dense",
                "distributed_push_sparse", "send"
            ])
99

100 101 102 103 104 105
        dense_table_set = set()
        program_id = str(id(self._program))
        if self._program == None:
            print("program of current device worker is not configured")
            exit(-1)
        opt_info = self._program._fleet_opt
106 107
        # when opt_info is None or empty dict, it should return
        if not opt_info:
108
            return
T
Thunderbrook 已提交
109 110 111 112 113 114
        downpour = trainer_desc.downpour_param
        hogwild = trainer_desc.hogwild_param
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                hogwild.stat_var_names.extend([i])
                downpour.stat_var_names.extend([i])
115

116 117
        from paddle.fluid.incubate.fleet.parameter_server import version

C
Chengmo 已提交
118 119 120
        if version.is_transpiler() and "fleet_desc" not in opt_info:
            return

121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166
        program_configs = opt_info["program_configs"]

        for pid in program_configs:
            if pid == program_id:
                pc = downpour.program_config.add()
                pc.program_id = program_id
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
                    dense_table_set.add(i)
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
                    dense_table_set.add(i)
                break

        trainer_desc.device_worker_name = "HogwildWorker"
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
        if opt_info.get("program_id_to_worker") is None:
            raise ValueError("opt_info must have program_id_to_worker")
        prog_id_to_worker = opt_info["program_id_to_worker"]
        if prog_id_to_worker.get(program_id) is None:
            raise ValueError("%s not found in program_id_to_worker" %
                             program_id)
        worker = opt_info["program_id_to_worker"][program_id]
        for i in worker.get_desc().dense_table:
            if i.table_id in dense_table_set:
                dense_table = pull_thread.dense_table.add()
                dense_table.dense_value_name.extend(i.dense_variable_name)
                dense_table.table_id = \
                    i.table_id
        sparse_len = len(worker.get_desc().sparse_table)
        for i in range(sparse_len):
            sparse_table = downpour.sparse_table.add()
            sparse_table.table_id = worker.get_desc().sparse_table[i].table_id
            sparse_table.sparse_key_name.extend(worker.get_desc().sparse_table[
                i].slot_key)
            sparse_table.sparse_value_name.extend(worker.get_desc()
                                                  .sparse_table[i].slot_value)
            sparse_table.sparse_grad_name.extend(worker.get_desc().sparse_table[
                i].slot_gradient)
            sparse_table.fea_dim = \
                self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
167
                    i].accessor.fea_dim
168 169 170 171 172 173 174 175 176 177 178 179
            # not use emb_dim
            sparse_table.emb_dim = -1
            # not use hard code click
            sparse_table.label_var_name = ""

        for i in worker.get_desc().dense_table:
            if i.table_id in dense_table_set:
                dense_table = downpour.dense_table.add()
                dense_table.table_id = i.table_id
                dense_table.dense_value_name.extend(i.dense_variable_name)
                dense_table.dense_grad_name.extend(
                    i.dense_gradient_variable_name)
180
        hogwild.skip_ops.extend(worker.get_desc().skip_op)
181
        if self._infer:
182 183
            hogwild.skip_ops.extend(
                ["push_sparse", "push_sparse_v2", "push_dense"])
184

185

D
dongdaxiang 已提交
186
class DownpourSGD(DeviceWorker):
X
xjqbest 已提交
187 188 189
    """
    DownpourSGD is a kind of distributed SGD algorithm.
    """
190

191
    def __init__(self):
X
xjqbest 已提交
192 193
        """
        Init.
194
        initialize downpourSGD device worker
X
xjqbest 已提交
195
        """
D
dongdaxiang 已提交
196
        super(DownpourSGD, self).__init__()
197

198
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
199 200 201 202 203 204
        """
        Generator worker desc, which device worker is DownpourWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
X
fix bug  
xjqbest 已提交
205
        dense_table_set = set()
D
dongdaxiang 已提交
206 207
        program_id = str(id(self._program))
        if self._program == None:
D
dongdaxiang 已提交
208
            print("program of current device worker is not configured")
209
            exit(-1)
D
dongdaxiang 已提交
210
        opt_info = self._program._fleet_opt
D
dongdaxiang 已提交
211
        program_configs = opt_info["program_configs"]
212
        downpour = trainer_desc.downpour_param
D
dongdaxiang 已提交
213

D
dongdaxiang 已提交
214 215
        for pid in program_configs:
            if pid == program_id:
D
dongdaxiang 已提交
216 217 218 219 220 221
                pc = downpour.program_config.add()
                pc.program_id = program_id
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
X
xjqbest 已提交
222
                    dense_table_set.add(i)
D
dongdaxiang 已提交
223 224 225 226
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
X
fix bug  
xjqbest 已提交
227
                    dense_table_set.add(i)
Z
zhang wenhui 已提交
228 229 230 231 232 233 234
                # code for partial push dense table such as multitask
                if "cond2denseid" in program_configs[program_id]:
                    cond2denseid = program_configs[program_id]["cond2denseid"]
                    for key, value in cond2denseid.items():
                        mc_map = pc.partial_pushdense_condtable_map.add()
                        mc_map.key = key
                        mc_map.value = value
D
dongdaxiang 已提交
235
                break
236

T
Thunderbrook 已提交
237 238
        trainer_desc.device_worker_name = opt_info.get("worker_class",
                                                       "DownpourWorker")
239 240
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
241 242 243 244 245 246 247 248
        if opt_info.get("program_id_to_worker") is None:
            raise ValueError("opt_info must have program_id_to_worker")
        prog_id_to_worker = opt_info["program_id_to_worker"]
        if prog_id_to_worker.get(program_id) is None:
            raise ValueError("%s not found in program_id_to_worker" %
                             program_id)
        worker = opt_info["program_id_to_worker"][program_id]
        for i in worker.get_desc().dense_table:
249 250
            if i.table_id in dense_table_set:
                dense_table = pull_thread.dense_table.add()
251
                dense_table.dense_value_name.extend(i.dense_variable_name)
252 253
                dense_table.table_id = \
                    i.table_id
254
        sparse_len = len(worker.get_desc().sparse_table)
255 256
        for i in range(sparse_len):
            sparse_table = downpour.sparse_table.add()
257 258 259 260 261 262 263
            sparse_table.table_id = worker.get_desc().sparse_table[i].table_id
            sparse_table.sparse_key_name.extend(worker.get_desc().sparse_table[
                i].slot_key)
            sparse_table.sparse_value_name.extend(worker.get_desc()
                                                  .sparse_table[i].slot_value)
            sparse_table.sparse_grad_name.extend(worker.get_desc().sparse_table[
                i].slot_gradient)
264 265
            if opt_info["use_cvm"] or "no_cvm" in opt_info and opt_info[
                    "no_cvm"] == True:
266 267
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
268
                        i].accessor.fea_dim
269 270 271 272
                sparse_table.fea_dim = sparse_table.emb_dim
            else:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
273
                        i].accessor.fea_dim - 2
274 275 276
                sparse_table.fea_dim = sparse_table.emb_dim + 2
            # TODO(guru4elephant): hard code here, need to improve
            sparse_table.label_var_name = "click"
277 278 279
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                downpour.stat_var_names.extend([i])
280

281
        for i in worker.get_desc().dense_table:
X
fix bug  
xjqbest 已提交
282 283 284
            if i.table_id in dense_table_set:
                dense_table = downpour.dense_table.add()
                dense_table.table_id = i.table_id
285
                dense_table.dense_value_name.extend(i.dense_variable_name)
X
fix bug  
xjqbest 已提交
286 287
                dense_table.dense_grad_name.extend(
                    i.dense_gradient_variable_name)
X
xujiaqi01 已提交
288
        downpour.skip_ops.extend(worker.get_desc().skip_op)
D
dongdaxiang 已提交
289
        if self._infer:
290 291
            downpour.push_dense = False
            downpour.push_sparse = False
X
fix bug  
xjqbest 已提交
292

293

294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367
class DownpourSGDOPT(DeviceWorker):
    """
    DownpourSGDOPT is a kind of distributed SGD algorithm.
    """

    def __init__(self):
        """
        Init.
        initialize downpourSGDOPT device worker
        """
        super(DownpourSGDOPT, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is DownpourWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        dense_table_set = set()
        program_id = str(id(self._program))
        if self._program == None:
            print("program of current device worker is not configured")
            exit(-1)
        opt_info = self._program._fleet_opt
        program_configs = opt_info["program_configs"]
        downpour = trainer_desc.downpour_param

        for pid in program_configs:
            if pid == program_id:
                pc = downpour.program_config.add()
                pc.program_id = program_id
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
                    dense_table_set.add(i)
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
                    dense_table_set.add(i)
                break

        trainer_desc.device_worker_name = "DownpourWorkerOpt"
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
        if opt_info.get("program_id_to_worker") is None:
            raise ValueError("opt_info must have program_id_to_worker")
        prog_id_to_worker = opt_info["program_id_to_worker"]
        if prog_id_to_worker.get(program_id) is None:
            raise ValueError("%s not found in program_id_to_worker" %
                             program_id)
        worker = opt_info["program_id_to_worker"][program_id]
        for i in worker.get_desc().dense_table:
            if i.table_id in dense_table_set:
                dense_table = pull_thread.dense_table.add()
                dense_table.dense_value_name.extend(i.dense_variable_name)
                dense_table.table_id = \
                    i.table_id
        sparse_len = len(worker.get_desc().sparse_table)
        for i in range(sparse_len):
            sparse_table = downpour.sparse_table.add()
            sparse_table.table_id = worker.get_desc().sparse_table[i].table_id
            sparse_table.sparse_key_name.extend(worker.get_desc().sparse_table[
                i].slot_key)
            sparse_table.sparse_value_name.extend(worker.get_desc()
                                                  .sparse_table[i].slot_value)
            sparse_table.sparse_grad_name.extend(worker.get_desc().sparse_table[
                i].slot_gradient)
            if opt_info["use_cvm"] or "no_cvm" in opt_info and opt_info[
                    "no_cvm"] == True:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
368
                        i].accessor.fea_dim
369 370 371 372
                sparse_table.fea_dim = sparse_table.emb_dim
            else:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
373
                        i].accessor.fea_dim - 2
374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399
                sparse_table.fea_dim = sparse_table.emb_dim + 2
            # TODO(guru4elephant): hard code here, need to improve
            sparse_table.label_var_name = "click"
        if "local_tables" in opt_info and sparse_table.table_id in opt_info[
                "local_tables"]:
            sparse_table.is_local = True
        if "async_tables" in opt_info and sparse_table.table_id in opt_info[
                "async_tables"]:
            sparse_table.is_async = True
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                downpour.stat_var_names.extend([i])

        for i in worker.get_desc().dense_table:
            if i.table_id in dense_table_set:
                dense_table = downpour.dense_table.add()
                dense_table.table_id = i.table_id
                dense_table.dense_value_name.extend(i.dense_variable_name)
                dense_table.dense_grad_name.extend(
                    i.dense_gradient_variable_name)
        downpour.skip_ops.extend(worker.get_desc().skip_op)
        if self._infer:
            downpour.push_dense = False
            downpour.push_sparse = False


H
hutuxian 已提交
400
class Section(DeviceWorker):
401
    """SectionWorker."""
H
hutuxian 已提交
402 403

    def __init__(self):
404
        """Init."""
H
hutuxian 已提交
405 406 407 408 409 410 411 412 413 414 415 416 417
        super(Section, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is SectionWorker.
        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        from google.protobuf import text_format
        from . import core
        trainer_desc.device_worker_name = "SectionWorker"
        pipeline_opt = self._program._pipeline_opt
        section_param = trainer_desc.section_param
L
lilong12 已提交
418
        section_param.num_microbatches = pipeline_opt["num_microbatches"]
H
hutuxian 已提交
419
        section_param.start_cpu_core_id = pipeline_opt["start_cpu_core_id"]
420 421 422 423 424 425 426 427 428 429 430 431
        section_param.pipeline_stage = pipeline_opt["pipeline_stage"]
        section_param.num_pipeline_stages = pipeline_opt["num_pipeline_stages"]
        schedule_mode_str = pipeline_opt["schedule_mode"]
        # F-then-B scheduler which runs Forward phase for all microbatches,
        # then runs Backward phase for all microbatches.
        # 1F1B scheduler, which runs forward phase and backward phase altertively
        # after startup phase.
        assert schedule_mode_str in ["F-then-B", "1F1B"], (
            "The schedule mode "
            "for pipeline must be one of F-then-B or 1F1B")
        schedule_mode = 0 if schedule_mode_str == "F-then-B" else 1
        section_param.schedule_mode = schedule_mode
432 433
        cfg = section_param.section_config
        program = pipeline_opt["section_program"]
434
        cfg.program_desc.ParseFromString(program._get_desc()
435 436 437 438 439
                                         .serialize_to_string())
        # TODO: why does not work
        # cfg.program_desc.CopyFrom(program.program._get_desc())
        place = pipeline_opt["place"]
        place_id = pipeline_opt["place_id"]
440 441 442 443
        if core.is_compiled_with_cuda():
            assert isinstance(place, core.CUDAPlace)
        elif core.is_compiled_with_npu():
            assert isinstance(place, core.NPUPlace)
444 445
        cfg.place = cfg.CUDAPlace
        cfg.place_id = place_id
H
hutuxian 已提交
446 447


448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477
class HeterSection(DeviceWorker):
    """HeterSectionWorker."""

    def __init__(self):
        """Init."""
        super(HeterSection, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is HeterSectionWorker.
        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        from google.protobuf import text_format
        from . import core
        trainer_desc.device_worker_name = "HeterSectionWorker"
        heter_pipeline_opt = self._program._heter_pipeline_opt
        heter_section_param = trainer_desc.heter_section_param
        heter_section_param.num_microbatches = heter_pipeline_opt[
            "num_microbatches"]
        heter_section_param.pipeline_stage = heter_pipeline_opt[
            "pipeline_stage"]
        heter_section_param.num_pipeline_stages = heter_pipeline_opt[
            "num_pipeline_stages"]
        cfg = heter_section_param.section_config
        program = heter_pipeline_opt["section_program"]
        cfg.program_desc.ParseFromString(program._get_desc()
                                         .serialize_to_string())


478
class DeviceWorkerFactory(object):
479
    def _create_device_worker(self, worker_type):
480 481
        classname = worker_type.capitalize()
        return globals()[classname]()