device_worker.py 26.0 KB
Newer Older
1
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
14
"""Defination of device workers."""
15

16
__all__ = [
17 18
    'DeviceWorker', 'Hogwild', 'DownpourSGD', 'Section', 'DownpourSGDOPT',
    'HeterSection'
19
]
20

21 22

class DeviceWorker(object):
X
xjqbest 已提交
23
    """
24
    DeviceWorker is an abstract class, which generates worker desc.
25 26
    This class is an inner class that we do computation logics within
    the implementation. For example, execution of a program or a graph.
X
xjqbest 已提交
27
    """
28

29
    def __init__(self):
30
        """Init."""
D
dongdaxiang 已提交
31 32
        self._program = None
        self._infer = None
33

34 35 36
    def _set_infer(self, infer=False):
        """
        set inference flag for current device worker
C
Chengmo 已提交
37

38 39 40
        Args:
            infer(bool): whether to do inference
        """
D
dongdaxiang 已提交
41
        self._infer = infer
D
dongdaxiang 已提交
42

43
    def _set_fleet_desc(self, fleet_desc):
X
xjqbest 已提交
44 45 46 47 48 49
        """
        Set fleet desc.

        Args:
            fleet_desc(PSParameter): pslib.PSParameter object
        """
D
dongdaxiang 已提交
50
        self._fleet_desc = fleet_desc
D
dongdaxiang 已提交
51

52
    def _set_program(self, program):
X
xjqbest 已提交
53 54 55 56 57 58
        """
        Set program.

        Args:
            program(Program): a Program object
        """
D
dongdaxiang 已提交
59
        self._program = program
60

61
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
62 63 64 65 66 67 68 69 70
        """
        Generator worker desc.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        raise NotImplementedError(
            "DeviceWorker does not implement gen_worker_desc, "
            "please use Hogwild or DownpourSGD, etc.")
71 72 73


class Hogwild(DeviceWorker):
X
xjqbest 已提交
74 75 76 77
    """
    Hogwild is a kind of SGD algorithm.

    """
78

79
    def __init__(self):
80
        """Init."""
81 82
        super(Hogwild, self).__init__()

83
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
84 85 86 87 88 89
        """
        Generator worker desc, which device worker is HogwildWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
90
        trainer_desc.device_worker_name = "HogwildWorker"
D
dongdaxiang 已提交
91
        if self._infer:
92
            # just ignore feed op for inference model
W
wangguanqun 已提交
93 94 95 96
            trainer_desc.hogwild_param.skip_ops.extend([
                "feed", "push_sparse", "push_sparse_v2", "push_dense",
                "distributed_push_sparse", "send"
            ])
97

98 99
        dense_table_set = set()
        program_id = str(id(self._program))
100
        print("device worker program id:", program_id)
101 102 103 104
        if self._program == None:
            print("program of current device worker is not configured")
            exit(-1)
        opt_info = self._program._fleet_opt
105 106
        # when opt_info is None or empty dict, it should return
        if not opt_info:
107
            return
T
Thunderbrook 已提交
108 109 110 111 112 113
        downpour = trainer_desc.downpour_param
        hogwild = trainer_desc.hogwild_param
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                hogwild.stat_var_names.extend([i])
                downpour.stat_var_names.extend([i])
114

115 116
        from paddle.fluid.incubate.fleet.parameter_server import version

117 118
        if version.is_transpiler(
        ) and "fleet_desc" not in opt_info and "program_configs" not in opt_info:
C
Chengmo 已提交
119 120
            return

121
        program_configs = opt_info["program_configs"]
122
        print("device worker program_configs:", program_configs)
123 124

        for pid in program_configs:
125
            print("device worker", pid, program_id)
126 127 128
            if pid == program_id:
                pc = downpour.program_config.add()
                pc.program_id = program_id
129 130
                print("device worker pull dense:",
                      program_configs[program_id]["pull_dense"])
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
                    dense_table_set.add(i)
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
                    dense_table_set.add(i)
                break

        trainer_desc.device_worker_name = "HogwildWorker"
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164
        if opt_info.get("program_id_to_worker") is None and opt_info.get(
                "dense_table_config") is None:
            raise ValueError(
                "opt_info must have program_id_to_worker or dense_table_config")
        if opt_info.get("program_id_to_worker") is not None:
            prog_id_to_worker = opt_info["program_id_to_worker"]
            if prog_id_to_worker.get(program_id) is None:
                raise ValueError("%s not found in program_id_to_worker" %
                                 program_id)
            worker = opt_info["program_id_to_worker"][program_id]
            for i in worker.get_desc().dense_table:
                if i.table_id in dense_table_set:
                    dense_table = pull_thread.dense_table.add()
                    dense_table.dense_value_name.extend(i.dense_variable_name)
                    dense_table.table_id = \
                        i.table_id
            sparse_len = len(worker.get_desc().sparse_table)
            for i in range(sparse_len):
                sparse_table = downpour.sparse_table.add()
165 166 167 168 169 170 171 172
                sparse_table.table_id = worker.get_desc(
                ).sparse_table[i].table_id
                sparse_table.sparse_key_name.extend(
                    worker.get_desc().sparse_table[i].slot_key)
                sparse_table.sparse_value_name.extend(
                    worker.get_desc().sparse_table[i].slot_value)
                sparse_table.sparse_grad_name.extend(
                    worker.get_desc().sparse_table[i].slot_gradient)
173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192
                sparse_table.fea_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
                        i].accessor.fea_dim
                # not use emb_dim
                sparse_table.emb_dim = -1
                # not use hard code click
                sparse_table.label_var_name = ""

            for i in worker.get_desc().dense_table:
                if i.table_id in dense_table_set:
                    dense_table = downpour.dense_table.add()
                    dense_table.table_id = i.table_id
                    dense_table.dense_value_name.extend(i.dense_variable_name)
                    dense_table.dense_grad_name.extend(
                        i.dense_gradient_variable_name)
            hogwild.skip_ops.extend(worker.get_desc().skip_op)
        else:
            dense_table_config = opt_info.get("dense_table_config")
            print("device worker dense_table_config:", dense_table_config)
            for table_id, varnames in dense_table_config.items():
193
                dense_table = pull_thread.dense_table.add()
194 195 196
                dense_table.dense_value_name.extend(varnames)
                dense_table.table_id = table_id

197
        if self._infer:
198 199
            hogwild.skip_ops.extend(
                ["push_sparse", "push_sparse_v2", "push_dense"])
200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291


class DownpourLite(DeviceWorker):
    """
    DownpourLite is a kind of SGD algorithm.

    """

    def __init__(self):
        """Init."""
        super(DownpourLite, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is DownpourLiteWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        print("create DownpourLiteWorker")
        trainer_desc.device_worker_name = "DownpourLiteWorker"
        if self._infer:
            # just ignore feed op for inference model
            trainer_desc.downpour_param.skip_ops.extend([
                "feed", "push_sparse", "push_sparse_v2", "push_dense",
                "distributed_push_sparse", "send"
            ])

        dense_table_set = set()
        program_id = str(id(self._program))
        print("device worker program id:", program_id)
        if self._program == None:
            print("program of current device worker is not configured")
            exit(-1)
        opt_info = self._program._fleet_opt
        # when opt_info is None or empty dict, it should return
        if not opt_info:
            return
        downpour = trainer_desc.downpour_param
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                downpour.stat_var_names.extend([i])

        from paddle.fluid.incubate.fleet.parameter_server import version

        if version.is_transpiler(
        ) and "fleet_desc" not in opt_info and "program_configs" not in opt_info:
            return

        program_configs = opt_info["program_configs"]
        print("device worker program_configs:", program_configs)

        for pid in program_configs:
            print("device worker", pid, program_id)
            if pid == program_id:
                pc = downpour.program_config.add()
                pc.program_id = program_id
                print("device worker pull dense:",
                      program_configs[program_id]["pull_dense"])
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
                    dense_table_set.add(i)
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
                    dense_table_set.add(i)
                break

        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
        if opt_info.get("program_id_to_worker") is None and opt_info.get(
                "dense_table_config") is None:
            raise ValueError(
                "opt_info must have program_id_to_worker or dense_table_config")
        if opt_info.get("program_id_to_worker") is not None:
            prog_id_to_worker = opt_info["program_id_to_worker"]
            if prog_id_to_worker.get(program_id) is None:
                raise ValueError("%s not found in program_id_to_worker" %
                                 program_id)
            worker = opt_info["program_id_to_worker"][program_id]
            for i in worker.get_desc().dense_table:
                if i.table_id in dense_table_set:
                    dense_table = pull_thread.dense_table.add()
                    dense_table.dense_value_name.extend(i.dense_variable_name)
                    dense_table.table_id = \
                        i.table_id
            sparse_len = len(worker.get_desc().sparse_table)
            for i in range(sparse_len):
                sparse_table = downpour.sparse_table.add()
292 293 294 295 296 297 298 299
                sparse_table.table_id = worker.get_desc(
                ).sparse_table[i].table_id
                sparse_table.sparse_key_name.extend(
                    worker.get_desc().sparse_table[i].slot_key)
                sparse_table.sparse_value_name.extend(
                    worker.get_desc().sparse_table[i].slot_value)
                sparse_table.sparse_grad_name.extend(
                    worker.get_desc().sparse_table[i].slot_gradient)
300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326
                sparse_table.fea_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
                        i].accessor.fea_dim
                # not use emb_dim
                sparse_table.emb_dim = -1
                # not use hard code click
                sparse_table.label_var_name = ""

            for i in worker.get_desc().dense_table:
                if i.table_id in dense_table_set:
                    dense_table = downpour.dense_table.add()
                    dense_table.table_id = i.table_id
                    dense_table.dense_value_name.extend(i.dense_variable_name)
                    dense_table.dense_grad_name.extend(
                        i.dense_gradient_variable_name)
            downpour.skip_ops.extend(worker.get_desc().skip_op)
        else:
            dense_table_config = opt_info.get("dense_table_config")
            print("device worker dense_table_config:", dense_table_config)
            for table_id, varnames in dense_table_config.items():
                dense_table = pull_thread.dense_table.add()
                dense_table.dense_value_name.extend(varnames)
                dense_table.table_id = table_id

        if self._infer:
            downpour.skip_ops.extend(
                ["push_sparse", "push_sparse_v2", "push_dense"])
327

328

D
dongdaxiang 已提交
329
class DownpourSGD(DeviceWorker):
X
xjqbest 已提交
330 331 332
    """
    DownpourSGD is a kind of distributed SGD algorithm.
    """
333

334
    def __init__(self):
X
xjqbest 已提交
335 336
        """
        Init.
337
        initialize downpourSGD device worker
X
xjqbest 已提交
338
        """
D
dongdaxiang 已提交
339
        super(DownpourSGD, self).__init__()
340

341
    def _gen_worker_desc(self, trainer_desc):
X
xjqbest 已提交
342 343 344 345 346 347
        """
        Generator worker desc, which device worker is DownpourWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
X
fix bug  
xjqbest 已提交
348
        dense_table_set = set()
D
dongdaxiang 已提交
349 350
        program_id = str(id(self._program))
        if self._program == None:
D
dongdaxiang 已提交
351
            print("program of current device worker is not configured")
352
            exit(-1)
D
dongdaxiang 已提交
353
        opt_info = self._program._fleet_opt
D
dongdaxiang 已提交
354
        program_configs = opt_info["program_configs"]
355
        downpour = trainer_desc.downpour_param
D
dongdaxiang 已提交
356

D
dongdaxiang 已提交
357 358
        for pid in program_configs:
            if pid == program_id:
D
dongdaxiang 已提交
359 360 361 362 363 364
                pc = downpour.program_config.add()
                pc.program_id = program_id
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
X
xjqbest 已提交
365
                    dense_table_set.add(i)
D
dongdaxiang 已提交
366 367 368 369
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
X
fix bug  
xjqbest 已提交
370
                    dense_table_set.add(i)
Z
zhang wenhui 已提交
371 372 373 374 375 376 377
                # code for partial push dense table such as multitask
                if "cond2denseid" in program_configs[program_id]:
                    cond2denseid = program_configs[program_id]["cond2denseid"]
                    for key, value in cond2denseid.items():
                        mc_map = pc.partial_pushdense_condtable_map.add()
                        mc_map.key = key
                        mc_map.value = value
D
dongdaxiang 已提交
378
                break
379

T
Thunderbrook 已提交
380 381
        trainer_desc.device_worker_name = opt_info.get("worker_class",
                                                       "DownpourWorker")
382 383
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
384 385 386 387 388 389 390 391
        if opt_info.get("program_id_to_worker") is None:
            raise ValueError("opt_info must have program_id_to_worker")
        prog_id_to_worker = opt_info["program_id_to_worker"]
        if prog_id_to_worker.get(program_id) is None:
            raise ValueError("%s not found in program_id_to_worker" %
                             program_id)
        worker = opt_info["program_id_to_worker"][program_id]
        for i in worker.get_desc().dense_table:
392 393
            if i.table_id in dense_table_set:
                dense_table = pull_thread.dense_table.add()
394
                dense_table.dense_value_name.extend(i.dense_variable_name)
395 396
                dense_table.table_id = \
                    i.table_id
397
        sparse_len = len(worker.get_desc().sparse_table)
398 399
        for i in range(sparse_len):
            sparse_table = downpour.sparse_table.add()
400
            sparse_table.table_id = worker.get_desc().sparse_table[i].table_id
401 402 403 404 405 406
            sparse_table.sparse_key_name.extend(
                worker.get_desc().sparse_table[i].slot_key)
            sparse_table.sparse_value_name.extend(
                worker.get_desc().sparse_table[i].slot_value)
            sparse_table.sparse_grad_name.extend(
                worker.get_desc().sparse_table[i].slot_gradient)
407 408
            if opt_info["use_cvm"] or "no_cvm" in opt_info and opt_info[
                    "no_cvm"] == True:
409 410
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
411
                        i].accessor.fea_dim
412 413 414 415
                sparse_table.fea_dim = sparse_table.emb_dim
            else:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
416
                        i].accessor.fea_dim - 2
417 418 419
                sparse_table.fea_dim = sparse_table.emb_dim + 2
            # TODO(guru4elephant): hard code here, need to improve
            sparse_table.label_var_name = "click"
420 421 422
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                downpour.stat_var_names.extend([i])
423

424
        for i in worker.get_desc().dense_table:
X
fix bug  
xjqbest 已提交
425 426 427
            if i.table_id in dense_table_set:
                dense_table = downpour.dense_table.add()
                dense_table.table_id = i.table_id
428
                dense_table.dense_value_name.extend(i.dense_variable_name)
X
fix bug  
xjqbest 已提交
429 430
                dense_table.dense_grad_name.extend(
                    i.dense_gradient_variable_name)
X
xujiaqi01 已提交
431
        downpour.skip_ops.extend(worker.get_desc().skip_op)
D
dongdaxiang 已提交
432
        if self._infer:
433 434
            downpour.push_dense = False
            downpour.push_sparse = False
X
fix bug  
xjqbest 已提交
435

436

437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500
class DownpourSGDOPT(DeviceWorker):
    """
    DownpourSGDOPT is a kind of distributed SGD algorithm.
    """

    def __init__(self):
        """
        Init.
        initialize downpourSGDOPT device worker
        """
        super(DownpourSGDOPT, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is DownpourWorker.

        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        dense_table_set = set()
        program_id = str(id(self._program))
        if self._program == None:
            print("program of current device worker is not configured")
            exit(-1)
        opt_info = self._program._fleet_opt
        program_configs = opt_info["program_configs"]
        downpour = trainer_desc.downpour_param

        for pid in program_configs:
            if pid == program_id:
                pc = downpour.program_config.add()
                pc.program_id = program_id
                for i in program_configs[program_id]["push_sparse"]:
                    pc.push_sparse_table_id.extend([i])
                for i in program_configs[program_id]["push_dense"]:
                    pc.push_dense_table_id.extend([i])
                    dense_table_set.add(i)
                for i in program_configs[program_id]["pull_sparse"]:
                    pc.pull_sparse_table_id.extend([i])
                for i in program_configs[program_id]["pull_dense"]:
                    pc.pull_dense_table_id.extend([i])
                    dense_table_set.add(i)
                break

        trainer_desc.device_worker_name = "DownpourWorkerOpt"
        pull_thread = trainer_desc.pull_dense_param
        pull_thread.device_num = trainer_desc.thread_num
        if opt_info.get("program_id_to_worker") is None:
            raise ValueError("opt_info must have program_id_to_worker")
        prog_id_to_worker = opt_info["program_id_to_worker"]
        if prog_id_to_worker.get(program_id) is None:
            raise ValueError("%s not found in program_id_to_worker" %
                             program_id)
        worker = opt_info["program_id_to_worker"][program_id]
        for i in worker.get_desc().dense_table:
            if i.table_id in dense_table_set:
                dense_table = pull_thread.dense_table.add()
                dense_table.dense_value_name.extend(i.dense_variable_name)
                dense_table.table_id = \
                    i.table_id
        sparse_len = len(worker.get_desc().sparse_table)
        for i in range(sparse_len):
            sparse_table = downpour.sparse_table.add()
            sparse_table.table_id = worker.get_desc().sparse_table[i].table_id
501 502 503 504 505 506
            sparse_table.sparse_key_name.extend(
                worker.get_desc().sparse_table[i].slot_key)
            sparse_table.sparse_value_name.extend(
                worker.get_desc().sparse_table[i].slot_value)
            sparse_table.sparse_grad_name.extend(
                worker.get_desc().sparse_table[i].slot_gradient)
507 508 509 510
            if opt_info["use_cvm"] or "no_cvm" in opt_info and opt_info[
                    "no_cvm"] == True:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
511
                        i].accessor.fea_dim
512 513 514 515
                sparse_table.fea_dim = sparse_table.emb_dim
            else:
                sparse_table.emb_dim = \
                    self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
C
Chengmo 已提交
516
                        i].accessor.fea_dim - 2
517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542
                sparse_table.fea_dim = sparse_table.emb_dim + 2
            # TODO(guru4elephant): hard code here, need to improve
            sparse_table.label_var_name = "click"
        if "local_tables" in opt_info and sparse_table.table_id in opt_info[
                "local_tables"]:
            sparse_table.is_local = True
        if "async_tables" in opt_info and sparse_table.table_id in opt_info[
                "async_tables"]:
            sparse_table.is_async = True
        if opt_info["stat_var_names"]:
            for i in opt_info["stat_var_names"]:
                downpour.stat_var_names.extend([i])

        for i in worker.get_desc().dense_table:
            if i.table_id in dense_table_set:
                dense_table = downpour.dense_table.add()
                dense_table.table_id = i.table_id
                dense_table.dense_value_name.extend(i.dense_variable_name)
                dense_table.dense_grad_name.extend(
                    i.dense_gradient_variable_name)
        downpour.skip_ops.extend(worker.get_desc().skip_op)
        if self._infer:
            downpour.push_dense = False
            downpour.push_sparse = False


H
hutuxian 已提交
543
class Section(DeviceWorker):
544
    """SectionWorker."""
H
hutuxian 已提交
545 546

    def __init__(self):
547
        """Init."""
H
hutuxian 已提交
548 549 550 551 552 553 554 555 556 557 558 559 560
        super(Section, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is SectionWorker.
        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        from google.protobuf import text_format
        from . import core
        trainer_desc.device_worker_name = "SectionWorker"
        pipeline_opt = self._program._pipeline_opt
        section_param = trainer_desc.section_param
L
lilong12 已提交
561
        section_param.num_microbatches = pipeline_opt["num_microbatches"]
H
hutuxian 已提交
562
        section_param.start_cpu_core_id = pipeline_opt["start_cpu_core_id"]
563 564 565 566 567 568 569
        section_param.pipeline_stage = pipeline_opt["pipeline_stage"]
        section_param.num_pipeline_stages = pipeline_opt["num_pipeline_stages"]
        schedule_mode_str = pipeline_opt["schedule_mode"]
        # F-then-B scheduler which runs Forward phase for all microbatches,
        # then runs Backward phase for all microbatches.
        # 1F1B scheduler, which runs forward phase and backward phase altertively
        # after startup phase.
570 571 572
        assert schedule_mode_str in [
            "F-then-B", "1F1B"
        ], ("The schedule mode "
573 574 575
            "for pipeline must be one of F-then-B or 1F1B")
        schedule_mode = 0 if schedule_mode_str == "F-then-B" else 1
        section_param.schedule_mode = schedule_mode
576 577
        cfg = section_param.section_config
        program = pipeline_opt["section_program"]
578 579
        cfg.program_desc.ParseFromString(
            program._get_desc().serialize_to_string())
580 581 582 583
        # TODO: why does not work
        # cfg.program_desc.CopyFrom(program.program._get_desc())
        place = pipeline_opt["place"]
        place_id = pipeline_opt["place_id"]
584 585 586 587
        if core.is_compiled_with_cuda():
            assert isinstance(place, core.CUDAPlace)
        elif core.is_compiled_with_npu():
            assert isinstance(place, core.NPUPlace)
588 589
        cfg.place = cfg.CUDAPlace
        cfg.place_id = place_id
H
hutuxian 已提交
590 591


592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617
class HeterSection(DeviceWorker):
    """HeterSectionWorker."""

    def __init__(self):
        """Init."""
        super(HeterSection, self).__init__()

    def _gen_worker_desc(self, trainer_desc):
        """
        Generator worker desc, which device worker is HeterSectionWorker.
        Args:
            trainer_desc(TrainerDesc): a TrainerDesc object
        """
        from google.protobuf import text_format
        from . import core
        trainer_desc.device_worker_name = "HeterSectionWorker"
        heter_pipeline_opt = self._program._heter_pipeline_opt
        heter_section_param = trainer_desc.heter_section_param
        heter_section_param.num_microbatches = heter_pipeline_opt[
            "num_microbatches"]
        heter_section_param.pipeline_stage = heter_pipeline_opt[
            "pipeline_stage"]
        heter_section_param.num_pipeline_stages = heter_pipeline_opt[
            "num_pipeline_stages"]
        cfg = heter_section_param.section_config
        program = heter_pipeline_opt["section_program"]
618 619
        cfg.program_desc.ParseFromString(
            program._get_desc().serialize_to_string())
620 621


622
class DeviceWorkerFactory(object):
623

624
    def _create_device_worker(self, worker_type):
625 626
        classname = worker_type.capitalize()
        return globals()[classname]()