trainer_desc.py 15.2 KB
Newer Older
1
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
X
xujiaqi01 已提交
14
"""Defination of trainers."""
15

H
hutuxian 已提交
16
import sys
17
import os
18

T
Thunderbrook 已提交
19
__all__ = [
20 21 22 23 24 25
    'TrainerDesc',
    'MultiTrainer',
    'DistMultiTrainer',
    'PipelineTrainer',
    'HeterXpuTrainer',
    'HeterPipelineTrainer',
T
Thunderbrook 已提交
26
]
27 28


29
class TrainerDesc:
T
Thunderbrook 已提交
30 31 32 33 34
    '''
    Set proto from python to c++.
    Can be initialized from train_desc.
    '''

35 36 37 38 39 40
    def __init__(self):
        '''
        self.proto_desc = data_feed_pb2.DataFeedDesc()
        with open(proto_file, 'r') as f:
            text_format.Parse(f.read(), self.proto_desc)
        '''
H
hutuxian 已提交
41 42
        # Workaround for relative import in protobuf under python3
        # TODO: should be fixed
43
        cur_path = os.path.dirname(__file__)
44 45 46 47
        if cur_path not in sys.path:
            sys.path.append(cur_path)
        if cur_path + "/proto" not in sys.path:
            sys.path.append(cur_path + "/proto")
48

49
        from proto import trainer_desc_pb2
50

51
        self.proto_desc = trainer_desc_pb2.TrainerDesc()
D
dongdaxiang 已提交
52
        import multiprocessing as mp
53

D
dongdaxiang 已提交
54 55
        # set default thread num == cpu count
        self.proto_desc.thread_num = mp.cpu_count()
D
dongdaxiang 已提交
56 57 58 59
        self._fleet_desc = None
        self._device_worker = None
        self._program = None
        self._infer = False
60

T
Thunderbrook 已提交
61
    def _set_heter_info(self, ret):
62 63 64
        # ret = = fu.split_program_by_device(program)
        # start_list, end_list, send_list, recv_list, program_list = fu.split_program_by_device(program)
        # if len(start_list) != 3:
T
Thunderbrook 已提交
65 66
        #    print("start_list len=", len(start_list), " will not set heter info")
        #    return
67
        # for i in start_list[0]:
T
Thunderbrook 已提交
68
        #    self.proto_desc.op_run_start_idx.append(i)
69
        # for i in end_list[0]:
T
Thunderbrook 已提交
70
        #    self.proto_desc.op_run_end_idx.append(i)
71
        # for i in send_list[0]:
T
Thunderbrook 已提交
72
        #    self.proto_desc.op_run_send_list.append(i)
73
        # for i in recv_list[0]:
T
Thunderbrook 已提交
74 75 76
        #    self.proto_desc.op_run_recv_list.append(i)
        if ret is None:
            return
77
        # for i in ret[0]: # start_list[1]:
T
Thunderbrook 已提交
78 79 80
        #    self.proto_desc.xpu_start_idx.append(i)
        self.proto_desc.xpu_start_idx = ret[0]

81
        # for i in ret[1]:  #end_list[1]:
T
Thunderbrook 已提交
82 83 84 85 86 87 88
        #    self.proto_desc.o_end_idx.append(i)
        self.proto_desc.xpu_end_idx = ret[1]
        for i in ret[2]:  # send_list[1]:
            self.proto_desc.xpu_send_list.append(i)
        for i in ret[3]:  # recv_list[1]:
            self.proto_desc.xpu_recv_list.append(i)

89
        # for i in start_list[2]:
T
Thunderbrook 已提交
90
        #    self.proto_desc.op_run_end_start_idx.append(i)
91
        # for i in end_list[2]:
T
Thunderbrook 已提交
92
        #    self.proto_desc.op_run_end_idx.append(i)
93
        # for i in send_list[2]:
T
Thunderbrook 已提交
94
        #    self.proto_desc.op_run_end_send_list.append(i)
95
        # for i in recv_list[2]:
T
Thunderbrook 已提交
96 97
        #    self.proto_desc.op_run_end_recv_list.append(i)

98
    def _set_fetch_var_and_info(self, fetch_vars, fetch_info, print_period):
X
xujiaqi01 已提交
99 100
        # convert fetch_info to list
        fetch_info = list(fetch_info)
D
dongdaxiang 已提交
101 102 103
        for i, v in enumerate(fetch_vars):
            self.proto_desc.fetch_config.fetch_var_names.extend([v.name])
            self.proto_desc.fetch_config.fetch_var_str_format.extend(
104 105
                [fetch_info[i]]
            )
D
dongdaxiang 已提交
106
        self.proto_desc.fetch_config.print_period = print_period
D
dongdaxiang 已提交
107

108
    def _set_debug(self, debug):
109 110
        self.proto_desc.debug = debug

111
    def _set_thread(self, thread_num):
112 113
        self.proto_desc.thread_num = thread_num

114
    def _set_device_worker(self, device_worker):
D
dongdaxiang 已提交
115
        self._device_worker = device_worker
116

117
    def _set_infer(self, infer):
D
dongdaxiang 已提交
118
        self._infer = infer
119

120
    def _set_fleet_desc(self, fleet_desc):
D
dongdaxiang 已提交
121
        self._fleet_desc = fleet_desc
122 123
        ## serialize fleet_desc
        from google.protobuf import text_format
124

125 126
        fleet_desc_str = text_format.MessageToString(fleet_desc)
        self.proto_desc.fleet_desc = fleet_desc_str
127

128
    def _gen_trainer_desc(self):
129 130
        pass

131
    def _set_program(self, program):
D
dongdaxiang 已提交
132
        self._program = program
D
dongdaxiang 已提交
133

134 135 136 137 138 139 140
    def _set_trainer_id(self, trainer_id):
        self.proto_desc.trainer_id = trainer_id

    def _set_trainers(self, trainers):
        for trainer_num in trainers:
            self.proto_desc.trainers.append(trainer_num)

141 142 143
    def _set_use_cvm(self, use_cvm=False):
        self.proto_desc.use_cvm = use_cvm

144 145 146
    def _set_no_cvm(self, no_cvm=False):
        self.proto_desc.no_cvm = no_cvm

147
    def _set_scale_sparse_grad_with_batch_size(
148 149 150 151 152
        self, scale_sparse_gradient_with_batch_size=True
    ):
        self.proto_desc.scale_sparse_gradient_with_batch_size = (
            scale_sparse_gradient_with_batch_size
        )
153

154 155 156
    def _set_scale_datanorm(self, scale_datanorm=-1):
        self.proto_desc.scale_datanorm = scale_datanorm

T
Thunderbrook 已提交
157 158 159
    def _set_dump_slot(self, dump_slot):
        self.proto_desc.dump_slot = dump_slot

160 161 162
    def _set_mpi_rank(self, mpi_rank):
        self.proto_desc.mpi_rank = mpi_rank

T
Thunderbrook 已提交
163 164 165
    def _set_mpi_size(self, mpi_size):
        self.proto_desc.mpi_size = mpi_size

166 167 168 169
    def _set_dump_fields(self, dump_fields):
        for field in dump_fields:
            self.proto_desc.dump_fields.append(field)

D
danleifeng 已提交
170 171 172
    def _set_is_dump_in_simple_mode(self, is_dump_in_simple_mode):
        self.proto_desc.is_dump_in_simple_mode = is_dump_in_simple_mode

173 174 175
    def _set_dump_fields_path(self, path):
        self.proto_desc.dump_fields_path = path

T
Thunderbrook 已提交
176 177 178
    def _set_dump_file_num(self, dump_file_num):
        self.proto_desc.dump_file_num = dump_file_num

Y
yaoxuefeng 已提交
179 180 181
    def _set_user_define_dump_filename(self, user_define_dump_filename):
        self.proto_desc.user_define_dump_filename = user_define_dump_filename

182 183 184
    def _set_dump_converter(self, converter):
        self.proto_desc.dump_converter = converter

H
hutuxian 已提交
185 186 187 188 189 190 191 192 193
    def _set_enable_random_dump(self, enable_random_dump):
        self.proto_desc.enable_random_dump = enable_random_dump

    def _set_dump_interval(self, dump_interval):
        self.proto_desc.dump_interval = dump_interval

    def _set_random_with_lineid(self, random_with_lineid):
        self.proto_desc.random_with_lineid = random_with_lineid

194 195 196 197
    def _set_dump_param(self, dump_param):
        for param in dump_param:
            self.proto_desc.dump_param.append(param)

T
Thunderbrook 已提交
198 199 200 201
    def _set_worker_places(self, worker_places):
        for place in worker_places:
            self.proto_desc.worker_places.append(place)

T
Thunderbrook 已提交
202 203 204
    def _set_use_ps_gpu(self, use_ps_gpu=False):
        self.proto_desc.use_ps_gpu = use_ps_gpu

205 206 207
    def _set_thread_barrier(self, thread_barrier):
        self.proto_desc.thread_barrier = thread_barrier

208 209 210 211
    def _set_check_nan_var_names(self, check_nan_var_names):
        for var in check_nan_var_names:
            self.proto_desc.check_nan_var_names.append(var)

212 213 214 215
    def _set_loss_names(self, loss_names):
        for loss in loss_names:
            self.proto_desc.loss_names.append(loss)

216
    def _set_adjust_ins_weight(self, config_dict):
217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
        self.proto_desc.adjust_ins_weight_config.need_adjust = config_dict.get(
            "need_adjust", False
        )
        self.proto_desc.adjust_ins_weight_config.nid_slot = config_dict.get(
            "nid_slot", ""
        )
        self.proto_desc.adjust_ins_weight_config.nid_adjw_threshold = (
            config_dict.get("nid_adjw_threshold", 0.0)
        )
        self.proto_desc.adjust_ins_weight_config.nid_adjw_ratio = (
            config_dict.get("nid_adjw_ratio", 0.0)
        )
        self.proto_desc.adjust_ins_weight_config.ins_weight_slot = (
            config_dict.get("ins_weight_slot", "")
        )
232

X
xujiaqi01 已提交
233 234 235 236 237 238 239 240 241 242 243 244 245
    def _set_copy_table_config(self, config_dict):
        config = self.proto_desc.copy_table_config
        config.need_copy = config_dict.get("need_copy", False)
        config.batch_num = config_dict.get("batch_num", 100)

        src_sparse_tables = config_dict.get("src_sparse_tables", [])
        if not isinstance(src_sparse_tables, list):
            src_sparse_tables = [src_sparse_tables]
        dest_sparse_tables = config_dict.get("dest_sparse_tables", [])
        if not isinstance(dest_sparse_tables, list):
            dest_sparse_tables = [dest_sparse_tables]
        if len(src_sparse_tables) != len(dest_sparse_tables):
            raise ValueError(
246 247 248
                "len(src_sparse_tables) != len(dest_sparse_tables),"
                " %s vs %s" % (len(src_sparse_tables), len(dest_sparse_tables))
            )
X
xujiaqi01 已提交
249 250 251 252 253 254 255 256 257 258 259 260 261
        for i in src_sparse_tables:
            config.src_sparse_tables.append(i)
        for i in dest_sparse_tables:
            config.dest_sparse_tables.append(i)

        src_dense_tables = config_dict.get("src_dense_tables", [])
        if not isinstance(src_dense_tables, list):
            src_dense_tables = [src_dense_tables]
        dest_dense_tables = config_dict.get("dest_dense_tables", [])
        if not isinstance(dest_dense_tables, list):
            dest_dense_tables = [dest_dense_tables]
        if len(src_dense_tables) != len(dest_dense_tables):
            raise ValueError(
262 263 264
                "len(src_dense_tables) != len(dest_dense_tables),"
                " %s vs %s" % (len(src_dense_tables), len(dest_dense_tables))
            )
X
xujiaqi01 已提交
265 266 267 268 269 270 271 272 273 274 275 276 277 278 279
        for i in src_dense_tables:
            config.src_dense_tables.append(i)
        for i in dest_dense_tables:
            config.dest_dense_tables.append(i)

        # user can also specify dense variables to copy,
        # instead of copy dense table
        src_var_list = config_dict.get("src_var_list", [])
        if not isinstance(src_var_list, list):
            src_var_list = [src_var_list]
        dest_var_list = config_dict.get("dest_var_list", [])
        if not isinstance(dest_var_list, list):
            dest_var_list = [dest_var_list]
        if len(src_var_list) != len(dest_var_list):
            raise ValueError(
280 281 282
                "len(src_var_list) != len(dest_var_list), %s vs"
                " %s" % (len(src_var_list), len(dest_var_list))
            )
X
xujiaqi01 已提交
283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298
        for i in src_var_list:
            config.src_var_list.append(i)
        for i in dest_var_list:
            config.dest_var_list.append(i)

        dependency_map = config_dict.get("dependency_map", {})
        for key in dependency_map:
            m = config.table_denpendency_map.add()
            m.key = key
            values = dependency_map[key]
            if not isinstance(values, list):
                values = [values]
            if len(values) != 1:
                raise ValueError("dependency len %s != 1" % len(values))
            for value in values:
                m.values.append(value)
299 300 301 302 303 304 305
        config.dense_pull_after_copy = config_dict.get(
            "dense_pull_after_copy", True
        )
        config.enable_dependency = config_dict.get("enable_dependency", False)
        config.sparse_copy_by_feasign = config_dict.get(
            "sparse_copy_by_feasign", True
        )
X
xujiaqi01 已提交
306

307
    def _desc(self):
D
dongdaxiang 已提交
308
        from google.protobuf import text_format
309

H
hutuxian 已提交
310
        return self.proto_desc.SerializeToString()
311

H
hutuxian 已提交
312
    def __str__(self):
313
        from google.protobuf import text_format
314

315
        return text_format.MessageToString(self.proto_desc)
H
hutuxian 已提交
316

317 318

class MultiTrainer(TrainerDesc):
T
Thunderbrook 已提交
319 320 321 322 323
    '''
    Implement of MultiTrainer.
    Can be init from TrainerDesc.
    '''

D
dongdaxiang 已提交
324
    def __init__(self):
325
        super().__init__()
D
dongdaxiang 已提交
326
        pass
327

328
    def _set_program(self, program):
329
        super()._set_program(program)
D
dongdaxiang 已提交
330
        self._program = program
331

332
    def _gen_trainer_desc(self):
333
        super()._gen_trainer_desc()
D
dongdaxiang 已提交
334
        self.proto_desc.class_name = "MultiTrainer"
D
fix bug  
dongdaxiang 已提交
335
        self._device_worker._set_infer(self._infer)
336
        self._device_worker._set_program(self._program)
D
dongdaxiang 已提交
337
        self._device_worker._gen_worker_desc(self.proto_desc)
338

339 340

class DistMultiTrainer(TrainerDesc):
X
xujiaqi01 已提交
341 342 343 344 345
    """
    Implement of DistMultiTrainer.
    It's for Distributed training.
    """

346
    def __init__(self):
347
        super().__init__()
348
        pass
349

350
    def _set_program(self, program):
351
        super()._set_program(program)
D
dongdaxiang 已提交
352
        self._program = program
353

354
    def _gen_trainer_desc(self):
355
        super()._gen_trainer_desc()
356
        self.proto_desc.class_name = "DistMultiTrainer"
357
        if self._program is None:
358
            raise RuntimeError("None Program")
D
fix bug  
dongdaxiang 已提交
359 360
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
D
dongdaxiang 已提交
361
        self._device_worker._gen_worker_desc(self.proto_desc)
H
hutuxian 已提交
362 363


T
Thunderbrook 已提交
364 365 366 367 368 369 370
class HeterXpuTrainer(TrainerDesc):
    """
    Implement of HeterXpuTrainer.
    It's for Distributed training.
    """

    def __init__(self):
371
        super().__init__()
T
Thunderbrook 已提交
372 373 374
        pass

    def _set_program(self, program):
375
        super()._set_program(program)
T
Thunderbrook 已提交
376 377 378
        self._program = program

    def _gen_trainer_desc(self):
379
        super()._gen_trainer_desc()
T
Thunderbrook 已提交
380
        self.proto_desc.class_name = "HeterXpuTrainer"
381
        if self._program is None:
T
Thunderbrook 已提交
382 383 384 385 386 387
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)


T
Thunderbrook 已提交
388 389 390 391 392 393 394
class PSGPUTrainer(TrainerDesc):
    """
    Implement of PSGPUTrainer.
    It's for Distributed training.
    """

    def __init__(self):
395
        super().__init__()
T
Thunderbrook 已提交
396 397 398
        pass

    def _set_program(self, program):
399
        super()._set_program(program)
T
Thunderbrook 已提交
400 401 402
        self._program = program

    def _gen_trainer_desc(self):
403
        super()._gen_trainer_desc()
T
Thunderbrook 已提交
404
        self.proto_desc.class_name = "PSGPUTrainer"
405
        if self._program is None:
T
Thunderbrook 已提交
406 407 408 409 410 411
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)


412 413 414 415 416 417 418
class HeterPipelineTrainer(TrainerDesc):
    """
    Implement of HeterPipelineTrainer.
    It's for HeterPS Pipeline training.
    """

    def __init__(self):
419
        super().__init__()
420 421 422
        pass

    def _set_program(self, program):
423
        super()._set_program(program)
424 425 426
        self._program = program

    def _gen_trainer_desc(self):
427
        super()._gen_trainer_desc()
428
        self.proto_desc.class_name = "HeterPipelineTrainer"
429
        if self._program is None:
430 431 432 433 434 435
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)


H
hutuxian 已提交
436
class PipelineTrainer(TrainerDesc):
X
xujiaqi01 已提交
437 438 439 440 441
    """
    Implement of PipelineTrainer.
    It's for Pipeline.
    """

H
hutuxian 已提交
442
    def __init__(self):
443
        super().__init__()
H
hutuxian 已提交
444 445 446
        pass

    def _set_program(self, program):
447
        super()._set_program(program)
H
hutuxian 已提交
448 449 450
        self._program = program

    def _gen_trainer_desc(self):
451
        super()._gen_trainer_desc()
H
hutuxian 已提交
452
        self.proto_desc.class_name = "PipelineTrainer"
453
        if self._program is None:
H
hutuxian 已提交
454 455 456 457
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)