trainer_desc.py 14.1 KB
Newer Older
1
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
X
xujiaqi01 已提交
14
"""Defination of trainers."""
15

H
hutuxian 已提交
16
import sys
17
import os
T
Thunderbrook 已提交
18 19
__all__ = [
    'TrainerDesc', 'MultiTrainer', 'DistMultiTrainer', 'PipelineTrainer',
T
Thunderbrook 已提交
20
    'HeterXpuTrainer'
T
Thunderbrook 已提交
21
]
22 23 24


class TrainerDesc(object):
T
Thunderbrook 已提交
25 26 27 28 29
    '''
    Set proto from python to c++.
    Can be initialized from train_desc.
    '''

30 31 32 33 34 35
    def __init__(self):
        '''
        self.proto_desc = data_feed_pb2.DataFeedDesc()
        with open(proto_file, 'r') as f:
            text_format.Parse(f.read(), self.proto_desc)
        '''
H
hutuxian 已提交
36 37
        # Workaround for relative import in protobuf under python3
        # TODO: should be fixed
38
        cur_path = os.path.dirname(__file__)
39 40 41 42
        if cur_path not in sys.path:
            sys.path.append(cur_path)
        if cur_path + "/proto" not in sys.path:
            sys.path.append(cur_path + "/proto")
43

44
        from proto import trainer_desc_pb2
45
        self.proto_desc = trainer_desc_pb2.TrainerDesc()
D
dongdaxiang 已提交
46 47 48
        import multiprocessing as mp
        # set default thread num == cpu count
        self.proto_desc.thread_num = mp.cpu_count()
D
dongdaxiang 已提交
49 50 51 52
        self._fleet_desc = None
        self._device_worker = None
        self._program = None
        self._infer = False
53

T
Thunderbrook 已提交
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
    def _set_heter_info(self, ret):
        #ret = = fu.split_program_by_device(program)
        #start_list, end_list, send_list, recv_list, program_list = fu.split_program_by_device(program)
        #if len(start_list) != 3:
        #    print("start_list len=", len(start_list), " will not set heter info")
        #    return
        #for i in start_list[0]:
        #    self.proto_desc.op_run_start_idx.append(i)
        #for i in end_list[0]:
        #    self.proto_desc.op_run_end_idx.append(i)
        #for i in send_list[0]:
        #    self.proto_desc.op_run_send_list.append(i)
        #for i in recv_list[0]:
        #    self.proto_desc.op_run_recv_list.append(i)
        if ret is None:
            return
        #for i in ret[0]: # start_list[1]:
        #    self.proto_desc.xpu_start_idx.append(i)
        self.proto_desc.xpu_start_idx = ret[0]

        #for i in ret[1]:  #end_list[1]:
        #    self.proto_desc.o_end_idx.append(i)
        self.proto_desc.xpu_end_idx = ret[1]
        for i in ret[2]:  # send_list[1]:
            self.proto_desc.xpu_send_list.append(i)
        for i in ret[3]:  # recv_list[1]:
            self.proto_desc.xpu_recv_list.append(i)

        #for i in start_list[2]:
        #    self.proto_desc.op_run_end_start_idx.append(i)
        #for i in end_list[2]:
        #    self.proto_desc.op_run_end_idx.append(i)
        #for i in send_list[2]:
        #    self.proto_desc.op_run_end_send_list.append(i)
        #for i in recv_list[2]:
        #    self.proto_desc.op_run_end_recv_list.append(i)

91
    def _set_fetch_var_and_info(self, fetch_vars, fetch_info, print_period):
X
xujiaqi01 已提交
92 93
        # convert fetch_info to list
        fetch_info = list(fetch_info)
D
dongdaxiang 已提交
94 95 96 97 98
        for i, v in enumerate(fetch_vars):
            self.proto_desc.fetch_config.fetch_var_names.extend([v.name])
            self.proto_desc.fetch_config.fetch_var_str_format.extend(
                [fetch_info[i]])
        self.proto_desc.fetch_config.print_period = print_period
D
dongdaxiang 已提交
99

100
    def _set_debug(self, debug):
101 102
        self.proto_desc.debug = debug

103
    def _set_thread(self, thread_num):
104 105
        self.proto_desc.thread_num = thread_num

106
    def _set_device_worker(self, device_worker):
D
dongdaxiang 已提交
107
        self._device_worker = device_worker
108

109
    def _set_infer(self, infer):
D
dongdaxiang 已提交
110
        self._infer = infer
111

112
    def _set_fleet_desc(self, fleet_desc):
D
dongdaxiang 已提交
113
        self._fleet_desc = fleet_desc
114

115
    def _gen_trainer_desc(self):
116 117
        pass

118
    def _set_program(self, program):
D
dongdaxiang 已提交
119
        self._program = program
D
dongdaxiang 已提交
120

121 122 123
    def _set_use_cvm(self, use_cvm=False):
        self.proto_desc.use_cvm = use_cvm

124 125 126
    def _set_no_cvm(self, no_cvm=False):
        self.proto_desc.no_cvm = no_cvm

127 128 129 130
    def _set_scale_sparse_grad_with_batch_size(
            self, scale_sparse_gradient_with_batch_size=True):
        self.proto_desc.scale_sparse_gradient_with_batch_size = scale_sparse_gradient_with_batch_size

131 132 133
    def _set_scale_datanorm(self, scale_datanorm=-1):
        self.proto_desc.scale_datanorm = scale_datanorm

T
Thunderbrook 已提交
134 135 136
    def _set_dump_slot(self, dump_slot):
        self.proto_desc.dump_slot = dump_slot

137 138 139
    def _set_mpi_rank(self, mpi_rank):
        self.proto_desc.mpi_rank = mpi_rank

T
Thunderbrook 已提交
140 141 142
    def _set_mpi_size(self, mpi_size):
        self.proto_desc.mpi_size = mpi_size

143 144 145 146 147 148 149
    def _set_dump_fields(self, dump_fields):
        for field in dump_fields:
            self.proto_desc.dump_fields.append(field)

    def _set_dump_fields_path(self, path):
        self.proto_desc.dump_fields_path = path

T
Thunderbrook 已提交
150 151 152
    def _set_dump_file_num(self, dump_file_num):
        self.proto_desc.dump_file_num = dump_file_num

Y
yaoxuefeng 已提交
153 154 155
    def _set_user_define_dump_filename(self, user_define_dump_filename):
        self.proto_desc.user_define_dump_filename = user_define_dump_filename

156 157 158
    def _set_dump_converter(self, converter):
        self.proto_desc.dump_converter = converter

H
hutuxian 已提交
159 160 161 162 163 164 165 166 167
    def _set_enable_random_dump(self, enable_random_dump):
        self.proto_desc.enable_random_dump = enable_random_dump

    def _set_dump_interval(self, dump_interval):
        self.proto_desc.dump_interval = dump_interval

    def _set_random_with_lineid(self, random_with_lineid):
        self.proto_desc.random_with_lineid = random_with_lineid

168 169 170 171
    def _set_dump_param(self, dump_param):
        for param in dump_param:
            self.proto_desc.dump_param.append(param)

T
Thunderbrook 已提交
172 173 174 175
    def _set_worker_places(self, worker_places):
        for place in worker_places:
            self.proto_desc.worker_places.append(place)

T
Thunderbrook 已提交
176 177 178
    def _set_use_ps_gpu(self, use_ps_gpu=False):
        self.proto_desc.use_ps_gpu = use_ps_gpu

179 180 181
    def _set_thread_barrier(self, thread_barrier):
        self.proto_desc.thread_barrier = thread_barrier

182 183 184 185
    def _set_check_nan_var_names(self, check_nan_var_names):
        for var in check_nan_var_names:
            self.proto_desc.check_nan_var_names.append(var)

186 187 188 189
    def _set_loss_names(self, loss_names):
        for loss in loss_names:
            self.proto_desc.loss_names.append(loss)

190 191 192 193 194 195 196 197 198 199 200 201
    def _set_adjust_ins_weight(self, config_dict):
        self.proto_desc.adjust_ins_weight_config.need_adjust = \
                config_dict.get("need_adjust", False)
        self.proto_desc.adjust_ins_weight_config.nid_slot = \
                config_dict.get("nid_slot", "")
        self.proto_desc.adjust_ins_weight_config.nid_adjw_threshold = \
                config_dict.get("nid_adjw_threshold", 0.0)
        self.proto_desc.adjust_ins_weight_config.nid_adjw_ratio = \
                config_dict.get("nid_adjw_ratio", 0.0)
        self.proto_desc.adjust_ins_weight_config.ins_weight_slot = \
                config_dict.get("ins_weight_slot", "")

X
xujiaqi01 已提交
202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
    def _set_copy_table_config(self, config_dict):
        config = self.proto_desc.copy_table_config
        config.need_copy = config_dict.get("need_copy", False)
        config.batch_num = config_dict.get("batch_num", 100)

        src_sparse_tables = config_dict.get("src_sparse_tables", [])
        if not isinstance(src_sparse_tables, list):
            src_sparse_tables = [src_sparse_tables]
        dest_sparse_tables = config_dict.get("dest_sparse_tables", [])
        if not isinstance(dest_sparse_tables, list):
            dest_sparse_tables = [dest_sparse_tables]
        if len(src_sparse_tables) != len(dest_sparse_tables):
            raise ValueError(
                "len(src_sparse_tables) != len(dest_sparse_tables)," \
                " %s vs %s" % (len(src_sparse_tables), \
                len(dest_sparse_tables)))
        for i in src_sparse_tables:
            config.src_sparse_tables.append(i)
        for i in dest_sparse_tables:
            config.dest_sparse_tables.append(i)

        src_dense_tables = config_dict.get("src_dense_tables", [])
        if not isinstance(src_dense_tables, list):
            src_dense_tables = [src_dense_tables]
        dest_dense_tables = config_dict.get("dest_dense_tables", [])
        if not isinstance(dest_dense_tables, list):
            dest_dense_tables = [dest_dense_tables]
        if len(src_dense_tables) != len(dest_dense_tables):
            raise ValueError(
                "len(src_dense_tables) != len(dest_dense_tables)," \
                " %s vs %s" % (len(src_dense_tables), \
                len(dest_dense_tables)))
        for i in src_dense_tables:
            config.src_dense_tables.append(i)
        for i in dest_dense_tables:
            config.dest_dense_tables.append(i)

        # user can also specify dense variables to copy,
        # instead of copy dense table
        src_var_list = config_dict.get("src_var_list", [])
        if not isinstance(src_var_list, list):
            src_var_list = [src_var_list]
        dest_var_list = config_dict.get("dest_var_list", [])
        if not isinstance(dest_var_list, list):
            dest_var_list = [dest_var_list]
        if len(src_var_list) != len(dest_var_list):
            raise ValueError(
                "len(src_var_list) != len(dest_var_list), %s vs" \
                " %s" % (len(src_var_list), len(dest_var_list)))
        for i in src_var_list:
            config.src_var_list.append(i)
        for i in dest_var_list:
            config.dest_var_list.append(i)

        dependency_map = config_dict.get("dependency_map", {})
        for key in dependency_map:
            m = config.table_denpendency_map.add()
            m.key = key
            values = dependency_map[key]
            if not isinstance(values, list):
                values = [values]
            if len(values) != 1:
                raise ValueError("dependency len %s != 1" % len(values))
            for value in values:
                m.values.append(value)
        config.dense_pull_after_copy = \
            config_dict.get("dense_pull_after_copy", True)
        config.enable_dependency = \
            config_dict.get("enable_dependency", False)
        config.sparse_copy_by_feasign = \
            config_dict.get("sparse_copy_by_feasign", True)

274
    def _desc(self):
D
dongdaxiang 已提交
275
        from google.protobuf import text_format
H
hutuxian 已提交
276
        return self.proto_desc.SerializeToString()
277

H
hutuxian 已提交
278
    def __str__(self):
279 280
        from google.protobuf import text_format
        return text_format.MessageToString(self.proto_desc)
H
hutuxian 已提交
281

282 283

class MultiTrainer(TrainerDesc):
T
Thunderbrook 已提交
284 285 286 287 288
    '''
    Implement of MultiTrainer.
    Can be init from TrainerDesc.
    '''

D
dongdaxiang 已提交
289
    def __init__(self):
290
        super(MultiTrainer, self).__init__()
D
dongdaxiang 已提交
291
        pass
292

293
    def _set_program(self, program):
294
        super(MultiTrainer, self)._set_program(program)
D
dongdaxiang 已提交
295
        self._program = program
296

297
    def _gen_trainer_desc(self):
298
        super(MultiTrainer, self)._gen_trainer_desc()
D
dongdaxiang 已提交
299
        self.proto_desc.class_name = "MultiTrainer"
D
fix bug  
dongdaxiang 已提交
300
        self._device_worker._set_infer(self._infer)
301
        self._device_worker._set_program(self._program)
D
dongdaxiang 已提交
302
        self._device_worker._gen_worker_desc(self.proto_desc)
303

304 305

class DistMultiTrainer(TrainerDesc):
X
xujiaqi01 已提交
306 307 308 309 310
    """
    Implement of DistMultiTrainer.
    It's for Distributed training.
    """

311
    def __init__(self):
312
        super(DistMultiTrainer, self).__init__()
313
        pass
314

315
    def _set_program(self, program):
316
        super(DistMultiTrainer, self)._set_program(program)
D
dongdaxiang 已提交
317
        self._program = program
318

319
    def _gen_trainer_desc(self):
320
        super(DistMultiTrainer, self)._gen_trainer_desc()
321
        self.proto_desc.class_name = "DistMultiTrainer"
D
dongdaxiang 已提交
322
        if self._program == None:
323
            raise RuntimeError("None Program")
D
fix bug  
dongdaxiang 已提交
324 325
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
D
dongdaxiang 已提交
326
        self._device_worker._gen_worker_desc(self.proto_desc)
H
hutuxian 已提交
327 328


T
Thunderbrook 已提交
329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352
class HeterXpuTrainer(TrainerDesc):
    """
    Implement of HeterXpuTrainer.
    It's for Distributed training.
    """

    def __init__(self):
        super(HeterXpuTrainer, self).__init__()
        pass

    def _set_program(self, program):
        super(HeterXpuTrainer, self)._set_program(program)
        self._program = program

    def _gen_trainer_desc(self):
        super(HeterXpuTrainer, self)._gen_trainer_desc()
        self.proto_desc.class_name = "HeterXpuTrainer"
        if self._program == None:
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)


T
Thunderbrook 已提交
353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376
class PSGPUTrainer(TrainerDesc):
    """
    Implement of PSGPUTrainer.
    It's for Distributed training.
    """

    def __init__(self):
        super(PSGPUTrainer, self).__init__()
        pass

    def _set_program(self, program):
        super(PSGPUTrainer, self)._set_program(program)
        self._program = program

    def _gen_trainer_desc(self):
        super(PSGPUTrainer, self)._gen_trainer_desc()
        self.proto_desc.class_name = "PSGPUTrainer"
        if self._program == None:
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)


H
hutuxian 已提交
377
class PipelineTrainer(TrainerDesc):
X
xujiaqi01 已提交
378 379 380 381 382
    """
    Implement of PipelineTrainer.
    It's for Pipeline.
    """

H
hutuxian 已提交
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398
    def __init__(self):
        super(PipelineTrainer, self).__init__()
        pass

    def _set_program(self, program):
        super(PipelineTrainer, self)._set_program(program)
        self._program = program

    def _gen_trainer_desc(self):
        super(PipelineTrainer, self)._gen_trainer_desc()
        self.proto_desc.class_name = "PipelineTrainer"
        if self._program == None:
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)