trainer_desc.py 6.3 KB
Newer Older
1
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

H
hutuxian 已提交
15 16
import sys
from os import path
H
hutuxian 已提交
17
__all__ = ['TrainerDesc', 'MultiTrainer', 'DistMultiTrainer', 'PipelineTrainer']
18 19 20


class TrainerDesc(object):
T
Thunderbrook 已提交
21 22 23 24 25
    '''
    Set proto from python to c++.
    Can be initialized from train_desc.
    '''

26 27 28 29 30 31
    def __init__(self):
        '''
        self.proto_desc = data_feed_pb2.DataFeedDesc()
        with open(proto_file, 'r') as f:
            text_format.Parse(f.read(), self.proto_desc)
        '''
H
hutuxian 已提交
32 33 34 35 36
        # Workaround for relative import in protobuf under python3
        # TODO: should be fixed
        cur_path = path.dirname(__file__)
        sys.path.append(cur_path)
        sys.path.append(cur_path + "/proto")
37
        from proto import trainer_desc_pb2
38
        self.proto_desc = trainer_desc_pb2.TrainerDesc()
D
dongdaxiang 已提交
39 40 41
        import multiprocessing as mp
        # set default thread num == cpu count
        self.proto_desc.thread_num = mp.cpu_count()
D
dongdaxiang 已提交
42 43 44 45
        self._fleet_desc = None
        self._device_worker = None
        self._program = None
        self._infer = False
46

47
    def _set_fetch_var_and_info(self, fetch_vars, fetch_info, print_period):
D
dongdaxiang 已提交
48 49 50 51 52
        for i, v in enumerate(fetch_vars):
            self.proto_desc.fetch_config.fetch_var_names.extend([v.name])
            self.proto_desc.fetch_config.fetch_var_str_format.extend(
                [fetch_info[i]])
        self.proto_desc.fetch_config.print_period = print_period
D
dongdaxiang 已提交
53

54
    def _set_debug(self, debug):
55 56
        self.proto_desc.debug = debug

57
    def _set_thread(self, thread_num):
58 59
        self.proto_desc.thread_num = thread_num

60
    def _set_device_worker(self, device_worker):
D
dongdaxiang 已提交
61
        self._device_worker = device_worker
62

63
    def _set_infer(self, infer):
D
dongdaxiang 已提交
64
        self._infer = infer
65

66
    def _set_fleet_desc(self, fleet_desc):
D
dongdaxiang 已提交
67
        self._fleet_desc = fleet_desc
68

69
    def _gen_trainer_desc(self):
70 71
        pass

72
    def _set_program(self, program):
D
dongdaxiang 已提交
73
        self._program = program
D
dongdaxiang 已提交
74

75 76 77
    def _set_use_cvm(self, use_cvm=False):
        self.proto_desc.use_cvm = use_cvm

78 79 80
    def _set_scale_datanorm(self, scale_datanorm=-1):
        self.proto_desc.scale_datanorm = scale_datanorm

T
Thunderbrook 已提交
81 82 83
    def _set_dump_slot(self, dump_slot):
        self.proto_desc.dump_slot = dump_slot

84 85 86
    def _set_mpi_rank(self, mpi_rank):
        self.proto_desc.mpi_rank = mpi_rank

T
Thunderbrook 已提交
87 88 89
    def _set_mpi_size(self, mpi_size):
        self.proto_desc.mpi_size = mpi_size

90 91 92 93 94 95 96
    def _set_dump_fields(self, dump_fields):
        for field in dump_fields:
            self.proto_desc.dump_fields.append(field)

    def _set_dump_fields_path(self, path):
        self.proto_desc.dump_fields_path = path

T
Thunderbrook 已提交
97 98 99
    def _set_dump_file_num(self, dump_file_num):
        self.proto_desc.dump_file_num = dump_file_num

100 101 102
    def _set_dump_converter(self, converter):
        self.proto_desc.dump_converter = converter

103 104 105 106
    def _set_dump_param(self, dump_param):
        for param in dump_param:
            self.proto_desc.dump_param.append(param)

107 108 109 110
    def _set_check_nan_var_names(self, check_nan_var_names):
        for var in check_nan_var_names:
            self.proto_desc.check_nan_var_names.append(var)

111 112 113 114 115 116 117 118 119 120 121 122
    def _set_adjust_ins_weight(self, config_dict):
        self.proto_desc.adjust_ins_weight_config.need_adjust = \
                config_dict.get("need_adjust", False)
        self.proto_desc.adjust_ins_weight_config.nid_slot = \
                config_dict.get("nid_slot", "")
        self.proto_desc.adjust_ins_weight_config.nid_adjw_threshold = \
                config_dict.get("nid_adjw_threshold", 0.0)
        self.proto_desc.adjust_ins_weight_config.nid_adjw_ratio = \
                config_dict.get("nid_adjw_ratio", 0.0)
        self.proto_desc.adjust_ins_weight_config.ins_weight_slot = \
                config_dict.get("ins_weight_slot", "")

123
    def _desc(self):
D
dongdaxiang 已提交
124
        from google.protobuf import text_format
H
hutuxian 已提交
125
        return self.proto_desc.SerializeToString()
126

H
hutuxian 已提交
127
    def __str__(self):
128 129
        from google.protobuf import text_format
        return text_format.MessageToString(self.proto_desc)
H
hutuxian 已提交
130

131 132

class MultiTrainer(TrainerDesc):
T
Thunderbrook 已提交
133 134 135 136 137
    '''
    Implement of MultiTrainer.
    Can be init from TrainerDesc.
    '''

D
dongdaxiang 已提交
138
    def __init__(self):
139
        super(MultiTrainer, self).__init__()
D
dongdaxiang 已提交
140
        pass
141

142
    def _set_program(self, program):
143
        super(MultiTrainer, self)._set_program(program)
D
dongdaxiang 已提交
144
        self._program = program
145

146
    def _gen_trainer_desc(self):
147
        super(MultiTrainer, self)._gen_trainer_desc()
D
dongdaxiang 已提交
148
        self.proto_desc.class_name = "MultiTrainer"
D
fix bug  
dongdaxiang 已提交
149
        self._device_worker._set_infer(self._infer)
D
dongdaxiang 已提交
150
        self._device_worker._gen_worker_desc(self.proto_desc)
151

152 153

class DistMultiTrainer(TrainerDesc):
154
    def __init__(self):
155
        super(DistMultiTrainer, self).__init__()
156
        pass
157

158
    def _set_program(self, program):
159
        super(DistMultiTrainer, self)._set_program(program)
D
dongdaxiang 已提交
160
        self._program = program
161

162
    def _gen_trainer_desc(self):
163
        super(DistMultiTrainer, self)._gen_trainer_desc()
164
        self.proto_desc.class_name = "DistMultiTrainer"
D
dongdaxiang 已提交
165
        if self._program == None:
166
            raise RuntimeError("None Program")
D
fix bug  
dongdaxiang 已提交
167 168
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
D
dongdaxiang 已提交
169
        self._device_worker._gen_worker_desc(self.proto_desc)
H
hutuxian 已提交
170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188


class PipelineTrainer(TrainerDesc):
    def __init__(self):
        super(PipelineTrainer, self).__init__()
        pass

    def _set_program(self, program):
        super(PipelineTrainer, self)._set_program(program)
        self._program = program

    def _gen_trainer_desc(self):
        super(PipelineTrainer, self)._gen_trainer_desc()
        self.proto_desc.class_name = "PipelineTrainer"
        if self._program == None:
            raise RuntimeError("None Program")
        self._device_worker._set_infer(self._infer)
        self._device_worker._set_program(self._program)
        self._device_worker._gen_worker_desc(self.proto_desc)