base.py 9.5 KB
Newer Older
X
xiexionghang 已提交
1 2 3 4
"""
Model Net: analyse layer config, and parse to Paddle Pragram
"""
import abc
X
xiexionghang 已提交
5 6
import copy
import yaml
T
tangwei 已提交
7
import paddle.fluid as fluid
T
tangwei 已提交
8
from ..utils import table as table
X
xiexionghang 已提交
9 10
from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet

T
tangwei 已提交
11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43

class Layer(object):
    """R
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, config):
        """R
        """
        pass

    def generate(self, mode, param):
        """R
        """
        if mode == 'fluid':
            return self.generate_fluid(param)
        elif mode == 'tensorflow':
            return self.generate_tensorflow(param)
        print('unsupport this mode: ' + mode)
        return None, None

    @abc.abstractmethod
    def generate_fluid(self, param):
        """R
        """
        pass

    def generate_tensorflow(self, param):
        """ Not implement currently
        """
        pass


X
xiexionghang 已提交
44
def create(config):
X
xiexionghang 已提交
45 46 47
    """
    Create a model instance by config
    Args:
T
tangwei 已提交
48
        config(dict) : desc model type and net
X
xiexionghang 已提交
49 50 51
    Return:
        Model Instance
    """
X
xiexionghang 已提交
52 53
    model = None
    if config['mode'] == 'fluid':
T
tangwei 已提交
54
        model = YamlModel(config)
X
xiexionghang 已提交
55 56
        model.build_model()
    return model
T
tangwei 已提交
57

X
xiexionghang 已提交
58

X
xiexionghang 已提交
59
class Model(object):
X
xiexionghang 已提交
60 61 62
    """R
    """
    __metaclass__ = abc.ABCMeta
X
xiexionghang 已提交
63 64

    def __init__(self, config):
X
xiexionghang 已提交
65 66
        """R
        """
X
xiexionghang 已提交
67 68 69 70 71
        self._config = config
        self._name = config['name']
        f = open(config['layer_file'], 'r')
        self._build_nodes = yaml.safe_load(f.read())
        self._build_phase = ['input', 'param', 'summary', 'layer']
T
tangwei 已提交
72 73
        self._build_param = {'layer': {}, 'inner_layer': {}, 'layer_extend': {}, 'model': {}}
        self._inference_meta = {'dependency': {}, 'params': {}}
X
xiexionghang 已提交
74 75 76 77
        self._cost = None
        self._metrics = {}
        self._data_var = []
        pass
T
tangwei 已提交
78

X
xiexionghang 已提交
79
    def get_cost_op(self):
X
xiexionghang 已提交
80 81
        """R
        """
X
xiexionghang 已提交
82 83 84
        return self._cost

    def get_metrics(self):
X
xiexionghang 已提交
85 86
        """R
        """
X
xiexionghang 已提交
87 88
        return self._metrics

X
xiexionghang 已提交
89
    @abc.abstractmethod
X
xiexionghang 已提交
90
    def shrink(self, params):
X
xiexionghang 已提交
91 92
        """R
        """
T
tangwei 已提交
93
        pass
X
xiexionghang 已提交
94

X
xiexionghang 已提交
95
    @abc.abstractmethod
T
tangwei 已提交
96
    def build_model(self):
X
xiexionghang 已提交
97 98
        """R
        """
X
xiexionghang 已提交
99 100
        pass

X
xiexionghang 已提交
101
    @abc.abstractmethod
X
xiexionghang 已提交
102
    def dump_model_program(self, path):
X
xiexionghang 已提交
103 104
        """R
        """
X
xiexionghang 已提交
105 106
        pass

X
xiexionghang 已提交
107
    @abc.abstractmethod
X
xiexionghang 已提交
108
    def dump_inference_param(self, params):
X
xiexionghang 已提交
109 110
        """R
        """
X
xiexionghang 已提交
111
        pass
X
xiexionghang 已提交
112

X
xiexionghang 已提交
113
    @abc.abstractmethod
X
xiexionghang 已提交
114
    def dump_inference_program(self, inference_layer, path):
X
xiexionghang 已提交
115 116
        """R
        """
X
xiexionghang 已提交
117
        pass
T
tangwei 已提交
118

X
xiexionghang 已提交
119
    def inference_params(self, inference_layer):
X
xiexionghang 已提交
120
        """
T
tangwei 已提交
121
        get params name for inference_layer
X
xiexionghang 已提交
122 123 124 125 126
        Args:
            inference_layer(str): layer for inference
        Return:
            params(list): params name list that for inference layer
        """
X
xiexionghang 已提交
127 128 129
        layer = inference_layer
        if layer in self._inference_meta['params']:
            return self._inference_meta['params'][layer]
T
tangwei 已提交
130

X
xiexionghang 已提交
131 132 133 134 135 136
        self._inference_meta['params'][layer] = []
        self._inference_meta['dependency'][layer] = self.get_dependency(self._build_param['inner_layer'], layer)
        for node in self._build_nodes['layer']:
            if node['name'] not in self._inference_meta['dependency'][layer]:
                continue
            if 'inference_param' in self._build_param['layer_extend'][node['name']]:
X
xiexionghang 已提交
137
                self._inference_meta['params'][layer] += \
T
tangwei 已提交
138
                    self._build_param['layer_extend'][node['name']]['inference_param']['params']
X
xiexionghang 已提交
139 140 141
        return self._inference_meta['params'][layer]

    def get_dependency(self, layer_graph, dest_layer):
X
xiexionghang 已提交
142
        """
T
tangwei 已提交
143
        get model of dest_layer depends on
X
xiexionghang 已提交
144
        Args:
T
tangwei 已提交
145
            layer_graph(dict) : all model in graph
X
xiexionghang 已提交
146
        Return:
T
tangwei 已提交
147
            depend_layers(list) : sub-graph model for calculate dest_layer
X
xiexionghang 已提交
148
        """
X
xiexionghang 已提交
149 150 151 152 153 154 155 156
        dependency_list = []
        if dest_layer in layer_graph:
            dependencys = copy.deepcopy(layer_graph[dest_layer]['input'])
            dependency_list = copy.deepcopy(dependencys)
            for dependency in dependencys:
                dependency_list = dependency_list + self.get_dependency(layer_graph, dependency)
        return list(set(dependency_list))

T
tangwei 已提交
157 158

class YamlModel(Model):
X
xiexionghang 已提交
159 160
    """R
    """
X
xiexionghang 已提交
161
    def __init__(self, config):
X
xiexionghang 已提交
162 163
        """R
        """
X
xiexionghang 已提交
164 165
        Model.__init__(self, config)
        pass
T
tangwei 已提交
166 167

    def build_model(self):
X
xiexionghang 已提交
168 169 170 171 172 173 174 175 176
        """R
        build a fluid model with config
        Return:
            modle_instance(dict)
                train_program
                startup_program
                inference_param : all params name list
                table: table-meta to ps-server
        """
X
xiexionghang 已提交
177 178
        for layer in self._build_nodes['layer']:
            self._build_param['inner_layer'][layer['name']] = layer
T
tangwei 已提交
179

X
xiexionghang 已提交
180 181 182
        self._build_param['table'] = {}
        self._build_param['model']['train_program'] = fluid.Program()
        self._build_param['model']['startup_program'] = fluid.Program()
X
xiexionghang 已提交
183
        with fluid.program_guard(self._build_param['model']['train_program'], \
T
tangwei 已提交
184
                                 self._build_param['model']['startup_program']):
X
xiexionghang 已提交
185 186 187 188 189
            with fluid.unique_name.guard():
                for phase in self._build_phase:
                    if self._build_nodes[phase] is None:
                        continue
                    for node in self._build_nodes[phase]:
T
tangwei 已提交
190
                        exec("""layer=layer.{}(node)""".format(node['class']))
X
xiexionghang 已提交
191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206
                        layer_output, extend_output = layer.generate(self._config['mode'], self._build_param)
                        self._build_param['layer'][node['name']] = layer_output
                        self._build_param['layer_extend'][node['name']] = extend_output
                        if extend_output is None:
                            continue
                        if 'loss' in extend_output:
                            if self._cost is None:
                                self._cost = extend_output['loss']
                            else:
                                self._cost += extend_output['loss']
                        if 'data_var' in extend_output:
                            self._data_var += extend_output['data_var']
                        if 'metric_label' in extend_output and extend_output['metric_label'] is not None:
                            self._metrics[extend_output['metric_label']] = extend_output['metric_dict']

                        if 'inference_param' in extend_output:
X
xiexionghang 已提交
207 208
                            inference_param = extend_output['inference_param']
                            param_name = inference_param['name']
X
xiexionghang 已提交
209
                            if param_name not in self._build_param['table']:
T
tangwei 已提交
210
                                self._build_param['table'][param_name] = {'params' :[]}
T
tangwei 已提交
211
                                table_meta = table.TableMeta.alloc_new_table(inference_param['table_id'])
X
xiexionghang 已提交
212
                                self._build_param['table'][param_name]['_meta'] = table_meta
X
xiexionghang 已提交
213
                            self._build_param['table'][param_name]['params'] += inference_param['params']
X
xiexionghang 已提交
214
        pass
T
tangwei 已提交
215

X
xiexionghang 已提交
216 217
    @classmethod
    def build_optimizer(self, params):
X
xiexionghang 已提交
218 219
        """R
        """
X
xiexionghang 已提交
220 221 222 223 224 225 226 227
        optimizer_conf = params['optimizer_conf']
        strategy = None
        if 'strategy' in optimizer_conf:
            strategy = optimizer_conf['strategy']
            stat_var_names = []
            metrics = params['metrics']
            for name in metrics:
                model_metrics = metrics[name]
X
xiexionghang 已提交
228
                stat_var_names += [model_metrics[metric]['var'].name for metric in model_metrics]
X
xiexionghang 已提交
229
            strategy['stat_var_names'] = list(set(stat_var_names))
X
xiexionghang 已提交
230
        optimizer_generator = 'optimizer = fluid.optimizer.' + optimizer_conf['class'] + \
T
tangwei 已提交
231 232
                              '(learning_rate=' + str(optimizer_conf['learning_rate']) + ')'
        exec(optimizer_generator)
X
xiexionghang 已提交
233 234 235 236
        optimizer = fleet.distributed_optimizer(optimizer, strategy=strategy)
        return optimizer

    def dump_model_program(self, path):
X
xiexionghang 已提交
237 238
        """R
        """
X
xiexionghang 已提交
239 240 241 242 243 244 245
        with open(path + '/' + self._name + '_main_program.pbtxt', "w") as fout:
            print >> fout, self._build_param['model']['train_program']
        with open(path + '/' + self._name + '_startup_program.pbtxt', "w") as fout:
            print >> fout, self._build_param['model']['startup_program']
        pass

    def shrink(self, params):
X
xiexionghang 已提交
246 247
        """R
        """
X
xiexionghang 已提交
248 249 250 251 252 253 254
        scope = params['scope']
        decay = params['decay']
        for param_table in self._build_param['table']:
            table_id = self._build_param['table'][param_table]['_meta']._table_id
            fleet.shrink_dense_table(decay, scope=scope, table_id=table_id)

    def dump_inference_program(self, inference_layer, path):
X
xiexionghang 已提交
255 256
        """R
        """
X
xiexionghang 已提交
257 258 259
        pass

    def dump_inference_param(self, params):
X
xiexionghang 已提交
260 261
        """R
        """
X
xiexionghang 已提交
262 263 264
        scope = params['scope']
        executor = params['executor']
        program = self._build_param['model']['train_program']
X
xiexionghang 已提交
265
        for table_name, table in self._build_param['table'].items():
X
xiexionghang 已提交
266 267 268
            fleet._fleet_ptr.pull_dense(scope, table['_meta']._table_id, table['params'])
        for infernce_item in params['inference_list']:
            params_name_list = self.inference_params(infernce_item['layer_name'])
X
xiexionghang 已提交
269
            params_var_list = [program.global_block().var(i) for i in params_name_list]
X
xiexionghang 已提交
270 271 272
            params_file_name = infernce_item['save_file_name']
            with fluid.scope_guard(scope):
                if params['save_combine']:
X
xiexionghang 已提交
273
                    fluid.io.save_vars(executor, "./", \
T
tangwei 已提交
274
                                       program, vars=params_var_list, filename=params_file_name)
X
xiexionghang 已提交
275 276
                else:
                    fluid.io.save_vars(executor, params_file_name, program, vars=params_var_list)