kagle_layer.py 9.3 KB
Newer Older
X
xiexionghang 已提交
1 2 3 4
"""
DnnLayer: analyse layer config, and parse to Paddle Operator, build net
"""
import abc
X
xiexionghang 已提交
5 6 7
import paddle.fluid as fluid

class Layer(object):
X
xiexionghang 已提交
8 9 10
    """R
    """
    __metaclass__ = abc.ABCMeta
X
xiexionghang 已提交
11 12

    def __init__(self, config):
X
xiexionghang 已提交
13 14
        """R
        """
X
xiexionghang 已提交
15 16 17
        pass
    
    def generate(self, mode, param): 
X
xiexionghang 已提交
18 19
        """R
        """
X
xiexionghang 已提交
20 21 22 23 24
        if mode == 'fluid':
            return self.generate_fluid(param)
        elif mode == 'tensorflow':
            return self.generate_tensorflow(param)
        print ('unsupport this mode: ' + mode) 
X
xiexionghang 已提交
25
        return None, None
X
xiexionghang 已提交
26

X
xiexionghang 已提交
27
    @abc.abstractmethod
X
xiexionghang 已提交
28
    def generate_fluid(self, param): 
X
xiexionghang 已提交
29 30
        """R
        """
X
xiexionghang 已提交
31 32 33
        pass

    def generate_tensorflow(self, param): 
X
xiexionghang 已提交
34 35
        """ Not implement currently
        """
X
xiexionghang 已提交
36 37
        pass

X
xiexionghang 已提交
38

X
xiexionghang 已提交
39
class EmbeddingInputLayer(Layer):
X
xiexionghang 已提交
40 41
    """R
    """
X
xiexionghang 已提交
42
    def __init__(self, config):
X
xiexionghang 已提交
43 44
        """R
        """
X
xiexionghang 已提交
45 46
        self._cvm = config['cvm']
        self._name = config['name']
X
xiexionghang 已提交
47
        self._slots = [str(slot) for slot in config['slots']]
X
xiexionghang 已提交
48 49 50 51 52 53
        self._mf_dim = config['mf_dim']
        self._backward = config['backward']
        self._emb_dim = self._mf_dim + 3 #append show ctr lr
        self._emb_layers = []
    
    def generate_fluid(self, param): 
X
xiexionghang 已提交
54 55
        """R
        """
X
xiexionghang 已提交
56 57 58 59 60 61 62
        show_clk = fluid.layers.concat(
            [param['layer']['show'], param['layer']['click']], axis=1)
        show_clk.stop_gradient = True
        data_var = []
        for slot in self._slots:
            l = fluid.layers.data(name=slot, shape=[1], dtype="int64", lod_level=1)
            data_var.append(l)
X
xiexionghang 已提交
63 64
            emb = fluid.layers.embedding(input=l, size=[10, self._emb_dim], \
                is_sparse=True, is_distributed=True, param_attr=fluid.ParamAttr(name="embedding"))
X
xiexionghang 已提交
65 66 67 68 69 70
            emb = fluid.layers.sequence_pool(input=emb, pool_type='sum')
            emb = fluid.layers.continuous_value_model(emb, show_clk, self._cvm)
            self._emb_layers.append(emb)
        output = fluid.layers.concat(input=self._emb_layers, axis=1, name=self._name)
        return output, {'data_var' : data_var}

X
xiexionghang 已提交
71

X
xiexionghang 已提交
72
class LabelInputLayer(Layer):
X
xiexionghang 已提交
73 74
    """R
    """
X
xiexionghang 已提交
75
    def __init__(self, config):
X
xiexionghang 已提交
76 77
        """R
        """
X
xiexionghang 已提交
78 79 80 81 82 83
        self._name = config['name']
        self._dim = config.get('dim', 1)
        self._data_type = config.get('data_type', "int64")
        self._label_idx = config['label_idx']

    def generate_fluid(self, param): 
X
xiexionghang 已提交
84 85 86 87
        """R
        """
        label = fluid.layers.data(name=self._name, shape=[-1, self._dim], \
            dtype=self._data_type, lod_level=0, append_batch_size=False)
X
xiexionghang 已提交
88 89
        cast_label = fluid.layers.cast(label, dtype='float32')
        cast_label.stop_gradient = True
X
xiexionghang 已提交
90 91
        return cast_label, {'data_var': [label]}

X
xiexionghang 已提交
92 93

class TagInputLayer(Layer): 
X
xiexionghang 已提交
94 95
    """R
    """
X
xiexionghang 已提交
96
    def __init__(self, config):
X
xiexionghang 已提交
97 98
        """R
        """
X
xiexionghang 已提交
99 100 101 102 103 104
        self._name = config['name']
        self._tag = config['tag']
        self._dim = config.get('dim', 1)
        self._data_type = config['data_type']

    def generate_fluid(self, param): 
X
xiexionghang 已提交
105 106 107 108 109
        """R
        """
        output = fluid.layers.data(name=self._name, shape=[-1, self._dim], \
            dtype=self._data_type, lod_level=0, append_batch_size=False, stop_gradient=True)
        return output, {'data_var': [output]}
X
xiexionghang 已提交
110
        
X
xiexionghang 已提交
111

X
xiexionghang 已提交
112
class ParamLayer(Layer): 
X
xiexionghang 已提交
113 114
    """R
    """
X
xiexionghang 已提交
115
    def __init__(self, config):
X
xiexionghang 已提交
116 117
        """R
        """
X
xiexionghang 已提交
118 119 120 121 122 123 124 125
        self._name = config['name']
        self._coln = config['coln']
        self._table_id = config.get('table_id', -1)
        self._init_range = config.get('init_range', 1)
        self._data_type = config.get('data_type', 'float32')
        self._config = config

    def generate_fluid(self, param): 
X
xiexionghang 已提交
126 127
        """R
        """
X
xiexionghang 已提交
128 129
        return self._config, {'inference_param': {'name':'param', 'params': [], 'table_id': self._table_id}} 

X
xiexionghang 已提交
130

X
xiexionghang 已提交
131
class SummaryLayer(Layer): 
X
xiexionghang 已提交
132 133
    """R
    """
X
xiexionghang 已提交
134
    def __init__(self, config):
X
xiexionghang 已提交
135 136
        """R
        """
X
xiexionghang 已提交
137 138 139 140 141 142
        self._name = config['name']
        self._table_id = config.get('table_id', -1)
        self._data_type = config.get('data_type', 'float32')
        self._config = config

    def generate_fluid(self, param): 
X
xiexionghang 已提交
143 144 145 146
        """R
        """
        return self._config, {'inference_param': {'name': 'summary', 'params': [], 'table_id': self._table_id}} 

X
xiexionghang 已提交
147 148

class NormalizetionLayer(Layer): 
X
xiexionghang 已提交
149 150
    """R
    """
X
xiexionghang 已提交
151
    def __init__(self, config):
X
xiexionghang 已提交
152 153
        """R
        """
X
xiexionghang 已提交
154 155 156 157 158 159
        self._name = config['name']
        self._input = config['input']
        self._summary = config['summary']       
        self._table_id = config.get('table_id', -1)

    def generate_fluid(self, param): 
X
xiexionghang 已提交
160 161
        """R
        """
X
xiexionghang 已提交
162 163 164
        input_layer = param['layer'][self._input[0]]
        summary_layer = param['layer'][self._summary]
        if len(self._input) > 0:
X
xiexionghang 已提交
165
            input_list=[param['layer'][i] for i in self._input]
X
xiexionghang 已提交
166 167
            input_layer = fluid.layers.concat(input=input_list, axis=1)
        bn = fluid.layers.data_norm(input=input_layer, name=self._name, epsilon=1e-4, param_attr={
X
xiexionghang 已提交
168 169 170 171
             "batch_size": 1e4, "batch_sum_default": 0.0, "batch_square": 1e4})
        inference_param = [self._name + '.batch_size', self._name + '.batch_sum', self._name + '.batch_square_sum']
        return bn, {'inference_param' : {'name':'summary', 'params': inference_param, 'table_id': summary_layer.get('table_id', -1)}}

X
xiexionghang 已提交
172 173

class NeuralLayer(Layer): 
X
xiexionghang 已提交
174 175
    """R
    """
X
xiexionghang 已提交
176
    def __init__(self, config):
X
xiexionghang 已提交
177 178
        """R
        """
X
xiexionghang 已提交
179 180 181 182 183 184 185
        self._name = config['name']
        self._param = config['param']
        self._input = config['input']
        self._bias = config.get('bias', True)
        self._act_func = config.get('act_func', None)

    def generate_fluid(self, param): 
X
xiexionghang 已提交
186 187
        """R
        """
X
xiexionghang 已提交
188 189 190
        param_layer = param['layer'][self._param]
        input_layer = param['layer'][self._input[0]]
        if len(self._input) > 0:
X
xiexionghang 已提交
191
            input_list=[param['layer'][i] for i in self._input]
X
xiexionghang 已提交
192 193 194 195 196
            input_layer = fluid.layers.concat(input=input_list, axis=1)
        input_coln = input_layer.shape[1]
        scale = param_layer['init_range'] / (input_coln ** 0.5)
        bias = None
        if self._bias:
X
xiexionghang 已提交
197 198
            bias = fluid.ParamAttr(learning_rate=1.0, 
                initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=scale))
X
xiexionghang 已提交
199 200 201 202 203 204 205 206 207 208 209 210
        fc = fluid.layers.fc(
            name = self._name,
            input = input_layer,
            size = param_layer['coln'],
            act = self._act_func,
            param_attr = \
                fluid.ParamAttr(learning_rate=1.0, \
                initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=scale)),
            bias_attr = bias)
        inference_param = [self._name + '.w_0',  self._name + '.b_0']
        return fc, {'inference_param' : {'name':'param', 'params': inference_param, 'table_id': param_layer.get('table_id', -1)}}

X
xiexionghang 已提交
211

X
xiexionghang 已提交
212
class SigmoidLossLayer(Layer):
X
xiexionghang 已提交
213 214
    """R
    """
X
xiexionghang 已提交
215
    def __init__(self, config):
X
xiexionghang 已提交
216 217
        """R
        """
X
xiexionghang 已提交
218 219 220 221 222 223 224
        self._name = config['name']
        self._label = config['label']
        self._input = config['input']
        self._weight = config.get('weight', None)
        self._metric_label = config.get('metric_label', None)
        self._bound = config.get('bound', [-15.0, 15.0])
        self._extend_output = {
X
xiexionghang 已提交
225 226 227 228 229 230 231 232 233 234 235 236 237 238
            'metric_label': self._metric_label,
            'metric_dict': {
                'auc': {'var': None},
                'batch_auc': {'var': None},
                'stat_pos': {'var': None, 'data_type': 'int64'},
                'stat_neg': {'var': None, 'data_type': 'int64'},
                'batch_stat_pos': {'var': None, 'data_type': 'int64'},
                'batch_stat_neg': {'var': None, 'data_type': 'int64'},
                'pos_ins_num': {'var': None},
                'abserr': {'var': None},
                'sqrerr': {'var': None},
                'prob': {'var': None},
                'total_ins_num': {'var': None},
                'q': {'var': None}
X
xiexionghang 已提交
239 240 241 242 243
            }
        }
        

    def generate_fluid(self, param): 
X
xiexionghang 已提交
244 245
        """R
        """
X
xiexionghang 已提交
246 247
        input_layer = param['layer'][self._input[0]]
        label_layer = param['layer'][self._label]
X
xiexionghang 已提交
248
        output = fluid.layers.clip(input_layer, self._bound[0], self._bound[1], name=self._name)
X
xiexionghang 已提交
249 250 251 252 253 254 255 256 257 258 259 260 261 262
        norm = fluid.layers.sigmoid(output, name=self._name)
        output = fluid.layers.log_loss(norm, fluid.layers.cast(x=label_layer, dtype='float32'))
        if self._weight:
            weight_layer = param['layer'][self._weight]
            output = fluid.layers.elementwise_mul(output, weight_layer)
        output = fluid.layers.mean(x=output)
        self._extend_output['loss'] = output
        
        #For AUC Metric
        metric = self._extend_output['metric_dict']
        binary_predict = fluid.layers.concat(
            input=[fluid.layers.elementwise_sub(fluid.layers.ceil(norm), norm), norm], axis=1)
        metric['auc']['var'], metric['batch_auc']['var'], [metric['batch_stat_pos']['var'], \
        metric['batch_stat_neg']['var'], metric['stat_pos']['var'], metric['stat_neg']['var']] = \
X
xiexionghang 已提交
263 264
            fluid.layers.auc(input=binary_predict, label=fluid.layers.cast(x=label_layer, dtype='int64'), \
            curve='ROC', num_thresholds=32)
X
xiexionghang 已提交
265 266 267 268 269 270

        metric['sqrerr']['var'], metric['abserr']['var'], metric['prob']['var'], metric['q']['var'], \
        metric['pos_ins_num']['var'], metric['total_ins_num']['var'] = \
            fluid.contrib.layers.ctr_metric_bundle(norm, fluid.layers.cast(x=label_layer, dtype='float32'))

        return norm, self._extend_output