caffe_op_mapper.py 36.1 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14 15

import numbers
S
SunAhong1993 已提交
16
import numpy as np
J
jiangjiajun 已提交
17 18
from x2paddle.decoder.caffe_decoder import CaffeGraph
from x2paddle.core.op_mapper import OpMapper
S
SunAhong1993 已提交
19
from x2paddle.core.util import *
20
from x2paddle.op_mapper import caffe_shape
S
SunAhong1993 已提交
21
from x2paddle.op_mapper.caffe_custom_layer import *
S
SunAhong1993 已提交
22 23


J
jiangjiajun 已提交
24
class CaffeOpMapper(OpMapper):
S
SunAhong1993 已提交
25 26 27 28 29 30 31
    directly_map_ops = {
        'ReLU': 'relu',
        'AbsVal': 'abs',
        'Sigmoid': 'sigmoid',
        'TanH': 'tanh',
    }

J
jiangjiajun 已提交
32 33 34
    def __init__(self, decoder):
        super(CaffeOpMapper, self).__init__()
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
35
        self.weights = dict()
J
jiangjiajun 已提交
36
        resolver = decoder.resolver
J
jiangjiajun 已提交
37
        self.used_custom_layers = {}
S
SunAhong1993 已提交
38 39 40 41

        print("Total nodes: {}".format(len(self.graph.topo_sort)))
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
S
SunAhong1993 已提交
42 43
            if node.layer_type == 'DepthwiseConvolution':
                node.layer_type = 'ConvolutionDepthwise'
S
SunAhong1993 已提交
44 45
            op = node.layer_type
            if hasattr(self, op):
46
                self.set_node_shape(node)
J
jiangjiajun 已提交
47 48
                func = getattr(self, op)
                func(node)
S
SunAhong1993 已提交
49
            elif op in custom_layers:
50
                self.set_node_shape(node, is_fluid_op=False)
S
SunAhong1993 已提交
51
                self.deal_custom_layer(node)
S
SunAhong1993 已提交
52 53 54
            elif op in self.directly_map_ops:
                self.set_node_shape(node)
                self.directly_map(node)
S
SunAhong1993 已提交
55
            else:
S
SunAhong1993 已提交
56 57
                raise Exception(
                    "The op {} in model is not supported yet.".format(op))
S
SunAhong1993 已提交
58

J
jiangjiajun 已提交
59 60 61
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
S
SunAhong1993 已提交
62
            node = self.graph.get_node(node_name)
J
jiangjiajun 已提交
63 64 65 66 67 68 69 70 71 72 73
            op = node.layer_type
            if not hasattr(self, op) and op not in custom_layers:
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
            print("There are {} ops not supported yet, list as below".format(
                len(unsupported_ops)))
            for op in unsupported_ops:
                print(op)
            return False
S
SunAhong1993 已提交
74

75
    def set_node_shape(self, node, is_fluid_op=True):
S
SunAhong1993 已提交
76 77 78 79 80 81 82
        inputs = node.inputs
        input_shape = []
        for i, nm in enumerate(inputs):
            last_node = self.graph.get_node(nm)
            tmp = node.layer.bottom[i]
            idx = list(last_node.layer.top).index(tmp)
            input_shape.append(last_node.output_shape[idx])
83 84 85 86

        node.input_shape = input_shape

        func_name = 'shape_' + node.layer_type.lower()
S
SunAhong1993 已提交
87
        if is_fluid_op:
88 89
            node.output_shape = getattr(caffe_shape, func_name)(node.layer,
                                                                input_shape)
S
SunAhong1993 已提交
90
        else:
91
            node.output_shape = compute_output_shape(node)
S
SunAhong1993 已提交
92 93 94

    def adjust_parameters(self, node):
        data = node.data
S
SunAhong1993 已提交
95 96 97 98 99 100 101 102
        # When using the protobuf-backend, each parameter initially has four dimensions.
        # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
        # This implementation takes care of the common cases. However, it does leave the
        # potential for future issues.
        # The Caffe-backend does not suffer from this problem.
        data = list(data)

        squeeze_indices = [1]  # Squeeze biases.
S
SunAhong1993 已提交
103
        if node.layer_type == 'InnerProduct':
S
SunAhong1993 已提交
104 105 106 107 108
            squeeze_indices.append(0)  # Squeeze FC.

        for idx in squeeze_indices:
            if idx >= len(data):
                continue
S
SunAhong1993 已提交
109

S
SunAhong1993 已提交
110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
            d = data[idx]
            assert len(
                d.shape
            ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
                str(d.shape))

            shape_old = d.shape
            sq_axis = None
            if idx == 0:
                sq_axis = (0, 1)
            elif idx == 1:
                sq_axis = (0, 1, 2)
            else:
                continue

            data[idx] = np.squeeze(d, axis=sq_axis)
            shape_new = data[idx].shape
        return data
S
SunAhong1993 已提交
128

S
SunAhong1993 已提交
129
    def get_kernel_parameters(self, kind, params):
S
SunAhong1993 已提交
130
        assert kind in ['Convolution', 'Pooling', 'Deconvolution']
S
SunAhong1993 已提交
131 132 133
        [k_h, k_w] = [1, 1]
        if isinstance(params.kernel_size, numbers.Number):
            [k_h, k_w] = [params.kernel_size] * 2
S
SunAhong1993 已提交
134
        elif len(params.kernel_size) > 0:
C
channingss 已提交
135 136
            k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
                0]
S
SunAhong1993 已提交
137
            k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
S
SunAhong1993 已提交
138
                len(params.kernel_size) - 1]
S
SunAhong1993 已提交
139 140 141
        elif params.kernel_h > 0 or params.kernel_w > 0:
            k_h = params.kernel_h
            k_w = params.kernel_w
S
SunAhong1993 已提交
142 143 144
        [s_h, s_w] = [1, 1]
        if isinstance(params.stride, numbers.Number):
            [s_h, s_w] = [params.stride] * 2
S
SunAhong1993 已提交
145
        elif len(params.stride) > 0:
S
SunAhong1993 已提交
146 147
            s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
            s_w = params.stride_w if params.stride_w > 0 else params.stride[
S
SunAhong1993 已提交
148
                len(params.stride) - 1]
S
SunAhong1993 已提交
149 150 151
        elif params.stride_h > 0 or params.stride_w > 0:
            s_h = params.stride_h
            s_w = params.stride_w
S
SunAhong1993 已提交
152 153 154
        [p_h, p_w] = [0, 0]
        if isinstance(params.pad, numbers.Number):
            [p_h, p_w] = [params.pad] * 2
S
SunAhong1993 已提交
155
        elif len(params.pad) > 0:
S
SunAhong1993 已提交
156
            p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
C
channingss 已提交
157 158
            p_w = params.pad_w if params.pad_w > 0 else params.pad[
                len(params.pad) - 1]
S
SunAhong1993 已提交
159 160 161
        elif params.pad_h > 0 or params.pad_w > 0:
            p_h = params.pad_h
            p_w = params.pad_w
S
SunAhong1993 已提交
162 163 164
        dila_h = dila_w = 1
        group = 1
        c_o = 1
165
        if kind in ['Convolution', 'Deconvolution']:
S
SunAhong1993 已提交
166 167 168 169 170 171 172 173 174 175
            c_o = params.num_output
            dila_len = len(params.dilation)
            if dila_len == 2:
                dila_h = params.dilation[0]
                dila_w = params.dilation[1]
            elif dila_len == 1:
                dila_h = dila_w = params.dilation[0]
            else:
                assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                    dila_len)
S
SunAhong1993 已提交
176 177
        if kind in ['Convolution', 'Deconvolution']:
            group = params.group
S
SunAhong1993 已提交
178 179 180 181 182 183
        kernel = [k_h, k_w]
        stride = [s_h, s_w]
        pad = [p_h, p_w]
        dilation = [dila_h, dila_w]
        return c_o, kernel, stride, pad, dilation, group

S
SunAhong1993 已提交
184 185 186 187 188 189
    def get_input_name(self, node):
        if hasattr(node, "index"):
            return node.layer_name + "[{}]".format(node.index)
        else:
            return node.layer_name

S
SunAhong1993 已提交
190 191 192 193 194 195 196 197
    def Input(self, node):
        shape = list(node.layer.input_param.shape[0].dim)[1:]
        dtype = 'float32'
        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
198 199
        node.fluid_code.add_layer(
            "data", inputs=None, output=node, param_attr=attr)
S
SunAhong1993 已提交
200

S
SunAhong1993 已提交
201 202 203 204 205 206 207 208 209
    def MemoryData(self, node):
        # TODO(syf): Paddlepaddle can't fully support
        shape = node.output_shape[0][1:]
        dtype = 'float32'
        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
210 211
        node.fluid_code.add_layer(
            "data", inputs=None, output=node.layer_name + '0', param_attr=attr)
S
SunAhong1993 已提交
212 213 214
        node.fluid_code.add_note('{} = [{}]'.format(node.layer_name,
                                                    node.layer_name + '0'))

S
SunAhong1993 已提交
215 216 217 218 219
    def Convolution(self, node):
        data = node.data
        params = node.layer.convolution_param
        channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters(
            node.layer_type, params)
S
SunAhong1993 已提交
220 221
        if data is None:
            data = []
C
channingss 已提交
222 223 224
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
225 226
            input_c = node.input_shape[0][1]
            output_c = channel
C
channingss 已提交
227 228 229 230 231 232
            data.append(
                np.zeros([output_c, input_c, kernel[0],
                          kernel[1]]).astype('float32'))
            data.append(np.zeros([
                output_c,
            ])).astype('float32')
S
SunAhong1993 已提交
233 234 235 236 237
        else:
            data = self.adjust_parameters(node)
        self.weights[node.layer_name + '_weights'] = data[0]
        if len(data) == 2:
            self.weights[node.layer_name + '_bias'] = data[1]
S
SunAhong1993 已提交
238 239
        assert len(node.inputs
                   ) == 1, 'The count of Convolution node\'s input is not 1.'
S
SunAhong1993 已提交
240
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
241

S
SunAhong1993 已提交
242
        attr = {
S
SunAhong1993 已提交
243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260
            'filter_size':
            kernel,
            'num_filters':
            channel,
            'stride':
            stride,
            'padding':
            pad,
            'dilation':
            dilation,
            'groups':
            group,
            'name':
            string(node.layer_name),
            'param_attr':
            string(node.layer_name + '_weights'),
            'bias_attr':
            False if len(data) == 1 else string(node.layer_name + '_bias'),
S
SunAhong1993 已提交
261
        }
J
jiangjiajun 已提交
262 263
        node.fluid_code.add_layer(
            "conv2d", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
264 265 266 267 268 269

    def Deconvolution(self, node):
        data = node.data
        params = node.layer.convolution_param
        channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters(
            node.layer_type, params)
S
SunAhong1993 已提交
270 271
        if data is None:
            data = []
C
channingss 已提交
272 273 274
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
275 276
            input_c = node.input_shape[0][1]
            output_c = channel
C
channingss 已提交
277 278 279 280 281 282
            data.append(
                np.zeros([output_c, input_c, kernel[0],
                          kernel[1]]).astype('float32'))
            data.append(np.zeros([
                output_c,
            ]).astype('float32'))
S
SunAhong1993 已提交
283 284 285 286 287
        else:
            data = self.adjust_parameters(node)
        self.weights[node.layer_name + '_weights'] = data[0]
        if len(data) == 2:
            self.weights[node.layer_name + '_bias'] = data[1]
S
SunAhong1993 已提交
288 289
        assert len(node.inputs
                   ) == 1, 'The count of Deconvolution node\'s input is not 1.'
S
SunAhong1993 已提交
290
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
291
        attr = {
S
SunAhong1993 已提交
292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311
            'output_size':
            None,
            'filter_size':
            kernel,
            'num_filters':
            channel,
            'stride':
            stride,
            'padding':
            pad,
            'dilation':
            dilation,
            'groups':
            group,
            'name':
            string(node.layer_name),
            'param_attr':
            string(node.layer_name + '_weights'),
            'bias_attr':
            False if len(data) == 1 else string(node.layer_name + '_bias')
S
SunAhong1993 已提交
312
        }
J
jiangjiajun 已提交
313 314
        node.fluid_code.add_layer(
            "conv2d_transpose", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
315 316 317

    def Pooling(self, node):
        params = node.layer.pooling_param
S
SunAhong1993 已提交
318
        ceil_mode = getattr(params, 'ceil_mode', True)
S
SunAhong1993 已提交
319 320
        global_pool = getattr(params, 'global_pooling', False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
321
        channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters(
S
SunAhong1993 已提交
322
            node.layer_type, params)
S
SunAhong1993 已提交
323 324 325 326 327 328
        if params.pool == 0:
            pool_type = 'max'
        else:
            pool_type = 'avg'
        assert len(
            node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
S
SunAhong1993 已提交
329
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
330 331 332 333
        attr = {
            'pool_size': kernel,
            'pool_stride': stride,
            'pool_padding': pad,
S
SunAhong1993 已提交
334
            'ceil_mode': ceil_mode,
S
SunAhong1993 已提交
335
            'pool_type': string(pool_type),
S
SunAhong1993 已提交
336
            'exclusive': False,
S
SunAhong1993 已提交
337
            'global_pooling': global_pool,
S
SunAhong1993 已提交
338 339
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
340 341
        node.fluid_code.add_layer(
            "pool2d", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
342 343 344 345 346 347 348 349 350 351 352

    def LRN(self, node):
        assert len(node.inputs) == 1, 'The count of LRN node\'s input is not 1.'
        params = node.layer.lrn_param
        # The window size must be an odd value. For a window
        # size of (2*n+1), Paddle defines depth_radius = n.
        assert params.local_size % 2 == 1
        # Caffe scales by (alpha/(2*n+1)), whereas Paddle
        # just scales by alpha (as does Krizhevsky's paper).
        # We'll account for that here.
        alpha = params.alpha / float(params.local_size)
S
SunAhong1993 已提交
353
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
354 355
        attr = {
            'n': params.local_size,
S
SunAhong1993 已提交
356
            'k': params.k,
S
SunAhong1993 已提交
357 358 359 360
            'alpha': alpha,
            'beta': params.beta,
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
361 362
        node.fluid_code.add_layer(
            "lrn", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
363 364 365

    def InnerProduct(self, node):
        data = node.data
S
SunAhong1993 已提交
366 367
        params = node.layer.inner_product_param
        if data is None:
C
channingss 已提交
368 369 370
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
371 372 373
            input_c = node.input_shape[0][1]
            output_c = params.num_output
            data = []
C
channingss 已提交
374 375 376 377 378
            data.append(
                np.zeros([input_c,
                          output_c]).astype('float32').astype('float32'))
            data.append(
                np.zeros([output_c]).astype('float32').astype('float32'))
S
SunAhong1993 已提交
379 380 381 382 383 384 385 386 387 388
        else:
            data = self.adjust_parameters(node)
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w
S
SunAhong1993 已提交
389

S
SunAhong1993 已提交
390 391 392 393 394
        self.weights[node.layer_name + '_weights'] = data[0]
        if len(data) == 2:
            self.weights[node.layer_name + '_bias'] = data[1]
        assert len(node.inputs
                   ) == 1, 'The count of InnerProduct node\'s input is not 1.'
S
SunAhong1993 已提交
395
        #params = node.layer.inner_product_param
S
SunAhong1993 已提交
396 397
        assert params.axis == 1
        assert params.bias_term == True
S
SunAhong1993 已提交
398
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
399
        attr = {
S
SunAhong1993 已提交
400 401 402 403 404 405 406 407 408 409
            'size':
            params.num_output,
            'name':
            string(node.layer_name),
            'act':
            None,
            'param_attr':
            string(node.layer_name + '_weights'),
            'bias_attr':
            False if len(data) == 1 else string(node.layer_name + '_bias')
S
SunAhong1993 已提交
410
        }
J
jiangjiajun 已提交
411 412
        node.fluid_code.add_layer(
            "fc", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
413 414 415 416

    def Softmax(self, node):
        assert len(
            node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
S
SunAhong1993 已提交
417
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
418 419 420 421 422
        params = node.layer.softmax_param
        axis = params.axis
        shape = node.input_shape[0]
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
S
SunAhong1993 已提交
423
        attr = {'axis': axis, 'name': string(node.layer_name + '_softmax')}
J
jiangjiajun 已提交
424 425
        node.fluid_code.add_layer(
            "softmax", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
426 427 428 429 430

    def Slice(self, node):
        assert len(
            node.inputs) == 1, 'The count of Slice node\'s input is not 1.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
431
        top_len = len(node.layer.top)
S
SunAhong1993 已提交
432 433
        params = node.layer.slice_param
        axis = params.axis
S
SunAhong1993 已提交
434 435 436
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
437 438 439 440 441 442 443 444 445
        output_shape = node.output_shape
        sections_list = []
        for s in output_shape:
            sections_list.append(s[axis])
        attr = {
            'num_or_sections': sections_list,
            'dim': axis,
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
446 447
        node.fluid_code.add_layer(
            "split", inputs=input, output=node.layer_name, param_attr=attr)
C
channingss 已提交
448

S
SunAhong1993 已提交
449 450
    def Concat(self, node):
        assert len(
S
SunAhong1993 已提交
451
            node.inputs
S
sunyanfang01 已提交
452
        ) >= 1, 'The count of Concat node\'s input is not more than 1.'
S
SunAhong1993 已提交
453 454 455 456 457 458
        inputs = []
        for i in range(len(node.inputs)):
            input = self.graph.get_bottom_node(node, idx=i, copy=True)
            inputs.append(input)
        params = node.layer.concat_param
        axis = params.axis
S
SunAhong1993 已提交
459
        attr = {'axis': axis, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
460 461
        node.fluid_code.add_layer(
            "concat", inputs=inputs, output=node, param_attr=attr)
S
SunAhong1993 已提交
462 463 464 465 466 467 468 469 470 471 472 473 474 475 476

    def PReLU(self, node):
        assert len(
            node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
        if mode_bool:
            mode = 'all'
        else:
            mode = 'channel'
        data = node.data
        assert data is not None, 'The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.'.format(
            node.layer_name, node.layer_type)
        self.weights[node.layer_name + '_weights'] = data[0]
S
SunAhong1993 已提交
477
        attr = {
S
SunAhong1993 已提交
478
            'mode': string(mode),
S
SunAhong1993 已提交
479 480
            'param_attr': string(node.layer_name + '_weights'),
            'name': string(node.layer_name)
S
SunAhong1993 已提交
481
        }
J
jiangjiajun 已提交
482 483
        node.fluid_code.add_layer(
            "prelu", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
484 485 486 487 488 489 490 491 492 493

    def Accuracy(self, node):
        assert len(
            node.inputs) == 2, 'The count of Accuracy node\'s input is not 2.'
        inputs = []
        inputs[0] = None
        inputs[1] = None
        i = 0
        for shape in node.input_shape:
            if shape[1] == 1:
S
SunAhong1993 已提交
494 495
                input = self.graph.get_bottom_node(node, idx=i, copy=True)
                inputs[1] = input
S
SunAhong1993 已提交
496
            else:
S
SunAhong1993 已提交
497 498
                input = self.graph.get_bottom_node(node, idx=i, copy=True)
                inputs[0] = input
S
SunAhong1993 已提交
499 500 501 502 503 504 505 506
            i += 1
        params = node.layer.accuracy_param
        top_k = params.top_k
        axis = params.axis
        ignore_label = params.ignore_label
        assert axis == 1, 'PaddlePaddle can not support the situation when the axis is not 1.'
        assert not ignore_label >= 0, 'PaddlePaddle can not support the situation when the model has ignore label.'
        attr = {'k': top_k}
J
jiangjiajun 已提交
507 508
        node.fluid_code.add_layer(
            "accuracy", inputs=inputs, output=node, param_attr=attr)
S
SunAhong1993 已提交
509 510 511 512 513 514 515

    def Eltwise(self, node):
        assert len(
            node.inputs) == 2, 'The count of TanH node\'s input is not 2.'
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
516 517 518 519
        input0 = self.graph.get_bottom_node(node, idx=0, copy=True)
        inputs.append(input0)
        input1 = self.graph.get_bottom_node(node, idx=1, copy=True)
        inputs.append(input1)
S
SunAhong1993 已提交
520
        if mode == 0:
S
SunAhong1993 已提交
521 522 523
            inputs_dict = {}
            inputs_dict['x'] = inputs[0]
            inputs_dict['y'] = inputs[1]
S
SunAhong1993 已提交
524
            attr = {'act': None, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
525 526 527 528 529
            node.fluid_code.add_layer(
                "elementwise_mul",
                inputs=inputs_dict,
                output=node,
                param_attr=attr)
S
SunAhong1993 已提交
530 531 532 533 534 535 536 537 538
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
                input1_name = self.get_input_name(inputs[0])
                attr = {
                    'shape': [1],
                    'value': coeff[0],
                    'dtype': '{}.dtype'.format(input1_name)
                }
J
jiangjiajun 已提交
539 540 541 542 543
                node.fluid_code.add_layer(
                    "fill_constant",
                    inputs=None,
                    output=node.layer_name + '_const1',
                    param_attr=attr)
S
SunAhong1993 已提交
544
                attr = {'act': None, 'name': string(node.layer_name + '_mul1')}
J
jiangjiajun 已提交
545 546 547 548 549
                node.fluid_code.add_layer(
                    "elementwise_mul",
                    inputs=input1_name + ', ' + node.layer_name + '_const1',
                    output=node.layer_name + '_mul1',
                    param_attr=attr)
S
SunAhong1993 已提交
550 551 552 553 554 555
                input2_name = self.get_input_name(inputs[1])
                attr = {
                    'shape': [1],
                    'value': coeff[1],
                    'dtype': '{}.dtype'.format(input2_name)
                }
J
jiangjiajun 已提交
556 557 558 559 560
                node.fluid_code.add_layer(
                    "fill_constant",
                    inputs=None,
                    output=node.layer_name + '_const2',
                    param_attr=attr)
S
SunAhong1993 已提交
561
                attr = {'act': None, 'name': string(node.layer_name + '_mul2')}
J
jiangjiajun 已提交
562 563 564 565 566
                node.fluid_code.add_layer(
                    "elementwise_mul",
                    inputs=input2_name + ', ' + node.layer_name + '_const2',
                    output=node.layer_name + '_mul2',
                    param_attr=attr)
S
SunAhong1993 已提交
567 568

                attr = {'act': None, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
569 570 571 572 573 574
                node.fluid_code.add_layer(
                    "elementwise_add",
                    inputs='{}_mul1, {}_mul2'.format(node.layer_name,
                                                     node.layer_name),
                    output=node,
                    param_attr=attr)
S
SunAhong1993 已提交
575
            else:
S
SunAhong1993 已提交
576 577 578
                inputs_dict = {}
                inputs_dict['x'] = inputs[0]
                inputs_dict['y'] = inputs[1]
S
SunAhong1993 已提交
579
                attr = {'act': None, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
580 581 582 583 584
                node.fluid_code.add_layer(
                    "elementwise_add",
                    inputs=inputs_dict,
                    output=node,
                    param_attr=attr)
S
SunAhong1993 已提交
585
        else:
S
SunAhong1993 已提交
586 587 588
            inputs_dict = {}
            inputs_dict['x'] = inputs[0]
            inputs_dict['y'] = inputs[1]
S
SunAhong1993 已提交
589
            attr = {'act': None, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
590 591 592 593 594
            node.fluid_code.add_layer(
                "elementwise_max",
                inputs=inputs_dict,
                output=node,
                param_attr=attr)
S
SunAhong1993 已提交
595 596

    def BatchNorm(self, node):
C
channingss 已提交
597 598
        assert len(
            node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
S
SunAhong1993 已提交
599 600
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        params = node.layer.batch_norm_param
S
SunAhong1993 已提交
601
        if hasattr(params, 'eps'):
S
SunAhong1993 已提交
602 603 604
            eps = params.eps
        else:
            eps = 1e-5
S
SunAhong1993 已提交
605
        if node.data is None or len(node.data) != 3:
C
channingss 已提交
606 607 608
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
609
            input_c = node.input_shape[0][1]
C
channingss 已提交
610 611 612 613 614 615
            mean = np.zeros([
                input_c,
            ]).astype('float32')
            variance = np.zeros([
                input_c,
            ]).astype('float32')
S
SunAhong1993 已提交
616 617
            scale = 0
        else:
S
SunAhong1993 已提交
618

S
SunAhong1993 已提交
619
            node.data = [np.squeeze(i).astype('float32') for i in node.data]
S
SunAhong1993 已提交
620
            mean, variance, scale = node.data
S
SunAhong1993 已提交
621 622 623 624 625 626
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
        self.weights[node.layer_name + '_mean'] = mean
        self.weights[node.layer_name + '_variance'] = variance
627 628 629 630 631 632 633 634 635
        attr = {
            'is_test': True,
            'param_attr': None,
            'bias_attr': None,
            'moving_mean_name': string(node.layer_name + '_mean'),
            'moving_variance_name': string(node.layer_name + '_variance'),
            'epsilon': eps,
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
636 637
        node.fluid_code.add_layer(
            "batch_norm", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
638 639

    def Scale(self, node):
S
SunAhong1993 已提交
640
        if node.data is None:
C
channingss 已提交
641 642 643
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
644
            input_c = node.input_shape[0][1]
C
channingss 已提交
645 646 647 648 649 650
            self.weights[node.layer_name + '_scale'] = np.zeros([
                input_c,
            ]).astype('float32')
            self.weights[node.layer_name + '_offset'] = np.zeros([
                input_c,
            ]).astype('float32')
S
SunAhong1993 已提交
651
        else:
S
SunAhong1993 已提交
652 653 654 655
            self.weights[node.layer_name + '_scale'] = np.squeeze(
                node.data[0]).astype('float32')
            self.weights[node.layer_name + '_offset'] = np.squeeze(
                node.data[1]).astype('float32')
656 657 658 659 660 661 662 663 664 665 666 667 668 669
        params = node.layer.scale_param
        axis = params.axis
        num_axes = params.num_axes
        inputs = []
        if len(node.inputs) == 2:
            # for two tensor, here resets axis to 1. Maybe there is a bug for unkown case.
            axis = 1
            bias_shape = node.input_shape[0][axis:axis + num_axes]
            input0 = self.graph.get_bottom_node(node, idx=0, copy=True)
            input1 = self.graph.get_bottom_node(node, idx=1, copy=True)
            inputs_dict = {}
            inputs_dict['x'] = input0
            inputs_dict['y'] = input1
            attr = {'axis': axis, 'name': string(node.layer_name + '_mul')}
J
jiangjiajun 已提交
670 671 672 673 674
            node.fluid_code.add_layer(
                "elementwise_mul",
                inputs=inputs_dict,
                output=node.layer_name + '_mul',
                param_attr=attr)
S
SunAhong1993 已提交
675
        else:
676 677
            bias_shape = node.input_shape[0][axis:axis + num_axes]
            input0 = self.graph.get_bottom_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
678 679
            input0_name = self.get_input_name(input0)
            attr = {
680 681 682 683
                'dtype': '{}.dtype'.format(input0_name),
                'shape': bias_shape,
                'name': string(node.layer_name + '_cparam1'),
                'attr': string(node.layer_name + '_scale'),
S
SunAhong1993 已提交
684 685 686
                'is_bias': True,
                'default_initializer': 'Constant(value=1.0)'
            }
J
jiangjiajun 已提交
687 688
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
689 690 691 692
            inputs_dict = {}
            inputs_dict['x'] = input0
            inputs_dict['y'] = node
            attr = {'axis': axis, 'name': string(node.layer_name + '_mul')}
J
jiangjiajun 已提交
693 694 695 696 697
            node.fluid_code.add_layer(
                "elementwise_mul",
                inputs=inputs_dict,
                output=node.layer_name + '_mul',
                param_attr=attr)
698 699 700 701 702 703 704 705 706 707
        scale_shape = bias_shape
        input0_name = self.get_input_name(input0)
        attr = {
            'dtype': '{}.dtype'.format(input0_name),
            'shape': scale_shape,
            'name': string(node.layer_name + '_cparam2'),
            'attr': string(node.layer_name + '_offset'),
            'is_bias': True,
            'default_initializer': 'Constant(value=1.0)'
        }
J
jiangjiajun 已提交
708 709 710 711 712
        node.fluid_code.add_layer(
            "create_parameter",
            inputs=None,
            output=node.layer_name + '_offset_param',
            param_attr=attr)
713
        attr = {'axis': axis, 'name': string(node.layer_name + '_add')}
J
jiangjiajun 已提交
714 715 716 717 718 719
        node.fluid_code.add_layer(
            "elementwise_add",
            inputs='{}_mul, {}_offset_param'.format(node.layer_name,
                                                    node.layer_name),
            output=node,
            param_attr=attr)
S
SunAhong1993 已提交
720 721 722 723

    def Reshape(self, node):
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        top_count = len(input.layer.top)
724
        is_inplace = False if top_count == 1 else True
S
SunAhong1993 已提交
725 726 727 728
        output_shape = node.output_shape[0]
        attr = {
            'shape': output_shape,
            'inplace': is_inplace,
729
            'act': None,
S
SunAhong1993 已提交
730 731
            'name': string(node.layer_name)
        }
J
jiangjiajun 已提交
732 733
        node.fluid_code.add_layer(
            "reshape", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, 'The count of ArgMax node\'s input and output is not 1.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        input_shape = node.input_shape[0]
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
        axis = parmas.axis if hasattr(params, axis) else -1
        if axis < 0:
            axis += len(input_shape)
        if out_max_val is True:
            attr = {'k': top_k, 'name': string(node.layer_name + '_topk')}
J
jiangjiajun 已提交
750 751 752 753 754 755
            node.fluid_code.add_layer(
                "topk",
                inputs=input,
                output='{}_topk_var, {}_index_var'.format(
                    node.layer_name, node.layer_name),
                param_attr=attr)
S
SunAhong1993 已提交
756 757 758 759 760 761 762
            attr = {'dtype': '{}_topk_var.dtype'.format(node.layer_name)}
            node.fluid_code.add_layer(
                "cast",
                inputs='{}_index_var'.format(node.layer_name),
                output='{}_index_var'.format(node.layer_name),
                param_attr=attr)
            attr = {'axis': axis, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
763 764 765 766 767 768
            node.fluid_code.add_layer(
                "concat",
                inputs='{}_topk_var, {}_index_var'.format(
                    node.layer_name, node.layer_name),
                output=node,
                param_attr=attr)
S
SunAhong1993 已提交
769 770
        else:
            attr = {'k': top_k, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
771 772 773 774 775
            node.fluid_code.add_layer(
                "topk",
                inputs=input,
                output='_, {}'.format(node.layer_name),
                param_attr=attr)
S
SunAhong1993 已提交
776 777 778 779 780 781 782

    def Crop(self, node):
        assert len(
            node.inputs) == 2, 'The count of Crop node\'s input is not 2.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        example = self.graph.get_bottom_node(node, idx=1, copy=True)
        params = node.layer.crop_param
S
sonixixi 已提交
783
        axis = params.axis
S
SunAhong1993 已提交
784 785 786 787
        input_shape = node.input_shape[0]
        if axis < 0:
            axis += len(input_shape)
        offset_real = [0] * len(input_shape)
S
sonixixi 已提交
788
        if hasattr(params, "offset") and len(params.offset) > 0:
S
SunAhong1993 已提交
789 790 791 792
            offset = list(params.offset)
            assert (len(input_shape) - axis) == len(
                offset), "invalid offset[%s] in crop layer" % (str(offset))
            offset_real = [0] * axis + offset
S
sonixixi 已提交
793
        attr = {'offsets': list(offset_real), 'name': string(node.layer_name)}
J
jiangjiajun 已提交
794 795 796 797 798 799 800 801
        node.fluid_code.add_layer(
            "crop",
            inputs={
                'x': input,
                'shape': node.input_shape[1]
            },
            output=node,
            param_attr=attr)
S
SunAhong1993 已提交
802

S
SunAhong1993 已提交
803
    def Flatten(self, node):
S
SunAhong1993 已提交
804 805 806 807 808 809
        assert len(
            node.inputs
        ) == 1, 'The count of DetectionOutput node\'s input is not 1.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        shape = node.output_shape[0]
        attr = {'shape': shape, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
810 811
        node.fluid_code.add_layer(
            "reshape", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
812 813 814 815 816 817 818 819 820 821 822 823 824 825 826

    def Power(self, node):
        assert len(
            node.inputs) == 1, 'The count of Permute node\'s input is not 1.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        params = node.layer.power_param
        power = params.power
        scale = params.scale
        shift = params.shift
        attr = {
            'scale': scale,
            'bias': shift,
            'bias_after_scale': True,
            'name': string(node.layer_name + '_scale')
        }
J
jiangjiajun 已提交
827 828
        node.fluid_code.add_layer(
            "scale", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
829
        attr = {'factor': power, 'name': string(node.layer_name)}
J
jiangjiajun 已提交
830 831
        node.fluid_code.add_layer(
            "pow", inputs=node, output=node, param_attr=attr)
S
SunAhong1993 已提交
832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852

    def Reduction(self, node):
        assert len(
            node.inputs) == 1, 'The count of Reduction node\'s input is not 1.'
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
        input_len = len(node.input_shape[0])
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
        if operation == 1:  ## operation = SUM
            attr = {
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
J
jiangjiajun 已提交
853 854
            node.fluid_code.add_layer(
                "reduce_sum", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
855 856
        elif operation == 2:  ## operation = ASUM
            attr = {'name': string(node.layer_name + '_abs')}
J
jiangjiajun 已提交
857 858
            node.fluid_code.add_layer(
                "abs", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
859 860 861 862 863
            attr = {
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
J
jiangjiajun 已提交
864 865
            node.fluid_code.add_layer(
                "reduce_sum", inputs=node, output=node, param_attr=attr)
S
SunAhong1993 已提交
866 867
        elif operation == 3:  ## operation = SUMSQ
            attr = {'factor': 2.0, 'name': string(node.layer_name + '_pow')}
J
jiangjiajun 已提交
868 869
            node.fluid_code.add_layer(
                "pow", inputs=input, output=node, param_attr=attr)
S
SunAhong1993 已提交
870 871 872 873 874
            attr = {
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
J
jiangjiajun 已提交
875 876
            node.fluid_code.add_layer(
                "reduce_sum", inputs=node, output=node, param_attr=attr)
S
SunAhong1993 已提交
877 878 879 880 881 882
        else:  ## operation = MEAN
            attr = {
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
J
jiangjiajun 已提交
883 884
            node.fluid_code.add_layer(
                "reduce_mean", inputs=node, output=node, param_attr=attr)
S
SunAhong1993 已提交
885
        attr = {'scale': coeff}
J
jiangjiajun 已提交
886 887
        node.fluid_code.add_layer(
            "scale", inputs=node, output=node, param_attr=attr)
S
SunAhong1993 已提交
888

S
SunAhong1993 已提交
889 890 891 892 893 894 895 896
    def deal_custom_layer(self, node):
        op = node.layer_type
        custom_code, func = make_custom_layer(node)
        params = get_params(node.layer, node.layer_type)
        arg_names, kwargs = set_args(func, params)
        kwargs['name'] = string(node.layer_name)
        kwargs['input_shape'] = node.input_shape
        data = node.data
S
SunAhong1993 已提交
897 898 899 900 901
        if data is not None:
            data = self.adjust_parameters(node)
            weights_name = deal_weights(node)
            for i in range(len(data)):
                self.weights[weights_name[i]] = data[i]
S
SunAhong1993 已提交
902 903 904
        inputs_node = []
        for i in range(len(node.inputs)):
            input = self.graph.get_bottom_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
905 906
            if i == 1 and op == 'DetectionOutput':
                input = self.graph.get_bottom_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
907 908
                while input is not None \
                      and input.layer_type != 'Softmax' \
S
SunAhong1993 已提交
909
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
910 911 912
                    input = self.graph.get_bottom_node(input, idx=0, copy=True)
                assert input is not None, 'This kind of DetectionOutput is not supported!'
                input = self.graph.get_bottom_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
913
            inputs_node.append(input)
J
jiangjiajun 已提交
914 915 916 917 918 919
        node.fluid_code.add_layer(
            func.__code__.co_name,
            inputs=inputs_node,
            output=node,
            param_attr=kwargs,
            is_custom_layer=True)
J
jiangjiajun 已提交
920 921
        if op not in self.used_custom_layers:
            self.used_custom_layers[op] = custom_code
S
SunAhong1993 已提交
922 923 924 925 926 927

    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_bottom_node(node, idx=0, copy=True)
        attr = {'name': string(node.layer_name)}
J
jiangjiajun 已提交
928 929
        node.fluid_code.add_layer(
            op_info, inputs=input, output=node, param_attr=attr)