caffe_op_mapper.py 39.3 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14 15

import numbers
S
SunAhong1993 已提交
16
import copy
S
SunAhong1993 已提交
17
import numpy as np
J
jiangjiajun 已提交
18 19
from x2paddle.decoder.caffe_decoder import CaffeGraph
from x2paddle.core.op_mapper import OpMapper
S
SunAhong1993 已提交
20
from x2paddle.core.util import *
S
SunAhong1993 已提交
21 22 23
from x2paddle.op_mapper.static.caffe2paddle import caffe_shape
from x2paddle.op_mapper.static.caffe2paddle.caffe_custom_layer import *
from x2paddle.core.program import PaddleGraph 
S
SunAhong1993 已提交
24 25


J
jiangjiajun 已提交
26
class CaffeOpMapper(OpMapper):
S
SunAhong1993 已提交
27
    directly_map_ops = {
S
SunAhong1993 已提交
28
        'AbsVal': 'paddle.abs',
S
SunAhong1993 已提交
29
        'Sigmoid': 'fluid.layers.sigmoid',
S
SunAhong1993 已提交
30
        'TanH': 'paddle.tanh',
S
SunAhong1993 已提交
31 32
    }

J
jiangjiajun 已提交
33 34 35
    def __init__(self, decoder):
        super(CaffeOpMapper, self).__init__()
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
36
        self.weights = dict()
J
jiangjiajun 已提交
37
        resolver = decoder.resolver
J
jiangjiajun 已提交
38
        self.used_custom_layers = {}
S
SunAhong1993 已提交
39 40 41
        self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="caffe")
        self.paddle_graph.inputs = self.graph.input_nodes
        self.paddle_graph.outputs = self.graph.output_nodes
S
SunAhong1993 已提交
42 43 44 45

        print("Total nodes: {}".format(len(self.graph.topo_sort)))
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
S
SunAhong1993 已提交
46 47
            if node.layer_type == 'DepthwiseConvolution':
                node.layer_type = 'ConvolutionDepthwise'
S
SunAhong1993 已提交
48 49
            op = node.layer_type
            if hasattr(self, op):
50
                self.set_node_shape(node)
J
jiangjiajun 已提交
51 52
                func = getattr(self, op)
                func(node)
S
SunAhong1993 已提交
53
            elif op in custom_layers:
54
                self.set_node_shape(node, is_fluid_op=False)
S
SunAhong1993 已提交
55
                self.deal_custom_layer(node)
S
SunAhong1993 已提交
56 57 58
            elif op in self.directly_map_ops:
                self.set_node_shape(node)
                self.directly_map(node)
S
SunAhong1993 已提交
59
            else:
S
SunAhong1993 已提交
60 61
                raise Exception(
                    "The op {} in model is not supported yet.".format(op))
S
SunAhong1993 已提交
62 63
        self.paddle_graph.set_parameters(self.weights)
        self.paddle_graph.set_custom(self.used_custom_layers)
S
SunAhong1993 已提交
64

S
SunAhong1993 已提交
65

J
jiangjiajun 已提交
66 67 68
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
S
SunAhong1993 已提交
69
            node = self.graph.get_node(node_name)
J
jiangjiajun 已提交
70 71 72 73 74 75 76 77 78 79 80
            op = node.layer_type
            if not hasattr(self, op) and op not in custom_layers:
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
            print("There are {} ops not supported yet, list as below".format(
                len(unsupported_ops)))
            for op in unsupported_ops:
                print(op)
            return False
S
SunAhong1993 已提交
81

82
    def set_node_shape(self, node, is_fluid_op=True):
S
SunAhong1993 已提交
83 84 85 86 87 88 89
        inputs = node.inputs
        input_shape = []
        for i, nm in enumerate(inputs):
            last_node = self.graph.get_node(nm)
            tmp = node.layer.bottom[i]
            idx = list(last_node.layer.top).index(tmp)
            input_shape.append(last_node.output_shape[idx])
90 91 92

        node.input_shape = input_shape
        func_name = 'shape_' + node.layer_type.lower()
S
SunAhong1993 已提交
93
        if is_fluid_op:
94 95
            node.output_shape = getattr(caffe_shape, func_name)(node.layer,
                                                                input_shape)
S
SunAhong1993 已提交
96
        else:
97
            node.output_shape = compute_output_shape(node)
S
SunAhong1993 已提交
98 99 100

    def adjust_parameters(self, node):
        data = node.data
S
SunAhong1993 已提交
101 102 103 104 105 106 107 108
        # When using the protobuf-backend, each parameter initially has four dimensions.
        # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
        # This implementation takes care of the common cases. However, it does leave the
        # potential for future issues.
        # The Caffe-backend does not suffer from this problem.
        data = list(data)

        squeeze_indices = [1]  # Squeeze biases.
S
SunAhong1993 已提交
109
        if node.layer_type == 'InnerProduct':
S
SunAhong1993 已提交
110 111 112 113 114
            squeeze_indices.append(0)  # Squeeze FC.

        for idx in squeeze_indices:
            if idx >= len(data):
                continue
S
SunAhong1993 已提交
115

S
SunAhong1993 已提交
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
            d = data[idx]
            assert len(
                d.shape
            ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
                str(d.shape))

            shape_old = d.shape
            sq_axis = None
            if idx == 0:
                sq_axis = (0, 1)
            elif idx == 1:
                sq_axis = (0, 1, 2)
            else:
                continue

            data[idx] = np.squeeze(d, axis=sq_axis)
            shape_new = data[idx].shape
        return data
S
SunAhong1993 已提交
134

S
SunAhong1993 已提交
135
    def get_kernel_parameters(self, kind, params):
S
SunAhong1993 已提交
136
        assert kind in ['Convolution', 'Pooling', 'Deconvolution']
S
SunAhong1993 已提交
137 138 139
        [k_h, k_w] = [1, 1]
        if isinstance(params.kernel_size, numbers.Number):
            [k_h, k_w] = [params.kernel_size] * 2
S
SunAhong1993 已提交
140
        elif len(params.kernel_size) > 0:
C
channingss 已提交
141 142
            k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
                0]
S
SunAhong1993 已提交
143
            k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
S
SunAhong1993 已提交
144
                len(params.kernel_size) - 1]
S
SunAhong1993 已提交
145 146 147
        elif params.kernel_h > 0 or params.kernel_w > 0:
            k_h = params.kernel_h
            k_w = params.kernel_w
S
SunAhong1993 已提交
148 149 150
        [s_h, s_w] = [1, 1]
        if isinstance(params.stride, numbers.Number):
            [s_h, s_w] = [params.stride] * 2
S
SunAhong1993 已提交
151
        elif len(params.stride) > 0:
S
SunAhong1993 已提交
152
            s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
J
jiangjiajun 已提交
153 154
            s_w = params.stride_w if params.stride_w > 0 else params.stride[len(
                params.stride) - 1]
S
SunAhong1993 已提交
155 156 157
        elif params.stride_h > 0 or params.stride_w > 0:
            s_h = params.stride_h
            s_w = params.stride_w
S
SunAhong1993 已提交
158 159 160
        [p_h, p_w] = [0, 0]
        if isinstance(params.pad, numbers.Number):
            [p_h, p_w] = [params.pad] * 2
S
SunAhong1993 已提交
161
        elif len(params.pad) > 0:
S
SunAhong1993 已提交
162
            p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
J
jiangjiajun 已提交
163 164
            p_w = params.pad_w if params.pad_w > 0 else params.pad[len(
                params.pad) - 1]
S
SunAhong1993 已提交
165 166 167
        elif params.pad_h > 0 or params.pad_w > 0:
            p_h = params.pad_h
            p_w = params.pad_w
S
SunAhong1993 已提交
168 169 170
        dila_h = dila_w = 1
        group = 1
        c_o = 1
171
        if kind in ['Convolution', 'Deconvolution']:
S
SunAhong1993 已提交
172 173 174 175 176 177 178 179 180 181
            c_o = params.num_output
            dila_len = len(params.dilation)
            if dila_len == 2:
                dila_h = params.dilation[0]
                dila_w = params.dilation[1]
            elif dila_len == 1:
                dila_h = dila_w = params.dilation[0]
            else:
                assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                    dila_len)
S
SunAhong1993 已提交
182 183
        if kind in ['Convolution', 'Deconvolution']:
            group = params.group
S
SunAhong1993 已提交
184 185 186 187 188 189
        kernel = [k_h, k_w]
        stride = [s_h, s_w]
        pad = [p_h, p_w]
        dilation = [dila_h, dila_w]
        return c_o, kernel, stride, pad, dilation, group

S
SunAhong1993 已提交
190 191
    def get_input_name(self, node):
        if hasattr(node, "index"):
S
SunAhong1993 已提交
192
            return "{}_{}".format(node.layer_name, node.index)
S
SunAhong1993 已提交
193 194 195
        else:
            return node.layer_name

S
SunAhong1993 已提交
196 197 198
    def Input(self, node):
        shape = list(node.layer.input_param.shape[0].dim)[1:]
        dtype = 'float32'
S
SunAhong1993 已提交
199 200 201 202
        layer_attrs = {
            "dtype": string(dtype),
            "shape": [-1] + shape,
            "name": string(node.layer_name)
S
SunAhong1993 已提交
203
        }
S
SunAhong1993 已提交
204
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
205
            kernel="fluid.data",
S
SunAhong1993 已提交
206 207 208
            inputs={},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
209

S
SunAhong1993 已提交
210 211 212 213 214
    def Convolution(self, node):
        data = node.data
        params = node.layer.convolution_param
        channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters(
            node.layer_type, params)
S
SunAhong1993 已提交
215 216
        if data is None:
            data = []
C
channingss 已提交
217
            print(
S
SunAhong1993 已提交
218
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
C
channingss 已提交
219
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
220 221
            input_c = node.input_shape[0][1]
            output_c = channel
C
channingss 已提交
222
            data.append(
J
jiangjiajun 已提交
223 224
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
S
0.8.4  
sunyanfang01 已提交
225
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
226 227 228 229 230
        else:
            data = self.adjust_parameters(node)
        self.weights[node.layer_name + '_weights'] = data[0]
        if len(data) == 2:
            self.weights[node.layer_name + '_bias'] = data[1]
S
SunAhong1993 已提交
231 232
        assert len(node.inputs
                   ) == 1, 'The count of Convolution node\'s input is not 1.'
S
SunAhong1993 已提交
233
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
234
        layer_attrs = {
J
jiangjiajun 已提交
235 236 237 238 239 240 241 242 243 244
            'filter_size': kernel,
            'num_filters': channel,
            'stride': stride,
            'padding': pad,
            'dilation': dilation,
            'groups': group,
            'name': string(node.layer_name),
            'param_attr': string(node.layer_name + '_weights'),
            'bias_attr': False
            if len(data) == 1 else string(node.layer_name + '_bias'),
S
SunAhong1993 已提交
245
        }
S
SunAhong1993 已提交
246
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
247 248
            kernel="fluid.layers.conv2d",
            inputs={"input": self.get_input_name(input)},
S
SunAhong1993 已提交
249 250 251
            outputs=[node.layer_name],
            **layer_attrs)        
        
S
SunAhong1993 已提交
252 253 254 255 256
    def Deconvolution(self, node):
        data = node.data
        params = node.layer.convolution_param
        channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters(
            node.layer_type, params)
S
SunAhong1993 已提交
257 258
        if data is None:
            data = []
C
channingss 已提交
259 260 261
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
262 263
            input_c = node.input_shape[0][1]
            output_c = channel
C
channingss 已提交
264
            data.append(
J
jiangjiajun 已提交
265 266 267
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
268 269 270 271 272
        else:
            data = self.adjust_parameters(node)
        self.weights[node.layer_name + '_weights'] = data[0]
        if len(data) == 2:
            self.weights[node.layer_name + '_bias'] = data[1]
S
SunAhong1993 已提交
273 274
        assert len(node.inputs
                   ) == 1, 'The count of Deconvolution node\'s input is not 1.'
S
SunAhong1993 已提交
275
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
276
        layer_attrs = {
J
jiangjiajun 已提交
277 278 279 280 281 282 283 284 285 286 287
            'output_size': None,
            'filter_size': kernel,
            'num_filters': channel,
            'stride': stride,
            'padding': pad,
            'dilation': dilation,
            'groups': group,
            'name': string(node.layer_name),
            'param_attr': string(node.layer_name + '_weights'),
            'bias_attr': False
            if len(data) == 1 else string(node.layer_name + '_bias')
S
SunAhong1993 已提交
288
        }
S
SunAhong1993 已提交
289
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
290 291
            kernel="fluid.layers.conv2d_transpose",
            inputs={"input": self.get_input_name(input)},
S
SunAhong1993 已提交
292 293
            outputs=[node.layer_name],
            **layer_attrs)    
S
SunAhong1993 已提交
294 295 296

    def Pooling(self, node):
        params = node.layer.pooling_param
S
SunAhong1993 已提交
297
        ceil_mode = getattr(params, 'ceil_mode', True)
S
SunAhong1993 已提交
298 299
        global_pool = getattr(params, 'global_pooling', False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
300
        channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters(
S
SunAhong1993 已提交
301
            node.layer_type, params)
S
SunAhong1993 已提交
302 303 304 305 306 307
        if params.pool == 0:
            pool_type = 'max'
        else:
            pool_type = 'avg'
        assert len(
            node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
S
SunAhong1993 已提交
308
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
309
        layer_attrs = {
S
SunAhong1993 已提交
310 311 312
            'pool_size': kernel,
            'pool_stride': stride,
            'pool_padding': pad,
S
SunAhong1993 已提交
313
            'ceil_mode': ceil_mode,
S
SunAhong1993 已提交
314
            'pool_type': string(pool_type),
S
SunAhong1993 已提交
315
            'exclusive': False,
S
SunAhong1993 已提交
316
            'global_pooling': global_pool,
S
SunAhong1993 已提交
317 318
            'name': string(node.layer_name)
        }
S
SunAhong1993 已提交
319
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
320 321 322 323
            kernel="fluid.layers.pool2d",
            inputs={"input": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)    
S
SunAhong1993 已提交
324 325 326 327 328 329 330 331 332 333 334

    def LRN(self, node):
        assert len(node.inputs) == 1, 'The count of LRN node\'s input is not 1.'
        params = node.layer.lrn_param
        # The window size must be an odd value. For a window
        # size of (2*n+1), Paddle defines depth_radius = n.
        assert params.local_size % 2 == 1
        # Caffe scales by (alpha/(2*n+1)), whereas Paddle
        # just scales by alpha (as does Krizhevsky's paper).
        # We'll account for that here.
        alpha = params.alpha / float(params.local_size)
S
SunAhong1993 已提交
335
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
336
        layer_attrs = {
S
SunAhong1993 已提交
337
            'n': params.local_size,
S
SunAhong1993 已提交
338
            'k': params.k,
S
SunAhong1993 已提交
339 340 341 342
            'alpha': alpha,
            'beta': params.beta,
            'name': string(node.layer_name)
        }
S
SunAhong1993 已提交
343
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
344 345 346 347
            kernel="fluid.layers.lrn",
            inputs={"input": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
348 349 350

    def InnerProduct(self, node):
        data = node.data
S
SunAhong1993 已提交
351 352
        params = node.layer.inner_product_param
        if data is None:
C
channingss 已提交
353 354 355
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
356 357 358
            input_c = node.input_shape[0][1]
            output_c = params.num_output
            data = []
C
channingss 已提交
359
            data.append(
J
jiangjiajun 已提交
360 361
                np.zeros([input_c, output_c]).astype('float32').astype(
                    'float32'))
C
channingss 已提交
362 363
            data.append(
                np.zeros([output_c]).astype('float32').astype('float32'))
S
SunAhong1993 已提交
364 365 366 367 368 369 370 371 372 373
        else:
            data = self.adjust_parameters(node)
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w
S
SunAhong1993 已提交
374

S
SunAhong1993 已提交
375 376 377 378 379
        self.weights[node.layer_name + '_weights'] = data[0]
        if len(data) == 2:
            self.weights[node.layer_name + '_bias'] = data[1]
        assert len(node.inputs
                   ) == 1, 'The count of InnerProduct node\'s input is not 1.'
S
SunAhong1993 已提交
380
        #params = node.layer.inner_product_param
S
SunAhong1993 已提交
381 382
        assert params.axis == 1
        assert params.bias_term == True
S
SunAhong1993 已提交
383
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
384
        layer_attrs = {
J
jiangjiajun 已提交
385 386 387 388 389 390
            'size': params.num_output,
            'name': string(node.layer_name),
            'act': None,
            'param_attr': string(node.layer_name + '_weights'),
            'bias_attr': False
            if len(data) == 1 else string(node.layer_name + '_bias')
S
SunAhong1993 已提交
391
        }
S
SunAhong1993 已提交
392
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
393
            kernel="fluid.layers.fc",
S
SunAhong1993 已提交
394 395 396
            inputs={"input": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
397 398 399 400

    def Softmax(self, node):
        assert len(
            node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
S
SunAhong1993 已提交
401
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
402 403 404 405 406
        params = node.layer.softmax_param
        axis = params.axis
        shape = node.input_shape[0]
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
S
SunAhong1993 已提交
407
        layer_attrs = {'axis': axis, 'name': string(node.layer_name + '_softmax')}
S
SunAhong1993 已提交
408
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
409 410 411 412
            kernel="paddle.nn.functional.softmax",
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
413 414 415 416

    def Slice(self, node):
        assert len(
            node.inputs) == 1, 'The count of Slice node\'s input is not 1.'
S
SunAhong1993 已提交
417
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
418
        top_len = len(node.layer.top)
S
SunAhong1993 已提交
419 420
        params = node.layer.slice_param
        axis = params.axis
S
SunAhong1993 已提交
421 422 423
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
424
        output_shape = node.output_shape
S
SunAhong1993 已提交
425 426 427
        sections_list = list()
        outputs_list = list()
        for i, s in enumerate(output_shape):
S
SunAhong1993 已提交
428
            sections_list.append(s[axis])
S
SunAhong1993 已提交
429
            outputs_list.append("{}_{}".format(node.layer_name, i))
S
SunAhong1993 已提交
430
        layer_attrs = {
S
SunAhong1993 已提交
431 432 433 434
            'num_or_sections': sections_list,
            'dim': axis,
            'name': string(node.layer_name)
        }
S
SunAhong1993 已提交
435
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
436
            kernel="fluid.layers.split",
S
SunAhong1993 已提交
437
            inputs={"input": self.get_input_name(input)},
S
SunAhong1993 已提交
438
            outputs=outputs_list,
S
SunAhong1993 已提交
439
            **layer_attrs)
C
channingss 已提交
440

S
SunAhong1993 已提交
441 442
    def Concat(self, node):
        assert len(
S
SunAhong1993 已提交
443
            node.inputs
S
sunyanfang01 已提交
444
        ) >= 1, 'The count of Concat node\'s input is not more than 1.'
S
SunAhong1993 已提交
445
        inputs_list = []
S
SunAhong1993 已提交
446
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
447
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
448
            inputs_list.append(self.get_input_name(input))
S
SunAhong1993 已提交
449 450
        params = node.layer.concat_param
        axis = params.axis
S
SunAhong1993 已提交
451
        layer_attrs = {'axis': axis, 'name': string(node.layer_name)}
S
SunAhong1993 已提交
452
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
453 454 455 456
            kernel="paddle.concat",
            inputs={"x": inputs_list},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
457

458 459 460 461 462 463 464 465
    def ReLU(self, node):
        """

        :param node:
        :return:
        """
        assert len(
            node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
S
SunAhong1993 已提交
466
        input = self.graph.get_input_node(node, idx=0, copy=True)
467 468 469 470

        params = node.layer.relu_param
        if params.HasField('negative_slope') and params.negative_slope != 0:
            negative_slope = float(params.negative_slope)
S
SunAhong1993 已提交
471
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
472
                kernel="fluid.layers.leaky_relu",
S
SunAhong1993 已提交
473 474
                inputs={"x": self.get_input_name(input)},
                outputs=[node.layer_name],
S
SunAhong1993 已提交
475
                alpha=negative_slope)
476
        else:
S
SunAhong1993 已提交
477
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
478
                kernel="fluid.layers.relu",
S
SunAhong1993 已提交
479 480
                inputs={"x": self.get_input_name(input)},
                outputs=[node.layer_name])
481

S
SunAhong1993 已提交
482 483 484
    def PReLU(self, node):
        assert len(
            node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
S
SunAhong1993 已提交
485
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
486 487 488 489 490 491 492 493 494
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
        if mode_bool:
            mode = 'all'
        else:
            mode = 'channel'
        data = node.data
        assert data is not None, 'The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.'.format(
            node.layer_name, node.layer_type)
S
SunAhong1993 已提交
495 496
        self.weights[node.layer_name + '_weights'] = data[0]
        layer_attrs = {
S
SunAhong1993 已提交
497
            'mode': string(mode),
S
SunAhong1993 已提交
498 499
            'param_attr': string(node.layer_name + '_weights'),
            'name': string(node.layer_name)
S
SunAhong1993 已提交
500
        }
S
SunAhong1993 已提交
501
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
502
            kernel="fluid.layers.prelu",
S
SunAhong1993 已提交
503 504 505
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
506 507 508 509

    def Accuracy(self, node):
        assert len(
            node.inputs) == 2, 'The count of Accuracy node\'s input is not 2.'
S
SunAhong1993 已提交
510 511
        inputs_dict = dict()
        for i, shape in enumerate(node.input_shape):
S
SunAhong1993 已提交
512
            if shape[1] == 1:
S
SunAhong1993 已提交
513
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
514
                inputs_dict["label"] = self.get_input_name(input)
S
SunAhong1993 已提交
515
            else:
S
SunAhong1993 已提交
516
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
517
                inputs_dict["input"] = self.get_input_name(input)
S
SunAhong1993 已提交
518 519 520 521 522 523
        params = node.layer.accuracy_param
        top_k = params.top_k
        axis = params.axis
        ignore_label = params.ignore_label
        assert axis == 1, 'PaddlePaddle can not support the situation when the axis is not 1.'
        assert not ignore_label >= 0, 'PaddlePaddle can not support the situation when the model has ignore label.'
S
SunAhong1993 已提交
524
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
525
            kernel="fluid.layers.accuracy",
S
SunAhong1993 已提交
526 527 528
            inputs=inputs_dict,
            outputs=[node.layer_name],
            k=top_k)
S
SunAhong1993 已提交
529 530 531 532 533 534 535

    def Eltwise(self, node):
        assert len(
            node.inputs) == 2, 'The count of TanH node\'s input is not 2.'
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
536
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
537
        inputs.append(input0)
S
SunAhong1993 已提交
538
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
539
        inputs.append(input1)
S
SunAhong1993 已提交
540
        if mode == 0:
S
SunAhong1993 已提交
541
            inputs_dict = {}
S
SunAhong1993 已提交
542 543
            inputs_dict['x'] = self.get_input_name(inputs[0])
            inputs_dict['y'] = self.get_input_name(inputs[1])
S
SunAhong1993 已提交
544
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
545
                kernel="fluid.layers.elementwise_mul",
J
jiangjiajun 已提交
546
                inputs=inputs_dict,
S
SunAhong1993 已提交
547
                outputs=[node.layer_name])
S
SunAhong1993 已提交
548 549 550 551
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
                input1_name = self.get_input_name(inputs[0])
S
SunAhong1993 已提交
552
                layer_attrs = {
S
SunAhong1993 已提交
553 554 555 556
                    'shape': [1],
                    'value': coeff[0],
                    'dtype': '{}.dtype'.format(input1_name)
                }
S
SunAhong1993 已提交
557
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
558
                    kernel="fluid.layers.fill_constant",
S
SunAhong1993 已提交
559 560 561
                    inputs={},
                    outputs=["{}_const1".format(node.layer_name)],
                    **layer_attrs)
S
SunAhong1993 已提交
562
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
563
                    kernel="fluid.layers.elementwise_mul",
S
SunAhong1993 已提交
564 565 566
                    inputs={"x": input1_name,
                            "y": "{}_const1".format(node.layer_name)},
                    outputs=["{}_mul1".format(node.layer_name)])
S
SunAhong1993 已提交
567
                input2_name = self.get_input_name(inputs[1])
S
SunAhong1993 已提交
568
                layer_attrs = {
S
SunAhong1993 已提交
569 570 571 572
                    'shape': [1],
                    'value': coeff[1],
                    'dtype': '{}.dtype'.format(input2_name)
                }
S
SunAhong1993 已提交
573
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
574
                    kernel="fluid.layers.fill_constant",
S
SunAhong1993 已提交
575 576 577
                    inputs={},
                    outputs=["{}_const2".format(node.layer_name)],
                    **layer_attrs)
S
SunAhong1993 已提交
578
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
579
                    kernel="fluid.layers.elementwise_mul",
S
SunAhong1993 已提交
580 581 582
                    inputs={"x": input2_name,
                            "y": "{}_const2".format(node.layer_name)},
                    outputs=["{}_mul2".format(node.layer_name)])
S
SunAhong1993 已提交
583
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
584
                    kernel="fluid.layers.elementwise_add",
S
SunAhong1993 已提交
585 586 587
                    inputs={"x": "{}_mul1".format(node.layer_name),
                            "y": "{}_mul2".format(node.layer_name)},
                    outputs=[node.layer_name])
S
SunAhong1993 已提交
588
            else:
S
SunAhong1993 已提交
589
                inputs_dict = {}
S
SunAhong1993 已提交
590 591
                inputs_dict['x'] = self.get_input_name(inputs[0])
                inputs_dict['y'] = self.get_input_name(inputs[1])
S
SunAhong1993 已提交
592
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
593
                    kernel="fluid.layers.elementwise_add",
J
jiangjiajun 已提交
594
                    inputs=inputs_dict,
S
SunAhong1993 已提交
595
                    outputs=[node.layer_name])
S
SunAhong1993 已提交
596
        else:
S
SunAhong1993 已提交
597
            inputs_dict = {}
S
SunAhong1993 已提交
598 599
            inputs_dict['x'] = self.get_input_name(inputs[0])
            inputs_dict['y'] = self.get_input_name(inputs[1])
S
SunAhong1993 已提交
600
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
601
                    kernel="fluid.layers.elementwise_max",
S
SunAhong1993 已提交
602 603
                    inputs=inputs_dict,
                    outputs=[node.layer_name])
S
SunAhong1993 已提交
604 605

    def BatchNorm(self, node):
C
channingss 已提交
606 607
        assert len(
            node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
S
SunAhong1993 已提交
608
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
609
        params = node.layer.batch_norm_param
S
SunAhong1993 已提交
610
        if hasattr(params, 'eps'):
S
SunAhong1993 已提交
611 612 613
            eps = params.eps
        else:
            eps = 1e-5
S
SunAhong1993 已提交
614
        if node.data is None or len(node.data) != 3:
C
channingss 已提交
615 616 617
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
618
            input_c = node.input_shape[0][1]
J
jiangjiajun 已提交
619 620
            mean = np.zeros([input_c, ]).astype('float32')
            variance = np.zeros([input_c, ]).astype('float32')
S
SunAhong1993 已提交
621 622
            scale = 0
        else:
S
SunAhong1993 已提交
623

S
SunAhong1993 已提交
624
            node.data = [np.squeeze(i).astype('float32') for i in node.data]
S
SunAhong1993 已提交
625
            mean, variance, scale = node.data
S
SunAhong1993 已提交
626 627 628 629 630 631
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
        self.weights[node.layer_name + '_mean'] = mean
        self.weights[node.layer_name + '_variance'] = variance
S
SunAhong1993 已提交
632
        layer_attrs = {
633 634 635 636 637 638 639 640
            'is_test': True,
            'param_attr': None,
            'bias_attr': None,
            'moving_mean_name': string(node.layer_name + '_mean'),
            'moving_variance_name': string(node.layer_name + '_variance'),
            'epsilon': eps,
            'name': string(node.layer_name)
        }
S
SunAhong1993 已提交
641
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
642
            kernel="fluid.layers.batch_norm",
S
SunAhong1993 已提交
643 644 645
            inputs={"input": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
646 647

    def Scale(self, node):
S
SunAhong1993 已提交
648
        if node.data is None:
C
channingss 已提交
649 650 651
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
652
            input_c = node.input_shape[0][1]
C
channingss 已提交
653 654 655 656 657 658
            self.weights[node.layer_name + '_scale'] = np.zeros([
                input_c,
            ]).astype('float32')
            self.weights[node.layer_name + '_offset'] = np.zeros([
                input_c,
            ]).astype('float32')
S
SunAhong1993 已提交
659
        else:
J
jiangjiajun 已提交
660 661 662 663
            self.weights[node.layer_name + '_scale'] = np.squeeze(node.data[
                0]).astype('float32')
            self.weights[node.layer_name + '_offset'] = np.squeeze(node.data[
                1]).astype('float32')
664 665 666 667 668 669 670 671
        params = node.layer.scale_param
        axis = params.axis
        num_axes = params.num_axes
        inputs = []
        if len(node.inputs) == 2:
            # for two tensor, here resets axis to 1. Maybe there is a bug for unkown case.
            axis = 1
            bias_shape = node.input_shape[0][axis:axis + num_axes]
S
SunAhong1993 已提交
672 673
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input1 = self.graph.get_input_node(node, idx=1, copy=True)
674
            inputs_dict = {}
S
SunAhong1993 已提交
675 676
            inputs_dict['x'] = self.get_input_name(input0)
            inputs_dict['y'] = self.get_input_name(input1)
S
SunAhong1993 已提交
677
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
678
                kernel="fluid.layers.elementwise_mul",
J
jiangjiajun 已提交
679
                inputs=inputs_dict,
S
SunAhong1993 已提交
680 681
                outputs=["{}_mul".format(node.layer_name)],
                axis=axis)
S
SunAhong1993 已提交
682
        else:
683
            bias_shape = node.input_shape[0][axis:axis + num_axes]
S
SunAhong1993 已提交
684
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
685
            input0_name = self.get_input_name(input0)
S
SunAhong1993 已提交
686
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
687
                kernel="fluid.ParamAttr",
S
SunAhong1993 已提交
688 689 690 691
                inputs={},
                outputs=["{}_scale".format(node.layer_name)],
                name = string("{}_scale".format(node.layer_name)))
            layer_attrs = {
692 693 694
                'dtype': '{}.dtype'.format(input0_name),
                'shape': bias_shape,
                'name': string(node.layer_name + '_cparam1'),
S
SunAhong1993 已提交
695 696 697
                'is_bias': True,
                'default_initializer': 'Constant(value=1.0)'
            }
S
SunAhong1993 已提交
698
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
699
                kernel="fluid.layers.create_parameter",
S
SunAhong1993 已提交
700 701 702
                inputs={"attr": node.layer_name + '_scale',},
                outputs=["{}_cparam1".format(node.layer_name)],
                **layer_attrs)
703
            inputs_dict = {}
S
SunAhong1993 已提交
704 705
            inputs_dict['x'] = self.get_input_name(input0)
            inputs_dict['y'] = "{}_cparam1".format(node.layer_name)
S
SunAhong1993 已提交
706
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
707
                kernel="fluid.layers.elementwise_mul",
J
jiangjiajun 已提交
708
                inputs=inputs_dict,
S
SunAhong1993 已提交
709 710
                outputs=["{}_mul".format(node.layer_name)],
                axis=axis)
711 712
        scale_shape = bias_shape
        input0_name = self.get_input_name(input0)
S
SunAhong1993 已提交
713
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
714
            kernel="fluid.ParamAttr",
S
SunAhong1993 已提交
715 716 717 718
            inputs={},
            outputs=["{}_offset".format(node.layer_name)],
            name = string("{}_offset".format(node.layer_name)))
        layer_attrs = {
719 720 721 722 723 724
            'dtype': '{}.dtype'.format(input0_name),
            'shape': scale_shape,
            'name': string(node.layer_name + '_cparam2'),
            'is_bias': True,
            'default_initializer': 'Constant(value=1.0)'
        }
S
SunAhong1993 已提交
725
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
726
            kernel="fluid.layers.create_parameter",
S
SunAhong1993 已提交
727 728 729 730 731 732
            inputs={"attr": node.layer_name + '_offset'},
            outputs=["{}_cparam2".format(node.layer_name)],
            **layer_attrs)
        inputs_dict = {}
        inputs_dict['x'] = "{}_mul".format(node.layer_name)
        inputs_dict['y'] = "{}_cparam2".format(node.layer_name)
S
SunAhong1993 已提交
733
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
734
            kernel="fluid.layers.elementwise_add",
S
SunAhong1993 已提交
735 736 737 738
            inputs=inputs_dict,
            outputs=[node.layer_name],
            axis=axis)
        
S
SunAhong1993 已提交
739 740

    def Reshape(self, node):
S
SunAhong1993 已提交
741
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
742
        top_count = len(input.layer.top)
743
        is_inplace = False if top_count == 1 else True
S
SunAhong1993 已提交
744
        output_shape = node.output_shape[0]
S
SunAhong1993 已提交
745
        layer_attrs = {
S
SunAhong1993 已提交
746 747
            'shape': output_shape,
            'inplace': is_inplace,
748
            'act': None,
S
SunAhong1993 已提交
749 750
            'name': string(node.layer_name)
        }
S
SunAhong1993 已提交
751
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
752
            kernel="fluid.layers.reshape",
S
SunAhong1993 已提交
753 754 755
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
756 757 758 759 760

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, 'The count of ArgMax node\'s input and output is not 1.'
S
SunAhong1993 已提交
761
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
762 763 764 765 766 767 768 769 770
        input_shape = node.input_shape[0]
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
        axis = parmas.axis if hasattr(params, axis) else -1
        if axis < 0:
            axis += len(input_shape)
        if out_max_val is True:
S
SunAhong1993 已提交
771
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
772
                kernel="fluid.layers.topk",
S
SunAhong1993 已提交
773 774 775 776
                inputs={"input": self.get_input_name(input)},
                outputs=["{}_topk_var".format(node.layer_name),
                         "{}_index_var".format(node.layer_name)],
                k=top_k)
S
SunAhong1993 已提交
777
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
778 779 780 781
                kernel="paddle.cast",
                inputs={"x": "{}_topk_var".format(node.layer_name)},
                outputs=["{}_topk_var".format(node.layer_name)],
                dtype="{}_topk_var.dtype".format(node.layer_name))
S
SunAhong1993 已提交
782
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
783 784 785 786 787
                kernel="paddle.concat",
                inputs={"x": "[{}_topk_var, {}_index_var]".format(node.layer_name,
                                                                  node.layer_name)},
                outputs=[node.layer_name],
                axis=axis)
S
SunAhong1993 已提交
788
        else:
S
SunAhong1993 已提交
789
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
790
                kernel="fluid.layers.topk",
S
SunAhong1993 已提交
791 792 793
                inputs={"input": self.get_input_name(input)},
                outputs=["_", node.layer_name],
                k=top_k)
S
SunAhong1993 已提交
794 795 796 797

    def Crop(self, node):
        assert len(
            node.inputs) == 2, 'The count of Crop node\'s input is not 2.'
S
SunAhong1993 已提交
798 799
        input = self.graph.get_input_node(node, idx=0, copy=True)
        example = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
800
        params = node.layer.crop_param
S
sonixixi 已提交
801
        axis = params.axis
S
SunAhong1993 已提交
802 803 804 805
        input_shape = node.input_shape[0]
        if axis < 0:
            axis += len(input_shape)
        offset_real = [0] * len(input_shape)
S
sonixixi 已提交
806
        if hasattr(params, "offset") and len(params.offset) > 0:
S
SunAhong1993 已提交
807
            offset = list(params.offset)
J
jiangjiajun 已提交
808 809 810
            assert (len(input_shape) - axis
                    ) == len(offset), "invalid offset[%s] in crop layer" % (
                        str(offset))
S
SunAhong1993 已提交
811
            offset_real = [0] * axis + offset
S
SunAhong1993 已提交
812 813
        layer_attrs = {"offsets": list(offset_real), 
                       "shape": node.input_shape[1]}
S
SunAhong1993 已提交
814
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
815
            kernel="fluid.layers.crop_tensor",
S
SunAhong1993 已提交
816 817 818 819
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
        
S
SunAhong1993 已提交
820
    def Flatten(self, node):
S
SunAhong1993 已提交
821
        assert len(
J
jiangjiajun 已提交
822 823
            node.
            inputs) == 1, 'The count of DetectionOutput node\'s input is not 1.'
S
SunAhong1993 已提交
824
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
825
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
826
            kernel="fluid.layers.reshape",
S
SunAhong1993 已提交
827 828 829 830
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name],
            shape = node.output_shape[0])
        
S
SunAhong1993 已提交
831 832 833
    def Power(self, node):
        assert len(
            node.inputs) == 1, 'The count of Permute node\'s input is not 1.'
S
SunAhong1993 已提交
834
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
835 836 837 838
        params = node.layer.power_param
        power = params.power
        scale = params.scale
        shift = params.shift
S
SunAhong1993 已提交
839
        layer_attrs = {
S
SunAhong1993 已提交
840 841 842 843 844
            'scale': scale,
            'bias': shift,
            'bias_after_scale': True,
            'name': string(node.layer_name + '_scale')
        }
S
SunAhong1993 已提交
845
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
846 847 848 849
            kernel="paddle.scale",
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
850
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
851 852 853 854
            kernel="paddle.pow",
            inputs={"x": node.layer_name},
            outputs=[node.layer_name],
            factor=power)
S
SunAhong1993 已提交
855 856 857 858

    def Reduction(self, node):
        assert len(
            node.inputs) == 1, 'The count of Reduction node\'s input is not 1.'
S
SunAhong1993 已提交
859
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
860 861 862 863 864 865 866 867 868 869 870
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
        input_len = len(node.input_shape[0])
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
        if operation == 1:  ## operation = SUM
S
SunAhong1993 已提交
871
            layer_attrs = {
S
SunAhong1993 已提交
872 873 874 875
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
S
SunAhong1993 已提交
876
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
877
                kernel="fluid.layers.reduce_sum",
S
SunAhong1993 已提交
878 879 880
                inputs={"input": self.get_input_name(input)},
                outputs=[node.layer_name],
                **layer_attrs)
S
SunAhong1993 已提交
881
        elif operation == 2:  ## operation = ASUM
S
SunAhong1993 已提交
882
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
883 884 885 886
                kernel="paddle.abs",
                inputs={"x": self.get_input_name(input)},
                outputs=[node.layer_name])
            layer_attrs = {
S
SunAhong1993 已提交
887 888 889 890
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
S
SunAhong1993 已提交
891
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
892
                kernel="fluid.layers.reduce_sum",
S
SunAhong1993 已提交
893 894 895
                inputs={"input": node.layer_name},
                outputs=[node.layer_name],
                **layer_attrs)
S
SunAhong1993 已提交
896
        elif operation == 3:  ## operation = SUMSQ
S
SunAhong1993 已提交
897
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
898 899 900 901 902
                kernel="paddle.pow",
                inputs={"x": self.get_input_name(input)},
                outputs=[node.layer_name],
                factor=2.0)
            layer_attrs = {
S
SunAhong1993 已提交
903 904 905 906
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
S
SunAhong1993 已提交
907
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
908
                kernel="fluid.layers.reduce_sum",
S
SunAhong1993 已提交
909 910 911
                inputs={"input": node.layer_name},
                outputs=[node.layer_name],
                **layer_attrs)
S
SunAhong1993 已提交
912
        else:  ## operation = MEAN
S
SunAhong1993 已提交
913
            layer_attrs = {
S
SunAhong1993 已提交
914 915 916 917
                'dim': dim[axis:],
                'keep_dim': False,
                'name': string(node.layer_name)
            }
S
SunAhong1993 已提交
918
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
919
                kernel="fluid.layers.reduce_mean",
S
SunAhong1993 已提交
920 921 922
                inputs={"input": node.layer_name},
                outputs=[node.layer_name],
                **layer_attrs)
S
SunAhong1993 已提交
923
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
924 925 926 927
            kernel="paddle.scale",
            inputs={"x": node.layer_name},
            outputs=[node.layer_name],
            scale=coeff)
S
SunAhong1993 已提交
928

S
SunAhong1993 已提交
929 930 931 932 933 934 935 936
    def deal_custom_layer(self, node):
        op = node.layer_type
        custom_code, func = make_custom_layer(node)
        params = get_params(node.layer, node.layer_type)
        arg_names, kwargs = set_args(func, params)
        kwargs['name'] = string(node.layer_name)
        kwargs['input_shape'] = node.input_shape
        data = node.data
S
SunAhong1993 已提交
937 938 939 940 941
        if data is not None:
            data = self.adjust_parameters(node)
            weights_name = deal_weights(node)
            for i in range(len(data)):
                self.weights[weights_name[i]] = data[i]
S
SunAhong1993 已提交
942
        inputs_list = []
S
SunAhong1993 已提交
943
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
944
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
945
            if i == 1 and op == 'DetectionOutput':
S
SunAhong1993 已提交
946
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
947 948
                while input is not None \
                      and input.layer_type != 'Softmax' \
S
SunAhong1993 已提交
949
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
950
                    input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
951
                assert input is not None, 'This kind of DetectionOutput is not supported!'
S
SunAhong1993 已提交
952
                input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
953 954 955 956 957 958 959 960
            inputs_list.append(self.get_input_name(input))
        kwargs_tmp = copy.deepcopy(kwargs)
        for k, v in kwargs_tmp.items():
            if str(type(v)) == "<class 'caffe_pb2.NonMaximumSuppressionParameter'>":
                kwargs[k] = dict()
                kwargs[k]["nms_threshold"] = v.nms_threshold
                kwargs[k]["top_k"] = v.top_k
                kwargs[k]["eta"] = v.eta
S
SunAhong1993 已提交
961
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
962
            kernel="custom_layer:{}".format(op),
S
SunAhong1993 已提交
963 964 965
            inputs={"inputs": inputs_list},
            outputs=[node.layer_name],
            **kwargs)
J
jiangjiajun 已提交
966 967
        if op not in self.used_custom_layers:
            self.used_custom_layers[op] = custom_code
S
SunAhong1993 已提交
968 969 970 971

    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
S
SunAhong1993 已提交
972
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
973
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
974 975 976
            kernel=op_info,
            inputs={"x": self.get_input_name(input)},
            outputs=[node.layer_name])
S
SunAhong1993 已提交
977