caffe_op_mapper.py 47.6 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14 15

import numbers
S
SunAhong1993 已提交
16
import copy
S
SunAhong1993 已提交
17
import numpy as np
S
SunAhong1993 已提交
18
from x2paddle.decoder.caffe_decoder import CaffeGraph, CaffeGraphNode
J
jiangjiajun 已提交
19
from x2paddle.core.op_mapper import OpMapper
S
SunAhong1993 已提交
20
from x2paddle.core.util import *
S
SunAhong1993 已提交
21
from x2paddle.core.program import PaddleGraph 
S
SunAhong1993 已提交
22 23


S
SunAhong1993 已提交
24
def _adjust_parameters(node):
S
SunAhong1993 已提交
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
    data = node.data
    # When using the protobuf-backend, each parameter initially has four dimensions.
    # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
    # This implementation takes care of the common cases. However, it does leave the
    # potential for future issues.
    # The Caffe-backend does not suffer from this problem.
    data = list(data)

    squeeze_indices = [1]  # Squeeze biases.
    if node.layer_type == 'InnerProduct':
        squeeze_indices.append(0)  # Squeeze FC.

    for idx in squeeze_indices:
        if idx >= len(data):
            continue

        d = data[idx]
        assert len(
            d.shape
        ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
            str(d.shape))

        shape_old = d.shape
        sq_axis = None
        if idx == 0:
            sq_axis = (0, 1)
        elif idx == 1:
            sq_axis = (0, 1, 2)
        else:
            continue

        data[idx] = np.squeeze(d, axis=sq_axis)
        shape_new = data[idx].shape
    return data

S
SunAhong1993 已提交
60 61
def _get_kernel_parameters(kind, params):
    assert kind in ["Convolution", "Pooling", "Deconvolution", "ConvolutionDepthwise"]
S
SunAhong1993 已提交
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
    [k_h, k_w] = [1, 1]
    if isinstance(params.kernel_size, numbers.Number):
        [k_h, k_w] = [params.kernel_size] * 2
    elif len(params.kernel_size) > 0:
        k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
            0]
        k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
            len(params.kernel_size) - 1]
    elif params.kernel_h > 0 or params.kernel_w > 0:
        k_h = params.kernel_h
        k_w = params.kernel_w
    [s_h, s_w] = [1, 1]
    if isinstance(params.stride, numbers.Number):
        [s_h, s_w] = [params.stride] * 2
    elif len(params.stride) > 0:
        s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
        s_w = params.stride_w if params.stride_w > 0 else params.stride[len(
            params.stride) - 1]
    elif params.stride_h > 0 or params.stride_w > 0:
        s_h = params.stride_h
        s_w = params.stride_w
    [p_h, p_w] = [0, 0]
    if isinstance(params.pad, numbers.Number):
        [p_h, p_w] = [params.pad] * 2
    elif len(params.pad) > 0:
        p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
        p_w = params.pad_w if params.pad_w > 0 else params.pad[len(
            params.pad) - 1]
    elif params.pad_h > 0 or params.pad_w > 0:
        p_h = params.pad_h
        p_w = params.pad_w
    dila_h = dila_w = 1
    group = 1
    c_o = 1
S
SunAhong1993 已提交
96 97 98
    if kind in ["Convolution", "Deconvolution", "ConvolutionDepthwise"]:
        if kind in ["Convolution", "Deconvolution"]:
            c_o = params.num_output
S
SunAhong1993 已提交
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
        dila_len = len(params.dilation)
        if dila_len == 2:
            dila_h = params.dilation[0]
            dila_w = params.dilation[1]
        elif dila_len == 1:
            dila_h = dila_w = params.dilation[0]
        else:
            assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                dila_len)
    if kind in ['Convolution', 'Deconvolution']:
        group = params.group
    kernel = [k_h, k_w]
    stride = [s_h, s_w]
    pad = [p_h, p_w]
    dilation = [dila_h, dila_w]
    return c_o, kernel, stride, pad, dilation, group


J
jiangjiajun 已提交
117
class CaffeOpMapper(OpMapper):
S
SunAhong1993 已提交
118
    directly_map_ops = {
S
SunAhong1993 已提交
119
        'AbsVal': 'paddle.abs',
S
SunAhong1993 已提交
120
        'Sigmoid': 'paddle.nn.functional.sigmoid',
S
SunAhong1993 已提交
121
        'TanH': 'paddle.tanh',
S
SunAhong1993 已提交
122 123
    }

J
jiangjiajun 已提交
124 125 126
    def __init__(self, decoder):
        super(CaffeOpMapper, self).__init__()
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
127
        self.params = dict()
J
jiangjiajun 已提交
128
        resolver = decoder.resolver
J
jiangjiajun 已提交
129
        self.used_custom_layers = {}
S
SunAhong1993 已提交
130 131 132
        self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="caffe")
        self.paddle_graph.inputs = self.graph.input_nodes
        self.paddle_graph.outputs = self.graph.output_nodes
S
SunAhong1993 已提交
133

S
SunAhong1993 已提交
134 135 136 137 138 139
        print("Total nodes: {}".format(
            sum([
                isinstance(node, CaffeGraphNode)
                for name, node in self.graph.node_map.items()
            ])))
        print("Nodes converting ...")
S
SunAhong1993 已提交
140 141 142 143
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
            if hasattr(self, op):
J
jiangjiajun 已提交
144 145
                func = getattr(self, op)
                func(node)
S
SunAhong1993 已提交
146 147
            elif op in self.directly_map_ops:
                self.directly_map(node)
S
SunAhong1993 已提交
148 149
        print("\nNodes converted.")
        self.paddle_graph.set_parameters(self.params)
S
SunAhong1993 已提交
150
        self.paddle_graph.set_custom(self.used_custom_layers)
S
SunAhong1993 已提交
151

J
jiangjiajun 已提交
152 153 154
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
S
SunAhong1993 已提交
155
            node = self.graph.get_node(node_name)
J
jiangjiajun 已提交
156
            op = node.layer_type
S
SunAhong1993 已提交
157 158 159
            if not hasattr(self, op) and \
                op not in self.directly_map_ops and \
                op not in self.elementwise_ops:
J
jiangjiajun 已提交
160 161 162 163
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
S
SunAhong1993 已提交
164 165 166
            if len(unsupported_ops) > 0:
                print("\n========= {} OPs are not supported yet ===========".format(
                    len(unsupported_ops)))
J
jiangjiajun 已提交
167
            for op in unsupported_ops:
S
SunAhong1993 已提交
168
                print("========== {} ============".format(op))
J
jiangjiajun 已提交
169
            return False
S
SunAhong1993 已提交
170 171 172 173 174 175 176 177 178
        
    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel=op_info,
            inputs={"x": input.name},
            outputs=[node.name])
S
SunAhong1993 已提交
179

S
SunAhong1993 已提交
180 181 182
    def Input(self, node):
        shape = list(node.layer.input_param.shape[0].dim)[1:]
        dtype = 'float32'
S
SunAhong1993 已提交
183 184 185
        layer_attrs = {
            "dtype": string(dtype),
            "shape": [-1] + shape,
S
SunAhong1993 已提交
186
            "name": string(node.name)
S
SunAhong1993 已提交
187
        }
S
SunAhong1993 已提交
188
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
189
            kernel="paddle.static.data",
S
SunAhong1993 已提交
190
            inputs={},
S
SunAhong1993 已提交
191
            outputs=[node.name],
S
SunAhong1993 已提交
192
            **layer_attrs)
S
SunAhong1993 已提交
193

S
SunAhong1993 已提交
194 195 196
    def Convolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
197
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
198
            node.layer_type, params)
S
SunAhong1993 已提交
199 200
        if data is None:
            data = []
C
channingss 已提交
201
            print(
S
SunAhong1993 已提交
202
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
S
SunAhong1993 已提交
203 204
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
205
            output_c = channel
C
channingss 已提交
206
            data.append(
J
jiangjiajun 已提交
207 208
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
S
0.8.4  
sunyanfang01 已提交
209
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
210
        else:
S
SunAhong1993 已提交
211
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
212 213 214
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
215
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
216 217 218 219 220
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
221
        if len(data) == 2:
S
SunAhong1993 已提交
222 223 224
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
225
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
226 227 228 229 230
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
231 232
        assert len(node.inputs
                   ) == 1, 'The count of Convolution node\'s input is not 1.'
S
SunAhong1993 已提交
233
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
234 235 236 237 238 239 240 241 242 243
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
244
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
245 246 247 248
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
249
        
S
SunAhong1993 已提交
250 251 252
    def Deconvolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
253
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
254
            node.layer_type, params)
S
SunAhong1993 已提交
255 256
        if data is None:
            data = []
C
channingss 已提交
257 258
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
S
SunAhong1993 已提交
259 260
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
261
            output_c = channel
C
channingss 已提交
262
            data.append(
J
jiangjiajun 已提交
263 264 265
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
266
        else:
S
SunAhong1993 已提交
267
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
268 269 270
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
271
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
272 273 274 275 276
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
277
        if len(data) == 2:
S
SunAhong1993 已提交
278 279 280
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
281
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
282 283 284 285 286
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
287 288
        assert len(node.inputs
                   ) == 1, 'The count of Deconvolution node\'s input is not 1.'
S
SunAhong1993 已提交
289
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
290 291 292 293 294 295 296 297 298 299
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
300
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
301 302 303
            kernel="paddle.nn.functional.conv2d_transpose",
            inputs=layer_inputs,
            outputs=[node.name],
S
SunAhong1993 已提交
304
            **layer_attrs)    
S
SunAhong1993 已提交
305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332
        
    def DepthwiseConvolution(self, node):
        node.layer_type = "ConvolutionDepthwise"
        self.ConvolutionDepthwise(node)
        
    def ConvolutionDepthwise(self, node):
        data = node.data
        params = node.layer.convolution_param
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
            node.layer_type, params)
        out_channel = params.num_output if params.num_output is not None else node.in_shapes[0][1]
        in_channel = node.in_shapes[0][1]
        group = int(in_channel / (in_channel / out_channel)) if in_channel > out_channel else int(in_channel /
                                                                (out_channel / in_channel))
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
                np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
            data = _adjust_parameters(node)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
333
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
334 335 336 337 338 339 340 341 342
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
        if len(data) == 2:
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
343
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
        assert len(node.inputs
                   ) == 1, "The count of Deconvolution node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
367 368 369

    def Pooling(self, node):
        params = node.layer.pooling_param
S
SunAhong1993 已提交
370
        ceil_mode = getattr(params, 'ceil_mode', True)
S
SunAhong1993 已提交
371 372
        global_pool = getattr(params, 'global_pooling', False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
373
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
374
            node.layer_type, params)
S
SunAhong1993 已提交
375 376
        assert len(
            node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
S
SunAhong1993 已提交
377
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
378 379 380 381 382 383 384 385 386 387 388 389 390
        if global_pool:
            if kernel[0] == 0:
                kernel = [1, 1]
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_max_pool2d",
                    inputs={"x": input.name},
                    outputs=layer_outputs,
                    output_size=kernel)
            else:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_avg_pool2d",
                    inputs={"x": input.name},
S
SunAhong1993 已提交
391
                    outputs=[node.name],
S
SunAhong1993 已提交
392 393 394 395 396 397 398 399 400 401 402 403 404 405 406
                    output_size=kernel)
        else:
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    kernel="paddle.nn.functional.max_pool2d",
                    inputs={"x": input.name},
                    outputs=[node.name],
                    kernel_size=kernel,
                    stride=stride,
                    padding=pad,
                    ceil_mode=ceil_mode)
            else:
                # TODO(syf): The op has diff.
                self.paddle_graph.add_layer(
                    kernel="fluid.layers.pool2d",
S
SunAhong1993 已提交
407
                    inputs={"input": input.name},
S
SunAhong1993 已提交
408 409 410 411 412 413 414 415
                    outputs=[node.name],
                    pool_size=kernel,
                    pool_type=string("avg"),
                    pool_stride=stride,
                    pool_padding=pad,
                    ceil_mode=ceil_mode,
                    exclusive=False,
                    global_pooling=False)
S
SunAhong1993 已提交
416 417 418 419 420 421 422 423 424 425 426

    def LRN(self, node):
        assert len(node.inputs) == 1, 'The count of LRN node\'s input is not 1.'
        params = node.layer.lrn_param
        # The window size must be an odd value. For a window
        # size of (2*n+1), Paddle defines depth_radius = n.
        assert params.local_size % 2 == 1
        # Caffe scales by (alpha/(2*n+1)), whereas Paddle
        # just scales by alpha (as does Krizhevsky's paper).
        # We'll account for that here.
        alpha = params.alpha / float(params.local_size)
S
SunAhong1993 已提交
427
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
428
        layer_attrs = {
S
SunAhong1993 已提交
429
            'n': params.local_size,
S
SunAhong1993 已提交
430
            'k': params.k,
S
SunAhong1993 已提交
431 432
            'alpha': alpha,
            'beta': params.beta,
S
SunAhong1993 已提交
433
            'name': string(node.name)
S
SunAhong1993 已提交
434
        }
S
SunAhong1993 已提交
435
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
436
            kernel="fluid.layers.lrn",
S
SunAhong1993 已提交
437 438
            inputs={"input": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
439
            **layer_attrs)
S
SunAhong1993 已提交
440 441 442

    def InnerProduct(self, node):
        data = node.data
S
SunAhong1993 已提交
443 444
        params = node.layer.inner_product_param
        if data is None:
C
channingss 已提交
445 446 447
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
448
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
449 450
            output_c = params.num_output
            data = []
C
channingss 已提交
451
            data.append(
J
jiangjiajun 已提交
452 453
                np.zeros([input_c, output_c]).astype('float32').astype(
                    'float32'))
C
channingss 已提交
454 455
            data.append(
                np.zeros([output_c]).astype('float32').astype('float32'))
S
SunAhong1993 已提交
456
        else:
S
SunAhong1993 已提交
457
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
458 459 460 461 462 463 464 465
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w
S
SunAhong1993 已提交
466

S
SunAhong1993 已提交
467 468 469
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
470
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
471 472 473 474 475
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
476
        if len(data) == 2:
S
SunAhong1993 已提交
477 478 479
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
480
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
481 482 483 484 485
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
486 487
        assert len(node.inputs
                   ) == 1, 'The count of InnerProduct node\'s input is not 1.'
S
SunAhong1993 已提交
488
        #params = node.layer.inner_product_param
S
SunAhong1993 已提交
489 490
        assert params.axis == 1
        assert params.bias_term == True
S
SunAhong1993 已提交
491
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = dict()
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        if node.in_shapes[0][-1] != data[0].shape[0]:
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": input.name},
                outputs=[input.name],
                shape=[-1, data[0].shape[0]])
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
        else:
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
S
SunAhong1993 已提交
516 517 518 519

    def Softmax(self, node):
        assert len(
            node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
S
SunAhong1993 已提交
520
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
521 522
        params = node.layer.softmax_param
        axis = params.axis
S
SunAhong1993 已提交
523
        shape = node.in_shapes[0]
S
SunAhong1993 已提交
524 525
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
S
SunAhong1993 已提交
526
        layer_attrs = {'axis': axis, 'name': string(node.layer_name + '_softmax')}
S
SunAhong1993 已提交
527
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
528
            kernel="paddle.nn.functional.softmax",
S
SunAhong1993 已提交
529
            inputs={"x": input.name},
S
SunAhong1993 已提交
530 531
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
532 533 534

    def Slice(self, node):
        assert len(
S
SunAhong1993 已提交
535
            node.inputs) == 1, "The count of Slice node\'s input is not 1."
S
SunAhong1993 已提交
536
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
537
        top_len = len(node.layer.top)
S
SunAhong1993 已提交
538 539
        params = node.layer.slice_param
        axis = params.axis
S
SunAhong1993 已提交
540 541 542
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
543
        output_shape = node.out_shapes
S
SunAhong1993 已提交
544 545 546
        sections_list = list()
        outputs_list = list()
        for i, s in enumerate(output_shape):
S
SunAhong1993 已提交
547
            sections_list.append(s[axis])
S
SunAhong1993 已提交
548
            outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
549
        layer_attrs = {
S
SunAhong1993 已提交
550
            'num_or_sections': sections_list,
S
SunAhong1993 已提交
551
            'axis': axis,
S
SunAhong1993 已提交
552
        }
S
SunAhong1993 已提交
553
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
554 555
            "paddle.split",
            inputs={"x": input.name},
S
SunAhong1993 已提交
556
            outputs=outputs_list,
S
SunAhong1993 已提交
557
            **layer_attrs)
C
channingss 已提交
558

S
SunAhong1993 已提交
559 560
    def Concat(self, node):
        assert len(
S
SunAhong1993 已提交
561
            node.inputs
S
sunyanfang01 已提交
562
        ) >= 1, 'The count of Concat node\'s input is not more than 1.'
S
SunAhong1993 已提交
563
        inputs_list = []
S
SunAhong1993 已提交
564
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
565
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
566
            inputs_list.append(input.name)
S
SunAhong1993 已提交
567 568
        params = node.layer.concat_param
        axis = params.axis
S
SunAhong1993 已提交
569
        layer_attrs = {'axis': axis, 'name': string(node.name)}
S
SunAhong1993 已提交
570
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
571 572
            kernel="paddle.concat",
            inputs={"x": inputs_list},
S
SunAhong1993 已提交
573
            outputs=[node.name],
S
SunAhong1993 已提交
574
            **layer_attrs)
S
SunAhong1993 已提交
575

576 577
    def ReLU(self, node):
        """
S
SunAhong1993 已提交
578

579 580 581 582 583
        :param node:
        :return:
        """
        assert len(
            node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
S
SunAhong1993 已提交
584
        input = self.graph.get_input_node(node, idx=0, copy=True)
585 586 587 588

        params = node.layer.relu_param
        if params.HasField('negative_slope') and params.negative_slope != 0:
            negative_slope = float(params.negative_slope)
S
SunAhong1993 已提交
589
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
590 591 592 593
                kernel="paddle.nn.functional.leaky_relu",
                inputs={"x": input.name},
                outputs=[node.name],
                negative_slope=negative_slope)
594
        else:
S
SunAhong1993 已提交
595
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
596 597 598
                kernel="paddle.nn.functional.relu",
                inputs={"x": input.name},
                outputs=[node.name])
599

S
SunAhong1993 已提交
600 601 602
    def PReLU(self, node):
        assert len(
            node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
S
SunAhong1993 已提交
603
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
604 605
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
S
SunAhong1993 已提交
606
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
607
        if mode_bool:
S
SunAhong1993 已提交
608
            num_parameters = 1
S
SunAhong1993 已提交
609
        else:
S
SunAhong1993 已提交
610
            num_parameters = output_shape[1]
S
SunAhong1993 已提交
611 612
        data = node.data
        assert data is not None, 'The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.'.format(
S
SunAhong1993 已提交
613 614
            node.name, node.layer_type)
        kernel_weight_name = node.name + '_weights'
S
SunAhong1993 已提交
615
        self.params[kernel_weight_name] = np.squeeze(data[0])
S
SunAhong1993 已提交
616
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
617
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
618 619
            inputs={},
            outputs=[kernel_weight_name],
S
SunAhong1993 已提交
620
            shape=[num_parameters],
S
SunAhong1993 已提交
621 622
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
623
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
624 625 626 627
            kernel="paddle.nn.functional.prelu",
            inputs={"x": input.name,
                    "weight": kernel_weight_name},
            outputs=[node.name])
S
SunAhong1993 已提交
628 629 630

    def Eltwise(self, node):
        assert len(
S
SunAhong1993 已提交
631
            node.inputs) == 2, "The count of Eltwise node\'s input is not 2."
S
SunAhong1993 已提交
632 633 634
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
635 636
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
637 638
        input0_name = input0.name
        input1_name = input1.name
S
SunAhong1993 已提交
639
        if mode == 0:
S
SunAhong1993 已提交
640
            inputs_dict = {}
S
SunAhong1993 已提交
641 642
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
643
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
644
                "paddle.multiply",
J
jiangjiajun 已提交
645
                inputs=inputs_dict,
S
SunAhong1993 已提交
646
                outputs=[node.name])
S
SunAhong1993 已提交
647 648 649
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
S
SunAhong1993 已提交
650
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
651 652 653 654
                    "paddle.scale",
                    inputs={"x": input0_name},
                    outputs=[node.name + '_mul0'],
                    scale=coeff[0])
S
SunAhong1993 已提交
655
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
656 657 658 659 660 661 662
                    "paddle.scale",
                    inputs={"x": input1_name},
                    outputs=[node.name + '_mul1'],
                    scale=coeff[2])
                inputs_dict = {}
                inputs_dict['x'] = node.name + '_mul0'
                inputs_dict['y'] = node.name + '_mul1'
S
SunAhong1993 已提交
663
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
664 665 666
                    "paddle.add",
                    inputs=inputs_dict,
                    outputs=[node.name])
S
SunAhong1993 已提交
667
            else:
S
SunAhong1993 已提交
668
                inputs_dict = {}
S
SunAhong1993 已提交
669 670
                inputs_dict['x'] = input0_name
                inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
671
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
672
                    "paddle.add",
J
jiangjiajun 已提交
673
                    inputs=inputs_dict,
S
SunAhong1993 已提交
674
                    outputs=[node.name])
S
SunAhong1993 已提交
675
        else:
S
SunAhong1993 已提交
676
            inputs_dict = {}
S
SunAhong1993 已提交
677 678
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
679
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
680 681 682
                "paddle.max",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
683 684

    def BatchNorm(self, node):
C
channingss 已提交
685 686
        assert len(
            node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
S
SunAhong1993 已提交
687
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
688
        params = node.layer.batch_norm_param
S
SunAhong1993 已提交
689
        if hasattr(params, 'eps'):
S
SunAhong1993 已提交
690 691 692
            eps = params.eps
        else:
            eps = 1e-5
S
SunAhong1993 已提交
693 694 695 696
        if hasattr(params, 'moving_average_fraction'):
            momentum = params.moving_average_fraction
        else:
            momentum = 0.9
S
SunAhong1993 已提交
697
        if node.data is None or len(node.data) != 3:
C
channingss 已提交
698 699 700
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
701
            input_c = node.in_shapes[0][1]
J
jiangjiajun 已提交
702 703
            mean = np.zeros([input_c, ]).astype('float32')
            variance = np.zeros([input_c, ]).astype('float32')
S
SunAhong1993 已提交
704 705
            scale = 0
        else:
S
SunAhong1993 已提交
706

S
SunAhong1993 已提交
707
            node.data = [np.squeeze(i).astype('float32') for i in node.data]
S
SunAhong1993 已提交
708
            mean, variance, scale = node.data
S
SunAhong1993 已提交
709 710 711 712
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
S
SunAhong1993 已提交
713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729
        weight_name = node.name + '_weight'
        self.paddle_graph.add_layer(
            kernel="paddle.ones",
            inputs={},
            outputs=[weight_name],
            shape=mean.shape,
            dtype=string("float32"))
        bias_name = node.name + '_bias'
        self.paddle_graph.add_layer(
            kernel="paddle.zeros",
            inputs={},
            outputs=[bias_name],
            shape=mean.shape,
            dtype=string("float32"))
        mean_name = node.name + '_mean'
        self.params[mean_name] = mean
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
730
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
731 732 733 734 735 736 737 738
            inputs={},
            outputs=[mean_name],
            shape=self.params[mean_name].shape,
            dtype=string(str(self.params[mean_name].dtype)),
            name=string(mean_name))
        variance_name = node.name + '_variance'
        self.params[variance_name] = variance
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
739
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
740 741 742 743 744
            inputs={},
            outputs=[variance_name],
            shape=self.params[variance_name].shape,
            dtype=string(str(self.params[variance_name].dtype)),
            name=string(variance_name))
S
SunAhong1993 已提交
745
        layer_attrs = {
746
            'epsilon': eps,
S
SunAhong1993 已提交
747
            'momentum': momentum
748
        }
S
SunAhong1993 已提交
749
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
750 751 752 753 754 755 756
            kernel="paddle.nn.functional.batch_norm",
            inputs={"x": input.name,
                    "weight": weight_name,
                    "bias": bias_name,
                    "running_mean": mean_name,
                    "running_var": variance_name,},
            outputs=[node.name],
S
SunAhong1993 已提交
757
            **layer_attrs)
S
SunAhong1993 已提交
758 759

    def Scale(self, node):
S
SunAhong1993 已提交
760
        if node.data is None:
C
channingss 已提交
761
            print(
S
SunAhong1993 已提交
762 763 764 765 766 767 768 769
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.name, node.layer_type))
            self.params[node.name + "_cparam1"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
            self.params[node.name + "_cparam2"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
S
SunAhong1993 已提交
770
        else:
S
SunAhong1993 已提交
771 772 773 774
            self.params[node.name + "_cparam1"] = np.squeeze(node.data[
                0]).astype("float32")
            self.params[node.name + "_cparam2"] = np.squeeze(node.data[
                1]).astype("float32")
775 776 777 778
        params = node.layer.scale_param
        axis = params.axis
        inputs = []
        if len(node.inputs) == 2:
S
SunAhong1993 已提交
779 780
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
781 782
            input0_name = input0.name
            input1_name = input1.name
783
            inputs_dict = {}
S
SunAhong1993 已提交
784 785
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
786
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
787
                "paddle.multiply",
J
jiangjiajun 已提交
788
                inputs=inputs_dict,
S
SunAhong1993 已提交
789 790
                outputs=[node.name + "_mul"],
                axis=1)
S
SunAhong1993 已提交
791
        else:
S
SunAhong1993 已提交
792
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
793
                "paddle.static.create_parameter",
S
SunAhong1993 已提交
794
                inputs={},
S
SunAhong1993 已提交
795 796 797 798 799 800
                outputs=[node.name + "_cparam1"],
                shape=self.params[node.name + "_cparam1"].shape,
                dtype=string(str(self.params[node.name + "_cparam1"].dtype)),
                name=string(node.name + "_cparam1"))
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input0_name = input0.name
801
            inputs_dict = {}
S
SunAhong1993 已提交
802 803
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = node.name + "_cparam1"
S
SunAhong1993 已提交
804
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
805
                "paddle.multiply",
J
jiangjiajun 已提交
806
                inputs=inputs_dict,
S
SunAhong1993 已提交
807
                outputs=[node.name + "_mul"],
S
SunAhong1993 已提交
808
                axis=axis)
S
SunAhong1993 已提交
809
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
810
            "paddle.static.create_parameter",
S
SunAhong1993 已提交
811
            inputs={},
S
SunAhong1993 已提交
812 813 814 815
            outputs=[node.name + "_cparam2"],
            shape=self.params[node.name + "_cparam2"].shape,
            dtype=string(str(self.params[node.name + "_cparam2"].dtype)),
            name=string(node.name + "_cparam2"))
S
SunAhong1993 已提交
816
        inputs_dict = {}
S
SunAhong1993 已提交
817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = node.name + "_cparam2"
        output_shape = node.out_shapes[0]
        if axis == -1:
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
        else:
            if axis < 0:
                axis = axis + len(output_shape)
            param2_shape = self.params[node.name + "_cparam2"].shape
            param2_shape_len = len(param2_shape)
            diff_len = len(output_shape) - axis - param2_shape_len
            new_shape = list(param2_shape) + [1] * diff_len
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": node.name + "_cparam2"},
                outputs=[node.name + "_cparam2"],
                shape=new_shape)
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
841
        
S
SunAhong1993 已提交
842 843

    def Reshape(self, node):
S
SunAhong1993 已提交
844
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
845
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
846
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
847 848 849 850
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=output_shape)
S
SunAhong1993 已提交
851 852 853 854

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
S
SunAhong1993 已提交
855
        ) == 1, "The count of ArgMax node\'s input and output is not 1."
S
SunAhong1993 已提交
856
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
857
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
858 859 860 861 862 863
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
        axis = parmas.axis if hasattr(params, axis) else -1
        if axis < 0:
S
SunAhong1993 已提交
864
            axis += len(in_shapes)
S
SunAhong1993 已提交
865
        if out_max_val is True:
S
SunAhong1993 已提交
866
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
867 868 869
                "paddle.topk",
                inputs={"x": input.name},
                outputs=[node.name + "_topk_var", node.name + "_index_var"],
S
SunAhong1993 已提交
870
                k=top_k)
S
SunAhong1993 已提交
871
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
872 873 874 875
                "paddle.cast",
                inputs={"x": node.name + "_index_var"},
                outputs=[node.name + "_index_var"],
                dtype="{}_topk_var.dtype".format(node.name))
S
SunAhong1993 已提交
876
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
877 878 879
                "paddle.concat",
                inputs={"x": [node.name + "_topk_var", node.name + "_index_var"]},
                outputs=[node.name],
S
SunAhong1993 已提交
880
                axis=axis)
S
SunAhong1993 已提交
881
        else:
S
SunAhong1993 已提交
882
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
883 884 885
                "paddle.topk",
                inputs={"x": input.name},
                outputs=["_", node.name],
S
SunAhong1993 已提交
886
                k=top_k)
S
SunAhong1993 已提交
887 888 889

    def Crop(self, node):
        assert len(
S
SunAhong1993 已提交
890
            node.inputs) == 2, "The count of Crop node\'s input is not 2."
S
SunAhong1993 已提交
891 892
        input = self.graph.get_input_node(node, idx=0, copy=True)
        example = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
893
        params = node.layer.crop_param
S
sonixixi 已提交
894
        axis = params.axis
S
SunAhong1993 已提交
895
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
896
        if axis < 0:
S
SunAhong1993 已提交
897 898
            axis += len(in_shapes)
        offset_real = [0] * len(in_shapes)
S
sonixixi 已提交
899
        if hasattr(params, "offset") and len(params.offset) > 0:
S
SunAhong1993 已提交
900
            offset = list(params.offset)
S
SunAhong1993 已提交
901
            assert (len(in_shapes) - axis
J
jiangjiajun 已提交
902 903
                    ) == len(offset), "invalid offset[%s] in crop layer" % (
                        str(offset))
S
SunAhong1993 已提交
904
            offset_real = [0] * axis + offset
S
SunAhong1993 已提交
905
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
906 907 908 909 910 911
                "paddle.crop",
                inputs={"x": input.name},
                outputs=[node.name],
                shape=node.in_shapes[1],
                offsets=list(offset_real))

S
SunAhong1993 已提交
912
        
S
SunAhong1993 已提交
913
    def Flatten(self, node):
S
SunAhong1993 已提交
914
        assert len(
J
jiangjiajun 已提交
915
            node.
S
SunAhong1993 已提交
916
            inputs) == 1, "The count of DetectionOutput node\'s input is not 1."
S
SunAhong1993 已提交
917
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
918
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
919 920 921 922 923
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=node.out_shapes[0])

S
SunAhong1993 已提交
924 925
    def Power(self, node):
        assert len(
S
SunAhong1993 已提交
926
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
927
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
928
        params = node.layer.power_param
S
SunAhong1993 已提交
929
        layer_attrs = {
S
SunAhong1993 已提交
930 931 932
            'scale': params.scale,
            'bias': params.shift,
            'bias_after_scale': True
S
SunAhong1993 已提交
933
        }
S
SunAhong1993 已提交
934
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
935 936 937
            "paddle.scale",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
938
            **layer_attrs)
S
SunAhong1993 已提交
939
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
940 941 942 943
            "paddle.pow",
            inputs={"x": node.name},
            outputs=[node.name],
            exponent=params.power)
S
SunAhong1993 已提交
944 945 946

    def Reduction(self, node):
        assert len(
S
SunAhong1993 已提交
947
            node.inputs) == 1, "The count of Reduction node\'s input is not 1."
S
SunAhong1993 已提交
948
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
949 950 951 952 953 954
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
S
SunAhong1993 已提交
955
        input_len = len(node.in_shapes[0])
S
SunAhong1993 已提交
956 957 958
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
S
SunAhong1993 已提交
959 960
        # operation = SUM
        if operation == 1:  
S
SunAhong1993 已提交
961
            layer_attrs = {
S
SunAhong1993 已提交
962 963
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
964
            }
S
SunAhong1993 已提交
965
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
966 967 968
                "paddle.sum",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
969
                **layer_attrs)
S
SunAhong1993 已提交
970 971
        # operation = ASUM
        elif operation == 2:  
S
SunAhong1993 已提交
972
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
973 974 975
                "paddle.abs",
                inputs={"x": input.name},
                outputs=[node.name])
S
SunAhong1993 已提交
976
            layer_attrs = {
S
SunAhong1993 已提交
977 978
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
979
            }
S
SunAhong1993 已提交
980
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
981 982 983
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
984
                **layer_attrs)
S
SunAhong1993 已提交
985 986
        # operation = SUMSQ
        elif operation == 3: 
S
SunAhong1993 已提交
987
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
988 989 990 991
                "paddle.pow",
                inputs={"x": input.name},
                outputs=[node.name],
                exponent=2.0)
S
SunAhong1993 已提交
992
            layer_attrs = {
S
SunAhong1993 已提交
993 994
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
995
            }
S
SunAhong1993 已提交
996
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
997 998 999
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1000
                **layer_attrs)
S
SunAhong1993 已提交
1001 1002
        # operation = MEAN
        else: 
S
SunAhong1993 已提交
1003
            layer_attrs = {
S
SunAhong1993 已提交
1004 1005
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
1006
            }
S
SunAhong1993 已提交
1007
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1008 1009 1010
                "paddle.mean",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1011
                **layer_attrs)
S
SunAhong1993 已提交
1012
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1013 1014 1015
            "paddle.scale",
            inputs={"x": node.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1016
            scale=coeff)
S
SunAhong1993 已提交
1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049
        
    def Axpy(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, "The count of Axpy node\'s input and output is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.axpy_param
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        input2 = self.graph.get_input_node(node, idx=2, copy=True)
        input0_name = input0.name
        input1_name = input1.name
        input2_name = input2.name
        inputs_dict = {}
        inputs_dict['x'] = input1_name
        inputs_dict['y'] = input0_name
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"],
            axis=0)
        inputs_dict = {}
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = input2_name
        self.paddle_graph.add_layer(
            "paddle.add",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"])
        
    def DetectionOutput(self, node):
        assert len(
            node.inputs) == 3, "The count of DetectionOutput node\'s input is not 3."
        inputs_dict = dict()
S
SunAhong1993 已提交
1050
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
1051
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1052
            if i == 1:
S
SunAhong1993 已提交
1053
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1054 1055
                while input is not None \
                      and input.layer_type != 'Softmax' \
S
SunAhong1993 已提交
1056
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
1057
                    input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1058
                assert input is not None, 'This kind of DetectionOutput is not supported!'
S
SunAhong1993 已提交
1059
                input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085
            inputs_dict["x{}".format(i)] = input.name
        params = node.layer.detection_output_param
        nms_param = params.nms_param
        nms_param_dict = dict()
        nms_param_dict["nms_threshold"] = nms_param.nms_threshold
        nms_param_dict["top_k"] = nms_param.top_k
        nms_param_dict["eta"] = nms_param.eta
        if nms_param is None:
            nms_param_dict = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        fields = ["eta", "top_k", "nms_threshold"]
        for f in default.keys():
            if f not in nms_param_dict:
                nms_param_dict[f] = default[f]
        layer_attrs = {
            "background_label": params.background_label_id,
            "nms_threshold": nms_param_dict["nms_threshold"],
            "nms_top_k": nms_param_dict["top_k"],
            "keep_top_k": params.keep_top_k,
            "score_threshold": params.confidence_threshold,
            "nms_eta": nms_param_dict["eta"]}
        self.paddle_graph.add_layer(
            kernel="custom_layer:detectionoutput",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
S
SunAhong1993 已提交
1086

S
SunAhong1993 已提交
1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115
    def Normalize(self, node):
        assert len(
            node.inputs) == 1, "The count of Normalize node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.norm_param
        scale_name = node.name + "_scale"
        if node.data is None or len(node.data) != 1:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(scale_name, node.layer_type))
            self.parmas[scale_name] = \
                np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32")
        else:
            self.parmas[scale_name] = _adjust_parameters(node)[0]
        
        layer_attrs = {
            "axis": -1 if params.channel_shared else 1,
            "param_name": scale_name,
            "param_shape": self.parmas[scale_name].shape,
            "param_dtype": str(self.parmas[scale_name].dtype)}
        self.pd_pdgraph.add_layer(
            "custom_layer:normalize",
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)
        
    def Permute(self, node):
        assert len(
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
1116
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1117 1118
        params = node.layer.permute_param
        order = list(params.order)    
S
SunAhong1993 已提交
1119
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220
            "paddle.transpose",
            inputs={"x": input.name},
            outputs=[node.name],
            perm=order)
        
    def PriorBox(self, node):
        assert len(
            node.inputs) == 2, "The count of PriorBox node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.prior_box_param
        steps = tuple(params.step) if type(params.step) \
                is list or type(params.step) is tuple \
                else (params.step, params.step)
        layer_attrs = {
            "min_sizes": params.min_size,
            "max_sizes": params.max_size,
            "aspect_ratios": params.aspect_ratio,
            "variance": params.variance,
            "flip": params.flip,
            "clip": params.clip,
            "steps": steps,
            "offset": params.offset,
            "min_max_aspect_ratios_order": True}
        self.paddle_graph.add_layer(
            "custom_layer:priorbox",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ReLU6(self, node):
        assert len(
            node.inputs) == 1, "The count of RelU6 node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.nn.functional.relu6",
            inputs={"x": input.name},
            outputs=[node.name])
        
    def ROIPooling(self, node):
        assert len(
            node.inputs) == 2, "The count of ROIPooling node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.roi_pooling_param
        layer_attrs = {
            "pooled_height": params.pooled_h,
            "pooled_width": params.pooled_w,
            "spatial_scale": params.spatial_scale}
        self.paddle_graph.add_layer(
            "custom_layer:ROIPooling",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ShuffleChannel(self, node):
        assert len(
            node.inputs) == 1, "The count of ShuffleChannel node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.shuffle_channel_param
        self.paddle_graph.add_layer(
            "fluid.layers.shuffle_channel",
            inputs={"x": input.name},
            outputs=[node.layer_name],
            group=params.group)
        
    def Upsample(self, node):
        assert len(
            node.inputs) == 1, "The count of Upsample node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.upsample_param
        layer_attrs = {
            "align_corners": False,
            "scale_factor": params.scale,
            "mode": "nearest"}
        self.paddle_graph.add_layer(
            "paddle.nn.functioanl.interpolate",
            inputs={"input": input.name},
            outputs=[node.layer_name],
            **layer_attrs)
    
    def Select(self, node):
        assert len(
            node.inputs) == 1, "The count of Select node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        in_shapes = node.in_shapes[0]
        params = node.layer.select_param
        layer_attrs = {
            "in_shapes": in_shapes,
            "point": params.slice_point,
            "axis": params.axis}
        self.paddle_graph.add_layer(
            "custom_layer:select",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1221
            **layer_attrs)