caffe_op_mapper.py 47.7 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14 15

import numbers
S
SunAhong1993 已提交
16
import copy
S
SunAhong1993 已提交
17
import numpy as np
S
SunAhong1993 已提交
18
from x2paddle.decoder.caffe_decoder import CaffeGraph, CaffeGraphNode
J
jiangjiajun 已提交
19
from x2paddle.core.op_mapper import OpMapper
S
SunAhong1993 已提交
20
from x2paddle.core.util import *
S
SunAhong1993 已提交
21
from x2paddle.core.program import PaddleGraph 
S
SunAhong1993 已提交
22 23


S
SunAhong1993 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
def adjust_parameters(node):
    data = node.data
    # When using the protobuf-backend, each parameter initially has four dimensions.
    # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
    # This implementation takes care of the common cases. However, it does leave the
    # potential for future issues.
    # The Caffe-backend does not suffer from this problem.
    data = list(data)

    squeeze_indices = [1]  # Squeeze biases.
    if node.layer_type == 'InnerProduct':
        squeeze_indices.append(0)  # Squeeze FC.

    for idx in squeeze_indices:
        if idx >= len(data):
            continue

        d = data[idx]
        assert len(
            d.shape
        ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
            str(d.shape))

        shape_old = d.shape
        sq_axis = None
        if idx == 0:
            sq_axis = (0, 1)
        elif idx == 1:
            sq_axis = (0, 1, 2)
        else:
            continue

        data[idx] = np.squeeze(d, axis=sq_axis)
        shape_new = data[idx].shape
    return data

def get_kernel_parameters(kind, params):
    assert kind in ['Convolution', 'Pooling', 'Deconvolution']
    [k_h, k_w] = [1, 1]
    if isinstance(params.kernel_size, numbers.Number):
        [k_h, k_w] = [params.kernel_size] * 2
    elif len(params.kernel_size) > 0:
        k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
            0]
        k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
            len(params.kernel_size) - 1]
    elif params.kernel_h > 0 or params.kernel_w > 0:
        k_h = params.kernel_h
        k_w = params.kernel_w
    [s_h, s_w] = [1, 1]
    if isinstance(params.stride, numbers.Number):
        [s_h, s_w] = [params.stride] * 2
    elif len(params.stride) > 0:
        s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
        s_w = params.stride_w if params.stride_w > 0 else params.stride[len(
            params.stride) - 1]
    elif params.stride_h > 0 or params.stride_w > 0:
        s_h = params.stride_h
        s_w = params.stride_w
    [p_h, p_w] = [0, 0]
    if isinstance(params.pad, numbers.Number):
        [p_h, p_w] = [params.pad] * 2
    elif len(params.pad) > 0:
        p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
        p_w = params.pad_w if params.pad_w > 0 else params.pad[len(
            params.pad) - 1]
    elif params.pad_h > 0 or params.pad_w > 0:
        p_h = params.pad_h
        p_w = params.pad_w
    dila_h = dila_w = 1
    group = 1
    c_o = 1
    if kind in ['Convolution', 'Deconvolution']:
        c_o = params.num_output
        dila_len = len(params.dilation)
        if dila_len == 2:
            dila_h = params.dilation[0]
            dila_w = params.dilation[1]
        elif dila_len == 1:
            dila_h = dila_w = params.dilation[0]
        else:
            assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                dila_len)
    if kind in ['Convolution', 'Deconvolution']:
        group = params.group
    kernel = [k_h, k_w]
    stride = [s_h, s_w]
    pad = [p_h, p_w]
    dilation = [dila_h, dila_w]
    return c_o, kernel, stride, pad, dilation, group


J
jiangjiajun 已提交
116
class CaffeOpMapper(OpMapper):
S
SunAhong1993 已提交
117
    directly_map_ops = {
S
SunAhong1993 已提交
118
        'AbsVal': 'paddle.abs',
S
SunAhong1993 已提交
119
        'Sigmoid': 'paddle.nn.functional.sigmoid',
S
SunAhong1993 已提交
120
        'TanH': 'paddle.tanh',
S
SunAhong1993 已提交
121 122
    }

J
jiangjiajun 已提交
123 124 125
    def __init__(self, decoder):
        super(CaffeOpMapper, self).__init__()
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
126
        self.params = dict()
J
jiangjiajun 已提交
127
        resolver = decoder.resolver
J
jiangjiajun 已提交
128
        self.used_custom_layers = {}
S
SunAhong1993 已提交
129 130 131
        self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="caffe")
        self.paddle_graph.inputs = self.graph.input_nodes
        self.paddle_graph.outputs = self.graph.output_nodes
S
SunAhong1993 已提交
132

S
SunAhong1993 已提交
133 134 135 136 137 138
        print("Total nodes: {}".format(
            sum([
                isinstance(node, CaffeGraphNode)
                for name, node in self.graph.node_map.items()
            ])))
        print("Nodes converting ...")
S
SunAhong1993 已提交
139 140 141 142
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
            if hasattr(self, op):
J
jiangjiajun 已提交
143 144
                func = getattr(self, op)
                func(node)
S
SunAhong1993 已提交
145 146
            elif op in self.directly_map_ops:
                self.directly_map(node)
S
SunAhong1993 已提交
147 148
        print("\nNodes converted.")
        self.paddle_graph.set_parameters(self.params)
S
SunAhong1993 已提交
149
        self.paddle_graph.set_custom(self.used_custom_layers)
S
SunAhong1993 已提交
150

J
jiangjiajun 已提交
151 152 153
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
S
SunAhong1993 已提交
154
            node = self.graph.get_node(node_name)
J
jiangjiajun 已提交
155
            op = node.layer_type
S
SunAhong1993 已提交
156 157 158
            if not hasattr(self, op) and \
                op not in self.directly_map_ops and \
                op not in self.elementwise_ops:
J
jiangjiajun 已提交
159 160 161 162
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
S
SunAhong1993 已提交
163 164 165
            if len(unsupported_ops) > 0:
                print("\n========= {} OPs are not supported yet ===========".format(
                    len(unsupported_ops)))
J
jiangjiajun 已提交
166
            for op in unsupported_ops:
S
SunAhong1993 已提交
167
                print("========== {} ============".format(op))
J
jiangjiajun 已提交
168
            return False
S
SunAhong1993 已提交
169 170 171 172 173 174 175 176 177
        
    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel=op_info,
            inputs={"x": input.name},
            outputs=[node.name])
S
SunAhong1993 已提交
178

S
SunAhong1993 已提交
179 180 181
    def Input(self, node):
        shape = list(node.layer.input_param.shape[0].dim)[1:]
        dtype = 'float32'
S
SunAhong1993 已提交
182 183 184
        layer_attrs = {
            "dtype": string(dtype),
            "shape": [-1] + shape,
S
SunAhong1993 已提交
185
            "name": string(node.name)
S
SunAhong1993 已提交
186
        }
S
SunAhong1993 已提交
187
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
188
            kernel="paddle.static.data",
S
SunAhong1993 已提交
189
            inputs={},
S
SunAhong1993 已提交
190
            outputs=[node.name],
S
SunAhong1993 已提交
191
            **layer_attrs)
S
SunAhong1993 已提交
192

S
SunAhong1993 已提交
193 194 195
    def Convolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
196
        channel, kernel, stride, pad, dilation, group = get_kernel_parameters(
S
SunAhong1993 已提交
197
            node.layer_type, params)
S
SunAhong1993 已提交
198 199
        if data is None:
            data = []
C
channingss 已提交
200
            print(
S
SunAhong1993 已提交
201
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
S
SunAhong1993 已提交
202 203
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
204
            output_c = channel
C
channingss 已提交
205
            data.append(
J
jiangjiajun 已提交
206 207
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
S
0.8.4  
sunyanfang01 已提交
208
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
209
        else:
S
SunAhong1993 已提交
210 211 212 213 214 215 216 217 218 219
            data = adjust_parameters(node)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
220
        if len(data) == 2:
S
SunAhong1993 已提交
221 222 223 224 225 226 227 228 229
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
                kernel="paddle.static.nn.create_parameter",
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
230 231
        assert len(node.inputs
                   ) == 1, 'The count of Convolution node\'s input is not 1.'
S
SunAhong1993 已提交
232
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
233 234 235 236 237 238 239 240 241 242
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
243
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
244 245 246 247
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
248
        
S
SunAhong1993 已提交
249 250 251
    def Deconvolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
252
        channel, kernel, stride, pad, dilation, group = get_kernel_parameters(
S
SunAhong1993 已提交
253
            node.layer_type, params)
S
SunAhong1993 已提交
254 255
        if data is None:
            data = []
C
channingss 已提交
256 257
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
S
SunAhong1993 已提交
258 259
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
260
            output_c = channel
C
channingss 已提交
261
            data.append(
J
jiangjiajun 已提交
262 263 264
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
265
        else:
S
SunAhong1993 已提交
266 267 268 269 270 271 272 273 274 275
            data = adjust_parameters(node)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
276
        if len(data) == 2:
S
SunAhong1993 已提交
277 278 279 280 281 282 283 284 285
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
                kernel="paddle.static.nn.create_parameter",
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
286 287
        assert len(node.inputs
                   ) == 1, 'The count of Deconvolution node\'s input is not 1.'
S
SunAhong1993 已提交
288
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
289 290 291 292 293 294 295 296 297 298
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
299
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
300 301 302
            kernel="paddle.nn.functional.conv2d_transpose",
            inputs=layer_inputs,
            outputs=[node.name],
S
SunAhong1993 已提交
303
            **layer_attrs)    
S
SunAhong1993 已提交
304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368
        
    def DepthwiseConvolution(self, node):
        node.layer_type = "ConvolutionDepthwise"
        self.ConvolutionDepthwise(node)
        
    def ConvolutionDepthwise(self, node):
        conv2d_name = name_generator("conv", self.nn_name2id)
        output_name = node.layer_name
        layer_outputs = [conv2d_name, output_name]
        data = node.data
        params = node.layer.convolution_param
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
            node.layer_type, params)
        out_channel = params.num_output if params.num_output is not None else node.in_shapes[0][1]
        in_channel = node.in_shapes[0][1]
        group = int(in_channel / (in_channel / out_channel)) if in_channel > out_channel else int(in_channel /
                                                                (out_channel / in_channel))
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
                np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
            data = _adjust_parameters(node)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
        if len(data) == 2:
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
                kernel="paddle.static.nn.create_parameter",
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
        assert len(node.inputs
                   ) == 1, "The count of Deconvolution node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
369 370 371

    def Pooling(self, node):
        params = node.layer.pooling_param
S
SunAhong1993 已提交
372
        ceil_mode = getattr(params, 'ceil_mode', True)
S
SunAhong1993 已提交
373 374
        global_pool = getattr(params, 'global_pooling', False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
375
        channel, kernel, stride, pad, dilation, group = get_kernel_parameters(
S
SunAhong1993 已提交
376
            node.layer_type, params)
S
SunAhong1993 已提交
377 378
        assert len(
            node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
S
SunAhong1993 已提交
379
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417
        if global_pool:
            if kernel[0] == 0:
                kernel = [1, 1]
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_max_pool2d",
                    inputs={"x": input.name},
                    outputs=layer_outputs,
                    output_size=kernel)
            else:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_avg_pool2d",
                    inputs={"x": input.name},
                    outputs=layer_outputs,
                    output_size=kernel)
        else:
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    kernel="paddle.nn.functional.max_pool2d",
                    inputs={"x": input.name},
                    outputs=[node.name],
                    kernel_size=kernel,
                    stride=stride,
                    padding=pad,
                    ceil_mode=ceil_mode)
            else:
                # TODO(syf): The op has diff.
                self.paddle_graph.add_layer(
                    kernel="fluid.layers.pool2d",
                    inputs={"input": x.name},
                    outputs=[node.name],
                    pool_size=kernel,
                    pool_type=string("avg"),
                    pool_stride=stride,
                    pool_padding=pad,
                    ceil_mode=ceil_mode,
                    exclusive=False,
                    global_pooling=False)
S
SunAhong1993 已提交
418 419 420 421 422 423 424 425 426 427 428

    def LRN(self, node):
        assert len(node.inputs) == 1, 'The count of LRN node\'s input is not 1.'
        params = node.layer.lrn_param
        # The window size must be an odd value. For a window
        # size of (2*n+1), Paddle defines depth_radius = n.
        assert params.local_size % 2 == 1
        # Caffe scales by (alpha/(2*n+1)), whereas Paddle
        # just scales by alpha (as does Krizhevsky's paper).
        # We'll account for that here.
        alpha = params.alpha / float(params.local_size)
S
SunAhong1993 已提交
429
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
430
        layer_attrs = {
S
SunAhong1993 已提交
431
            'n': params.local_size,
S
SunAhong1993 已提交
432
            'k': params.k,
S
SunAhong1993 已提交
433 434
            'alpha': alpha,
            'beta': params.beta,
S
SunAhong1993 已提交
435
            'name': string(node.name)
S
SunAhong1993 已提交
436
        }
S
SunAhong1993 已提交
437
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
438
            kernel="fluid.layers.lrn",
S
SunAhong1993 已提交
439 440
            inputs={"input": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
441
            **layer_attrs)
S
SunAhong1993 已提交
442 443 444

    def InnerProduct(self, node):
        data = node.data
S
SunAhong1993 已提交
445 446
        params = node.layer.inner_product_param
        if data is None:
C
channingss 已提交
447 448 449
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
450
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
451 452
            output_c = params.num_output
            data = []
C
channingss 已提交
453
            data.append(
J
jiangjiajun 已提交
454 455
                np.zeros([input_c, output_c]).astype('float32').astype(
                    'float32'))
C
channingss 已提交
456 457
            data.append(
                np.zeros([output_c]).astype('float32').astype('float32'))
S
SunAhong1993 已提交
458
        else:
S
SunAhong1993 已提交
459
            data = adjust_parameters(node)
S
SunAhong1993 已提交
460 461 462 463 464 465 466 467
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w
S
SunAhong1993 已提交
468

S
SunAhong1993 已提交
469 470 471 472 473 474 475 476 477
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
478
        if len(data) == 2:
S
SunAhong1993 已提交
479 480 481 482 483 484 485 486 487
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
                kernel="paddle.static.nn.create_parameter",
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
488 489
        assert len(node.inputs
                   ) == 1, 'The count of InnerProduct node\'s input is not 1.'
S
SunAhong1993 已提交
490
        #params = node.layer.inner_product_param
S
SunAhong1993 已提交
491 492
        assert params.axis == 1
        assert params.bias_term == True
S
SunAhong1993 已提交
493
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = dict()
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        if node.in_shapes[0][-1] != data[0].shape[0]:
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": input.name},
                outputs=[input.name],
                shape=[-1, data[0].shape[0]])
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
        else:
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
S
SunAhong1993 已提交
518 519 520 521

    def Softmax(self, node):
        assert len(
            node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
S
SunAhong1993 已提交
522
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
523 524
        params = node.layer.softmax_param
        axis = params.axis
S
SunAhong1993 已提交
525
        shape = node.in_shapes[0]
S
SunAhong1993 已提交
526 527
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
S
SunAhong1993 已提交
528
        layer_attrs = {'axis': axis, 'name': string(node.layer_name + '_softmax')}
S
SunAhong1993 已提交
529
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
530
            kernel="paddle.nn.functional.softmax",
S
SunAhong1993 已提交
531
            inputs={"x": input.name},
S
SunAhong1993 已提交
532 533
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
534 535 536

    def Slice(self, node):
        assert len(
S
SunAhong1993 已提交
537
            node.inputs) == 1, "The count of Slice node\'s input is not 1."
S
SunAhong1993 已提交
538
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
539
        top_len = len(node.layer.top)
S
SunAhong1993 已提交
540 541
        params = node.layer.slice_param
        axis = params.axis
S
SunAhong1993 已提交
542 543 544
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
545
        output_shape = node.out_shapes
S
SunAhong1993 已提交
546 547 548
        sections_list = list()
        outputs_list = list()
        for i, s in enumerate(output_shape):
S
SunAhong1993 已提交
549
            sections_list.append(s[axis])
S
SunAhong1993 已提交
550
            outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
551
        layer_attrs = {
S
SunAhong1993 已提交
552
            'num_or_sections': sections_list,
S
SunAhong1993 已提交
553
            'axis': axis,
S
SunAhong1993 已提交
554
        }
S
SunAhong1993 已提交
555
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
556 557
            "paddle.split",
            inputs={"x": input.name},
S
SunAhong1993 已提交
558
            outputs=outputs_list,
S
SunAhong1993 已提交
559
            **layer_attrs)
C
channingss 已提交
560

S
SunAhong1993 已提交
561 562
    def Concat(self, node):
        assert len(
S
SunAhong1993 已提交
563
            node.inputs
S
sunyanfang01 已提交
564
        ) >= 1, 'The count of Concat node\'s input is not more than 1.'
S
SunAhong1993 已提交
565
        inputs_list = []
S
SunAhong1993 已提交
566
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
567
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
568
            inputs_list.append(input.name)
S
SunAhong1993 已提交
569 570
        params = node.layer.concat_param
        axis = params.axis
S
SunAhong1993 已提交
571
        layer_attrs = {'axis': axis, 'name': string(node.name)}
S
SunAhong1993 已提交
572
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
573 574
            kernel="paddle.concat",
            inputs={"x": inputs_list},
S
SunAhong1993 已提交
575
            outputs=[node.name],
S
SunAhong1993 已提交
576
            **layer_attrs)
S
SunAhong1993 已提交
577

578 579
    def ReLU(self, node):
        """
S
SunAhong1993 已提交
580

581 582 583 584 585
        :param node:
        :return:
        """
        assert len(
            node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
S
SunAhong1993 已提交
586
        input = self.graph.get_input_node(node, idx=0, copy=True)
587 588 589 590

        params = node.layer.relu_param
        if params.HasField('negative_slope') and params.negative_slope != 0:
            negative_slope = float(params.negative_slope)
S
SunAhong1993 已提交
591
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
592 593 594 595
                kernel="paddle.nn.functional.leaky_relu",
                inputs={"x": input.name},
                outputs=[node.name],
                negative_slope=negative_slope)
596
        else:
S
SunAhong1993 已提交
597
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
598 599 600
                kernel="paddle.nn.functional.relu",
                inputs={"x": input.name},
                outputs=[node.name])
601

S
SunAhong1993 已提交
602 603 604
    def PReLU(self, node):
        assert len(
            node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
S
SunAhong1993 已提交
605
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
606 607 608 609 610 611 612 613
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
        if mode_bool:
            mode = 'all'
        else:
            mode = 'channel'
        data = node.data
        assert data is not None, 'The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.'.format(
S
SunAhong1993 已提交
614 615 616
            node.name, node.layer_type)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
S
SunAhong1993 已提交
617
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
618 619 620 621 622 623
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
624
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
625 626 627 628
            kernel="paddle.nn.functional.prelu",
            inputs={"x": input.name,
                    "weight": kernel_weight_name},
            outputs=[node.name])
S
SunAhong1993 已提交
629 630 631

    def Eltwise(self, node):
        assert len(
S
SunAhong1993 已提交
632
            node.inputs) == 2, "The count of Eltwise node\'s input is not 2."
S
SunAhong1993 已提交
633 634 635
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
636 637
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
638 639
        input0_name = input0.name
        input1_name = input1.name
S
SunAhong1993 已提交
640
        if mode == 0:
S
SunAhong1993 已提交
641
            inputs_dict = {}
S
SunAhong1993 已提交
642 643
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
644
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
645
                "paddle.multiply",
J
jiangjiajun 已提交
646
                inputs=inputs_dict,
S
SunAhong1993 已提交
647
                outputs=[node.name])
S
SunAhong1993 已提交
648 649 650
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
S
SunAhong1993 已提交
651
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
652 653 654 655
                    "paddle.scale",
                    inputs={"x": input0_name},
                    outputs=[node.name + '_mul0'],
                    scale=coeff[0])
S
SunAhong1993 已提交
656
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
657 658 659 660 661 662 663
                    "paddle.scale",
                    inputs={"x": input1_name},
                    outputs=[node.name + '_mul1'],
                    scale=coeff[2])
                inputs_dict = {}
                inputs_dict['x'] = node.name + '_mul0'
                inputs_dict['y'] = node.name + '_mul1'
S
SunAhong1993 已提交
664
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
665 666 667
                    "paddle.add",
                    inputs=inputs_dict,
                    outputs=[node.name])
S
SunAhong1993 已提交
668
            else:
S
SunAhong1993 已提交
669
                inputs_dict = {}
S
SunAhong1993 已提交
670 671
                inputs_dict['x'] = input0_name
                inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
672
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
673
                    "paddle.add",
J
jiangjiajun 已提交
674
                    inputs=inputs_dict,
S
SunAhong1993 已提交
675
                    outputs=[node.name])
S
SunAhong1993 已提交
676
        else:
S
SunAhong1993 已提交
677
            inputs_dict = {}
S
SunAhong1993 已提交
678 679
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
680
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
681 682 683
                "paddle.max",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
684 685

    def BatchNorm(self, node):
C
channingss 已提交
686 687
        assert len(
            node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
S
SunAhong1993 已提交
688
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
689
        params = node.layer.batch_norm_param
S
SunAhong1993 已提交
690
        if hasattr(params, 'eps'):
S
SunAhong1993 已提交
691 692 693
            eps = params.eps
        else:
            eps = 1e-5
S
SunAhong1993 已提交
694 695 696 697
        if hasattr(params, 'moving_average_fraction'):
            momentum = params.moving_average_fraction
        else:
            momentum = 0.9
S
SunAhong1993 已提交
698
        if node.data is None or len(node.data) != 3:
C
channingss 已提交
699 700 701
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
702
            input_c = node.in_shapes[0][1]
J
jiangjiajun 已提交
703 704
            mean = np.zeros([input_c, ]).astype('float32')
            variance = np.zeros([input_c, ]).astype('float32')
S
SunAhong1993 已提交
705 706
            scale = 0
        else:
S
SunAhong1993 已提交
707

S
SunAhong1993 已提交
708
            node.data = [np.squeeze(i).astype('float32') for i in node.data]
S
SunAhong1993 已提交
709
            mean, variance, scale = node.data
S
SunAhong1993 已提交
710 711 712 713
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
S
SunAhong1993 已提交
714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745
        weight_name = node.name + '_weight'
        self.paddle_graph.add_layer(
            kernel="paddle.ones",
            inputs={},
            outputs=[weight_name],
            shape=mean.shape,
            dtype=string("float32"))
        bias_name = node.name + '_bias'
        self.paddle_graph.add_layer(
            kernel="paddle.zeros",
            inputs={},
            outputs=[bias_name],
            shape=mean.shape,
            dtype=string("float32"))
        mean_name = node.name + '_mean'
        self.params[mean_name] = mean
        self.paddle_graph.add_layer(
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[mean_name],
            shape=self.params[mean_name].shape,
            dtype=string(str(self.params[mean_name].dtype)),
            name=string(mean_name))
        variance_name = node.name + '_variance'
        self.params[variance_name] = variance
        self.paddle_graph.add_layer(
            kernel="paddle.static.nn.create_parameter",
            inputs={},
            outputs=[variance_name],
            shape=self.params[variance_name].shape,
            dtype=string(str(self.params[variance_name].dtype)),
            name=string(variance_name))
S
SunAhong1993 已提交
746
        layer_attrs = {
747
            'epsilon': eps,
S
SunAhong1993 已提交
748
            'momentum': momentum
749
        }
S
SunAhong1993 已提交
750
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
751 752 753 754 755 756 757
            kernel="paddle.nn.functional.batch_norm",
            inputs={"x": input.name,
                    "weight": weight_name,
                    "bias": bias_name,
                    "running_mean": mean_name,
                    "running_var": variance_name,},
            outputs=[node.name],
S
SunAhong1993 已提交
758
            **layer_attrs)
S
SunAhong1993 已提交
759 760

    def Scale(self, node):
S
SunAhong1993 已提交
761
        if node.data is None:
C
channingss 已提交
762
            print(
S
SunAhong1993 已提交
763 764 765 766 767 768 769 770
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.name, node.layer_type))
            self.params[node.name + "_cparam1"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
            self.params[node.name + "_cparam2"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
S
SunAhong1993 已提交
771
        else:
S
SunAhong1993 已提交
772 773 774 775
            self.params[node.name + "_cparam1"] = np.squeeze(node.data[
                0]).astype("float32")
            self.params[node.name + "_cparam2"] = np.squeeze(node.data[
                1]).astype("float32")
776 777 778 779
        params = node.layer.scale_param
        axis = params.axis
        inputs = []
        if len(node.inputs) == 2:
S
SunAhong1993 已提交
780 781
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
782 783
            input0_name = input0.name
            input1_name = input1.name
784
            inputs_dict = {}
S
SunAhong1993 已提交
785 786
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
787
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
788
                "paddle.multiply",
J
jiangjiajun 已提交
789
                inputs=inputs_dict,
S
SunAhong1993 已提交
790 791
                outputs=[node.name + "_mul"],
                axis=1)
S
SunAhong1993 已提交
792
        else:
S
SunAhong1993 已提交
793
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
794
                "paddle.static.nn.create_parameter",
S
SunAhong1993 已提交
795
                inputs={},
S
SunAhong1993 已提交
796 797 798 799 800 801
                outputs=[node.name + "_cparam1"],
                shape=self.params[node.name + "_cparam1"].shape,
                dtype=string(str(self.params[node.name + "_cparam1"].dtype)),
                name=string(node.name + "_cparam1"))
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input0_name = input0.name
802
            inputs_dict = {}
S
SunAhong1993 已提交
803 804
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = node.name + "_cparam1"
S
SunAhong1993 已提交
805
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
806
                "paddle.multiply",
J
jiangjiajun 已提交
807
                inputs=inputs_dict,
S
SunAhong1993 已提交
808
                outputs=[node.name + "_mul"],
S
SunAhong1993 已提交
809
                axis=axis)
S
SunAhong1993 已提交
810
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
811
            "paddle.static.nn.create_parameter",
S
SunAhong1993 已提交
812
            inputs={},
S
SunAhong1993 已提交
813 814 815 816
            outputs=[node.name + "_cparam2"],
            shape=self.params[node.name + "_cparam2"].shape,
            dtype=string(str(self.params[node.name + "_cparam2"].dtype)),
            name=string(node.name + "_cparam2"))
S
SunAhong1993 已提交
817
        inputs_dict = {}
S
SunAhong1993 已提交
818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = node.name + "_cparam2"
        output_shape = node.out_shapes[0]
        if axis == -1:
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
        else:
            if axis < 0:
                axis = axis + len(output_shape)
            param2_shape = self.params[node.name + "_cparam2"].shape
            param2_shape_len = len(param2_shape)
            diff_len = len(output_shape) - axis - param2_shape_len
            new_shape = list(param2_shape) + [1] * diff_len
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": node.name + "_cparam2"},
                outputs=[node.name + "_cparam2"],
                shape=new_shape)
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
842
        
S
SunAhong1993 已提交
843 844

    def Reshape(self, node):
S
SunAhong1993 已提交
845
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
846
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
847
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
848 849 850 851
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=output_shape)
S
SunAhong1993 已提交
852 853 854 855

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
S
SunAhong1993 已提交
856
        ) == 1, "The count of ArgMax node\'s input and output is not 1."
S
SunAhong1993 已提交
857
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
858
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
859 860 861 862 863 864
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
        axis = parmas.axis if hasattr(params, axis) else -1
        if axis < 0:
S
SunAhong1993 已提交
865
            axis += len(in_shapes)
S
SunAhong1993 已提交
866
        if out_max_val is True:
S
SunAhong1993 已提交
867
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
868 869 870
                "paddle.topk",
                inputs={"x": input.name},
                outputs=[node.name + "_topk_var", node.name + "_index_var"],
S
SunAhong1993 已提交
871
                k=top_k)
S
SunAhong1993 已提交
872
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
873 874 875 876
                "paddle.cast",
                inputs={"x": node.name + "_index_var"},
                outputs=[node.name + "_index_var"],
                dtype="{}_topk_var.dtype".format(node.name))
S
SunAhong1993 已提交
877
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
878 879 880
                "paddle.concat",
                inputs={"x": [node.name + "_topk_var", node.name + "_index_var"]},
                outputs=[node.name],
S
SunAhong1993 已提交
881
                axis=axis)
S
SunAhong1993 已提交
882
        else:
S
SunAhong1993 已提交
883
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
884 885 886
                "paddle.topk",
                inputs={"x": input.name},
                outputs=["_", node.name],
S
SunAhong1993 已提交
887
                k=top_k)
S
SunAhong1993 已提交
888 889 890

    def Crop(self, node):
        assert len(
S
SunAhong1993 已提交
891
            node.inputs) == 2, "The count of Crop node\'s input is not 2."
S
SunAhong1993 已提交
892 893
        input = self.graph.get_input_node(node, idx=0, copy=True)
        example = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
894
        params = node.layer.crop_param
S
sonixixi 已提交
895
        axis = params.axis
S
SunAhong1993 已提交
896
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
897
        if axis < 0:
S
SunAhong1993 已提交
898 899
            axis += len(in_shapes)
        offset_real = [0] * len(in_shapes)
S
sonixixi 已提交
900
        if hasattr(params, "offset") and len(params.offset) > 0:
S
SunAhong1993 已提交
901
            offset = list(params.offset)
S
SunAhong1993 已提交
902
            assert (len(in_shapes) - axis
J
jiangjiajun 已提交
903 904
                    ) == len(offset), "invalid offset[%s] in crop layer" % (
                        str(offset))
S
SunAhong1993 已提交
905
            offset_real = [0] * axis + offset
S
SunAhong1993 已提交
906
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
907 908 909 910 911 912
                "paddle.crop",
                inputs={"x": input.name},
                outputs=[node.name],
                shape=node.in_shapes[1],
                offsets=list(offset_real))

S
SunAhong1993 已提交
913
        
S
SunAhong1993 已提交
914
    def Flatten(self, node):
S
SunAhong1993 已提交
915
        assert len(
J
jiangjiajun 已提交
916
            node.
S
SunAhong1993 已提交
917
            inputs) == 1, "The count of DetectionOutput node\'s input is not 1."
S
SunAhong1993 已提交
918
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
919
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
920 921 922 923 924
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=node.out_shapes[0])

S
SunAhong1993 已提交
925 926
    def Power(self, node):
        assert len(
S
SunAhong1993 已提交
927
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
928
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
929
        params = node.layer.power_param
S
SunAhong1993 已提交
930
        layer_attrs = {
S
SunAhong1993 已提交
931 932 933
            'scale': params.scale,
            'bias': params.shift,
            'bias_after_scale': True
S
SunAhong1993 已提交
934
        }
S
SunAhong1993 已提交
935
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
936 937 938
            "paddle.scale",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
939
            **layer_attrs)
S
SunAhong1993 已提交
940
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
941 942 943 944
            "paddle.pow",
            inputs={"x": node.name},
            outputs=[node.name],
            exponent=params.power)
S
SunAhong1993 已提交
945 946 947

    def Reduction(self, node):
        assert len(
S
SunAhong1993 已提交
948
            node.inputs) == 1, "The count of Reduction node\'s input is not 1."
S
SunAhong1993 已提交
949
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
950 951 952 953 954 955
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
S
SunAhong1993 已提交
956
        input_len = len(node.in_shapes[0])
S
SunAhong1993 已提交
957 958 959
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
S
SunAhong1993 已提交
960 961
        # operation = SUM
        if operation == 1:  
S
SunAhong1993 已提交
962
            layer_attrs = {
S
SunAhong1993 已提交
963 964
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
965
            }
S
SunAhong1993 已提交
966
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
967 968 969
                "paddle.sum",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
970
                **layer_attrs)
S
SunAhong1993 已提交
971 972
        # operation = ASUM
        elif operation == 2:  
S
SunAhong1993 已提交
973
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
974 975 976
                "paddle.abs",
                inputs={"x": input.name},
                outputs=[node.name])
S
SunAhong1993 已提交
977
            layer_attrs = {
S
SunAhong1993 已提交
978 979
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
980
            }
S
SunAhong1993 已提交
981
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
982 983 984
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
985
                **layer_attrs)
S
SunAhong1993 已提交
986 987
        # operation = SUMSQ
        elif operation == 3: 
S
SunAhong1993 已提交
988
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
989 990 991 992
                "paddle.pow",
                inputs={"x": input.name},
                outputs=[node.name],
                exponent=2.0)
S
SunAhong1993 已提交
993
            layer_attrs = {
S
SunAhong1993 已提交
994 995
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
996
            }
S
SunAhong1993 已提交
997
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
998 999 1000
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1001
                **layer_attrs)
S
SunAhong1993 已提交
1002 1003
        # operation = MEAN
        else: 
S
SunAhong1993 已提交
1004
            layer_attrs = {
S
SunAhong1993 已提交
1005 1006
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
1007
            }
S
SunAhong1993 已提交
1008
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1009 1010 1011
                "paddle.mean",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1012
                **layer_attrs)
S
SunAhong1993 已提交
1013
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1014 1015 1016
            "paddle.scale",
            inputs={"x": node.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1017
            scale=coeff)
S
SunAhong1993 已提交
1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050
        
    def Axpy(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, "The count of Axpy node\'s input and output is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.axpy_param
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        input2 = self.graph.get_input_node(node, idx=2, copy=True)
        input0_name = input0.name
        input1_name = input1.name
        input2_name = input2.name
        inputs_dict = {}
        inputs_dict['x'] = input1_name
        inputs_dict['y'] = input0_name
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"],
            axis=0)
        inputs_dict = {}
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = input2_name
        self.paddle_graph.add_layer(
            "paddle.add",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"])
        
    def DetectionOutput(self, node):
        assert len(
            node.inputs) == 3, "The count of DetectionOutput node\'s input is not 3."
        inputs_dict = dict()
S
SunAhong1993 已提交
1051
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
1052
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1053
            if i == 1:
S
SunAhong1993 已提交
1054
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1055 1056
                while input is not None \
                      and input.layer_type != 'Softmax' \
S
SunAhong1993 已提交
1057
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
1058
                    input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1059
                assert input is not None, 'This kind of DetectionOutput is not supported!'
S
SunAhong1993 已提交
1060
                input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086
            inputs_dict["x{}".format(i)] = input.name
        params = node.layer.detection_output_param
        nms_param = params.nms_param
        nms_param_dict = dict()
        nms_param_dict["nms_threshold"] = nms_param.nms_threshold
        nms_param_dict["top_k"] = nms_param.top_k
        nms_param_dict["eta"] = nms_param.eta
        if nms_param is None:
            nms_param_dict = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        fields = ["eta", "top_k", "nms_threshold"]
        for f in default.keys():
            if f not in nms_param_dict:
                nms_param_dict[f] = default[f]
        layer_attrs = {
            "background_label": params.background_label_id,
            "nms_threshold": nms_param_dict["nms_threshold"],
            "nms_top_k": nms_param_dict["top_k"],
            "keep_top_k": params.keep_top_k,
            "score_threshold": params.confidence_threshold,
            "nms_eta": nms_param_dict["eta"]}
        self.paddle_graph.add_layer(
            kernel="custom_layer:detectionoutput",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
S
SunAhong1993 已提交
1087

S
SunAhong1993 已提交
1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116
    def Normalize(self, node):
        assert len(
            node.inputs) == 1, "The count of Normalize node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.norm_param
        scale_name = node.name + "_scale"
        if node.data is None or len(node.data) != 1:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(scale_name, node.layer_type))
            self.parmas[scale_name] = \
                np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32")
        else:
            self.parmas[scale_name] = _adjust_parameters(node)[0]
        
        layer_attrs = {
            "axis": -1 if params.channel_shared else 1,
            "param_name": scale_name,
            "param_shape": self.parmas[scale_name].shape,
            "param_dtype": str(self.parmas[scale_name].dtype)}
        self.pd_pdgraph.add_layer(
            "custom_layer:normalize",
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)
        
    def Permute(self, node):
        assert len(
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
1117
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1118 1119
        params = node.layer.permute_param
        order = list(params.order)    
S
SunAhong1993 已提交
1120
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222
            "paddle.transpose",
            inputs={"x": input.name},
            outputs=[node.name],
            perm=order)
        
    def PriorBox(self, node):
        assert len(
            node.inputs) == 2, "The count of PriorBox node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.prior_box_param
        steps = tuple(params.step) if type(params.step) \
                is list or type(params.step) is tuple \
                else (params.step, params.step)
        layer_attrs = {
            "min_sizes": params.min_size,
            "max_sizes": params.max_size,
            "aspect_ratios": params.aspect_ratio,
            "variance": params.variance,
            "flip": params.flip,
            "clip": params.clip,
            "steps": steps,
            "offset": params.offset,
            "min_max_aspect_ratios_order": True}
        self.paddle_graph.add_layer(
            "custom_layer:priorbox",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ReLU6(self, node):
        assert len(
            node.inputs) == 1, "The count of RelU6 node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.nn.functional.relu6",
            inputs={"x": input.name},
            outputs=[node.name])
        
    def ROIPooling(self, node):
        assert len(
            node.inputs) == 2, "The count of ROIPooling node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.roi_pooling_param
        layer_attrs = {
            "pooled_height": params.pooled_h,
            "pooled_width": params.pooled_w,
            "spatial_scale": params.spatial_scale}
        self.paddle_graph.add_layer(
            "custom_layer:ROIPooling",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ShuffleChannel(self, node):
        assert len(
            node.inputs) == 1, "The count of ShuffleChannel node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.shuffle_channel_param
        self.paddle_graph.add_layer(
            "fluid.layers.shuffle_channel",
            inputs={"x": input.name},
            outputs=[node.layer_name],
            group=params.group)
        
    def Upsample(self, node):
        assert len(
            node.inputs) == 1, "The count of Upsample node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.upsample_param
        layer_attrs = {
            "align_corners": False,
            "scale_factor": params.scale,
            "mode": "nearest"}
        self.paddle_graph.add_layer(
            "paddle.nn.functioanl.interpolate",
            inputs={"input": input.name},
            outputs=[node.layer_name],
            **layer_attrs)
    
    def Select(self, node):
        assert len(
            node.inputs) == 1, "The count of Select node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        in_shapes = node.in_shapes[0]
        params = node.layer.select_param
        layer_attrs = {
            "in_shapes": in_shapes,
            "point": params.slice_point,
            "axis": params.axis}
        self.paddle_graph.add_layer(
            "custom_layer:select",
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)