caffe_op_mapper.py 48.4 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14 15

import numbers
S
SunAhong1993 已提交
16
import copy
S
SunAhong1993 已提交
17
import numpy as np
S
SunAhong1993 已提交
18
from x2paddle.decoder.caffe_decoder import CaffeGraph, CaffeGraphNode
J
jiangjiajun 已提交
19
from x2paddle.core.op_mapper import OpMapper
S
SunAhong1993 已提交
20
from x2paddle.core.util import *
S
SunAhong1993 已提交
21
from x2paddle.core.program import PaddleGraph 
S
SunAhong1993 已提交
22 23


S
SunAhong1993 已提交
24
def _adjust_parameters(node):
S
SunAhong1993 已提交
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
    data = node.data
    # When using the protobuf-backend, each parameter initially has four dimensions.
    # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
    # This implementation takes care of the common cases. However, it does leave the
    # potential for future issues.
    # The Caffe-backend does not suffer from this problem.
    data = list(data)

    squeeze_indices = [1]  # Squeeze biases.
    if node.layer_type == 'InnerProduct':
        squeeze_indices.append(0)  # Squeeze FC.

    for idx in squeeze_indices:
        if idx >= len(data):
            continue

        d = data[idx]
        assert len(
            d.shape
        ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
            str(d.shape))

        shape_old = d.shape
        sq_axis = None
        if idx == 0:
            sq_axis = (0, 1)
        elif idx == 1:
            sq_axis = (0, 1, 2)
        else:
            continue

        data[idx] = np.squeeze(d, axis=sq_axis)
        shape_new = data[idx].shape
    return data

S
SunAhong1993 已提交
60 61
def _get_kernel_parameters(kind, params):
    assert kind in ["Convolution", "Pooling", "Deconvolution", "ConvolutionDepthwise"]
S
SunAhong1993 已提交
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
    [k_h, k_w] = [1, 1]
    if isinstance(params.kernel_size, numbers.Number):
        [k_h, k_w] = [params.kernel_size] * 2
    elif len(params.kernel_size) > 0:
        k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
            0]
        k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
            len(params.kernel_size) - 1]
    elif params.kernel_h > 0 or params.kernel_w > 0:
        k_h = params.kernel_h
        k_w = params.kernel_w
    [s_h, s_w] = [1, 1]
    if isinstance(params.stride, numbers.Number):
        [s_h, s_w] = [params.stride] * 2
    elif len(params.stride) > 0:
        s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
        s_w = params.stride_w if params.stride_w > 0 else params.stride[len(
            params.stride) - 1]
    elif params.stride_h > 0 or params.stride_w > 0:
        s_h = params.stride_h
        s_w = params.stride_w
    [p_h, p_w] = [0, 0]
    if isinstance(params.pad, numbers.Number):
        [p_h, p_w] = [params.pad] * 2
    elif len(params.pad) > 0:
        p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
        p_w = params.pad_w if params.pad_w > 0 else params.pad[len(
            params.pad) - 1]
    elif params.pad_h > 0 or params.pad_w > 0:
        p_h = params.pad_h
        p_w = params.pad_w
    dila_h = dila_w = 1
    group = 1
    c_o = 1
S
SunAhong1993 已提交
96 97 98
    if kind in ["Convolution", "Deconvolution", "ConvolutionDepthwise"]:
        if kind in ["Convolution", "Deconvolution"]:
            c_o = params.num_output
S
SunAhong1993 已提交
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
        dila_len = len(params.dilation)
        if dila_len == 2:
            dila_h = params.dilation[0]
            dila_w = params.dilation[1]
        elif dila_len == 1:
            dila_h = dila_w = params.dilation[0]
        else:
            assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                dila_len)
    if kind in ['Convolution', 'Deconvolution']:
        group = params.group
    kernel = [k_h, k_w]
    stride = [s_h, s_w]
    pad = [p_h, p_w]
    dilation = [dila_h, dila_w]
    return c_o, kernel, stride, pad, dilation, group


J
jiangjiajun 已提交
117
class CaffeOpMapper(OpMapper):
S
SunAhong1993 已提交
118
    directly_map_ops = {
S
SunAhong1993 已提交
119
        'AbsVal': 'paddle.abs',
S
SunAhong1993 已提交
120
        'Sigmoid': 'paddle.nn.functional.sigmoid',
S
SunAhong1993 已提交
121
        'TanH': 'paddle.tanh',
S
SunAhong1993 已提交
122 123
    }

J
jiangjiajun 已提交
124 125 126
    def __init__(self, decoder):
        super(CaffeOpMapper, self).__init__()
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
127 128
        if not self.op_checker():
            raise Exception("Model is not supported yet.")
S
SunAhong1993 已提交
129
        self.params = dict()
J
jiangjiajun 已提交
130
        resolver = decoder.resolver
J
jiangjiajun 已提交
131
        self.used_custom_layers = {}
S
SunAhong1993 已提交
132 133 134
        self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="caffe")
        self.paddle_graph.inputs = self.graph.input_nodes
        self.paddle_graph.outputs = self.graph.output_nodes
S
SunAhong1993 已提交
135

S
SunAhong1993 已提交
136 137 138 139 140 141
        print("Total nodes: {}".format(
            sum([
                isinstance(node, CaffeGraphNode)
                for name, node in self.graph.node_map.items()
            ])))
        print("Nodes converting ...")
S
SunAhong1993 已提交
142 143 144 145
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
            if hasattr(self, op):
J
jiangjiajun 已提交
146 147
                func = getattr(self, op)
                func(node)
S
SunAhong1993 已提交
148 149
            elif op in self.directly_map_ops:
                self.directly_map(node)
S
SunAhong1993 已提交
150 151
        print("\nNodes converted.")
        self.paddle_graph.set_parameters(self.params)
S
SunAhong1993 已提交
152
        self.paddle_graph.set_custom(self.used_custom_layers)
S
SunAhong1993 已提交
153

J
jiangjiajun 已提交
154 155 156
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
S
SunAhong1993 已提交
157
            node = self.graph.get_node(node_name)
J
jiangjiajun 已提交
158
            op = node.layer_type
S
SunAhong1993 已提交
159 160 161
            if not hasattr(self, op) and \
                op not in self.directly_map_ops and \
                op not in self.elementwise_ops:
J
jiangjiajun 已提交
162 163 164 165
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
S
SunAhong1993 已提交
166 167 168
            if len(unsupported_ops) > 0:
                print("\n========= {} OPs are not supported yet ===========".format(
                    len(unsupported_ops)))
J
jiangjiajun 已提交
169
            for op in unsupported_ops:
S
SunAhong1993 已提交
170
                print("========== {} ============".format(op))
J
jiangjiajun 已提交
171
            return False
S
SunAhong1993 已提交
172 173 174 175 176 177 178 179 180
        
    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel=op_info,
            inputs={"x": input.name},
            outputs=[node.name])
S
SunAhong1993 已提交
181

S
SunAhong1993 已提交
182 183 184
    def Input(self, node):
        shape = list(node.layer.input_param.shape[0].dim)[1:]
        dtype = 'float32'
S
SunAhong1993 已提交
185 186 187
        layer_attrs = {
            "dtype": string(dtype),
            "shape": [-1] + shape,
S
SunAhong1993 已提交
188
            "name": string(node.name)
S
SunAhong1993 已提交
189
        }
S
SunAhong1993 已提交
190
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
191
            kernel="paddle.static.data",
S
SunAhong1993 已提交
192
            inputs={},
S
SunAhong1993 已提交
193
            outputs=[node.name],
S
SunAhong1993 已提交
194
            **layer_attrs)
S
SunAhong1993 已提交
195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
        
    def MemoryData(self, node):
        params = node.layer.memory_data_param
        transform_params = node.layer.transform_param
        
        shape = list()
        shape.append(params.batch_size)
        shape.append(params.channels)
        if hasattr(transform_params, "crop_size"):
            shape.append(transform_params.crop_size)
            shape.append(transform_params.crop_size)
        else:
            shape.append(params.width)
            shape.append(params.height)
        dtype = 'float32'
        layer_attrs = {
            "dtype": string(dtype),
            "shape": shape,
            "name": string(node.name)
        }
        self.paddle_graph.add_layer(
            kernel="paddle.static.data",
            inputs={},
            outputs=[node.name],
            **layer_attrs)
        
S
SunAhong1993 已提交
221 222 223
    def Convolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
224
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
225
            node.layer_type, params)
S
SunAhong1993 已提交
226 227
        if data is None:
            data = []
C
channingss 已提交
228
            print(
S
SunAhong1993 已提交
229
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
S
SunAhong1993 已提交
230 231
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
232
            output_c = channel
C
channingss 已提交
233
            data.append(
J
jiangjiajun 已提交
234 235
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
S
0.8.4  
sunyanfang01 已提交
236
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
237
        else:
S
SunAhong1993 已提交
238
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
239 240 241
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
242
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
243 244 245 246 247
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
248
        if len(data) == 2:
S
SunAhong1993 已提交
249 250 251
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
252
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
253 254 255 256 257
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
258 259
        assert len(node.inputs
                   ) == 1, 'The count of Convolution node\'s input is not 1.'
S
SunAhong1993 已提交
260
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
261 262 263 264 265 266 267 268 269 270
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
271
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
272 273 274 275
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
276
        
S
SunAhong1993 已提交
277 278 279
    def Deconvolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
280
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
281
            node.layer_type, params)
S
SunAhong1993 已提交
282 283
        if data is None:
            data = []
C
channingss 已提交
284 285
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
S
SunAhong1993 已提交
286 287
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
288
            output_c = channel
C
channingss 已提交
289
            data.append(
J
jiangjiajun 已提交
290 291 292
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
293
        else:
S
SunAhong1993 已提交
294
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
295 296 297
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
298
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
299 300 301 302 303
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
304
        if len(data) == 2:
S
SunAhong1993 已提交
305 306 307
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
308
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
309 310 311 312 313
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
314 315
        assert len(node.inputs
                   ) == 1, 'The count of Deconvolution node\'s input is not 1.'
S
SunAhong1993 已提交
316
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
317 318 319 320 321 322 323 324 325 326
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
327
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
328 329 330
            kernel="paddle.nn.functional.conv2d_transpose",
            inputs=layer_inputs,
            outputs=[node.name],
S
SunAhong1993 已提交
331
            **layer_attrs)    
S
SunAhong1993 已提交
332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359
        
    def DepthwiseConvolution(self, node):
        node.layer_type = "ConvolutionDepthwise"
        self.ConvolutionDepthwise(node)
        
    def ConvolutionDepthwise(self, node):
        data = node.data
        params = node.layer.convolution_param
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
            node.layer_type, params)
        out_channel = params.num_output if params.num_output is not None else node.in_shapes[0][1]
        in_channel = node.in_shapes[0][1]
        group = int(in_channel / (in_channel / out_channel)) if in_channel > out_channel else int(in_channel /
                                                                (out_channel / in_channel))
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
                np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
            data = _adjust_parameters(node)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
360
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
361 362 363 364 365 366 367 368 369
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
        if len(data) == 2:
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
370
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
        assert len(node.inputs
                   ) == 1, "The count of Deconvolution node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
394 395 396

    def Pooling(self, node):
        params = node.layer.pooling_param
S
SunAhong1993 已提交
397
        ceil_mode = getattr(params, 'ceil_mode', True)
S
SunAhong1993 已提交
398 399
        global_pool = getattr(params, 'global_pooling', False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
400
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
401
            node.layer_type, params)
S
SunAhong1993 已提交
402 403
        assert len(
            node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
S
SunAhong1993 已提交
404
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
405 406 407 408 409 410 411 412 413 414 415 416 417
        if global_pool:
            if kernel[0] == 0:
                kernel = [1, 1]
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_max_pool2d",
                    inputs={"x": input.name},
                    outputs=layer_outputs,
                    output_size=kernel)
            else:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_avg_pool2d",
                    inputs={"x": input.name},
S
SunAhong1993 已提交
418
                    outputs=[node.name],
S
SunAhong1993 已提交
419 420 421 422 423 424 425 426 427 428 429 430 431
                    output_size=kernel)
        else:
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    kernel="paddle.nn.functional.max_pool2d",
                    inputs={"x": input.name},
                    outputs=[node.name],
                    kernel_size=kernel,
                    stride=stride,
                    padding=pad,
                    ceil_mode=ceil_mode)
            else:
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
432 433
                    kernel="paddle.nn.functional.avg_pool2d",
                    inputs={"x": input.name},
S
SunAhong1993 已提交
434
                    outputs=[node.name],
S
SunAhong1993 已提交
435 436 437 438
                    kernel_size=kernel,
                    stride=stride,
                    padding=pad,
                    ceil_mode=ceil_mode)
S
SunAhong1993 已提交
439 440 441 442 443 444 445 446 447 448 449

    def LRN(self, node):
        assert len(node.inputs) == 1, 'The count of LRN node\'s input is not 1.'
        params = node.layer.lrn_param
        # The window size must be an odd value. For a window
        # size of (2*n+1), Paddle defines depth_radius = n.
        assert params.local_size % 2 == 1
        # Caffe scales by (alpha/(2*n+1)), whereas Paddle
        # just scales by alpha (as does Krizhevsky's paper).
        # We'll account for that here.
        alpha = params.alpha / float(params.local_size)
S
SunAhong1993 已提交
450
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
451
        layer_attrs = {
S
SunAhong1993 已提交
452
            'n': params.local_size,
S
SunAhong1993 已提交
453
            'k': params.k,
S
SunAhong1993 已提交
454 455
            'alpha': alpha,
            'beta': params.beta,
S
SunAhong1993 已提交
456
            'name': string(node.name)
S
SunAhong1993 已提交
457
        }
S
SunAhong1993 已提交
458
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
459
            kernel="paddle.fluid.layers.lrn",
S
SunAhong1993 已提交
460 461
            inputs={"input": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
462
            **layer_attrs)
S
SunAhong1993 已提交
463 464 465

    def InnerProduct(self, node):
        data = node.data
S
SunAhong1993 已提交
466 467
        params = node.layer.inner_product_param
        if data is None:
C
channingss 已提交
468 469 470
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
471
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
472 473
            output_c = params.num_output
            data = []
C
channingss 已提交
474
            data.append(
J
jiangjiajun 已提交
475 476
                np.zeros([input_c, output_c]).astype('float32').astype(
                    'float32'))
C
channingss 已提交
477 478
            data.append(
                np.zeros([output_c]).astype('float32').astype('float32'))
S
SunAhong1993 已提交
479
        else:
S
SunAhong1993 已提交
480
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
481 482 483 484 485 486 487 488
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w
S
SunAhong1993 已提交
489

S
SunAhong1993 已提交
490 491 492
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
493
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
494 495 496 497 498
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
499
        if len(data) == 2:
S
SunAhong1993 已提交
500 501 502
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
503
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
504 505 506 507 508
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
509 510
        assert len(node.inputs
                   ) == 1, 'The count of InnerProduct node\'s input is not 1.'
S
SunAhong1993 已提交
511
        #params = node.layer.inner_product_param
S
SunAhong1993 已提交
512 513
        assert params.axis == 1
        assert params.bias_term == True
S
SunAhong1993 已提交
514
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = dict()
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        if node.in_shapes[0][-1] != data[0].shape[0]:
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": input.name},
                outputs=[input.name],
                shape=[-1, data[0].shape[0]])
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
        else:
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
S
SunAhong1993 已提交
539 540 541 542

    def Softmax(self, node):
        assert len(
            node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
S
SunAhong1993 已提交
543
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
544 545
        params = node.layer.softmax_param
        axis = params.axis
S
SunAhong1993 已提交
546
        shape = node.in_shapes[0]
S
SunAhong1993 已提交
547 548
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
S
SunAhong1993 已提交
549
        layer_attrs = {'axis': axis, 'name': string(node.layer_name + '_softmax')}
S
SunAhong1993 已提交
550
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
551
            kernel="paddle.nn.functional.softmax",
S
SunAhong1993 已提交
552
            inputs={"x": input.name},
S
SunAhong1993 已提交
553 554
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
555 556 557

    def Slice(self, node):
        assert len(
S
SunAhong1993 已提交
558
            node.inputs) == 1, "The count of Slice node\'s input is not 1."
S
SunAhong1993 已提交
559
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
560
        top_len = len(node.layer.top)
S
SunAhong1993 已提交
561 562
        params = node.layer.slice_param
        axis = params.axis
S
SunAhong1993 已提交
563 564 565
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
566
        output_shape = node.out_shapes
S
SunAhong1993 已提交
567 568 569
        sections_list = list()
        outputs_list = list()
        for i, s in enumerate(output_shape):
S
SunAhong1993 已提交
570
            sections_list.append(s[axis])
S
SunAhong1993 已提交
571
            outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
572
        layer_attrs = {
S
SunAhong1993 已提交
573
            'num_or_sections': sections_list,
S
SunAhong1993 已提交
574
            'axis': axis,
S
SunAhong1993 已提交
575
        }
S
SunAhong1993 已提交
576
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
577 578
            "paddle.split",
            inputs={"x": input.name},
S
SunAhong1993 已提交
579
            outputs=outputs_list,
S
SunAhong1993 已提交
580
            **layer_attrs)
C
channingss 已提交
581

S
SunAhong1993 已提交
582 583
    def Concat(self, node):
        assert len(
S
SunAhong1993 已提交
584
            node.inputs
S
sunyanfang01 已提交
585
        ) >= 1, 'The count of Concat node\'s input is not more than 1.'
S
SunAhong1993 已提交
586
        inputs_list = []
S
SunAhong1993 已提交
587
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
588
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
589
            inputs_list.append(input.name)
S
SunAhong1993 已提交
590 591
        params = node.layer.concat_param
        axis = params.axis
S
SunAhong1993 已提交
592
        layer_attrs = {'axis': axis, 'name': string(node.name)}
S
SunAhong1993 已提交
593
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
594 595
            kernel="paddle.concat",
            inputs={"x": inputs_list},
S
SunAhong1993 已提交
596
            outputs=[node.name],
S
SunAhong1993 已提交
597
            **layer_attrs)
S
SunAhong1993 已提交
598

599 600
    def ReLU(self, node):
        """
S
SunAhong1993 已提交
601

602 603 604 605 606
        :param node:
        :return:
        """
        assert len(
            node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
S
SunAhong1993 已提交
607
        input = self.graph.get_input_node(node, idx=0, copy=True)
608 609 610 611

        params = node.layer.relu_param
        if params.HasField('negative_slope') and params.negative_slope != 0:
            negative_slope = float(params.negative_slope)
S
SunAhong1993 已提交
612
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
613 614 615 616
                kernel="paddle.nn.functional.leaky_relu",
                inputs={"x": input.name},
                outputs=[node.name],
                negative_slope=negative_slope)
617
        else:
S
SunAhong1993 已提交
618
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
619 620 621
                kernel="paddle.nn.functional.relu",
                inputs={"x": input.name},
                outputs=[node.name])
622

S
SunAhong1993 已提交
623 624 625
    def PReLU(self, node):
        assert len(
            node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
S
SunAhong1993 已提交
626
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
627 628
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
S
SunAhong1993 已提交
629
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
630
        if mode_bool:
S
SunAhong1993 已提交
631
            num_parameters = 1
S
SunAhong1993 已提交
632
        else:
S
SunAhong1993 已提交
633
            num_parameters = output_shape[1]
S
SunAhong1993 已提交
634 635
        data = node.data
        assert data is not None, 'The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.'.format(
S
SunAhong1993 已提交
636 637
            node.name, node.layer_type)
        kernel_weight_name = node.name + '_weights'
S
SunAhong1993 已提交
638
        self.params[kernel_weight_name] = np.squeeze(data[0])
S
SunAhong1993 已提交
639
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
640
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
641 642
            inputs={},
            outputs=[kernel_weight_name],
S
SunAhong1993 已提交
643
            shape=[num_parameters],
S
SunAhong1993 已提交
644 645
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
646
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
647 648 649 650
            kernel="paddle.nn.functional.prelu",
            inputs={"x": input.name,
                    "weight": kernel_weight_name},
            outputs=[node.name])
S
SunAhong1993 已提交
651 652 653

    def Eltwise(self, node):
        assert len(
S
SunAhong1993 已提交
654
            node.inputs) == 2, "The count of Eltwise node\'s input is not 2."
S
SunAhong1993 已提交
655 656 657
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
658 659
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
660 661
        input0_name = input0.name
        input1_name = input1.name
S
SunAhong1993 已提交
662
        if mode == 0:
S
SunAhong1993 已提交
663
            inputs_dict = {}
S
SunAhong1993 已提交
664 665
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
666
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
667
                "paddle.multiply",
J
jiangjiajun 已提交
668
                inputs=inputs_dict,
S
SunAhong1993 已提交
669
                outputs=[node.name])
S
SunAhong1993 已提交
670 671 672
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
S
SunAhong1993 已提交
673
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
674 675 676 677
                    "paddle.scale",
                    inputs={"x": input0_name},
                    outputs=[node.name + '_mul0'],
                    scale=coeff[0])
S
SunAhong1993 已提交
678
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
679 680 681 682 683 684 685
                    "paddle.scale",
                    inputs={"x": input1_name},
                    outputs=[node.name + '_mul1'],
                    scale=coeff[2])
                inputs_dict = {}
                inputs_dict['x'] = node.name + '_mul0'
                inputs_dict['y'] = node.name + '_mul1'
S
SunAhong1993 已提交
686
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
687 688 689
                    "paddle.add",
                    inputs=inputs_dict,
                    outputs=[node.name])
S
SunAhong1993 已提交
690
            else:
S
SunAhong1993 已提交
691
                inputs_dict = {}
S
SunAhong1993 已提交
692 693
                inputs_dict['x'] = input0_name
                inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
694
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
695
                    "paddle.add",
J
jiangjiajun 已提交
696
                    inputs=inputs_dict,
S
SunAhong1993 已提交
697
                    outputs=[node.name])
S
SunAhong1993 已提交
698
        else:
S
SunAhong1993 已提交
699
            inputs_dict = {}
S
SunAhong1993 已提交
700 701
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
702
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
703 704 705
                "paddle.max",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
706 707

    def BatchNorm(self, node):
C
channingss 已提交
708 709
        assert len(
            node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
S
SunAhong1993 已提交
710
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
711
        params = node.layer.batch_norm_param
S
SunAhong1993 已提交
712
        if hasattr(params, 'eps'):
S
SunAhong1993 已提交
713 714 715
            eps = params.eps
        else:
            eps = 1e-5
S
SunAhong1993 已提交
716 717 718 719
        if hasattr(params, 'moving_average_fraction'):
            momentum = params.moving_average_fraction
        else:
            momentum = 0.9
S
SunAhong1993 已提交
720
        if node.data is None or len(node.data) != 3:
C
channingss 已提交
721 722 723
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
724
            input_c = node.in_shapes[0][1]
J
jiangjiajun 已提交
725 726
            mean = np.zeros([input_c, ]).astype('float32')
            variance = np.zeros([input_c, ]).astype('float32')
S
SunAhong1993 已提交
727 728
            scale = 0
        else:
S
SunAhong1993 已提交
729

S
SunAhong1993 已提交
730
            node.data = [np.squeeze(i).astype('float32') for i in node.data]
S
SunAhong1993 已提交
731
            mean, variance, scale = node.data
S
SunAhong1993 已提交
732 733 734 735
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
S
SunAhong1993 已提交
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752
        weight_name = node.name + '_weight'
        self.paddle_graph.add_layer(
            kernel="paddle.ones",
            inputs={},
            outputs=[weight_name],
            shape=mean.shape,
            dtype=string("float32"))
        bias_name = node.name + '_bias'
        self.paddle_graph.add_layer(
            kernel="paddle.zeros",
            inputs={},
            outputs=[bias_name],
            shape=mean.shape,
            dtype=string("float32"))
        mean_name = node.name + '_mean'
        self.params[mean_name] = mean
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
753
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
754 755 756 757 758 759 760 761
            inputs={},
            outputs=[mean_name],
            shape=self.params[mean_name].shape,
            dtype=string(str(self.params[mean_name].dtype)),
            name=string(mean_name))
        variance_name = node.name + '_variance'
        self.params[variance_name] = variance
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
762
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
763 764 765 766 767
            inputs={},
            outputs=[variance_name],
            shape=self.params[variance_name].shape,
            dtype=string(str(self.params[variance_name].dtype)),
            name=string(variance_name))
S
SunAhong1993 已提交
768
        layer_attrs = {
769
            'epsilon': eps,
S
SunAhong1993 已提交
770
            'momentum': momentum
771
        }
S
SunAhong1993 已提交
772
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
773 774 775 776 777 778 779
            kernel="paddle.nn.functional.batch_norm",
            inputs={"x": input.name,
                    "weight": weight_name,
                    "bias": bias_name,
                    "running_mean": mean_name,
                    "running_var": variance_name,},
            outputs=[node.name],
S
SunAhong1993 已提交
780
            **layer_attrs)
S
SunAhong1993 已提交
781 782

    def Scale(self, node):
S
SunAhong1993 已提交
783
        if node.data is None:
C
channingss 已提交
784
            print(
S
SunAhong1993 已提交
785 786 787 788 789 790 791 792
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.name, node.layer_type))
            self.params[node.name + "_cparam1"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
            self.params[node.name + "_cparam2"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
S
SunAhong1993 已提交
793
        else:
S
SunAhong1993 已提交
794 795 796 797
            self.params[node.name + "_cparam1"] = np.squeeze(node.data[
                0]).astype("float32")
            self.params[node.name + "_cparam2"] = np.squeeze(node.data[
                1]).astype("float32")
798 799 800 801
        params = node.layer.scale_param
        axis = params.axis
        inputs = []
        if len(node.inputs) == 2:
S
SunAhong1993 已提交
802 803
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
804 805
            input0_name = input0.name
            input1_name = input1.name
806
            inputs_dict = {}
S
SunAhong1993 已提交
807 808
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
809
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
810
                "paddle.multiply",
J
jiangjiajun 已提交
811
                inputs=inputs_dict,
S
SunAhong1993 已提交
812 813
                outputs=[node.name + "_mul"],
                axis=1)
S
SunAhong1993 已提交
814
        else:
S
SunAhong1993 已提交
815
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
816
                "paddle.static.create_parameter",
S
SunAhong1993 已提交
817
                inputs={},
S
SunAhong1993 已提交
818 819 820 821 822 823
                outputs=[node.name + "_cparam1"],
                shape=self.params[node.name + "_cparam1"].shape,
                dtype=string(str(self.params[node.name + "_cparam1"].dtype)),
                name=string(node.name + "_cparam1"))
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input0_name = input0.name
824
            inputs_dict = {}
S
SunAhong1993 已提交
825 826
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = node.name + "_cparam1"
S
SunAhong1993 已提交
827
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
828
                "paddle.multiply",
J
jiangjiajun 已提交
829
                inputs=inputs_dict,
S
SunAhong1993 已提交
830
                outputs=[node.name + "_mul"],
S
SunAhong1993 已提交
831
                axis=axis)
S
SunAhong1993 已提交
832
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
833
            "paddle.static.create_parameter",
S
SunAhong1993 已提交
834
            inputs={},
S
SunAhong1993 已提交
835 836 837 838
            outputs=[node.name + "_cparam2"],
            shape=self.params[node.name + "_cparam2"].shape,
            dtype=string(str(self.params[node.name + "_cparam2"].dtype)),
            name=string(node.name + "_cparam2"))
S
SunAhong1993 已提交
839
        inputs_dict = {}
S
SunAhong1993 已提交
840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = node.name + "_cparam2"
        output_shape = node.out_shapes[0]
        if axis == -1:
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
        else:
            if axis < 0:
                axis = axis + len(output_shape)
            param2_shape = self.params[node.name + "_cparam2"].shape
            param2_shape_len = len(param2_shape)
            diff_len = len(output_shape) - axis - param2_shape_len
            new_shape = list(param2_shape) + [1] * diff_len
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": node.name + "_cparam2"},
                outputs=[node.name + "_cparam2"],
                shape=new_shape)
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
864
        
S
SunAhong1993 已提交
865 866

    def Reshape(self, node):
S
SunAhong1993 已提交
867
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
868
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
869
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
870 871 872 873
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=output_shape)
S
SunAhong1993 已提交
874 875 876 877

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
S
SunAhong1993 已提交
878
        ) == 1, "The count of ArgMax node\'s input and output is not 1."
S
SunAhong1993 已提交
879
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
880
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
881 882 883 884
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
S
SunAhong1993 已提交
885
        axis = params.axis if hasattr(params, axis) else -1
S
SunAhong1993 已提交
886
        if axis < 0:
S
SunAhong1993 已提交
887
            axis += len(in_shapes)
S
SunAhong1993 已提交
888
        if out_max_val is True:
S
SunAhong1993 已提交
889
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
890 891 892
                "paddle.topk",
                inputs={"x": input.name},
                outputs=[node.name + "_topk_var", node.name + "_index_var"],
S
SunAhong1993 已提交
893
                k=top_k)
S
SunAhong1993 已提交
894
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
895 896 897 898
                "paddle.cast",
                inputs={"x": node.name + "_index_var"},
                outputs=[node.name + "_index_var"],
                dtype="{}_topk_var.dtype".format(node.name))
S
SunAhong1993 已提交
899
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
900 901 902
                "paddle.concat",
                inputs={"x": [node.name + "_topk_var", node.name + "_index_var"]},
                outputs=[node.name],
S
SunAhong1993 已提交
903
                axis=axis)
S
SunAhong1993 已提交
904
        else:
S
SunAhong1993 已提交
905
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
906 907 908
                "paddle.topk",
                inputs={"x": input.name},
                outputs=["_", node.name],
S
SunAhong1993 已提交
909
                k=top_k)
S
SunAhong1993 已提交
910 911 912

    def Crop(self, node):
        assert len(
S
SunAhong1993 已提交
913
            node.inputs) == 2, "The count of Crop node\'s input is not 2."
S
SunAhong1993 已提交
914 915
        input = self.graph.get_input_node(node, idx=0, copy=True)
        example = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
916
        params = node.layer.crop_param
S
sonixixi 已提交
917
        axis = params.axis
S
SunAhong1993 已提交
918
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
919
        if axis < 0:
S
SunAhong1993 已提交
920 921
            axis += len(in_shapes)
        offset_real = [0] * len(in_shapes)
S
sonixixi 已提交
922
        if hasattr(params, "offset") and len(params.offset) > 0:
S
SunAhong1993 已提交
923
            offset = list(params.offset)
S
SunAhong1993 已提交
924
            assert (len(in_shapes) - axis
J
jiangjiajun 已提交
925 926
                    ) == len(offset), "invalid offset[%s] in crop layer" % (
                        str(offset))
S
SunAhong1993 已提交
927
            offset_real = [0] * axis + offset
S
SunAhong1993 已提交
928
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
929 930 931 932 933 934
                "paddle.crop",
                inputs={"x": input.name},
                outputs=[node.name],
                shape=node.in_shapes[1],
                offsets=list(offset_real))

S
SunAhong1993 已提交
935
        
S
SunAhong1993 已提交
936
    def Flatten(self, node):
S
SunAhong1993 已提交
937
        assert len(
J
jiangjiajun 已提交
938
            node.
S
SunAhong1993 已提交
939
            inputs) == 1, "The count of DetectionOutput node\'s input is not 1."
S
SunAhong1993 已提交
940
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
941
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
942 943 944 945 946
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=node.out_shapes[0])

S
SunAhong1993 已提交
947 948
    def Power(self, node):
        assert len(
S
SunAhong1993 已提交
949
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
950
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
951
        params = node.layer.power_param
S
SunAhong1993 已提交
952
        layer_attrs = {
S
SunAhong1993 已提交
953 954 955
            'scale': params.scale,
            'bias': params.shift,
            'bias_after_scale': True
S
SunAhong1993 已提交
956
        }
S
SunAhong1993 已提交
957
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
958 959 960
            "paddle.scale",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
961
            **layer_attrs)
S
SunAhong1993 已提交
962
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
963 964 965 966
            "paddle.pow",
            inputs={"x": node.name},
            outputs=[node.name],
            exponent=params.power)
S
SunAhong1993 已提交
967 968 969

    def Reduction(self, node):
        assert len(
S
SunAhong1993 已提交
970
            node.inputs) == 1, "The count of Reduction node\'s input is not 1."
S
SunAhong1993 已提交
971
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
972 973 974 975 976 977
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
S
SunAhong1993 已提交
978
        input_len = len(node.in_shapes[0])
S
SunAhong1993 已提交
979 980 981
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
S
SunAhong1993 已提交
982 983
        # operation = SUM
        if operation == 1:  
S
SunAhong1993 已提交
984
            layer_attrs = {
S
SunAhong1993 已提交
985 986
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
987
            }
S
SunAhong1993 已提交
988
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
989 990 991
                "paddle.sum",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
992
                **layer_attrs)
S
SunAhong1993 已提交
993 994
        # operation = ASUM
        elif operation == 2:  
S
SunAhong1993 已提交
995
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
996 997 998
                "paddle.abs",
                inputs={"x": input.name},
                outputs=[node.name])
S
SunAhong1993 已提交
999
            layer_attrs = {
S
SunAhong1993 已提交
1000 1001
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
1002
            }
S
SunAhong1993 已提交
1003
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1004 1005 1006
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1007
                **layer_attrs)
S
SunAhong1993 已提交
1008 1009
        # operation = SUMSQ
        elif operation == 3: 
S
SunAhong1993 已提交
1010
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1011 1012 1013 1014
                "paddle.pow",
                inputs={"x": input.name},
                outputs=[node.name],
                exponent=2.0)
S
SunAhong1993 已提交
1015
            layer_attrs = {
S
SunAhong1993 已提交
1016 1017
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
1018
            }
S
SunAhong1993 已提交
1019
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1020 1021 1022
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1023
                **layer_attrs)
S
SunAhong1993 已提交
1024 1025
        # operation = MEAN
        else: 
S
SunAhong1993 已提交
1026
            layer_attrs = {
S
SunAhong1993 已提交
1027 1028
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
1029
            }
S
SunAhong1993 已提交
1030
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1031 1032 1033
                "paddle.mean",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1034
                **layer_attrs)
S
SunAhong1993 已提交
1035
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1036 1037 1038
            "paddle.scale",
            inputs={"x": node.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1039
            scale=coeff)
S
SunAhong1993 已提交
1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072
        
    def Axpy(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, "The count of Axpy node\'s input and output is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.axpy_param
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        input2 = self.graph.get_input_node(node, idx=2, copy=True)
        input0_name = input0.name
        input1_name = input1.name
        input2_name = input2.name
        inputs_dict = {}
        inputs_dict['x'] = input1_name
        inputs_dict['y'] = input0_name
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"],
            axis=0)
        inputs_dict = {}
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = input2_name
        self.paddle_graph.add_layer(
            "paddle.add",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"])
        
    def DetectionOutput(self, node):
        assert len(
            node.inputs) == 3, "The count of DetectionOutput node\'s input is not 3."
        inputs_dict = dict()
S
SunAhong1993 已提交
1073
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
1074
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1075
            if i == 1:
S
SunAhong1993 已提交
1076
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1077 1078
                while input is not None \
                      and input.layer_type != 'Softmax' \
S
SunAhong1993 已提交
1079
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
1080
                    input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1081
                assert input is not None, 'This kind of DetectionOutput is not supported!'
S
SunAhong1993 已提交
1082
                input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108
            inputs_dict["x{}".format(i)] = input.name
        params = node.layer.detection_output_param
        nms_param = params.nms_param
        nms_param_dict = dict()
        nms_param_dict["nms_threshold"] = nms_param.nms_threshold
        nms_param_dict["top_k"] = nms_param.top_k
        nms_param_dict["eta"] = nms_param.eta
        if nms_param is None:
            nms_param_dict = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        fields = ["eta", "top_k", "nms_threshold"]
        for f in default.keys():
            if f not in nms_param_dict:
                nms_param_dict[f] = default[f]
        layer_attrs = {
            "background_label": params.background_label_id,
            "nms_threshold": nms_param_dict["nms_threshold"],
            "nms_top_k": nms_param_dict["top_k"],
            "keep_top_k": params.keep_top_k,
            "score_threshold": params.confidence_threshold,
            "nms_eta": nms_param_dict["eta"]}
        self.paddle_graph.add_layer(
            kernel="custom_layer:detectionoutput",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
S
SunAhong1993 已提交
1109

S
SunAhong1993 已提交
1110 1111 1112 1113 1114 1115 1116 1117 1118 1119
    def Normalize(self, node):
        assert len(
            node.inputs) == 1, "The count of Normalize node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.norm_param
        scale_name = node.name + "_scale"
        if node.data is None or len(node.data) != 1:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(scale_name, node.layer_type))
S
SunAhong1993 已提交
1120 1121
            self.params[scale_name] = \
                np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32")
S
SunAhong1993 已提交
1122
        else:
S
SunAhong1993 已提交
1123
            self.params[scale_name] = _adjust_parameters(node)[0]
S
SunAhong1993 已提交
1124 1125 1126
        
        layer_attrs = {
            "axis": -1 if params.channel_shared else 1,
S
SunAhong1993 已提交
1127 1128 1129 1130
            "param_name": string(scale_name),
            "param_shape": self.params[scale_name].shape,
            "param_dtype": string(self.params[scale_name].dtype)}
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1131 1132 1133 1134 1135 1136 1137 1138
            "custom_layer:normalize",
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)
        
    def Permute(self, node):
        assert len(
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
1139
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1140 1141
        params = node.layer.permute_param
        order = list(params.order)    
S
SunAhong1993 已提交
1142
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209
            "paddle.transpose",
            inputs={"x": input.name},
            outputs=[node.name],
            perm=order)
        
    def PriorBox(self, node):
        assert len(
            node.inputs) == 2, "The count of PriorBox node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.prior_box_param
        steps = tuple(params.step) if type(params.step) \
                is list or type(params.step) is tuple \
                else (params.step, params.step)
        layer_attrs = {
            "min_sizes": params.min_size,
            "max_sizes": params.max_size,
            "aspect_ratios": params.aspect_ratio,
            "variance": params.variance,
            "flip": params.flip,
            "clip": params.clip,
            "steps": steps,
            "offset": params.offset,
            "min_max_aspect_ratios_order": True}
        self.paddle_graph.add_layer(
            "custom_layer:priorbox",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ReLU6(self, node):
        assert len(
            node.inputs) == 1, "The count of RelU6 node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.nn.functional.relu6",
            inputs={"x": input.name},
            outputs=[node.name])
        
    def ROIPooling(self, node):
        assert len(
            node.inputs) == 2, "The count of ROIPooling node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.roi_pooling_param
        layer_attrs = {
            "pooled_height": params.pooled_h,
            "pooled_width": params.pooled_w,
            "spatial_scale": params.spatial_scale}
        self.paddle_graph.add_layer(
            "custom_layer:ROIPooling",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ShuffleChannel(self, node):
        assert len(
            node.inputs) == 1, "The count of ShuffleChannel node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.shuffle_channel_param
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1210
            "paddle.fluid.layers.shuffle_channel",
S
SunAhong1993 已提交
1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243
            inputs={"x": input.name},
            outputs=[node.layer_name],
            group=params.group)
        
    def Upsample(self, node):
        assert len(
            node.inputs) == 1, "The count of Upsample node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.upsample_param
        layer_attrs = {
            "align_corners": False,
            "scale_factor": params.scale,
            "mode": "nearest"}
        self.paddle_graph.add_layer(
            "paddle.nn.functioanl.interpolate",
            inputs={"input": input.name},
            outputs=[node.layer_name],
            **layer_attrs)
    
    def Select(self, node):
        assert len(
            node.inputs) == 1, "The count of Select node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        in_shapes = node.in_shapes[0]
        params = node.layer.select_param
        layer_attrs = {
            "in_shapes": in_shapes,
            "point": params.slice_point,
            "axis": params.axis}
        self.paddle_graph.add_layer(
            "custom_layer:select",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1244
            **layer_attrs)