caffe_op_mapper.py 47.5 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14 15

import numbers
S
SunAhong1993 已提交
16
import copy
S
SunAhong1993 已提交
17
import numpy as np
S
SunAhong1993 已提交
18
from x2paddle.decoder.caffe_decoder import CaffeGraph, CaffeGraphNode
J
jiangjiajun 已提交
19
from x2paddle.core.op_mapper import OpMapper
S
SunAhong1993 已提交
20
from x2paddle.core.util import *
S
SunAhong1993 已提交
21
from x2paddle.core.program import PaddleGraph 
S
SunAhong1993 已提交
22 23


S
SunAhong1993 已提交
24
def _adjust_parameters(node):
S
SunAhong1993 已提交
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
    data = node.data
    # When using the protobuf-backend, each parameter initially has four dimensions.
    # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
    # This implementation takes care of the common cases. However, it does leave the
    # potential for future issues.
    # The Caffe-backend does not suffer from this problem.
    data = list(data)

    squeeze_indices = [1]  # Squeeze biases.
    if node.layer_type == 'InnerProduct':
        squeeze_indices.append(0)  # Squeeze FC.

    for idx in squeeze_indices:
        if idx >= len(data):
            continue

        d = data[idx]
        assert len(
            d.shape
        ) == 4, 'invalid shape[%s] from caffe when adjust_parameters' % (
            str(d.shape))

        shape_old = d.shape
        sq_axis = None
        if idx == 0:
            sq_axis = (0, 1)
        elif idx == 1:
            sq_axis = (0, 1, 2)
        else:
            continue

        data[idx] = np.squeeze(d, axis=sq_axis)
        shape_new = data[idx].shape
    return data

S
SunAhong1993 已提交
60 61
def _get_kernel_parameters(kind, params):
    assert kind in ["Convolution", "Pooling", "Deconvolution", "ConvolutionDepthwise"]
S
SunAhong1993 已提交
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
    [k_h, k_w] = [1, 1]
    if isinstance(params.kernel_size, numbers.Number):
        [k_h, k_w] = [params.kernel_size] * 2
    elif len(params.kernel_size) > 0:
        k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
            0]
        k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
            len(params.kernel_size) - 1]
    elif params.kernel_h > 0 or params.kernel_w > 0:
        k_h = params.kernel_h
        k_w = params.kernel_w
    [s_h, s_w] = [1, 1]
    if isinstance(params.stride, numbers.Number):
        [s_h, s_w] = [params.stride] * 2
    elif len(params.stride) > 0:
        s_h = params.stride_h if params.stride_h > 0 else params.stride[0]
        s_w = params.stride_w if params.stride_w > 0 else params.stride[len(
            params.stride) - 1]
    elif params.stride_h > 0 or params.stride_w > 0:
        s_h = params.stride_h
        s_w = params.stride_w
    [p_h, p_w] = [0, 0]
    if isinstance(params.pad, numbers.Number):
        [p_h, p_w] = [params.pad] * 2
    elif len(params.pad) > 0:
        p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
        p_w = params.pad_w if params.pad_w > 0 else params.pad[len(
            params.pad) - 1]
    elif params.pad_h > 0 or params.pad_w > 0:
        p_h = params.pad_h
        p_w = params.pad_w
    dila_h = dila_w = 1
    group = 1
    c_o = 1
S
SunAhong1993 已提交
96 97 98
    if kind in ["Convolution", "Deconvolution", "ConvolutionDepthwise"]:
        if kind in ["Convolution", "Deconvolution"]:
            c_o = params.num_output
S
SunAhong1993 已提交
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
        dila_len = len(params.dilation)
        if dila_len == 2:
            dila_h = params.dilation[0]
            dila_w = params.dilation[1]
        elif dila_len == 1:
            dila_h = dila_w = params.dilation[0]
        else:
            assert dila_len == 0, "invalid length[%s] of dilation in convolution" % (
                dila_len)
    if kind in ['Convolution', 'Deconvolution']:
        group = params.group
    kernel = [k_h, k_w]
    stride = [s_h, s_w]
    pad = [p_h, p_w]
    dilation = [dila_h, dila_w]
    return c_o, kernel, stride, pad, dilation, group


J
jiangjiajun 已提交
117
class CaffeOpMapper(OpMapper):
S
SunAhong1993 已提交
118
    directly_map_ops = {
S
SunAhong1993 已提交
119
        'AbsVal': 'paddle.abs',
S
SunAhong1993 已提交
120
        'Sigmoid': 'paddle.nn.functional.sigmoid',
S
SunAhong1993 已提交
121
        'TanH': 'paddle.tanh',
S
SunAhong1993 已提交
122 123
    }

J
jiangjiajun 已提交
124 125 126
    def __init__(self, decoder):
        super(CaffeOpMapper, self).__init__()
        self.graph = decoder.caffe_graph
S
SunAhong1993 已提交
127
        self.params = dict()
J
jiangjiajun 已提交
128
        resolver = decoder.resolver
J
jiangjiajun 已提交
129
        self.used_custom_layers = {}
S
SunAhong1993 已提交
130 131 132
        self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="caffe")
        self.paddle_graph.inputs = self.graph.input_nodes
        self.paddle_graph.outputs = self.graph.output_nodes
S
SunAhong1993 已提交
133

S
SunAhong1993 已提交
134 135 136 137 138 139
        print("Total nodes: {}".format(
            sum([
                isinstance(node, CaffeGraphNode)
                for name, node in self.graph.node_map.items()
            ])))
        print("Nodes converting ...")
S
SunAhong1993 已提交
140 141 142 143
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
            if hasattr(self, op):
J
jiangjiajun 已提交
144 145
                func = getattr(self, op)
                func(node)
S
SunAhong1993 已提交
146 147
            elif op in self.directly_map_ops:
                self.directly_map(node)
S
SunAhong1993 已提交
148 149
        print("\nNodes converted.")
        self.paddle_graph.set_parameters(self.params)
S
SunAhong1993 已提交
150
        self.paddle_graph.set_custom(self.used_custom_layers)
S
SunAhong1993 已提交
151

J
jiangjiajun 已提交
152 153 154
    def op_checker(self):
        unsupported_ops = set()
        for node_name in self.graph.topo_sort:
S
SunAhong1993 已提交
155
            node = self.graph.get_node(node_name)
J
jiangjiajun 已提交
156
            op = node.layer_type
S
SunAhong1993 已提交
157 158 159
            if not hasattr(self, op) and \
                op not in self.directly_map_ops and \
                op not in self.elementwise_ops:
J
jiangjiajun 已提交
160 161 162 163
                unsupported_ops.add(op)
        if len(unsupported_ops) == 0:
            return True
        else:
S
SunAhong1993 已提交
164 165 166
            if len(unsupported_ops) > 0:
                print("\n========= {} OPs are not supported yet ===========".format(
                    len(unsupported_ops)))
J
jiangjiajun 已提交
167
            for op in unsupported_ops:
S
SunAhong1993 已提交
168
                print("========== {} ============".format(op))
J
jiangjiajun 已提交
169
            return False
S
SunAhong1993 已提交
170 171 172 173 174 175 176 177 178
        
    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel=op_info,
            inputs={"x": input.name},
            outputs=[node.name])
S
SunAhong1993 已提交
179

S
SunAhong1993 已提交
180 181 182
    def Input(self, node):
        shape = list(node.layer.input_param.shape[0].dim)[1:]
        dtype = 'float32'
S
SunAhong1993 已提交
183 184 185
        layer_attrs = {
            "dtype": string(dtype),
            "shape": [-1] + shape,
S
SunAhong1993 已提交
186
            "name": string(node.name)
S
SunAhong1993 已提交
187
        }
S
SunAhong1993 已提交
188
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
189
            kernel="paddle.static.data",
S
SunAhong1993 已提交
190
            inputs={},
S
SunAhong1993 已提交
191
            outputs=[node.name],
S
SunAhong1993 已提交
192
            **layer_attrs)
S
SunAhong1993 已提交
193

S
SunAhong1993 已提交
194 195 196
    def Convolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
197
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
198
            node.layer_type, params)
S
SunAhong1993 已提交
199 200
        if data is None:
            data = []
C
channingss 已提交
201
            print(
S
SunAhong1993 已提交
202
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
S
SunAhong1993 已提交
203 204
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
205
            output_c = channel
C
channingss 已提交
206
            data.append(
J
jiangjiajun 已提交
207 208
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
S
0.8.4  
sunyanfang01 已提交
209
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
210
        else:
S
SunAhong1993 已提交
211
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
212 213 214
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
215
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
216 217 218 219 220
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
221
        if len(data) == 2:
S
SunAhong1993 已提交
222 223 224
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
225
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
226 227 228 229 230
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
231 232
        assert len(node.inputs
                   ) == 1, 'The count of Convolution node\'s input is not 1.'
S
SunAhong1993 已提交
233
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
234 235 236 237 238 239 240 241 242 243
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
244
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
245 246 247 248
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
249
        
S
SunAhong1993 已提交
250 251 252
    def Deconvolution(self, node):
        data = node.data
        params = node.layer.convolution_param
S
SunAhong1993 已提交
253
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
254
            node.layer_type, params)
S
SunAhong1993 已提交
255 256
        if data is None:
            data = []
C
channingss 已提交
257 258
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
S
SunAhong1993 已提交
259 260
                .format(node.name, node.layer_type))
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
261
            output_c = channel
C
channingss 已提交
262
            data.append(
J
jiangjiajun 已提交
263 264 265
                np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([output_c, ]).astype('float32'))
S
SunAhong1993 已提交
266
        else:
S
SunAhong1993 已提交
267
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
268 269 270
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
271
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
272 273 274 275 276
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
277
        if len(data) == 2:
S
SunAhong1993 已提交
278 279 280
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
281
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
282 283 284 285 286
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
287 288
        assert len(node.inputs
                   ) == 1, 'The count of Deconvolution node\'s input is not 1.'
S
SunAhong1993 已提交
289
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
290 291 292 293 294 295 296 297 298 299
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
S
SunAhong1993 已提交
300
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
301 302 303
            kernel="paddle.nn.functional.conv2d_transpose",
            inputs=layer_inputs,
            outputs=[node.name],
S
SunAhong1993 已提交
304
            **layer_attrs)    
S
SunAhong1993 已提交
305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332
        
    def DepthwiseConvolution(self, node):
        node.layer_type = "ConvolutionDepthwise"
        self.ConvolutionDepthwise(node)
        
    def ConvolutionDepthwise(self, node):
        data = node.data
        params = node.layer.convolution_param
        out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
            node.layer_type, params)
        out_channel = params.num_output if params.num_output is not None else node.in_shapes[0][1]
        in_channel = node.in_shapes[0][1]
        group = int(in_channel / (in_channel / out_channel)) if in_channel > out_channel else int(in_channel /
                                                                (out_channel / in_channel))
        if data is None:
            data = []
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.layer_name, node.layer_type))
            data.append(
                np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
                    'float32'))
            data.append(np.zeros([out_channel, ]).astype('float32'))
        else:
            data = _adjust_parameters(node)
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
333
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
334 335 336 337 338 339 340 341 342
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
        if len(data) == 2:
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
343
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
        assert len(node.inputs
                   ) == 1, "The count of Deconvolution node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = {'stride': stride,
                       'padding': pad,
                       'dilation': dilation,
                       'groups': group}
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.conv2d",
            inputs=layer_inputs,
            outputs=[node.name],
            **layer_attrs) 
S
SunAhong1993 已提交
367 368 369

    def Pooling(self, node):
        params = node.layer.pooling_param
S
SunAhong1993 已提交
370
        ceil_mode = getattr(params, 'ceil_mode', True)
S
SunAhong1993 已提交
371 372
        global_pool = getattr(params, 'global_pooling', False)
        kernel_default = [1, 1]
S
SunAhong1993 已提交
373
        channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
S
SunAhong1993 已提交
374
            node.layer_type, params)
S
SunAhong1993 已提交
375 376
        assert len(
            node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
S
SunAhong1993 已提交
377
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
378 379 380 381 382 383 384 385 386 387 388 389 390
        if global_pool:
            if kernel[0] == 0:
                kernel = [1, 1]
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_max_pool2d",
                    inputs={"x": input.name},
                    outputs=layer_outputs,
                    output_size=kernel)
            else:
                self.paddle_graph.add_layer(
                    "paddle.nn.functional.adaptive_avg_pool2d",
                    inputs={"x": input.name},
S
SunAhong1993 已提交
391
                    outputs=[node.name],
S
SunAhong1993 已提交
392 393 394 395 396 397 398 399 400 401 402 403 404
                    output_size=kernel)
        else:
            if params.pool == 0:
                self.paddle_graph.add_layer(
                    kernel="paddle.nn.functional.max_pool2d",
                    inputs={"x": input.name},
                    outputs=[node.name],
                    kernel_size=kernel,
                    stride=stride,
                    padding=pad,
                    ceil_mode=ceil_mode)
            else:
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
405 406
                    kernel="paddle.nn.functional.avg_pool2d",
                    inputs={"x": input.name},
S
SunAhong1993 已提交
407
                    outputs=[node.name],
S
SunAhong1993 已提交
408 409 410 411
                    kernel_size=kernel,
                    stride=stride,
                    padding=pad,
                    ceil_mode=ceil_mode)
S
SunAhong1993 已提交
412 413 414 415 416 417 418 419 420 421 422

    def LRN(self, node):
        assert len(node.inputs) == 1, 'The count of LRN node\'s input is not 1.'
        params = node.layer.lrn_param
        # The window size must be an odd value. For a window
        # size of (2*n+1), Paddle defines depth_radius = n.
        assert params.local_size % 2 == 1
        # Caffe scales by (alpha/(2*n+1)), whereas Paddle
        # just scales by alpha (as does Krizhevsky's paper).
        # We'll account for that here.
        alpha = params.alpha / float(params.local_size)
S
SunAhong1993 已提交
423
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
424
        layer_attrs = {
S
SunAhong1993 已提交
425
            'n': params.local_size,
S
SunAhong1993 已提交
426
            'k': params.k,
S
SunAhong1993 已提交
427 428
            'alpha': alpha,
            'beta': params.beta,
S
SunAhong1993 已提交
429
            'name': string(node.name)
S
SunAhong1993 已提交
430
        }
S
SunAhong1993 已提交
431
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
432
            kernel="paddle.fluid.layers.lrn",
S
SunAhong1993 已提交
433 434
            inputs={"input": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
435
            **layer_attrs)
S
SunAhong1993 已提交
436 437 438

    def InnerProduct(self, node):
        data = node.data
S
SunAhong1993 已提交
439 440
        params = node.layer.inner_product_param
        if data is None:
C
channingss 已提交
441 442 443
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
444
            input_c = node.in_shapes[0][1]
S
SunAhong1993 已提交
445 446
            output_c = params.num_output
            data = []
C
channingss 已提交
447
            data.append(
J
jiangjiajun 已提交
448 449
                np.zeros([input_c, output_c]).astype('float32').astype(
                    'float32'))
C
channingss 已提交
450 451
            data.append(
                np.zeros([output_c]).astype('float32').astype('float32'))
S
SunAhong1993 已提交
452
        else:
S
SunAhong1993 已提交
453
            data = _adjust_parameters(node)
S
SunAhong1993 已提交
454 455 456 457 458 459 460 461
            # Reshape the parameters to Paddle's ordering
            transpose_order = (1, 0)
            w = data[0]
            fc_shape = w.shape
            output_channels = fc_shape[0]
            w = w.reshape((output_channels, -1))
            w = w.transpose(transpose_order)
            data[0] = w
S
SunAhong1993 已提交
462

S
SunAhong1993 已提交
463 464 465
        kernel_weight_name = node.name + '_weights'
        self.params[kernel_weight_name] = data[0]
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
466
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
467 468 469 470 471
            inputs={},
            outputs=[kernel_weight_name],
            shape=self.params[kernel_weight_name].shape,
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
472
        if len(data) == 2:
S
SunAhong1993 已提交
473 474 475
            kernel_bias_name = node.name + '_bias'
            self.params[kernel_bias_name] = data[1]
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
476
                kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
477 478 479 480 481
                inputs={},
                outputs=[kernel_bias_name],
                shape=self.params[kernel_bias_name].shape,
                dtype=string(str(self.params[kernel_bias_name].dtype)),
                name=string(kernel_bias_name))
S
SunAhong1993 已提交
482 483
        assert len(node.inputs
                   ) == 1, 'The count of InnerProduct node\'s input is not 1.'
S
SunAhong1993 已提交
484
        #params = node.layer.inner_product_param
S
SunAhong1993 已提交
485 486
        assert params.axis == 1
        assert params.bias_term == True
S
SunAhong1993 已提交
487
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511
        layer_inputs = {"x": input.name, 
                        "weight": kernel_weight_name}
        layer_attrs = dict()
        if len(data) == 2:
            layer_inputs["bias"] = kernel_bias_name
        else:
            layer_attrs["bias"] = None
        if node.in_shapes[0][-1] != data[0].shape[0]:
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": input.name},
                outputs=[input.name],
                shape=[-1, data[0].shape[0]])
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
        else:
            self.paddle_graph.add_layer(
                kernel="paddle.nn.functional.linear",
                inputs=layer_inputs,
                outputs=[node.name],
                **layer_attrs)        
S
SunAhong1993 已提交
512 513 514 515

    def Softmax(self, node):
        assert len(
            node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
S
SunAhong1993 已提交
516
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
517 518
        params = node.layer.softmax_param
        axis = params.axis
S
SunAhong1993 已提交
519
        shape = node.in_shapes[0]
S
SunAhong1993 已提交
520 521
        dims = len(shape)
        axis = axis + dims if axis < 0 else axis
S
SunAhong1993 已提交
522
        layer_attrs = {'axis': axis, 'name': string(node.layer_name + '_softmax')}
S
SunAhong1993 已提交
523
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
524
            kernel="paddle.nn.functional.softmax",
S
SunAhong1993 已提交
525
            inputs={"x": input.name},
S
SunAhong1993 已提交
526 527
            outputs=[node.layer_name],
            **layer_attrs)
S
SunAhong1993 已提交
528 529 530

    def Slice(self, node):
        assert len(
S
SunAhong1993 已提交
531
            node.inputs) == 1, "The count of Slice node\'s input is not 1."
S
SunAhong1993 已提交
532
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
533
        top_len = len(node.layer.top)
S
SunAhong1993 已提交
534 535
        params = node.layer.slice_param
        axis = params.axis
S
SunAhong1993 已提交
536 537 538
        slice_dim = params.slice_dim
        if slice_dim != 1 and axis == 1:
            axis = slice_dim
S
SunAhong1993 已提交
539
        output_shape = node.out_shapes
S
SunAhong1993 已提交
540 541 542
        sections_list = list()
        outputs_list = list()
        for i, s in enumerate(output_shape):
S
SunAhong1993 已提交
543
            sections_list.append(s[axis])
S
SunAhong1993 已提交
544
            outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
545
        layer_attrs = {
S
SunAhong1993 已提交
546
            'num_or_sections': sections_list,
S
SunAhong1993 已提交
547
            'axis': axis,
S
SunAhong1993 已提交
548
        }
S
SunAhong1993 已提交
549
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
550 551
            "paddle.split",
            inputs={"x": input.name},
S
SunAhong1993 已提交
552
            outputs=outputs_list,
S
SunAhong1993 已提交
553
            **layer_attrs)
C
channingss 已提交
554

S
SunAhong1993 已提交
555 556
    def Concat(self, node):
        assert len(
S
SunAhong1993 已提交
557
            node.inputs
S
sunyanfang01 已提交
558
        ) >= 1, 'The count of Concat node\'s input is not more than 1.'
S
SunAhong1993 已提交
559
        inputs_list = []
S
SunAhong1993 已提交
560
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
561
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
562
            inputs_list.append(input.name)
S
SunAhong1993 已提交
563 564
        params = node.layer.concat_param
        axis = params.axis
S
SunAhong1993 已提交
565
        layer_attrs = {'axis': axis, 'name': string(node.name)}
S
SunAhong1993 已提交
566
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
567 568
            kernel="paddle.concat",
            inputs={"x": inputs_list},
S
SunAhong1993 已提交
569
            outputs=[node.name],
S
SunAhong1993 已提交
570
            **layer_attrs)
S
SunAhong1993 已提交
571

572 573
    def ReLU(self, node):
        """
S
SunAhong1993 已提交
574

575 576 577 578 579
        :param node:
        :return:
        """
        assert len(
            node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
S
SunAhong1993 已提交
580
        input = self.graph.get_input_node(node, idx=0, copy=True)
581 582 583 584

        params = node.layer.relu_param
        if params.HasField('negative_slope') and params.negative_slope != 0:
            negative_slope = float(params.negative_slope)
S
SunAhong1993 已提交
585
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
586 587 588 589
                kernel="paddle.nn.functional.leaky_relu",
                inputs={"x": input.name},
                outputs=[node.name],
                negative_slope=negative_slope)
590
        else:
S
SunAhong1993 已提交
591
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
592 593 594
                kernel="paddle.nn.functional.relu",
                inputs={"x": input.name},
                outputs=[node.name])
595

S
SunAhong1993 已提交
596 597 598
    def PReLU(self, node):
        assert len(
            node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
S
SunAhong1993 已提交
599
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
600 601
        params = node.layer.prelu_param
        mode_bool = params.channel_shared
S
SunAhong1993 已提交
602
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
603
        if mode_bool:
S
SunAhong1993 已提交
604
            num_parameters = 1
S
SunAhong1993 已提交
605
        else:
S
SunAhong1993 已提交
606
            num_parameters = output_shape[1]
S
SunAhong1993 已提交
607 608
        data = node.data
        assert data is not None, 'The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.'.format(
S
SunAhong1993 已提交
609 610
            node.name, node.layer_type)
        kernel_weight_name = node.name + '_weights'
S
SunAhong1993 已提交
611
        self.params[kernel_weight_name] = np.squeeze(data[0])
S
SunAhong1993 已提交
612
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
613
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
614 615
            inputs={},
            outputs=[kernel_weight_name],
S
SunAhong1993 已提交
616
            shape=[num_parameters],
S
SunAhong1993 已提交
617 618
            dtype=string(str(self.params[kernel_weight_name].dtype)),
            name=string(kernel_weight_name))
S
SunAhong1993 已提交
619
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
620 621 622 623
            kernel="paddle.nn.functional.prelu",
            inputs={"x": input.name,
                    "weight": kernel_weight_name},
            outputs=[node.name])
S
SunAhong1993 已提交
624 625 626

    def Eltwise(self, node):
        assert len(
S
SunAhong1993 已提交
627
            node.inputs) == 2, "The count of Eltwise node\'s input is not 2."
S
SunAhong1993 已提交
628 629 630
        params = node.layer.eltwise_param
        mode = params.operation
        inputs = []
S
SunAhong1993 已提交
631 632
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
633 634
        input0_name = input0.name
        input1_name = input1.name
S
SunAhong1993 已提交
635
        if mode == 0:
S
SunAhong1993 已提交
636
            inputs_dict = {}
S
SunAhong1993 已提交
637 638
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
639
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
640
                "paddle.multiply",
J
jiangjiajun 已提交
641
                inputs=inputs_dict,
S
SunAhong1993 已提交
642
                outputs=[node.name])
S
SunAhong1993 已提交
643 644 645
        elif mode == 1:
            if hasattr(params, 'coeff') and len(params.coeff) == 2:
                coeff = params.coeff
S
SunAhong1993 已提交
646
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
647 648 649 650
                    "paddle.scale",
                    inputs={"x": input0_name},
                    outputs=[node.name + '_mul0'],
                    scale=coeff[0])
S
SunAhong1993 已提交
651
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
652 653 654 655 656 657 658
                    "paddle.scale",
                    inputs={"x": input1_name},
                    outputs=[node.name + '_mul1'],
                    scale=coeff[2])
                inputs_dict = {}
                inputs_dict['x'] = node.name + '_mul0'
                inputs_dict['y'] = node.name + '_mul1'
S
SunAhong1993 已提交
659
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
660 661 662
                    "paddle.add",
                    inputs=inputs_dict,
                    outputs=[node.name])
S
SunAhong1993 已提交
663
            else:
S
SunAhong1993 已提交
664
                inputs_dict = {}
S
SunAhong1993 已提交
665 666
                inputs_dict['x'] = input0_name
                inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
667
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
668
                    "paddle.add",
J
jiangjiajun 已提交
669
                    inputs=inputs_dict,
S
SunAhong1993 已提交
670
                    outputs=[node.name])
S
SunAhong1993 已提交
671
        else:
S
SunAhong1993 已提交
672
            inputs_dict = {}
S
SunAhong1993 已提交
673 674
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
675
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
676 677 678
                "paddle.max",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
679 680

    def BatchNorm(self, node):
C
channingss 已提交
681 682
        assert len(
            node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
S
SunAhong1993 已提交
683
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
684
        params = node.layer.batch_norm_param
S
SunAhong1993 已提交
685
        if hasattr(params, 'eps'):
S
SunAhong1993 已提交
686 687 688
            eps = params.eps
        else:
            eps = 1e-5
S
SunAhong1993 已提交
689 690 691 692
        if hasattr(params, 'moving_average_fraction'):
            momentum = params.moving_average_fraction
        else:
            momentum = 0.9
S
SunAhong1993 已提交
693
        if node.data is None or len(node.data) != 3:
C
channingss 已提交
694 695 696
            print(
                'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
                .format(node.layer_name, node.layer_type))
S
SunAhong1993 已提交
697
            input_c = node.in_shapes[0][1]
J
jiangjiajun 已提交
698 699
            mean = np.zeros([input_c, ]).astype('float32')
            variance = np.zeros([input_c, ]).astype('float32')
S
SunAhong1993 已提交
700 701
            scale = 0
        else:
S
SunAhong1993 已提交
702

S
SunAhong1993 已提交
703
            node.data = [np.squeeze(i).astype('float32') for i in node.data]
S
SunAhong1993 已提交
704
            mean, variance, scale = node.data
S
SunAhong1993 已提交
705 706 707 708
        # Prescale the stats
        scaling_factor = 1.0 / scale if scale != 0 else 0
        mean *= scaling_factor
        variance *= scaling_factor
S
SunAhong1993 已提交
709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725
        weight_name = node.name + '_weight'
        self.paddle_graph.add_layer(
            kernel="paddle.ones",
            inputs={},
            outputs=[weight_name],
            shape=mean.shape,
            dtype=string("float32"))
        bias_name = node.name + '_bias'
        self.paddle_graph.add_layer(
            kernel="paddle.zeros",
            inputs={},
            outputs=[bias_name],
            shape=mean.shape,
            dtype=string("float32"))
        mean_name = node.name + '_mean'
        self.params[mean_name] = mean
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
726
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
727 728 729 730 731 732 733 734
            inputs={},
            outputs=[mean_name],
            shape=self.params[mean_name].shape,
            dtype=string(str(self.params[mean_name].dtype)),
            name=string(mean_name))
        variance_name = node.name + '_variance'
        self.params[variance_name] = variance
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
735
            kernel="paddle.static.create_parameter",
S
SunAhong1993 已提交
736 737 738 739 740
            inputs={},
            outputs=[variance_name],
            shape=self.params[variance_name].shape,
            dtype=string(str(self.params[variance_name].dtype)),
            name=string(variance_name))
S
SunAhong1993 已提交
741
        layer_attrs = {
742
            'epsilon': eps,
S
SunAhong1993 已提交
743
            'momentum': momentum
744
        }
S
SunAhong1993 已提交
745
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
746 747 748 749 750 751 752
            kernel="paddle.nn.functional.batch_norm",
            inputs={"x": input.name,
                    "weight": weight_name,
                    "bias": bias_name,
                    "running_mean": mean_name,
                    "running_var": variance_name,},
            outputs=[node.name],
S
SunAhong1993 已提交
753
            **layer_attrs)
S
SunAhong1993 已提交
754 755

    def Scale(self, node):
S
SunAhong1993 已提交
756
        if node.data is None:
C
channingss 已提交
757
            print(
S
SunAhong1993 已提交
758 759 760 761 762 763 764 765
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(node.name, node.layer_type))
            self.params[node.name + "_cparam1"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
            self.params[node.name + "_cparam2"] = np.zeros([
                node.in_shapes[0][1],
            ]).astype("float32")
S
SunAhong1993 已提交
766
        else:
S
SunAhong1993 已提交
767 768 769 770
            self.params[node.name + "_cparam1"] = np.squeeze(node.data[
                0]).astype("float32")
            self.params[node.name + "_cparam2"] = np.squeeze(node.data[
                1]).astype("float32")
771 772 773 774
        params = node.layer.scale_param
        axis = params.axis
        inputs = []
        if len(node.inputs) == 2:
S
SunAhong1993 已提交
775 776
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input1 = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
777 778
            input0_name = input0.name
            input1_name = input1.name
779
            inputs_dict = {}
S
SunAhong1993 已提交
780 781
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = input1_name
S
SunAhong1993 已提交
782
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
783
                "paddle.multiply",
J
jiangjiajun 已提交
784
                inputs=inputs_dict,
S
SunAhong1993 已提交
785 786
                outputs=[node.name + "_mul"],
                axis=1)
S
SunAhong1993 已提交
787
        else:
S
SunAhong1993 已提交
788
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
789
                "paddle.static.create_parameter",
S
SunAhong1993 已提交
790
                inputs={},
S
SunAhong1993 已提交
791 792 793 794 795 796
                outputs=[node.name + "_cparam1"],
                shape=self.params[node.name + "_cparam1"].shape,
                dtype=string(str(self.params[node.name + "_cparam1"].dtype)),
                name=string(node.name + "_cparam1"))
            input0 = self.graph.get_input_node(node, idx=0, copy=True)
            input0_name = input0.name
797
            inputs_dict = {}
S
SunAhong1993 已提交
798 799
            inputs_dict['x'] = input0_name
            inputs_dict['y'] = node.name + "_cparam1"
S
SunAhong1993 已提交
800
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
801
                "paddle.multiply",
J
jiangjiajun 已提交
802
                inputs=inputs_dict,
S
SunAhong1993 已提交
803
                outputs=[node.name + "_mul"],
S
SunAhong1993 已提交
804
                axis=axis)
S
SunAhong1993 已提交
805
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
806
            "paddle.static.create_parameter",
S
SunAhong1993 已提交
807
            inputs={},
S
SunAhong1993 已提交
808 809 810 811
            outputs=[node.name + "_cparam2"],
            shape=self.params[node.name + "_cparam2"].shape,
            dtype=string(str(self.params[node.name + "_cparam2"].dtype)),
            name=string(node.name + "_cparam2"))
S
SunAhong1993 已提交
812
        inputs_dict = {}
S
SunAhong1993 已提交
813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = node.name + "_cparam2"
        output_shape = node.out_shapes[0]
        if axis == -1:
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
        else:
            if axis < 0:
                axis = axis + len(output_shape)
            param2_shape = self.params[node.name + "_cparam2"].shape
            param2_shape_len = len(param2_shape)
            diff_len = len(output_shape) - axis - param2_shape_len
            new_shape = list(param2_shape) + [1] * diff_len
            self.paddle_graph.add_layer(
                "paddle.reshape",
                inputs={"x": node.name + "_cparam2"},
                outputs=[node.name + "_cparam2"],
                shape=new_shape)
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs=inputs_dict,
                outputs=[node.name])
S
SunAhong1993 已提交
837
        
S
SunAhong1993 已提交
838 839

    def Reshape(self, node):
S
SunAhong1993 已提交
840
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
841
        output_shape = node.out_shapes[0]
S
SunAhong1993 已提交
842
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
843 844 845 846
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=output_shape)
S
SunAhong1993 已提交
847 848 849 850

    def ArgMax(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
S
SunAhong1993 已提交
851
        ) == 1, "The count of ArgMax node\'s input and output is not 1."
S
SunAhong1993 已提交
852
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
853
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
854 855 856 857 858 859
        params = node.layer.argmax_param
        out_max_val = params.out_max_val if hasattr(params,
                                                    out_max_val) else False
        top_k = params.top_k if hasattr(params, top_k) else 1
        axis = parmas.axis if hasattr(params, axis) else -1
        if axis < 0:
S
SunAhong1993 已提交
860
            axis += len(in_shapes)
S
SunAhong1993 已提交
861
        if out_max_val is True:
S
SunAhong1993 已提交
862
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
863 864 865
                "paddle.topk",
                inputs={"x": input.name},
                outputs=[node.name + "_topk_var", node.name + "_index_var"],
S
SunAhong1993 已提交
866
                k=top_k)
S
SunAhong1993 已提交
867
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
868 869 870 871
                "paddle.cast",
                inputs={"x": node.name + "_index_var"},
                outputs=[node.name + "_index_var"],
                dtype="{}_topk_var.dtype".format(node.name))
S
SunAhong1993 已提交
872
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
873 874 875
                "paddle.concat",
                inputs={"x": [node.name + "_topk_var", node.name + "_index_var"]},
                outputs=[node.name],
S
SunAhong1993 已提交
876
                axis=axis)
S
SunAhong1993 已提交
877
        else:
S
SunAhong1993 已提交
878
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
879 880 881
                "paddle.topk",
                inputs={"x": input.name},
                outputs=["_", node.name],
S
SunAhong1993 已提交
882
                k=top_k)
S
SunAhong1993 已提交
883 884 885

    def Crop(self, node):
        assert len(
S
SunAhong1993 已提交
886
            node.inputs) == 2, "The count of Crop node\'s input is not 2."
S
SunAhong1993 已提交
887 888
        input = self.graph.get_input_node(node, idx=0, copy=True)
        example = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
889
        params = node.layer.crop_param
S
sonixixi 已提交
890
        axis = params.axis
S
SunAhong1993 已提交
891
        in_shapes = node.in_shapes[0]
S
SunAhong1993 已提交
892
        if axis < 0:
S
SunAhong1993 已提交
893 894
            axis += len(in_shapes)
        offset_real = [0] * len(in_shapes)
S
sonixixi 已提交
895
        if hasattr(params, "offset") and len(params.offset) > 0:
S
SunAhong1993 已提交
896
            offset = list(params.offset)
S
SunAhong1993 已提交
897
            assert (len(in_shapes) - axis
J
jiangjiajun 已提交
898 899
                    ) == len(offset), "invalid offset[%s] in crop layer" % (
                        str(offset))
S
SunAhong1993 已提交
900
            offset_real = [0] * axis + offset
S
SunAhong1993 已提交
901
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
902 903 904 905 906 907
                "paddle.crop",
                inputs={"x": input.name},
                outputs=[node.name],
                shape=node.in_shapes[1],
                offsets=list(offset_real))

S
SunAhong1993 已提交
908
        
S
SunAhong1993 已提交
909
    def Flatten(self, node):
S
SunAhong1993 已提交
910
        assert len(
J
jiangjiajun 已提交
911
            node.
S
SunAhong1993 已提交
912
            inputs) == 1, "The count of DetectionOutput node\'s input is not 1."
S
SunAhong1993 已提交
913
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
914
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
915 916 917 918 919
            "paddle.reshape",
            inputs={"x": input.name},
            outputs=[node.name],
            shape=node.out_shapes[0])

S
SunAhong1993 已提交
920 921
    def Power(self, node):
        assert len(
S
SunAhong1993 已提交
922
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
923
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
924
        params = node.layer.power_param
S
SunAhong1993 已提交
925
        layer_attrs = {
S
SunAhong1993 已提交
926 927 928
            'scale': params.scale,
            'bias': params.shift,
            'bias_after_scale': True
S
SunAhong1993 已提交
929
        }
S
SunAhong1993 已提交
930
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
931 932 933
            "paddle.scale",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
934
            **layer_attrs)
S
SunAhong1993 已提交
935
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
936 937 938 939
            "paddle.pow",
            inputs={"x": node.name},
            outputs=[node.name],
            exponent=params.power)
S
SunAhong1993 已提交
940 941 942

    def Reduction(self, node):
        assert len(
S
SunAhong1993 已提交
943
            node.inputs) == 1, "The count of Reduction node\'s input is not 1."
S
SunAhong1993 已提交
944
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
945 946 947 948 949 950
        params = node.layer.reduction_param
        operation = params.operation
        axis = params.axis
        coeff = params.coeff
        assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
            operation)
S
SunAhong1993 已提交
951
        input_len = len(node.in_shapes[0])
S
SunAhong1993 已提交
952 953 954
        if axis < 0:
            axis += input_len + 1
        dim = list(range(input_len))
S
SunAhong1993 已提交
955 956
        # operation = SUM
        if operation == 1:  
S
SunAhong1993 已提交
957
            layer_attrs = {
S
SunAhong1993 已提交
958 959
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
960
            }
S
SunAhong1993 已提交
961
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
962 963 964
                "paddle.sum",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
965
                **layer_attrs)
S
SunAhong1993 已提交
966 967
        # operation = ASUM
        elif operation == 2:  
S
SunAhong1993 已提交
968
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
969 970 971
                "paddle.abs",
                inputs={"x": input.name},
                outputs=[node.name])
S
SunAhong1993 已提交
972
            layer_attrs = {
S
SunAhong1993 已提交
973 974
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
975
            }
S
SunAhong1993 已提交
976
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
977 978 979
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
980
                **layer_attrs)
S
SunAhong1993 已提交
981 982
        # operation = SUMSQ
        elif operation == 3: 
S
SunAhong1993 已提交
983
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
984 985 986 987
                "paddle.pow",
                inputs={"x": input.name},
                outputs=[node.name],
                exponent=2.0)
S
SunAhong1993 已提交
988
            layer_attrs = {
S
SunAhong1993 已提交
989 990
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
991
            }
S
SunAhong1993 已提交
992
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
993 994 995
                "paddle.sum",
                inputs={"input": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
996
                **layer_attrs)
S
SunAhong1993 已提交
997 998
        # operation = MEAN
        else: 
S
SunAhong1993 已提交
999
            layer_attrs = {
S
SunAhong1993 已提交
1000 1001
                "dim": dim[axis:],
                "keep_dim": False,
S
SunAhong1993 已提交
1002
            }
S
SunAhong1993 已提交
1003
            self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1004 1005 1006
                "paddle.mean",
                inputs={"input": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1007
                **layer_attrs)
S
SunAhong1993 已提交
1008
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1009 1010 1011
            "paddle.scale",
            inputs={"x": node.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1012
            scale=coeff)
S
SunAhong1993 已提交
1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045
        
    def Axpy(self, node):
        assert len(node.inputs) == 1 and len(
            node.outputs
        ) == 1, "The count of Axpy node\'s input and output is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.axpy_param
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        input2 = self.graph.get_input_node(node, idx=2, copy=True)
        input0_name = input0.name
        input1_name = input1.name
        input2_name = input2.name
        inputs_dict = {}
        inputs_dict['x'] = input1_name
        inputs_dict['y'] = input0_name
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"],
            axis=0)
        inputs_dict = {}
        inputs_dict['x'] = node.name + "_mul"
        inputs_dict['y'] = input2_name
        self.paddle_graph.add_layer(
            "paddle.add",
            inputs=inputs_dict,
            outputs=[node.name + "_mul"])
        
    def DetectionOutput(self, node):
        assert len(
            node.inputs) == 3, "The count of DetectionOutput node\'s input is not 3."
        inputs_dict = dict()
S
SunAhong1993 已提交
1046
        for i in range(len(node.inputs)):
S
SunAhong1993 已提交
1047
            input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1048
            if i == 1:
S
SunAhong1993 已提交
1049
                input = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1050 1051
                while input is not None \
                      and input.layer_type != 'Softmax' \
S
SunAhong1993 已提交
1052
                      and input.layer_type != 'Sigmoid':
S
SunAhong1993 已提交
1053
                    input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1054
                assert input is not None, 'This kind of DetectionOutput is not supported!'
S
SunAhong1993 已提交
1055
                input = self.graph.get_input_node(input, idx=0, copy=True)
S
SunAhong1993 已提交
1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081
            inputs_dict["x{}".format(i)] = input.name
        params = node.layer.detection_output_param
        nms_param = params.nms_param
        nms_param_dict = dict()
        nms_param_dict["nms_threshold"] = nms_param.nms_threshold
        nms_param_dict["top_k"] = nms_param.top_k
        nms_param_dict["eta"] = nms_param.eta
        if nms_param is None:
            nms_param_dict = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
        fields = ["eta", "top_k", "nms_threshold"]
        for f in default.keys():
            if f not in nms_param_dict:
                nms_param_dict[f] = default[f]
        layer_attrs = {
            "background_label": params.background_label_id,
            "nms_threshold": nms_param_dict["nms_threshold"],
            "nms_top_k": nms_param_dict["top_k"],
            "keep_top_k": params.keep_top_k,
            "score_threshold": params.confidence_threshold,
            "nms_eta": nms_param_dict["eta"]}
        self.paddle_graph.add_layer(
            kernel="custom_layer:detectionoutput",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
S
SunAhong1993 已提交
1082

S
SunAhong1993 已提交
1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111
    def Normalize(self, node):
        assert len(
            node.inputs) == 1, "The count of Normalize node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.norm_param
        scale_name = node.name + "_scale"
        if node.data is None or len(node.data) != 1:
            print(
                "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
                .format(scale_name, node.layer_type))
            self.parmas[scale_name] = \
                np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32")
        else:
            self.parmas[scale_name] = _adjust_parameters(node)[0]
        
        layer_attrs = {
            "axis": -1 if params.channel_shared else 1,
            "param_name": scale_name,
            "param_shape": self.parmas[scale_name].shape,
            "param_dtype": str(self.parmas[scale_name].dtype)}
        self.pd_pdgraph.add_layer(
            "custom_layer:normalize",
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)
        
    def Permute(self, node):
        assert len(
            node.inputs) == 1, "The count of Permute node\'s input is not 1."
S
SunAhong1993 已提交
1112
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1113 1114
        params = node.layer.permute_param
        order = list(params.order)    
S
SunAhong1993 已提交
1115
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182
            "paddle.transpose",
            inputs={"x": input.name},
            outputs=[node.name],
            perm=order)
        
    def PriorBox(self, node):
        assert len(
            node.inputs) == 2, "The count of PriorBox node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.prior_box_param
        steps = tuple(params.step) if type(params.step) \
                is list or type(params.step) is tuple \
                else (params.step, params.step)
        layer_attrs = {
            "min_sizes": params.min_size,
            "max_sizes": params.max_size,
            "aspect_ratios": params.aspect_ratio,
            "variance": params.variance,
            "flip": params.flip,
            "clip": params.clip,
            "steps": steps,
            "offset": params.offset,
            "min_max_aspect_ratios_order": True}
        self.paddle_graph.add_layer(
            "custom_layer:priorbox",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ReLU6(self, node):
        assert len(
            node.inputs) == 1, "The count of RelU6 node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.nn.functional.relu6",
            inputs={"x": input.name},
            outputs=[node.name])
        
    def ROIPooling(self, node):
        assert len(
            node.inputs) == 2, "The count of ROIPooling node\'s input is not 2."
        input0 = self.graph.get_input_node(node, idx=0, copy=True)
        input1 = self.graph.get_input_node(node, idx=1, copy=True)
        inputs_dict = {}
        inputs_dict["x0"] = input0.name
        inputs_dict["x1"] = input1.name
        params = node.layer.roi_pooling_param
        layer_attrs = {
            "pooled_height": params.pooled_h,
            "pooled_width": params.pooled_w,
            "spatial_scale": params.spatial_scale}
        self.paddle_graph.add_layer(
            "custom_layer:ROIPooling",
            inputs=inputs_dict,
            outputs=[node.name],
            **layer_attrs)
        
    def ShuffleChannel(self, node):
        assert len(
            node.inputs) == 1, "The count of ShuffleChannel node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.shuffle_channel_param
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1183
            "paddle.fluid.layers.shuffle_channel",
S
SunAhong1993 已提交
1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216
            inputs={"x": input.name},
            outputs=[node.layer_name],
            group=params.group)
        
    def Upsample(self, node):
        assert len(
            node.inputs) == 1, "The count of Upsample node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        params = node.layer.upsample_param
        layer_attrs = {
            "align_corners": False,
            "scale_factor": params.scale,
            "mode": "nearest"}
        self.paddle_graph.add_layer(
            "paddle.nn.functioanl.interpolate",
            inputs={"input": input.name},
            outputs=[node.layer_name],
            **layer_attrs)
    
    def Select(self, node):
        assert len(
            node.inputs) == 1, "The count of Select node\'s input is not 1."
        input = self.graph.get_input_node(node, idx=0, copy=True)
        in_shapes = node.in_shapes[0]
        params = node.layer.select_param
        layer_attrs = {
            "in_shapes": in_shapes,
            "point": params.slice_point,
            "axis": params.axis}
        self.paddle_graph.add_layer(
            "custom_layer:select",
            inputs={"x": input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1217
            **layer_attrs)