opset.py 71.8 KB
Newer Older
1
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
C
update  
channingss 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

C
Channingss 已提交
15
from x2paddle.decoder.onnx_decoder import ONNXGraph, ONNXGraphNode, ONNXGraphDataNode
C
update  
channingss 已提交
16
from x2paddle.core.graph import GraphNode
C
channingss 已提交
17
from x2paddle.core.util import string
C
Channingss 已提交
18
from functools import reduce
C
update  
channingss 已提交
19
import numpy as np
C
channingss 已提交
20
import onnx
C
channingss 已提交
21
import onnx.numpy_helper as numpy_helper
C
channingss 已提交
22
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
C
update  
channingss 已提交
23
import logging as _logging
24
from collections import OrderedDict
C
channingss 已提交
25
import math
C
channingss 已提交
26
import os
S
SunAhong1993 已提交
27 28
import copy
import sys
C
channingss 已提交
29
import shutil
30

C
update  
channingss 已提交
31 32 33
_logger = _logging.getLogger(__name__)


C
Channingss 已提交
34
def _const_weight_or_none(node, necessary=False):
C
channings 已提交
35
    if 'Constant' in node.layer_type:
C
channingss 已提交
36
        return node.value
C
update  
channingss 已提交
37 38
    if isinstance(node, ONNXGraphDataNode):
        return node.weight
C
Channingss 已提交
39 40 41
    if necessary:
        assert '{} should be an initializer or Constant operator.'.format(
            node.layer_name)
C
update  
channingss 已提交
42 43 44
    return None


C
Channingss 已提交
45 46 47 48 49 50
def _is_static_shape(shape):
    negtive_dims = 0
    error_dims = 0
    for dim in shape:
        if dim < 0:
            negtive_dims += 1
C
update  
Channingss 已提交
51
        if dim < -1:
C
Channingss 已提交
52 53 54 55 56 57 58
            error_dims += 1
    if negtive_dims > 1:
        return False
    if error_dims > 0:
        return False
    return True

59

C
Channingss 已提交
60
def _get_same_padding(in_size, kernel_size, stride):
C
channingss 已提交
61 62 63 64 65 66 67
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]


68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
def print_mapping_info(func):
    def run_mapping(*args, **kwargs):
        node = args[1]
        try:
            res = func(*args, **kwargs)
        except:
            print("convert failed node:{}, op_type is {}".format(
                node.layer_name[9:], node.layer_type))
            raise
        else:
            #print("convert successfully node:{}, op_type is {}".format(
            #    node.layer_name[9:], node.layer_type))
            return res

    return run_mapping


C
Channingss 已提交
85
class OpSet9():
86
    elementwise_ops = {
S
SunAhong1993 已提交
87 88
        'Add': 'paddle.add',
        'Div': 'paddle.divide',
S
fix  
SunAhong1993 已提交
89
        'Sub': 'paddle.subtract',
S
SunAhong1993 已提交
90 91
        'Mul': 'paddle.multiply',
        'Pow': 'paddle.pow',
92
        'Less': 'paddle.less_than',
R
root 已提交
93
    }
94

S
SunAhong1993 已提交
95 96 97
    directly_map_ops = {
        'Ceil': ['paddle.ceil'],
        # reduce function
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122
        'ReduceMean': [
            'paddle.mean', dict(
                axes='axis', keepdims='keepdim'), dict(
                    axes=None, keepdims=1)
        ],
        'ReduceSum': [
            'paddle.sum', dict(
                axes='axis', keepdims='keepdim'), dict(
                    axes=None, keepdims=1)
        ],
        'ReduceMin': [
            'paddle.min', dict(
                axes='axis', keepdims='keepdim'), dict(
                    axes=None, keepdim=1)
        ],
        'ReduceMax': [
            'paddle.max', dict(
                axes='axis', keepdims='keepdim'), dict(
                    axes=None, keepdim=1)
        ],
        'ReduceProd': [
            'paddle.prod', dict(
                axes='axis', keepdims='keepdim'), dict(
                    axes=None, keepdim=1)
        ],
S
SunAhong1993 已提交
123 124
        # active function
        'Relu': ['paddle.nn.functional.relu'],
125 126 127 128 129 130 131 132 133 134
        'LeakyRelu': [
            'paddle.nn.functional.leaky_relu', dict(alpha='negative_slope'),
            dict(negative_slope=.01)
        ],
        'Elu':
        ['paddle.nn.functional.elu', dict(alpha='alpha'), dict(alpha=1.)],
        'ThresholdedRelu': [
            'paddle.nn.functional.thresholded_relu', dict(alpha='threshold'),
            dict(alpha=1.)
        ],
S
SunAhong1993 已提交
135 136 137
        'Tanh': ['paddle.nn.functional.tanh'],
        'Sigmoid': ['paddle.nn.functional.sigmoid'],
        'Softsign': ['paddle.nn.functional.softsign'],
138 139 140 141
        'Softplus': [
            'paddle.nn.functional.softplus', dict(threshold='threshold'),
            dict(threshold=float(sys.maxsize))
        ],
S
SunAhong1993 已提交
142
        'Exp': ['paddle.exp'],
S
SunAhong1993 已提交
143
        'Log': ['paddle.log'],
144 145
        'Softmax':
        ['paddle.nn.functional.softmax', dict(axis='axis'), dict(axis=1)],
S
SunAhong1993 已提交
146 147 148 149
        'Sqrt': ['paddle.sqrt'],
        'Floor': ['paddle.floor'],
        'Abs': ['paddle.abs'],
        'Erf': ['paddle.erf'],
150 151
    }

S
SunAhong1993 已提交
152
    def __init__(self, decoder, paddle_graph):
C
Channingss 已提交
153
        super(OpSet9, self).__init__()
154
        self.graph = decoder.graph
S
SunAhong1993 已提交
155 156 157 158
        self.paddle_graph = paddle_graph
        self.input_index = 0
        self.inputs_info = dict()
        self.params = dict()
R
root 已提交
159

160
    @print_mapping_info
S
SunAhong1993 已提交
161
    def directly_map(self, node, *args, **kwargs):
C
update  
channingss 已提交
162
        inputs = node.layer.input
S
SunAhong1993 已提交
163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184
        assert len(inputs) == 1, 'directly_map error with multi inputs'
        input = self.graph.get_input_node(node, idx=0, copy=True)
        onnx_attrs = node.attr_map
        if '' in onnx_attrs:
            onnx_attrs.pop('')
        if '_' in onnx_attrs:
            onnx_attrs.pop('_')
        op_info = self.directly_map_ops[node.layer_type]
        paddle_op = op_info[0]
        layer_attrs = dict()
        if len(op_info) > 1:
            attrs_name_map_dict = op_info[1]
            for onnx_attr_name, pd_attr_name in attrs_name_map_dict.items():
                if onnx_attr_name in onnx_attrs:
                    layer_attrs[pd_attr_name] = onnx_attrs[onnx_attr_name]
                else:
                    layer_attrs[pd_attr_name] = op_info[2][onnx_attr_name]
        self.paddle_graph.add_layer(
            kernel=paddle_op,
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)
185

186
    @print_mapping_info
187 188 189 190
    def elementwise_map(self, node):
        op_type = self.elementwise_ops[node.layer_type]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
191
        inputs_dict = {'x': val_x.name, 'y': val_y.name}
S
SunAhong1993 已提交
192
        self.paddle_graph.add_layer(
193 194
            op_type, inputs=inputs_dict, outputs=[node.name])

195
    @print_mapping_info
C
update  
channingss 已提交
196
    def place_holder(self, node):
C
channings 已提交
197 198
        shape = node.out_shapes[0]
        for i, dim_shape in enumerate(shape):
R
root 已提交
199 200 201
            if dim_shape == 0 and i == 0:
                shape[i] = 1
            if dim_shape == 0 and i != 0:
C
channings 已提交
202
                assert 'shape of input is not assigned'
S
SunAhong1993 已提交
203 204 205 206 207 208 209 210 211
        self.paddle_graph.add_layer(
            kernel="paddle.static.data",
            inputs={},
            outputs=[node.name],
            dtype=string(node.dtype),
            shape=shape,
            name=string(node.name))
        self.inputs_info["x{}".format(self.input_index)] = [shape, node.dtype]
        self.input_index += 1
C
update  
channingss 已提交
212

213
    @print_mapping_info
C
update  
channingss 已提交
214 215 216 217
    def create_parameter(self, node, parameter=None):
        if parameter is not None:
            node = parameter
        dtype = node.dtype
C
channingss 已提交
218
        shape = node.out_shapes[0]
S
fix  
SunAhong1993 已提交
219
        if hasattr(node.weight, "shape") and len(node.weight.shape) == 0:
S
SunAhong1993 已提交
220
            self.paddle_graph.add_layer(
221 222
                "paddle.full",
                inputs={},
S
SunAhong1993 已提交
223 224 225 226
                outputs=[node.name],
                dtype=string(dtype),
                shape=[1],
                fill_value=node.weight)
227
        else:
S
SunAhong1993 已提交
228 229 230 231 232 233 234 235 236
            self.params[node.name] = node.weight
            self.paddle_graph.add_layer(
                kernel="paddle.static.create_parameter",
                inputs={},
                outputs=[node.name],
                dtype=string(dtype),
                shape=shape,
                name=string(node.name),
                default_initializer="paddle.nn.initializer.Constant(value=0.0)")
C
update  
channingss 已提交
237 238 239 240 241 242 243 244 245 246 247 248 249 250

    def _pad_if_asymmetric(self, node, pads, val_name):  # pads: SSEE
        assert len(pads) & 1 == 0
        symmetric = True
        ndims = len(pads) // 2
        for idx_dim in range(ndims):
            if pads[idx_dim] != pads[ndims + idx_dim]:
                symmetric = False
                break
        if symmetric:
            return pads[:ndims], val_name
        val_padded = self.Pad(node, op_independent=False)
        return [0] * ndims, val_padded

C
channingss 已提交
251
    def _interpolate(self, node):
C
channingss 已提交
252
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
253
        inputs = {'x': val_x.name}
S
fix  
SunAhong1993 已提交
254
        attrs = dict()
255
        if node.layer_type == 'Resize':
C
Channingss 已提交
256 257 258
            if len(node.layer.input) == 2:
                # opset 10
                val_scales = self.graph.get_input_node(node, idx=1, copy=True)
259
                # TODO(syf): paddle.nn.functional.interpolate will support the length
S
fix  
SunAhong1993 已提交
260
                # which is the same as the rank of input.
261 262 263
                #                 inputs['scale_factor'] = val_scales.name
                attrs['scale_factor'] = self.params[val_scales.name].tolist()[
                    2:]
C
Channingss 已提交
264 265 266
            elif len(node.layer.input) == 3:
                # opset 11
                val_scales = self.graph.get_input_node(node, idx=2, copy=True)
267
                # TODO(syf): paddle.nn.functional.interpolate will support the length
S
fix  
SunAhong1993 已提交
268
                # which is the same as the rank of input.
269 270 271
                #                 inputs['scale_factor'] = val_scales.name
                attrs['scale_factor'] = self.params[val_scales.name].tolist()[
                    2:]
C
Channingss 已提交
272 273 274
            elif len(node.layer.input) == 4:
                # opset 11
                val_sizes = self.graph.get_input_node(node, idx=3, copy=True)
S
SunAhong1993 已提交
275 276 277 278 279 280 281 282 283 284 285 286
                var_nc, var_hw = val_sizes.name + '_nc', val_sizes.name + '_hw'
                self.paddle_graph.add_layer(
                    'paddle.split',
                    inputs={"x": val_sizes.name},
                    outputs=[var_nc, var_hw],
                    num_or_sections=[2, 2],
                    axis=0)
                self.paddle_graph.add_layer(
                    "paddle.cast",
                    inputs={"x": var_hw},
                    outputs=[var_hw],
                    dtype=string('int32'))
S
SunAhong1993 已提交
287
                inputs['size'] = var_hw
288 289 290 291 292 293 294 295 296 297 298
                attrs = {
                    "align_corners": False,
                    "mode": string(node.get_attr('mode', 'nearest'))
                }
                mode = node.get_attr('mode', 'nearest')
                if mode == "linear":
                    attrs["mode"] = string("bilinear")
                if node.get_attr('coordinate_transformation_mode',
                                 'half_pixel') == 'pytorch_half_pixel':
                    attrs["align_corners"] = False
                    attrs["align_mode"] = 0
S
SunAhong1993 已提交
299
                self.paddle_graph.add_layer(
S
docs  
SunAhong1993 已提交
300
                    kernel="paddle.nn.functional.interpolate",
S
SunAhong1993 已提交
301 302 303 304
                    inputs=inputs,
                    outputs=[node.name],
                    **attrs)
                return
305 306
        elif node.layer_type == 'Upsample':
            val_scales = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
307 308 309 310 311 312 313 314
            self.paddle_graph.add_layer(
                "paddle.slice",
                inputs={"input": val_scales.name},
                outputs=[val_scales.name],
                axes=[0],
                starts=[2],
                ends=[4])
            inputs['scale_factor'] = val_scales.name
R
root 已提交
315

C
channingss 已提交
316
        mode = node.get_attr('mode', 'nearest')
317 318 319 320 321
        attrs.update({
            "align_corners": False,
            "mode": string(mode),
            "align_mode": 1
        })
S
SunAhong1993 已提交
322 323 324
        val_x_shape = val_x.out_shapes[0]
        if mode == "linear" and len(val_x_shape) == 4:
            attrs["mode"] = string("bilinear")
325 326 327 328 329 330
            if node.get_attr('coordinate_transformation_mode',
                             'half_pixel') == 'pytorch_half_pixel':
                attrs["align_corners"] = False
                attrs["align_mode"] = 0
            else:
                attrs["align_corners"] = True
S
SunAhong1993 已提交
331 332 333 334 335
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.interpolate",
            inputs=inputs,
            outputs=[node.name],
            **attrs)
336

S
SunAhong1993 已提交
337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352
    @print_mapping_info
    def HardSigmoid(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        alpha = node.get_attr('alpha', 0.2)
        beta = node.get_attr('beta', 0.5)
        self.paddle_graph.add_layer(
            kernel="paddle.scale",
            inputs={"x": val_x.name},
            outputs=[node.name + "_val"],
            scale=alpha,
            bias=beta)
        self.paddle_graph.add_layer(
            kernel="paddle.clip",
            inputs={"x": node.name + "_val"},
            outputs=[node.name],
            min=0.0,
353 354
            max=1.0)

S
SunAhong1993 已提交
355 356 357 358 359 360 361 362
    @print_mapping_info
    def Shape(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel="paddle.shape",
            inputs={"input": val_x.name},
            outputs=[node.name])
        self.paddle_graph.add_layer(
363 364 365 366
            'paddle.cast',
            inputs={"x": node.name},
            outputs=[node.name],
            dtype=string('int64'))
R
root 已提交
367

368
    @print_mapping_info
C
channings 已提交
369 370 371
    def RoiAlign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
372 373 374

        pooled_height = node.get_attr('output_height')
        pooled_width = node.get_attr('output_width')
C
channings 已提交
375 376
        spatial_scale = node.get_attr('spatial_scale')
        sampling_ratio = node.get_attr('sampling_ratio')
S
SunAhong1993 已提交
377
        layer_attrs = {
R
root 已提交
378 379 380 381 382
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
            'sampling_ratio': sampling_ratio,
        }
S
SunAhong1993 已提交
383
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
384
            'paddle.fluid.layers.roi_align',
S
SunAhong1993 已提交
385 386 387 388
            inputs={'input': val_x.name,
                    'rois': val_rois.name},
            outputs=[node.name],
            **layer_attrs)
389 390

    @print_mapping_info
C
channings 已提交
391 392 393
    def MaxRoiPool(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
394

C
channings 已提交
395 396
        spatial_scale = node.get_attr('spatial_scale')
        pooled_height, pooled_width = node.get_attr('pooled_shape')
S
SunAhong1993 已提交
397
        layer_attrs = {
R
root 已提交
398 399 400 401
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
        }
S
SunAhong1993 已提交
402
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
403
            'paddle.fluid.layers.roi_pool',
S
SunAhong1993 已提交
404 405 406 407
            inputs={'input': val_x.name,
                    'rois': val_rois.name},
            outputs=[node.name],
            **layer_attrs)
408 409

    @print_mapping_info
C
update  
channingss 已提交
410
    def Pad(self, node, op_independent=True):
C
channingss 已提交
411
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
412
        pads = node.get_attr('pads')
S
SunAhong1993 已提交
413 414 415 416 417 418 419 420
        is_pads_attr = True
        if pads is None:
            val_pad = self.graph.get_input_node(node, idx=1, copy=True)
            pad_shape = val_pad.out_shapes[0]
            is_pads_attr = False
            pads = _const_weight_or_none(val_pad)
            if pads is not None:
                is_pads_attr = True
C
update  
channingss 已提交
421 422
        mode = node.get_attr('mode', 'constant')
        value = node.get_attr('value', 0.)
C
channingss 已提交
423 424
        data_shape = val_x.out_shapes[0]
        output_shape = node.out_shapes[0]
S
for pad  
SunAhong1993 已提交
425
        assume_pad = False
S
SunAhong1993 已提交
426 427
        layer_attrs = {}
        layer_attrs['mode'] = string(mode)
S
for pad  
SunAhong1993 已提交
428 429 430
        layer_attrs['value'] = value
        if not op_independent:
            output_name = node.name + '_paded'
C
update  
channingss 已提交
431
        else:
S
for pad  
SunAhong1993 已提交
432 433
            output_name = node.name
        layer_outputs = [output_name]
S
SunAhong1993 已提交
434 435
        if is_pads_attr:
            paddings = []
S
for pad  
SunAhong1993 已提交
436
            paddle_op = 'paddle.nn.functional.pad'
S
SunAhong1993 已提交
437
            if len(pads) == 10 and sum(pads) == 0:
438
                pads = pads[0:6]
S
for pad  
SunAhong1993 已提交
439
            if len(pads) in [2, 4, 6]:
S
SunAhong1993 已提交
440
                if data_shape:
441 442
                    assume_pad |= data_shape and 2 * (len(data_shape) - 2
                                                      ) == len(pads)  # NCHW
S
SunAhong1993 已提交
443
                if output_shape:
444 445
                    assume_pad |= output_shape and 2 * (len(output_shape) - 2
                                                        ) == len(pads)  # NCHW
S
for pad  
SunAhong1993 已提交
446 447 448 449 450 451 452
                if assume_pad:
                    if len(pads) == 2:
                        data_format = "NCL"
                    elif len(pads) == 4:
                        data_format = "NCHW"
                    else:
                        data_format = "NCDHW"
453

S
for pad  
SunAhong1993 已提交
454 455 456 457
                    paddings = np.array(pads).reshape(
                        (2, -1)).transpose().astype("int32")
                    paddings = np.flip(paddings, axis=0).flatten().tolist()
                    layer_attrs['pad'] = paddings
S
SunAhong1993 已提交
458
                    layer_attrs['data_format'] = string(data_format)
S
for pad  
SunAhong1993 已提交
459 460
                else:
                    if data_shape:
461 462
                        assume_pad |= data_shape and 2 * len(data_shape) == len(
                            pads)  # NCHW
S
for pad  
SunAhong1993 已提交
463
                    if output_shape:
464 465
                        assume_pad |= output_shape and 2 * len(
                            output_shape) == len(pads)  # NCHW
S
for pad  
SunAhong1993 已提交
466 467
                    if assume_pad:
                        paddings = np.array(pads).reshape(
468 469
                            (2,
                             -1)).transpose().astype("int32").flatten().tolist()
S
for pad  
SunAhong1993 已提交
470 471
                        layer_attrs['pad'] = paddings
                    else:
472 473
                        raise Exception("The padding value {} is wrong!".format(
                            pads))
S
SunAhong1993 已提交
474
            elif len(pads) == 8:
S
for pad  
SunAhong1993 已提交
475
                if data_shape:
476 477
                    assume_pad |= data_shape and 2 * len(data_shape) == len(
                        pads)  # NCHW
S
for pad  
SunAhong1993 已提交
478
                if output_shape:
479 480
                    assume_pad |= output_shape and 2 * len(output_shape) == len(
                        pads)  # NCHW
S
for pad  
SunAhong1993 已提交
481 482 483 484 485 486 487 488 489 490
                if assume_pad:
                    paddings = np.array(pads).reshape(
                        (2, -1)).transpose().astype("int32")
                    paddings = np.flip(paddings, axis=0).flatten().tolist()
                    if sum(paddings[:4]) == 0:
                        paddings = paddings[4:]
                        layer_attrs['pad'] = paddings
                    else:
                        layer_attrs['pad'] = paddings
                        paddle_op = "custom_layer:pad_all_dim4_one_input"
S
SunAhong1993 已提交
491
            else:
492
                raise Exception("The padding value {} is wrong!".format(pads))
S
SunAhong1993 已提交
493
            self.paddle_graph.add_layer(
494 495 496
                paddle_op,
                inputs={'x': val_x.name},
                outputs=layer_outputs,
S
SunAhong1993 已提交
497
                **layer_attrs)
S
for pad  
SunAhong1993 已提交
498
            if not op_independent:
S
SunAhong1993 已提交
499
                return node.name + '_paded'
C
update  
channingss 已提交
500
        else:
S
for pad  
SunAhong1993 已提交
501 502
            pads_len = val_pad.out_shapes[0][0]
            if pads_len in [2, 4, 6]:
S
SunAhong1993 已提交
503
                if data_shape:
504 505
                    assume_pad |= data_shape and 2 * (len(data_shape) - 2
                                                      ) == pads_len  # NCHW
S
SunAhong1993 已提交
506
                if output_shape:
507 508
                    assume_pad |= output_shape and 2 * (len(output_shape) - 2
                                                        ) == pads_len  # NCHW
S
for pad  
SunAhong1993 已提交
509 510 511 512 513 514 515 516
                if assume_pad:
                    if pads_len == 2:
                        data_format = "NCL"
                    elif pads_len == 4:
                        data_format = "NCHW"
                    else:
                        data_format = "NCDHW"
                    self.paddle_graph.add_layer(
517 518 519
                        "custom_layer:pad_with_two_input",
                        inputs={'x': val_x.name,
                                'pad': val_pad.name},
S
for pad  
SunAhong1993 已提交
520 521 522 523 524 525
                        outputs=layer_outputs,
                        value=value,
                        mode=string(mode),
                        data_format=string(data_format))
                else:
                    if data_shape:
526 527
                        assume_pad |= data_shape and 2 * len(
                            data_shape) == pads_len  # NCHW
S
for pad  
SunAhong1993 已提交
528
                    if output_shape:
529 530
                        assume_pad |= output_shape and 2 * len(
                            output_shape) == pads_len  # NCHW
S
for pad  
SunAhong1993 已提交
531 532 533
                    if assume_pad:
                        if pads_len == 4:
                            self.paddle_graph.add_layer(
534 535 536 537
                                "custom_layer:pad_all_dim2",
                                inputs={'x': val_x.name,
                                        'pad': val_pad.name},
                                outputs=layer_outputs,
S
for pad  
SunAhong1993 已提交
538 539 540 541 542 543
                                value=value,
                                mode=string(mode))
                        else:
                            raise Exception("The padding value is wrong!")
            elif pads_len == 8:
                if data_shape:
544 545
                    assume_pad |= data_shape and 2 * len(
                        data_shape) == pads_len  # NCHW
S
for pad  
SunAhong1993 已提交
546
                if output_shape:
547 548
                    assume_pad |= output_shape and 2 * len(
                        output_shape) == pads_len  # NCHW
S
for pad  
SunAhong1993 已提交
549 550
                if assume_pad:
                    self.paddle_graph.add_layer(
551 552 553 554
                        "custom_layer:pad_all_dim4",
                        inputs={'x': val_x.name,
                                'pad': val_pad.name},
                        outputs=layer_outputs,
S
for pad  
SunAhong1993 已提交
555 556 557 558
                        value=value,
                        mode=string(mode))
            else:
                print(pads_len)
559
                raise Exception("The padding value is wrong!")
S
SunAhong1993 已提交
560 561
            if not op_independent:
                return node.name + '_paded'
C
update  
channingss 已提交
562

563
    @print_mapping_info
C
update  
channingss 已提交
564
    def Unsqueeze(self, node):
C
channingss 已提交
565
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
566
        axes = node.get_attr('axes')
S
SunAhong1993 已提交
567
        layer_attrs = {'axis': axes}
R
root 已提交
568
        if len(val_x.out_shapes[0]) == 0:
S
SunAhong1993 已提交
569 570 571 572 573 574
            if node.name:
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={"x": val_x.name},
                    outputs=[node.name],
                    shape=[1])
575
        else:
S
SunAhong1993 已提交
576
            self.paddle_graph.add_layer(
577 578
                'paddle.unsqueeze',
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
579 580
                outputs=[node.name],
                **layer_attrs)
581

582
    @print_mapping_info
C
channingss 已提交
583
    def Shrink(self, node):
C
channingss 已提交
584
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
585 586 587
        bias = node.get_attr('bias')
        lambd = node.get_attr('lambd')
        assert bias == 0.0, 'not support bias!=0'
S
SunAhong1993 已提交
588
        self.paddle_graph.add_layer(
589 590 591
            'paddle.nn.functional.hardshrink',
            inputs={"x": val_x.name},
            outputs=[node.name],
S
SunAhong1993 已提交
592
            threshold=lambd)
C
channingss 已提交
593

594
    @print_mapping_info
C
update  
channingss 已提交
595 596 597 598 599 600 601 602
    def Constant(self, node):
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        value = node.get_attr('value')
        dtype = np.dtype(value.dtype)
        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'tensor dtype unmatches storage dtype'
R
root 已提交
603

C
update  
channingss 已提交
604
        shape = node.get_attr('shape', None)
R
root 已提交
605

C
update  
channingss 已提交
606
        if shape is None:
C
channingss 已提交
607
            shape = val_output.out_shapes[0]
C
update  
channingss 已提交
608 609
        if shape is None:
            shape = list(value.shape)
610 611 612
            _logger.warning('in (Constant -> %s): '
                            'attribute "shape" of %s not inferred, '
                            'using value as 1-D tensor may lead to fails',
S
SunAhong1993 已提交
613
                            val_output.name, val_output.name)
614
        if len(value) == 1:
C
channingss 已提交
615
            value = value.tolist()
C
update  
channingss 已提交
616
            value = value[0]
S
SunAhong1993 已提交
617
            self.paddle_graph.add_layer(
618 619
                "paddle.full",
                inputs={},
S
SunAhong1993 已提交
620 621 622 623
                outputs=[node.name],
                dtype=string(dtype),
                shape=[1],
                fill_value=value)
C
channingss 已提交
624 625
        else:
            value = np.reshape(value, shape)
S
SunAhong1993 已提交
626 627 628 629 630 631 632 633 634
            self.params[node.name] = value
            self.paddle_graph.add_layer(
                kernel="paddle.static.create_parameter",
                inputs={},
                outputs=[node.name],
                dtype=string(dtype),
                shape=shape,
                name=string(node.name),
                default_initializer="paddle.nn.initializer.Constant(value=0.0)")
C
update  
channingss 已提交
635

636
    @print_mapping_info
C
update  
channingss 已提交
637
    def Resize(self, node):
638 639
        self._interpolate(node)

640
    @print_mapping_info
641 642 643
    def Upsample(self, node):
        self._interpolate(node)

644 645 646 647 648 649
    @print_mapping_info
    def InstanceNormalization(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        epsilon = node.get_attr('epsilon', 1e-5)
650
        layer_attrs = {'eps': epsilon, }
S
SunAhong1993 已提交
651
        dim = len(val_x.out_shapes[0])
652
        if dim == 2:
S
fix  
SunAhong1993 已提交
653
            layer_attrs["data_format"] = string("NC")
S
SunAhong1993 已提交
654
        elif dim == 3:
S
fix  
SunAhong1993 已提交
655
            layer_attrs["data_format"] = string("NCL")
S
SunAhong1993 已提交
656
        elif dim == 4:
S
fix  
SunAhong1993 已提交
657
            layer_attrs["data_format"] = string("NCHW")
S
SunAhong1993 已提交
658
        elif dim == 5:
S
fix  
SunAhong1993 已提交
659
            layer_attrs["data_format"] = string("NCDHW")
S
SunAhong1993 已提交
660
        else:
661 662 663
            raise Exception(
                "The paddle only support 2D, 3D, 4D or 5D input in InstanceNormalization."
            )
S
SunAhong1993 已提交
664
        self.paddle_graph.add_layer(
665 666 667 668 669 670 671
            "paddle.nn.functional.instance_norm",
            inputs={
                "x": val_x.name,
                "weight": val_scale.name,
                "bias": val_b.name
            },
            outputs=[node.name],
S
SunAhong1993 已提交
672
            **layer_attrs)
673 674

    @print_mapping_info
675
    def Expand(self, node):
C
channingss 已提交
676
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
677
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
678
        val_x_dtype = val_x.dtype
S
SunAhong1993 已提交
679
        name_ones = node.name + '_ones'
C
Channingss 已提交
680
        attr_ones = {
S
SunAhong1993 已提交
681
            'shape': val_shape.name,
C
Channingss 已提交
682
            'dtype': string(val_x_dtype),
S
SunAhong1993 已提交
683
            'fill_value': 1
C
Channingss 已提交
684
        }
S
SunAhong1993 已提交
685
        self.paddle_graph.add_layer(
686 687
            'paddle.full', inputs={}, outputs=[name_ones], **attr_ones)
        inputs_dict = {'x': name_ones, 'y': val_x.name}
S
SunAhong1993 已提交
688
        self.paddle_graph.add_layer(
689
            'paddle.multiply', inputs=inputs_dict, outputs=[node.name])
C
update  
channingss 已提交
690

691
    @print_mapping_info
C
channingss 已提交
692 693 694 695
    def Gather(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        indices_shape = indices.out_shapes[0]
C
Channingss 已提交
696
        axis = node.get_attr('axis', 0)
697 698
        #assert len(
        #    indices_shape) <= 2, "Gather op don't support dim of indice >2 "
R
root 已提交
699
        if axis == 0 and len(indices_shape) <= 1:
C
Channingss 已提交
700
            if len(val_x.out_shapes[0]) <= 1:
S
SunAhong1993 已提交
701 702 703 704 705
                self.paddle_graph.add_layer(
                    'paddle.gather',
                    inputs={'x': val_x.name,
                            'index': indices.name},
                    outputs=[node.name])
C
Channingss 已提交
706 707
            elif len(val_x.out_shapes[0]) > 1:
                if len(indices_shape) == 0:
S
SunAhong1993 已提交
708 709 710 711 712 713 714 715 716 717 718
                    gather_ = node.name + '_1'
                    self.paddle_graph.add_layer(
                        'paddle.gather',
                        inputs={'x': val_x.name,
                                'index': indices.name},
                        outputs=[gather_])
                    self.paddle_graph.add_layer(
                        'paddle.squeeze',
                        inputs={'x': gather_},
                        outputs=[node.name],
                        axis=[0])
C
Channingss 已提交
719
                else:
S
SunAhong1993 已提交
720 721 722 723 724
                    self.paddle_graph.add_layer(
                        'paddle.gather',
                        inputs={'x': val_x.name,
                                'index': indices.name},
                        outputs=[node.name])
C
channingss 已提交
725 726
        elif axis > 0 and len(indices_shape) <= 1:
            perm = list(range(len(val_x.out_shapes[0])))
C
channingss 已提交
727
            perm = [axis] + perm[:axis] + perm[axis + 1:]
S
SunAhong1993 已提交
728 729 730 731 732 733 734 735 736 737 738
            name_trans = val_x.name + '_trans'
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": val_x.name},
                outputs=[name_trans],
                perm=perm)
            self.paddle_graph.add_layer(
                'paddle.gather',
                inputs={'x': name_trans,
                        'index': indices.name},
                outputs=[node.name])
S
SunAhong1993 已提交
739 740 741
            new_perm = [0] * len(perm)
            for i in range(len(perm)):
                new_perm[perm[i]] = i
S
SunAhong1993 已提交
742
            self.paddle_graph.add_layer(
743 744 745
                'paddle.transpose',
                inputs={"x": node.name},
                outputs=[node.name],
S
SunAhong1993 已提交
746
                perm=new_perm)
C
Channingss 已提交
747
            if len(indices_shape) < 1:
S
SunAhong1993 已提交
748 749 750 751 752
                self.paddle_graph.add_layer(
                    'paddle.squeeze',
                    inputs={'x': node.name},
                    outputs=[node.name],
                    axis=[axis])
753 754 755
        elif axis == 0 and len(indices_shape) > 1:
            if val_x.out_shapes[0] is not None and isinstance(
                    val_x, ONNXGraphDataNode):
S
SunAhong1993 已提交
756 757 758 759
                indices_cast = indices.name + '_cast'
                self.paddle_graph.add_layer(
                    'paddle.cast',
                    inputs={"x": indices.name},
S
SunAhong1993 已提交
760
                    outputs=[indices_cast],
S
SunAhong1993 已提交
761 762
                    dtype=string('int64'))
                self.paddle_graph.add_layer(
S
for pad  
SunAhong1993 已提交
763 764 765 766
                    'paddle.nn.functional.embedding',
                    inputs={"x": indices_cast,
                            "weight": val_x.name},
                    outputs=[node.name])
767 768 769
            else:
                from functools import reduce
                reshape_shape = reduce(lambda x, y: x * y, indices_shape)
S
SunAhong1993 已提交
770 771 772 773 774 775
                indices_reshape = indices.name + '_shape'
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={"x": indices.name},
                    outputs=[indices_reshape],
                    shape=[reshape_shape, ])
776 777

                perm = list(range(len(val_x.out_shapes[0])))
S
SunAhong1993 已提交
778 779 780
                self.paddle_graph.add_layer(
                    'paddle.gather',
                    inputs={'x': val_x.name,
781
                            'index': indices_reshape},
S
SunAhong1993 已提交
782
                    outputs=[node.name])
783 784 785 786 787 788
                val_x_shape = val_x.out_shapes[0]
                reshaped_shape = []
                for i in perm:
                    reshaped_shape.append(indices_shape[i])
                for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                    reshaped_shape.append(i)
S
SunAhong1993 已提交
789 790 791 792 793
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={"x": node.name},
                    outputs=[node.name],
                    shape=reshaped_shape)
794
        elif axis > 0 and len(indices_shape) > 1:
C
Channingss 已提交
795
            from functools import reduce
R
root 已提交
796
            reshape_shape = reduce(lambda x, y: x * y, indices_shape)
S
SunAhong1993 已提交
797 798 799 800 801 802
            indices_reshape = indices.name + '_shape'
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": indices.name},
                outputs=[indices_reshape],
                shape=[reshape_shape, ])
R
root 已提交
803

C
Channingss 已提交
804 805
            perm = list(range(len(val_x.out_shapes[0])))
            perm = [axis] + perm[:axis] + perm[axis + 1:]
S
SunAhong1993 已提交
806 807 808 809 810 811 812 813 814
            name_trans = val_x.name + '_transpose'
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": val_x.name},
                outputs=[name_trans],
                perm=perm)
            self.paddle_graph.add_layer(
                'paddle.gather',
                inputs={'x': name_trans,
815
                        'index': indices_reshape},
S
SunAhong1993 已提交
816 817
                outputs=[node.name])
            input_transpose = node.name + '_transpose'
S
SunAhong1993 已提交
818 819 820
            new_perm = [0] * len(perm)
            for i in range(len(perm)):
                new_perm[perm[i]] = i
S
SunAhong1993 已提交
821 822 823 824
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": node.name},
                outputs=[input_transpose],
S
SunAhong1993 已提交
825 826
                perm=new_perm)
            perm = new_perm
C
Channingss 已提交
827 828 829 830 831 832
            val_x_shape = val_x.out_shapes[0]
            reshaped_shape = []
            for i in perm:
                reshaped_shape.append(indices_shape[i])
            for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                reshaped_shape.append(i)
S
SunAhong1993 已提交
833 834 835 836 837
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": input_transpose},
                outputs=[node.name],
                shape=reshaped_shape)
838

C
Channingss 已提交
839 840 841 842 843 844
    @print_mapping_info
    def ScatterND(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        updates = self.graph.get_input_node(node, idx=2, copy=True)
        if len(indices.out_shapes[0]) == 1:
S
SunAhong1993 已提交
845 846
            self.paddle_graph.add_layer(
                'paddle.scatter',
847 848 849 850 851
                inputs={
                    'x': val_x.name,
                    'index': indices.name,
                    'updates': updates.name
                },
S
SunAhong1993 已提交
852
                outputs=[node.name])
C
Channingss 已提交
853
        else:
S
SunAhong1993 已提交
854
            input_inner_indices = node.name + '_input_inner_indices'
855
            shape = val_x.out_shapes[0]
S
SunAhong1993 已提交
856 857 858 859 860 861 862 863 864 865 866 867 868
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": indices.name},
                outputs=[indices.name],
                shape=indices.out_shapes[0])

            zeros_like_val_x = val_x.name + '_zeros'
            self.paddle_graph.add_layer(
                'paddle.zeros_like',
                inputs={"x": val_x.name},
                outputs=[zeros_like_val_x])
            self.paddle_graph.add_layer(
                'paddle.scatter_nd_add',
C
Channingss 已提交
869
                inputs={
S
SunAhong1993 已提交
870 871 872
                    'x': zeros_like_val_x,
                    'index': indices.name,
                    'updates': updates.name
C
Channingss 已提交
873
                },
S
SunAhong1993 已提交
874 875 876
                outputs=[input_inner_indices])
            indices_mask = node.name + '_indices_mask'
            constant_minus_one = node.name + '_constant_minus_one'
877
            # full_like support create tensor shape like input tensor
S
SunAhong1993 已提交
878 879 880 881 882 883 884 885
            self.paddle_graph.add_layer(
                'paddle.full_like',
                inputs={"x": updates.name},
                outputs=[constant_minus_one],
                dtype=string(updates.dtype),
                fill_value=-1)
            self.paddle_graph.add_layer(
                'paddle.scatter_nd_add',
C
Channingss 已提交
886
                inputs={
S
SunAhong1993 已提交
887 888
                    'x': zeros_like_val_x,
                    'index': indices.name,
C
Channingss 已提交
889 890
                    'updates': constant_minus_one
                },
S
SunAhong1993 已提交
891 892
                outputs=[indices_mask])
            constant_one = node.name + '_constant_1'
893
            # full_like support create tensor shape like input tensor
S
SunAhong1993 已提交
894 895 896 897 898 899 900 901 902
            self.paddle_graph.add_layer(
                'paddle.full_like',
                inputs={"x": val_x.name},
                outputs=[constant_one],
                dtype=string(val_x.dtype),
                fill_value=1)
            input_out_indices_mask = node.name + '_input_out_indices_mask'
            self.paddle_graph.add_layer(
                "paddle.add",
C
Channingss 已提交
903
                inputs={"x": indices_mask,
904
                        "y": constant_one},
S
SunAhong1993 已提交
905
                outputs=[input_out_indices_mask])
C
Channingss 已提交
906

S
SunAhong1993 已提交
907 908 909 910
            input_out_indices = node.name + '_input_out_indices'
            self.paddle_graph.add_layer(
                "paddle.multiply",
                inputs={"x": val_x.name,
C
Channingss 已提交
911
                        "y": input_out_indices_mask},
S
SunAhong1993 已提交
912
                outputs=[input_out_indices])
C
Channingss 已提交
913

S
SunAhong1993 已提交
914 915
            self.paddle_graph.add_layer(
                "paddle.add",
C
Channingss 已提交
916 917
                inputs={"x": input_inner_indices,
                        "y": input_out_indices},
S
SunAhong1993 已提交
918
                outputs=[node.name])
C
Channingss 已提交
919

920 921 922 923 924 925
    @print_mapping_info
    def Range(self, node):
        val_start = self.graph.get_input_node(node, idx=0, copy=True)
        val_limit = self.graph.get_input_node(node, idx=1, copy=True)
        val_delta = self.graph.get_input_node(node, idx=2, copy=True)
        dtype = val_start.dtype
926 927 928 929 930
        inputs = {
            'start': val_start.name,
            'end': val_limit.name,
            'step': val_delta.name
        }
S
SunAhong1993 已提交
931 932
        self.paddle_graph.add_layer(
            'paddle.arange',
933
            inputs=inputs,
S
SunAhong1993 已提交
934 935
            outputs=[node.name],
            dtype=string(dtype))
936 937

    @print_mapping_info
C
channingss 已提交
938
    def Slice(self, node):
C
channingss 已提交
939
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channings 已提交
940
        starts, ends, axes, steps = None, None, None, None
S
SunAhong1993 已提交
941
        layer_attrs = {}
C
channingss 已提交
942 943 944
        if len(node.inputs) > 1:
            starts = self.graph.get_input_node(node, idx=1, copy=True)
            ends = self.graph.get_input_node(node, idx=2, copy=True)
C
Channingss 已提交
945
            starts_value = _const_weight_or_none(starts)
S
for pad  
SunAhong1993 已提交
946 947
            if starts_value is not None:
                starts_value = starts_value.tolist()
C
Channingss 已提交
948
            ends_value = _const_weight_or_none(ends)
S
for pad  
SunAhong1993 已提交
949 950 951 952 953
            if ends_value is not None:
                ends_value = ends_value.tolist()
            if len(node.inputs) > 2:
                s_len = len(val_x.out_shapes[0])
                axes = list(range(s_len))
R
root 已提交
954
            if len(node.inputs) > 3:
S
for pad  
SunAhong1993 已提交
955 956
                axes_node = self.graph.get_input_node(node, idx=3, copy=True)
                axes = _const_weight_or_none(axes_node, necessary=True).tolist()
R
root 已提交
957
            if len(node.inputs) > 4:
C
channings 已提交
958
                steps = self.graph.get_input_node(node, idx=4, copy=True)
S
for pad  
SunAhong1993 已提交
959
                steps = _const_weight_or_none(steps).tolist()
960

S
SunAhong1993 已提交
961
            layer_attrs = {
962
                "axes": axes,
S
SunAhong1993 已提交
963 964
                "starts": starts.name,
                "ends": ends.name
965
            }
S
SunAhong1993 已提交
966
            if starts_value is not None and ends_value is not None and axes is not None:
C
Channingss 已提交
967
                starts_value = starts_value.copy()
968
                ends_value = ends_value.copy()
969 970 971 972
                #for idx in range(len(ends_value)):
                #    if ends_value[idx] > 2**31 - 1:
                #        ends_value[idx] = 2**31 - 1
                #print(val_x.out_shapes)
973
                for idx in range(len(ends_value)):
974 975
                    if starts_value[idx] >= val_x.out_shapes[0][axes[
                            idx]] and val_x.out_shapes[0][axes[idx]] > 0:
976
                        starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
C
Channingss 已提交
977
                        ends_value[idx] = val_x.out_shapes[0][axes[idx]]
S
SunAhong1993 已提交
978 979
                    elif ends_value[idx] > 2**31 - 1:
                        ends_value[idx] = 2**31 - 1
S
SunAhong1993 已提交
980
                layer_attrs = {
981 982 983 984 985 986
                    "axes": axes,
                    "starts": starts_value,
                    "ends": ends_value
                }
            else:
                if starts.dtype != 'int32':
S
SunAhong1993 已提交
987 988 989 990 991 992 993
                    starts_cast = starts.name + '_cast'
                    self.paddle_graph.add_layer(
                        'paddle.cast',
                        inputs={"x": starts.name},
                        outputs=[starts_cast],
                        dtype=string('int32'))
                    layer_attrs['starts'] = starts_cast
994
                if ends.dtype != 'int32':
S
SunAhong1993 已提交
995
                    ends_cast = ends.name + '_cast'
S
for pad  
SunAhong1993 已提交
996 997
                else:
                    ends_cast = ends.name
S
SunAhong1993 已提交
998 999 1000 1001 1002 1003
                self.paddle_graph.add_layer(
                    'paddle.cast',
                    inputs={"x": ends.name},
                    outputs=[ends_cast],
                    dtype=string('int32'))
                layer_attrs['ends'] = ends_cast
C
channingss 已提交
1004 1005 1006 1007
        else:
            starts = node.get_attr('starts')
            ends = node.get_attr('ends')
            axes = node.get_attr('axes')
1008 1009 1010
            for idx in range(len(ends)):
                if ends[idx] > 2**31 - 1:
                    ends[idx] = 2**31 - 1
S
SunAhong1993 已提交
1011
            layer_attrs = {"axes": axes, "starts": starts, "ends": ends}
C
channingss 已提交
1012

C
Channingss 已提交
1013
        if steps is not None:
S
SunAhong1993 已提交
1014 1015
            layer_attrs['strides'] = steps
            self.paddle_graph.add_layer(
1016 1017 1018
                'paddle.strided_slice',
                inputs={"x": val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1019
                **layer_attrs)
C
Channingss 已提交
1020
        else:
S
SunAhong1993 已提交
1021
            self.paddle_graph.add_layer(
1022 1023 1024
                'paddle.slice',
                inputs={"input": val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1025
                **layer_attrs)
C
channingss 已提交
1026

1027
    @print_mapping_info
C
update  
channingss 已提交
1028
    def ConstantOfShape(self, node):
C
channingss 已提交
1029
        val_shape = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1030
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
C
update  
channingss 已提交
1031 1032 1033 1034

        value = node.get_attr('value')
        dtype = value.dtype
        value = value.tolist()
1035 1036
        assert len(value) == 1, ('given value not Scalar, shape of value > 1, '
                                 'this is not supported')
C
update  
channingss 已提交
1037 1038
        if len(value) == 1:
            value = value[0]
1039
            layer_attrs = {'dtype': string(dtype), 'fill_value': value}
S
SunAhong1993 已提交
1040
            self.paddle_graph.add_layer(
1041 1042
                "paddle.full",
                inputs={'shape': val_shape.name},
S
SunAhong1993 已提交
1043 1044
                outputs=[node.name],
                **layer_attrs)
C
update  
channingss 已提交
1045

C
Channingss 已提交
1046 1047 1048 1049 1050 1051 1052 1053
    @print_mapping_info
    def Clip(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
        max_value, min_value = None, None
        if len(node.inputs) == 1:
            max_value = node.get_attr('max')
            min_value = node.get_attr('min')
S
SunAhong1993 已提交
1054
            layer_attrs = {
C
Channingss 已提交
1055 1056 1057
                'max': max_value,
                'min': min_value,
            }
S
SunAhong1993 已提交
1058
            self.paddle_graph.add_layer(
1059 1060 1061
                'paddle.clip',
                inputs={"x": val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1062
                **layer_attrs)
C
Channingss 已提交
1063
        else:
S
fix  
SunAhong1993 已提交
1064 1065
            min_ipt = self.graph.get_input_node(node, idx=1, copy=True)
            max_ipt = self.graph.get_input_node(node, idx=2, copy=True)
S
SunAhong1993 已提交
1066
            min_value = _const_weight_or_none(min_ipt)
S
fix  
SunAhong1993 已提交
1067
            max_value = _const_weight_or_none(max_ipt)
1068
            if max_value.shape == (1, ):
C
Channingss 已提交
1069
                max_value = max_value[0]
1070
            if min_value.shape == (1, ):
C
Channingss 已提交
1071 1072
                min_value = min_value[0]
        if max_value is not None and min_value is not None:
S
SunAhong1993 已提交
1073 1074
            layer_attrs = {'max': max_value, 'min': min_value}
            self.paddle_graph.add_layer(
1075 1076 1077
                'paddle.clip',
                inputs={"x": val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1078
                **layer_attrs)
C
Channingss 已提交
1079 1080 1081
        else:
            raise

1082
    @print_mapping_info
C
update  
channingss 已提交
1083
    def Split(self, node):
C
channingss 已提交
1084
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1085
        paddle_op = 'split'
C
channingss 已提交
1086
        split = node.get_attr('split')
C
update  
channingss 已提交
1087
        axis = node.get_attr('axis', 0)
S
SunAhong1993 已提交
1088
        layer_attrs = {
C
channingss 已提交
1089
            'num_or_sections': split,
S
SunAhong1993 已提交
1090
            'axis': axis,
C
channingss 已提交
1091
        }
S
SunAhong1993 已提交
1092 1093
        outputs_list = list()
        if isinstance(split, list) or isinstance(split, tuple):
S
SunAhong1993 已提交
1094 1095 1096 1097 1098
            if len(split) == 1:
                outputs_list.append(node.name)
            else:
                for i in range(len(split)):
                    outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
1099 1100 1101
        else:
            outputs_list.append(node.name)
        self.paddle_graph.add_layer(
1102 1103 1104
            'paddle.split',
            inputs={"x": val_x.name},
            outputs=outputs_list,
S
SunAhong1993 已提交
1105
            **layer_attrs)
C
update  
channingss 已提交
1106

1107
    @print_mapping_info
C
update  
channingss 已提交
1108
    def Reshape(self, node):
C
channingss 已提交
1109 1110
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1111
        val_reshaped = self.graph.get_node(node.layer.output[0], copy=True)
1112 1113 1114 1115
        shape_value = _const_weight_or_none(val_shape)
        shape_dims = len(val_shape.out_shapes[0])

        if shape_value is not None:
S
SunAhong1993 已提交
1116 1117 1118 1119 1120
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={'x': val_x.name},
                outputs=[node.name],
                shape=shape_value.tolist())
C
Channingss 已提交
1121 1122
        elif len(node.out_shapes[0]) > 0 and _is_static_shape(node.out_shapes[
                0]):
S
SunAhong1993 已提交
1123 1124 1125 1126 1127
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={'x': val_x.name},
                outputs=[node.name],
                shape=node.out_shapes[0])
1128
        else:
1129 1130
            # shape may be [], come form Gather by scalar indices
            if len(val_shape.out_shapes[0]) > 0:
S
SunAhong1993 已提交
1131 1132 1133 1134 1135
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={'x': val_shape.name},
                    outputs=[val_shape.name],
                    shape=val_shape.out_shapes[0])
S
for pad  
SunAhong1993 已提交
1136 1137 1138 1139 1140 1141
            if val_shape.dtype != "int32":
                self.paddle_graph.add_layer(
                    'paddle.cast',
                    inputs={'x': val_shape.name},
                    outputs=[val_shape.name],
                    dtype=string("int32"))
S
SunAhong1993 已提交
1142 1143 1144 1145
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={'x': val_x.name,
                        'shape': val_shape.name},
S
SunAhong1993 已提交
1146
                outputs=[node.name])
1147 1148

    @print_mapping_info
C
update  
channingss 已提交
1149
    def Cast(self, node):
C
channingss 已提交
1150
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1151 1152 1153 1154 1155 1156 1157 1158 1159
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        dtype = node.get_attr('to')
        if not isinstance(dtype, np.dtype):
            dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]

        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'dtype of to unmatches output'
S
SunAhong1993 已提交
1160
        self.paddle_graph.add_layer(
1161 1162 1163
            'paddle.cast',
            inputs={'x': val_input.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1164
            dtype=string(dtype))
C
update  
channingss 已提交
1165

C
Channingss 已提交
1166 1167 1168
    @print_mapping_info
    def Not(self, node):
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
1169 1170 1171 1172
        self.paddle_graph.add_layer(
            'paddle.logical_not',
            inputs={'x': val_input.name},
            outputs=[node.name])
C
Channingss 已提交
1173

1174
    @print_mapping_info
C
update  
channingss 已提交
1175
    def AveragePool(self, node):
C
channingss 已提交
1176
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1177 1178

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
1179 1180 1181 1182 1183 1184
        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))
        pads = node.get_attr('pads', [0] * (poolnd * 2))
C
channingss 已提交
1185

C
channingss 已提交
1186 1187
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1188
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
1189
            input_shape = val_x.out_shapes[0]
C
Channingss 已提交
1190 1191 1192 1193 1194
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
channingss 已提交
1195

S
SunAhong1993 已提交
1196 1197
        paddle_op = 'paddle.nn.functional.avg_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only avg_pool1d, avg_pool2d and avg_pool3d are supported'
S
SunAhong1993 已提交
1198
        layer_attrs = {
S
SunAhong1993 已提交
1199 1200 1201
            "kernel_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
C
update  
channingss 已提交
1202
            "ceil_mode": ceil_mode,
S
SunAhong1993 已提交
1203
            "exclusive": True,
S
SunAhong1993 已提交
1204
            "name": string(node.name)
C
update  
channingss 已提交
1205
        }
S
SunAhong1993 已提交
1206
        self.paddle_graph.add_layer(
1207 1208 1209
            paddle_op,
            inputs={'x': val_x if isinstance(val_x, str) else val_x.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1210
            **layer_attrs)
C
update  
channingss 已提交
1211

1212
    @print_mapping_info
C
update  
channingss 已提交
1213
    def Concat(self, node):
S
SunAhong1993 已提交
1214
        inputs_list = []
C
Channingss 已提交
1215
        dtypes = set()
C
update  
channingss 已提交
1216
        for i in range(len(node.layer.input)):
C
channingss 已提交
1217
            ipt = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1218 1219
            inputs_list.append(ipt.name)
            dtypes.add(ipt.dtype)
C
Channingss 已提交
1220 1221
        if len(dtypes) > 1:
            assert 'Unspported situation happened, please create issue on https://github.com/PaddlePaddle/X2Paddle/issues.'
C
update  
channingss 已提交
1222
        axis = node.get_attr('axis')
S
SunAhong1993 已提交
1223
        self.paddle_graph.add_layer(
1224 1225 1226
            'paddle.concat',
            inputs={"x": inputs_list},
            outputs=[node.name],
S
SunAhong1993 已提交
1227
            axis=axis)
C
update  
channingss 已提交
1228

1229
    @print_mapping_info
C
update  
channingss 已提交
1230
    def Flatten(self, node):
C
channingss 已提交
1231
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1232
        output_shape = node.out_shapes[0]
C
update  
channingss 已提交
1233
        axis = node.get_attr('axis', 1)
S
SunAhong1993 已提交
1234 1235 1236 1237 1238 1239 1240 1241 1242 1243
        shape_list = [1, 1]
        if axis == 0:
            for s in output_shape:
                shape_list[1] *= s
        else:
            for s in output_shape[:axis]:
                shape_list[0] *= s
            for s in output_shape[axis:]:
                shape_list[1] *= s
        self.paddle_graph.add_layer(
1244 1245
            'paddle.reshape',
            inputs={"x": val_x.name},
S
SunAhong1993 已提交
1246 1247
            outputs=[node.name],
            shape=shape_list)
C
update  
channingss 已提交
1248

1249
    @print_mapping_info
C
update  
channingss 已提交
1250
    def Gemm(self, node):
C
channingss 已提交
1251 1252 1253
        val_a = self.graph.get_input_node(node, idx=0, copy=True)
        val_b = self.graph.get_input_node(node, idx=1, copy=True)
        val_c = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
1254 1255 1256 1257 1258

        alpha = node.get_attr('alpha', 1.)  # optional
        beta = node.get_attr('beta', 1.)  # optional
        trans_a = bool(node.get_attr('transA', 0))  # optional
        trans_b = bool(node.get_attr('transB', 0))  # optional
S
SunAhong1993 已提交
1259
        val_mm = node.name + '_mm'
1260
        matmul_inputs = {"x": val_a.name, "y": val_b.name}
C
update  
channingss 已提交
1261 1262 1263 1264
        attr_matmul = {
            "transpose_x": trans_a,
            "transpose_y": trans_b,
        }
S
SunAhong1993 已提交
1265 1266
        self.paddle_graph.add_layer(
            'paddle.matmul',
1267
            inputs=matmul_inputs,
S
SunAhong1993 已提交
1268 1269 1270
            outputs=[val_mm],
            **attr_matmul)
        self.paddle_graph.add_layer(
1271
            "paddle.scale", inputs={"x": val_mm}, outputs=[val_mm], scale=alpha)
C
channingss 已提交
1272

C
update  
channingss 已提交
1273 1274
        if beta != 0:
            if beta == 1.:
1275
                add_inputs = {"x": val_mm, "y": val_c.name}
S
SunAhong1993 已提交
1276
                self.paddle_graph.add_layer(
1277
                    "paddle.add", inputs=add_inputs, outputs=[node.name])
C
update  
channingss 已提交
1278
            else:
S
SunAhong1993 已提交
1279 1280 1281 1282 1283 1284
                var_beta = node.name + '_beta'
                self.paddle_graph.add_layer(
                    "paddle.scale",
                    inputs={"x": val_c.name},
                    outputs=[var_beta],
                    scale=beta)
C
channingss 已提交
1285
                add_inputs = {"x": val_mm, "y": var_beta}
S
SunAhong1993 已提交
1286
                self.paddle_graph.add_layer(
1287
                    "paddle.add", inputs=add_inputs, outputs=[node.name])
C
update  
channingss 已提交
1288

1289
    @print_mapping_info
C
update  
channingss 已提交
1290
    def Sum(self, node):
1291
        val_inps = node.layer.input
S
SunAhong1993 已提交
1292
        inputs_dict = {
1293
            "x": self.graph.get_input_node(
S
SunAhong1993 已提交
1294
                node, idx=0, copy=True).name,
1295
            "y": self.graph.get_input_node(
S
SunAhong1993 已提交
1296
                node, idx=1, copy=True).name,
1297
        }
1298 1299
        self.paddle_graph.add_layer(
            "paddle.add", inputs=inputs_dict, outputs=[node.name])
1300

C
channingss 已提交
1301 1302
        for idx, ipt in enumerate(val_inps[2:]):
            y = self.graph.get_input_node(node, idx=idx, copy=True)
S
SunAhong1993 已提交
1303 1304 1305
            inputs_dict = {
                "x": node.name,
                "y": y.name,
1306
            }
S
SunAhong1993 已提交
1307
            self.paddle_graph.add_layer(
1308
                "paddle.add", inputs=inputs_dict, outputs=[node.name])
C
update  
channingss 已提交
1309

1310
    @print_mapping_info
C
update  
channingss 已提交
1311
    def MatMul(self, node):
C
channingss 已提交
1312 1313
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
C
Channingss 已提交
1314 1315
        x_shape = val_x.out_shapes[0]
        y_shape = val_y.out_shapes[0]
1316
        inputs_dict = {"x": val_x.name, "y": val_y.name}
C
Channingss 已提交
1317
        if y_shape[0] == 1 and x_shape[-1] != 1 and x_shape[0] != 1:
S
SunAhong1993 已提交
1318 1319 1320 1321 1322 1323 1324 1325
            y_squeeze = val_y.name + '_squeeze'
            self.paddle_graph.add_layer(
                "paddle.squeeze",
                inputs={"x": val_y.name},
                outputs=[y_squeeze],
                axis=[0])
            inputs_dict['y'] = y_squeeze
            self.paddle_graph.add_layer(
1326
                "paddle.matmul", inputs=inputs_dict, outputs=[node.name])
C
Channingss 已提交
1327
        else:
S
SunAhong1993 已提交
1328
            self.paddle_graph.add_layer(
1329 1330
                "paddle.matmul", inputs=inputs_dict, outputs=[node.name])

1331
    @print_mapping_info
C
update  
channingss 已提交
1332
    def BatchNormalization(self, node):
C
channingss 已提交
1333 1334 1335 1336 1337
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        val_mean = self.graph.get_input_node(node, idx=3, copy=True)
        val_var = self.graph.get_input_node(node, idx=4, copy=True)
C
update  
channingss 已提交
1338 1339 1340 1341

        momentum = node.get_attr('momentum', .9)
        epsilon = node.get_attr('epsilon', 1e-5)

C
channingss 已提交
1342 1343
        # Attribute: spatial is used in BatchNormalization-1,6,7
        spatial = bool(node.get_attr('spatial'))
S
SunAhong1993 已提交
1344
        layer_attrs = {
C
update  
channingss 已提交
1345 1346 1347
            "momentum": momentum,
            "epsilon": epsilon,
        }
S
SunAhong1993 已提交
1348
        self.paddle_graph.add_layer(
1349 1350 1351 1352 1353 1354 1355 1356 1357
            "paddle.nn.functional.batch_norm",
            inputs={
                "x": val_x.name,
                "weight": val_scale.name,
                "bias": val_b.name,
                "running_mean": val_mean.name,
                "running_var": val_var.name
            },
            outputs=[node.name],
S
SunAhong1993 已提交
1358
            **layer_attrs)
1359

1360
    @print_mapping_info
C
update  
channingss 已提交
1361
    def Transpose(self, node):
C
channingss 已提交
1362
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
for pad  
SunAhong1993 已提交
1363 1364 1365 1366
        s_len = len(val_x.out_shapes[0])
        perm_default = list(range(s_len))
        perm_default.reverse()
        perm = node.get_attr('perm', perm_default)
S
SunAhong1993 已提交
1367
        self.paddle_graph.add_layer(
1368
            "paddle.transpose",
S
SunAhong1993 已提交
1369
            inputs={"x": val_x.name},
1370
            outputs=[node.name],
S
SunAhong1993 已提交
1371
            perm=perm)
C
update  
channingss 已提交
1372

1373
    @print_mapping_info
C
update  
channingss 已提交
1374
    def PRelu(self, node):
C
channingss 已提交
1375 1376
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_slope = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1377

C
channingss 已提交
1378 1379
        mode = 'channel'
        shape_slope = val_slope.out_shapes[0]
C
Channingss 已提交
1380
        if shape_slope == [1]:
C
channingss 已提交
1381
            mode = 'all'
C
Channingss 已提交
1382

S
SunAhong1993 已提交
1383 1384
        if mode == "element":
            self.paddle_graph.add_layer(
1385
                "paddle.static.nn.prelu",
S
SunAhong1993 已提交
1386
                inputs={"x": val_x.name,
1387
                        "param_attr": val_slope.name},
S
SunAhong1993 已提交
1388 1389 1390
                outputs=[node.name],
                mode="element")
        else:
S
SunAhong1993 已提交
1391 1392 1393
            if mode == 'channel':
                if len(shape_slope) > 1:
                    self.paddle_graph.add_layer(
1394 1395
                        "paddle.reshape",
                        inputs={"x": val_slope.name},
S
SunAhong1993 已提交
1396 1397
                        outputs=[val_slope.name],
                        shape=[shape_slope[0]])
S
SunAhong1993 已提交
1398
            self.paddle_graph.add_layer(
1399
                "paddle.nn.functional.prelu",
S
SunAhong1993 已提交
1400
                inputs={"x": val_x.name,
1401
                        "weight": val_slope.name},
S
SunAhong1993 已提交
1402
                outputs=[node.name])
C
update  
channingss 已提交
1403

1404
    @print_mapping_info
C
update  
channingss 已提交
1405
    def Squeeze(self, node):
C
channingss 已提交
1406 1407
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axes = node.get_attr('axes')
1408
        if len(val_x.out_shapes[0]) == 1:
S
SunAhong1993 已提交
1409 1410 1411 1412 1413
            self.paddle_graph.add_layer(
                "paddle.cast",
                inputs={"x": val_x.name},
                outputs=[node.name],
                dtype=string(val_x.dtype))
1414
        else:
S
SunAhong1993 已提交
1415
            self.paddle_graph.add_layer(
1416 1417 1418
                "paddle.squeeze",
                inputs={"x": val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1419
                axis=axes)
R
root 已提交
1420

1421
    @print_mapping_info
C
channings 已提交
1422 1423 1424
    def Equal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
1425 1426 1427 1428 1429
        self.paddle_graph.add_layer(
            "paddle.equal",
            inputs={'x': val_x.name,
                    'y': val_y.name},
            outputs=[node.name])
1430

C
Channingss 已提交
1431 1432 1433 1434
    @print_mapping_info
    def Greater(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
1435 1436 1437 1438
        self.paddle_graph.add_layer(
            "paddle.greater_than",
            inputs={'x': val_x.name,
                    'y': val_y.name},
1439
            outputs=[node.name])
C
Channingss 已提交
1440

1441
    @print_mapping_info
C
channings 已提交
1442 1443 1444 1445
    def Where(self, node):
        condition = self.graph.get_input_node(node, idx=0, copy=True)
        val_x = self.graph.get_input_node(node, idx=1, copy=True)
        val_y = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
1446

S
SunAhong1993 已提交
1447 1448 1449 1450 1451
        not_condition = condition.name + '_not'
        self.paddle_graph.add_layer(
            "paddle.logical_not",
            inputs={"x": condition.name},
            outputs=[not_condition])
R
root 已提交
1452
        cast_not_condition = not_condition + '_cast'
S
SunAhong1993 已提交
1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467
        self.paddle_graph.add_layer(
            "paddle.cast",
            inputs={"x": not_condition},
            outputs=[cast_not_condition],
            dtype=string(val_x.dtype))
        cast_condition = condition.name + '_cast'
        self.paddle_graph.add_layer(
            "paddle.cast",
            inputs={"x": condition.name},
            outputs=[cast_condition],
            dtype=string(val_x.dtype))
        mul_val_x = val_x.name + '_mul'
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs={'x': val_x.name,
1468
                    'y': cast_condition},
S
SunAhong1993 已提交
1469 1470 1471 1472 1473
            outputs=[mul_val_x])
        mul_val_y = val_y.name + '_mul'
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs={'x': val_y.name,
1474
                    'y': cast_not_condition},
S
SunAhong1993 已提交
1475
            outputs=[mul_val_y])
1476

S
SunAhong1993 已提交
1477 1478
        self.paddle_graph.add_layer(
            "paddle.add",
1479 1480
            inputs={'x': mul_val_x,
                    'y': mul_val_y},
S
SunAhong1993 已提交
1481
            outputs=[node.name])
1482 1483

    @print_mapping_info
R
root 已提交
1484 1485
    def NonZero(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
1486 1487
        val_x_dim = len(val_x.out_shapes[0])
        if val_x_dim == 1:
S
SunAhong1993 已提交
1488
            self.paddle_graph.add_layer(
1489 1490
                "paddle.nonzero",
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
1491 1492 1493 1494
                outputs=[val_x.name])
            self.paddle_graph.add_layer(
                "paddle.transpose",
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
1495
                outputs=[node.layer_name],
S
SunAhong1993 已提交
1496
                perm=[1, 0])
1497
        if val_x_dim > 1:
S
SunAhong1993 已提交
1498
            self.paddle_graph.add_layer(
1499 1500
                "paddle.nonzero",
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
1501 1502 1503
                outputs=[val_x.name])
            self.paddle_graph.add_layer(
                "paddle.split",
1504
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
1505 1506 1507 1508
                outputs=[val_x.name],
                num_or_sections=1,
                axis=val_x_dim)
            self.paddle_graph.add_layer(
1509
                "paddle.concat", inputs={"x": val_x.name}, outputs=[node.name])
1510 1511

    @print_mapping_info
C
update  
channingss 已提交
1512
    def Identity(self, node):
C
channingss 已提交
1513
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1514
        self.paddle_graph.add_layer(
1515 1516
            "paddle.assign", inputs={"x": val_x.name}, outputs=[node.name])

1517
    @print_mapping_info
C
channings 已提交
1518 1519 1520 1521
    def Tile(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_repeats = self.graph.get_input_node(node, idx=1, copy=True)
        repeats = _const_weight_or_none(val_repeats)
R
root 已提交
1522

1523
        if repeats is None:
S
SunAhong1993 已提交
1524
            repeats = val_repeats.name
J
jiangjiajun 已提交
1525
            if val_repeats.dtype != 'int32':
S
SunAhong1993 已提交
1526 1527 1528
                self.paddle_graph.add_layer(
                    "paddle.cast",
                    inputs={"x": repeats},
1529
                    outputs=["{}_tmp".format(repeats)],
S
SunAhong1993 已提交
1530
                    dtype=string("int32"))
1531
                repeats = "{}_tmp".format(repeats)
J
jiangjiajun 已提交
1532

1533
        elif isinstance(repeats, int):
C
channings 已提交
1534
            repeats = [repeats]
R
root 已提交
1535

1536 1537 1538
        elif type(repeats) is np.ndarray:
            repeats = repeats.tolist()

C
channings 已提交
1539
        attr = {
R
root 已提交
1540
            'expand_times': repeats,
S
SunAhong1993 已提交
1541
            "name": string(node.name),
C
channings 已提交
1542
        }
S
SunAhong1993 已提交
1543
        self.paddle_graph.add_layer(
1544 1545 1546 1547
            "paddle.tile",
            inputs={"x": val_x.name},
            outputs=[node.name],
            repeat_times=repeats)
R
root 已提交
1548

1549
    @print_mapping_info
C
update  
channingss 已提交
1550
    def MaxPool(self, node):
C
channingss 已提交
1551
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1552
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
1553 1554 1555 1556 1557 1558 1559 1560 1561
        assert node.get_attr(
            "dilations") is None, 'only dilations = 0 is supported'  # optional

        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))  # optional
        pads = node.get_attr('pads', [0] * (poolnd * 2))  # optional
S
SunAhong1993 已提交
1562 1563
        paddle_op = 'paddle.nn.functional.max_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only max_pool1d, max_pool2d and max_pool3d are supported'
C
channingss 已提交
1564

C
channingss 已提交
1565 1566
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1567
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
1568
            input_shape = val_x.out_shapes[0]
C
Channingss 已提交
1569 1570 1571 1572 1573
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
1574

S
SunAhong1993 已提交
1575 1576 1577 1578
        layer_attrs = {
            "kernel_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
C
update  
channingss 已提交
1579 1580
            "ceil_mode": ceil_mode,
        }
S
SunAhong1993 已提交
1581
        self.paddle_graph.add_layer(
1582 1583 1584
            paddle_op,
            inputs={'x': val_x if isinstance(val_x, str) else val_x.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1585
            **layer_attrs)
R
root 已提交
1586

1587
    @print_mapping_info
C
channings 已提交
1588
    def GlobalMaxPool(self, node):
S
SunAhong1993 已提交
1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = val_x.out_shapes[0]
        if len(input_shape) == 4:
            poolnd = 2
        elif len(input_shape) == 5:
            poolnd = 3
        elif len(input_shape) == 3:
            poolnd = 1
        paddle_op = 'paddle.nn.functional.adaptive_max_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only adaptive_max_pool1d, adaptive_max_pool2d and adaptive_max_pool3d are supported'
        output_shape = node.out_shapes[0]
        self.paddle_graph.add_layer(
1601 1602 1603
            paddle_op,
            inputs={'x': val_x.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1604
            output_size=output_shape[2:])
1605

1606
    @print_mapping_info
C
channings 已提交
1607
    def GlobalAveragePool(self, node):
S
SunAhong1993 已提交
1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = val_x.out_shapes[0]
        if len(input_shape) == 4:
            poolnd = 2
        elif len(input_shape) == 5:
            poolnd = 3
        elif len(input_shape) == 3:
            poolnd = 1
        paddle_op = 'paddle.nn.functional.adaptive_avg_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'
        output_shape = node.out_shapes[0]
        self.paddle_graph.add_layer(
1620 1621 1622
            paddle_op,
            inputs={'x': val_x.name},
            outputs=[node.name],
S
SunAhong1993 已提交
1623
            output_size=output_shape[2:])
R
root 已提交
1624

1625
    @print_mapping_info
C
update  
channingss 已提交
1626
    def Conv(self, node):
C
channingss 已提交
1627 1628
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1629 1630
        has_bias = len(node.layer.input) == 3
        if has_bias:
C
channingss 已提交
1631
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
1632 1633
        auto_pad = node.get_attr('auto_pad', 'NOTSET')

C
channingss 已提交
1634
        kernel_shape = node.get_attr('kernel_shape')
C
update  
channingss 已提交
1635 1636
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d and conv3d is supported'
C
Channingss 已提交
1637
        num_out_channels = val_w.out_shapes[0][0]
S
SunAhong1993 已提交
1638 1639
        num_in_channels = val_w.out_shapes[0][1]
        paddle_op = 'paddle.nn.functional.conv{}d'.format(convnd)
C
update  
channingss 已提交
1640 1641

        num_groups = node.get_attr('group', 1)
C
Channingss 已提交
1642 1643 1644
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
update  
channingss 已提交
1645

C
channingss 已提交
1646
        input_shape = val_x.out_shapes[0]
C
update  
channingss 已提交
1647 1648
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1649
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
Channingss 已提交
1650 1651 1652 1653 1654
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
update  
channingss 已提交
1655

S
SunAhong1993 已提交
1656
        layer_attrs = {
C
update  
channingss 已提交
1657 1658 1659 1660
            "stride": strides,
            "padding": paddings,
            "dilation": dilations,
            "groups": num_groups,
S
SunAhong1993 已提交
1661 1662
        }
        layer_inputs = {
S
SunAhong1993 已提交
1663
            "x": val_x if isinstance(val_x, str) else val_x.name,
S
SunAhong1993 已提交
1664
            "weight": val_w.name
C
update  
channingss 已提交
1665 1666
        }
        if has_bias:
S
SunAhong1993 已提交
1667
            layer_inputs["bias"] = val_b.name
1668 1669
        if reduce(lambda x, y: x * y,
                  input_shape) in [1, -1] and 1 not in input_shape:
S
fix  
SunAhong1993 已提交
1670 1671 1672 1673
            input_shape[1] = num_in_channels * num_groups
            input_shape[0] = 0
            input_shape[2] = 0
            self.paddle_graph.add_layer(
1674 1675 1676
                "paddle.reshape",
                inputs={"x": layer_inputs["x"]},
                outputs=[layer_inputs["x"]],
S
fix  
SunAhong1993 已提交
1677
                shape=input_shape)
S
SunAhong1993 已提交
1678
        self.paddle_graph.add_layer(
1679
            paddle_op, inputs=layer_inputs, outputs=[node.name], **layer_attrs)
C
channingss 已提交
1680

1681
    @print_mapping_info
C
channingss 已提交
1682
    def ConvTranspose(self, node):
C
channingss 已提交
1683 1684
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
channingss 已提交
1685
        val_b = None
R
root 已提交
1686
        if len(node.layer.input) > 2:
C
channingss 已提交
1687
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
C
channingss 已提交
1688 1689
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        out_padding = node.get_attr('output_padding', [0, 0])
C
channingss 已提交
1690
        kernel_shape = node.get_attr('kernel_shape')
C
channingss 已提交
1691 1692 1693
        assert kernel_shape, 'kernel_shape not inferred'
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d_transpose and conv3d_transpose supported'
S
SunAhong1993 已提交
1694
        num_in_channels = val_w.out_shapes[0][0]
C
channingss 已提交
1695
        num_out_channels = val_w.out_shapes[0][1]
S
SunAhong1993 已提交
1696
        paddle_op = 'paddle.nn.functional.conv{}d_transpose'.format(convnd)
C
channingss 已提交
1697

C
channingss 已提交
1698 1699 1700 1701 1702
        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        output_size = node.get_attr('output_shape', [])
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
channingss 已提交
1703 1704 1705 1706

        paddings, var_x = self._pad_if_asymmetric(node, pads, val_x)

        output_size = [0, 0]
C
channingss 已提交
1707

1708 1709
        output_size[0] = (val_x.out_shapes[0][2] - 1
                          ) * strides[0] - 2 * paddings[0] + dilations[0] * (
C
channingss 已提交
1710
                              kernel_shape[0] - 1) + 1 + out_padding[0]
1711 1712
        output_size[1] = (val_x.out_shapes[0][3] - 1
                          ) * strides[1] - 2 * paddings[1] + dilations[1] * (
C
channingss 已提交
1713
                              kernel_shape[1] - 1) + 1 + out_padding[1]
1714
        layer_inputs = {'x': val_x.name, "weight": val_w.name}
S
SunAhong1993 已提交
1715 1716 1717 1718 1719
        layer_attrs = {
            "stride": strides,
            "dilation": dilations,
            "padding": paddings,
            "groups": num_groups,
1720 1721
            "output_size": node.out_shapes[0][2:]
        }
S
SunAhong1993 已提交
1722 1723 1724
        if val_b is not None:
            layer_inputs["bias"] = val_b.name
        self.paddle_graph.add_layer(
S
fix  
SunAhong1993 已提交
1725
            kernel=paddle_op,
S
SunAhong1993 已提交
1726 1727
            inputs=layer_inputs,
            outputs=[node.name],
S
fix  
SunAhong1993 已提交
1728
            **layer_attrs)
1729

S
fix  
SunAhong1993 已提交
1730 1731 1732 1733 1734
    @print_mapping_info
    def ArgMax(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axis = node.get_attr('axis')
        keepdims = False if node.get_attr('keepdims') == 0 else True
1735
        layer_attrs = {'axis': axis, 'keepdim': keepdims}
S
fix  
SunAhong1993 已提交
1736
        self.paddle_graph.add_layer(
1737 1738
            'paddle.argmax',
            inputs={"x": val_x.name},
S
fix  
SunAhong1993 已提交
1739
            outputs=[node.name],
C
Channingss 已提交
1740
            **layer_attrs)
1741

S
SunAhong1993 已提交
1742 1743 1744 1745
    @print_mapping_info
    def Size(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
1746
            "paddle.shape", inputs={"input": val_x.name}, outputs=[node.name])
S
for pad  
SunAhong1993 已提交
1747 1748 1749 1750
        self.paddle_graph.add_layer(
            'paddle.cast',
            inputs={"x": node.name},
            outputs=[node.name],
1751
            dtype=string('int64'))
S
SunAhong1993 已提交
1752
        self.paddle_graph.add_layer(
1753
            "paddle.prod", inputs={"x": node.name}, outputs=[node.name])
C
Channingss 已提交
1754

S
SunAhong1993 已提交
1755 1756 1757
    @print_mapping_info
    def Sign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
for pad  
SunAhong1993 已提交
1758 1759
        if node.dtype not in ["float16", "float32", "float64"]:
            self.paddle_graph.add_layer(
1760 1761
                "paddle.cast",
                inputs={"x": val_x.name},
S
for pad  
SunAhong1993 已提交
1762 1763
                outputs=[val_x.name],
                dtype=string("float32"))
S
SunAhong1993 已提交
1764
        self.paddle_graph.add_layer(
1765
            "paddle.sign", inputs={"x": val_x.name}, outputs=[node.name])
S
for pad  
SunAhong1993 已提交
1766 1767
        if node.dtype not in ["float16", "float32", "float64"]:
            self.paddle_graph.add_layer(
1768 1769
                "paddle.cast",
                inputs={"x": node.name},
S
for pad  
SunAhong1993 已提交
1770 1771
                outputs=[node.name],
                dtype=string(node.dtype))
S
SunAhong1993 已提交
1772 1773 1774 1775 1776 1777 1778 1779

    @print_mapping_info
    def OneHot(self, node):
        indices = self.graph.get_input_node(node, idx=0, copy=True)
        depth = self.graph.get_input_node(node, idx=1, copy=True)
        values = self.graph.get_input_node(node, idx=2, copy=True)
        axis = node.get_attr('axis', -1)
        self.paddle_graph.add_layer(
1780 1781 1782 1783 1784 1785
            "custom_layer:one_hot",
            inputs={
                "indices": indices.name,
                "depth": depth.name,
                "values": values.name
            },
S
SunAhong1993 已提交
1786 1787 1788 1789 1790 1791 1792
            outputs=[node.name],
            axis=axis)

    @print_mapping_info
    def Reciprocal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
1793
            "paddle.reciprocal", inputs={"x": val_x.name}, outputs=[node.name])
S
SunAhong1993 已提交
1794 1795 1796 1797 1798 1799 1800

    @print_mapping_info
    def TopK(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_k = self.graph.get_input_node(node, idx=1, copy=True)
        layer_attrs = dict()
        layer_attrs["axis"] = node.get_attr('axis', -1)
1801 1802 1803 1804
        layer_attrs["largest"] = True if node.get_attr('largest',
                                                       1) == 1 else False
        layer_attrs["sorted"] = True if node.get_attr('sorted',
                                                      1) == 1 else False
S
SunAhong1993 已提交
1805
        self.paddle_graph.add_layer(
1806
            "paddle.topk",
S
SunAhong1993 已提交
1807
            inputs={"x": val_x.name,
1808 1809 1810 1811 1812
                    "k": val_k.name},
            outputs=[
                "{}_p{}".format(node.layer_name, 0),
                "{}_p{}".format(node.layer_name, 1)
            ],
S
add lrn  
SunAhong1993 已提交
1813
            **layer_attrs)
1814

S
add lrn  
SunAhong1993 已提交
1815 1816 1817 1818 1819 1820 1821 1822
    @print_mapping_info
    def LRN(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        alpha = node.get_attr('alpha', 0.0001)
        beta = node.get_attr('beta', 0.75)
        bias = node.get_attr('bias', 1.0)
        size = node.get_attr('size')
1823
        layer_attrs = {'size': size, 'alpha': alpha, 'beta': beta, 'k': bias}
S
add lrn  
SunAhong1993 已提交
1824
        self.paddle_graph.add_layer(
1825 1826 1827
            "custom_layer:local_response_norm",
            inputs={"x": val_x.name},
            outputs=[node.name],
S
add lrn  
SunAhong1993 已提交
1828
            **layer_attrs)
S
SunAhong1993 已提交
1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841

    @print_mapping_info
    def DepthToSpace(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        blocksize = node.get_attr('blocksize')
        mode = node.get_attr('mode', "DCR")
        val_x_shape = val_x.out_shapes[0]
        b, c, h, w = val_x_shape
        if mode == "DCR":
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": val_x.name},
                outputs=[node.name],
1842
                shape=[b, blocksize, blocksize, c // (blocksize**2), h, w])
S
SunAhong1993 已提交
1843 1844 1845 1846
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": node.name},
                outputs=[node.name],
1847
                perm=[0, 3, 4, 1, 5, 2])
S
SunAhong1993 已提交
1848 1849 1850 1851
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": node.name},
                outputs=[node.name],
1852
                shape=[b, c // (blocksize**2), h * blocksize, w * blocksize])
S
SunAhong1993 已提交
1853 1854 1855 1856 1857
        else:
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": val_x.name},
                outputs=[node.name],
1858
                shape=[b, c // (blocksize**2), blocksize, blocksize, h, w])
S
SunAhong1993 已提交
1859 1860 1861 1862
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": node.name},
                outputs=[node.name],
1863
                perm=[0, 1, 4, 2, 5, 3])
S
SunAhong1993 已提交
1864 1865 1866 1867
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": node.name},
                outputs=[node.name],
1868
                shape=[b, c // (blocksize**2), h * blocksize, w * blocksize])