opset.py 60.0 KB
Newer Older
1
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
C
update  
channingss 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

C
Channingss 已提交
15
from x2paddle.decoder.onnx_decoder import ONNXGraph, ONNXGraphNode, ONNXGraphDataNode
C
update  
channingss 已提交
16 17 18
from x2paddle.core.graph import GraphNode
from x2paddle.core.fluid_code import Layer
from x2paddle.core.fluid_code import FluidCode
C
channingss 已提交
19
from x2paddle.core.util import string
C
Channingss 已提交
20
from functools import reduce
C
update  
channingss 已提交
21
import numpy as np
C
channingss 已提交
22
import onnx
C
channingss 已提交
23
import onnx.numpy_helper as numpy_helper
C
channingss 已提交
24
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
C
update  
channingss 已提交
25
import logging as _logging
26
from collections import OrderedDict
C
channingss 已提交
27
import math
C
channingss 已提交
28
import os
S
SunAhong1993 已提交
29 30
import copy
import sys
C
channingss 已提交
31
import shutil
32

C
update  
channingss 已提交
33 34 35
_logger = _logging.getLogger(__name__)


C
Channingss 已提交
36
def _const_weight_or_none(node, necessary=False):
C
channings 已提交
37
    if 'Constant' in node.layer_type:
C
channingss 已提交
38
        return node.value
C
update  
channingss 已提交
39 40
    if isinstance(node, ONNXGraphDataNode):
        return node.weight
C
Channingss 已提交
41 42 43
    if necessary:
        assert '{} should be an initializer or Constant operator.'.format(
            node.layer_name)
C
update  
channingss 已提交
44 45 46
    return None


C
Channingss 已提交
47 48 49 50 51 52
def _is_static_shape(shape):
    negtive_dims = 0
    error_dims = 0
    for dim in shape:
        if dim < 0:
            negtive_dims += 1
C
update  
Channingss 已提交
53
        if dim < -1:
C
Channingss 已提交
54 55 56 57 58 59 60
            error_dims += 1
    if negtive_dims > 1:
        return False
    if error_dims > 0:
        return False
    return True

61

C
Channingss 已提交
62
def _get_same_padding(in_size, kernel_size, stride):
C
channingss 已提交
63 64 65 66 67 68 69
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]


70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
def print_mapping_info(func):
    def run_mapping(*args, **kwargs):
        node = args[1]
        try:
            res = func(*args, **kwargs)
        except:
            print("convert failed node:{}, op_type is {}".format(
                node.layer_name[9:], node.layer_type))
            raise
        else:
            #print("convert successfully node:{}, op_type is {}".format(
            #    node.layer_name[9:], node.layer_type))
            return res

    return run_mapping


C
Channingss 已提交
87
class OpSet9():
88
    elementwise_ops = {
S
SunAhong1993 已提交
89 90 91 92 93
        'Add': 'paddle.add',
        'Div': 'paddle.divide',
        'Sub': 'fluid.layers.elementwise_sub',
        'Mul': 'paddle.multiply',
        'Pow': 'paddle.pow',
R
root 已提交
94
    }
95

S
SunAhong1993 已提交
96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
    directly_map_ops = {
        'Ceil': ['paddle.ceil'],
        # reduce function
        'ReduceMean': ['paddle.mean',
                       dict(axes='axis', keepdims='keepdim'), 
                       dict(keepdims=1)],
        'ReduceSum': ['paddle.sum', 
                      dict(axes='axis', keepdims='keepdim'), 
                      dict(keepdims=1)],
        'ReduceMin': ['paddle.min', 
                      dict(axes='axis', keepdims='keepdim'), 
                      dict(keepdim=1)],
        'ReduceMax': ['paddle.max', 
                      dict(axes='axis', keepdims='keepdim'), 
                      dict(keepdim=1)],
        # active function
        'Relu': ['paddle.nn.functional.relu'],
        'LeakyRelu': ['paddle.nn.functional.leaky_relu', 
                      dict(alpha='negative_slope'), 
                      dict(negative_slope=.01)],
        'Elu': ['paddle.nn.functional.elu', 
                dict(alpha='alpha'), 
                dict(alpha=1.)],
        'ThresholdedRelu': ['paddle.nn.functional.thresholded_relu', 
                            dict(alpha='threshold'),
                            dict(alpha=1.)],
        'Tanh': ['paddle.nn.functional.tanh'],
        'Sigmoid': ['paddle.nn.functional.sigmoid'],
        'Softsign': ['paddle.nn.functional.softsign'],
        'Softplus': ['paddle.nn.functional.softplus', 
                     dict(threshold='threshold'), 
                     dict(threshold=float(sys.maxsize))],
        'Exp': ['paddle.exp'],
        'Softmax': ['paddle.nn.functional.softmax', 
                    dict(axis='axis'), 
                    dict(axis=1)],
        'Sqrt': ['paddle.sqrt'],
        'Floor': ['paddle.floor'],
        'Abs': ['paddle.abs'],
        'Erf': ['paddle.erf'],
136 137
    }

S
SunAhong1993 已提交
138
    def __init__(self, decoder, paddle_graph):
C
Channingss 已提交
139
        super(OpSet9, self).__init__()
140
        self.graph = decoder.graph
S
SunAhong1993 已提交
141 142 143 144
        self.paddle_graph = paddle_graph
        self.input_index = 0
        self.inputs_info = dict()
        self.params = dict()
R
root 已提交
145

146
    @print_mapping_info
S
SunAhong1993 已提交
147
    def directly_map(self, node, *args, **kwargs):
C
update  
channingss 已提交
148
        inputs = node.layer.input
S
SunAhong1993 已提交
149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171
        assert len(inputs) == 1, 'directly_map error with multi inputs'
        input = self.graph.get_input_node(node, idx=0, copy=True)
        onnx_attrs = node.attr_map
        if '' in onnx_attrs:
            onnx_attrs.pop('')
        if '_' in onnx_attrs:
            onnx_attrs.pop('_')
        op_info = self.directly_map_ops[node.layer_type]
        paddle_op = op_info[0]
        layer_attrs = dict()
        if len(op_info) > 1:
            attrs_name_map_dict = op_info[1]
            for onnx_attr_name, pd_attr_name in attrs_name_map_dict.items():
                if onnx_attr_name in onnx_attrs:
                    layer_attrs[pd_attr_name] = onnx_attrs[onnx_attr_name]
                else:
                    layer_attrs[pd_attr_name] = op_info[2][onnx_attr_name]
        self.paddle_graph.add_layer(
            kernel=paddle_op,
            inputs={"x": input.name},
            outputs=[node.name],
            **layer_attrs)
            
172
    @print_mapping_info
173 174 175 176
    def elementwise_map(self, node):
        op_type = self.elementwise_ops[node.layer_type]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
177 178 179 180 181 182 183
        inputs_dict = {'x': val_x.name, 
                       'y': val_y.name}
        self.paddle_graph.add_layer(
            op_type, 
            inputs=inputs_dict, 
            outputs=[node.name])
        
184
    @print_mapping_info
C
update  
channingss 已提交
185
    def place_holder(self, node):
C
channings 已提交
186 187
        shape = node.out_shapes[0]
        for i, dim_shape in enumerate(shape):
R
root 已提交
188 189 190
            if dim_shape == 0 and i == 0:
                shape[i] = 1
            if dim_shape == 0 and i != 0:
C
channings 已提交
191
                assert 'shape of input is not assigned'
S
SunAhong1993 已提交
192 193 194 195 196 197 198 199 200
        self.paddle_graph.add_layer(
            kernel="paddle.static.data",
            inputs={},
            outputs=[node.name],
            dtype=string(node.dtype),
            shape=shape,
            name=string(node.name))
        self.inputs_info["x{}".format(self.input_index)] = [shape, node.dtype]
        self.input_index += 1
C
update  
channingss 已提交
201

202
    @print_mapping_info
C
update  
channingss 已提交
203 204 205 206
    def create_parameter(self, node, parameter=None):
        if parameter is not None:
            node = parameter
        dtype = node.dtype
C
channingss 已提交
207
        shape = node.out_shapes[0]
C
channingss 已提交
208
        if len(node.weight.shape) == 0:
S
SunAhong1993 已提交
209 210 211 212 213 214 215
            self.paddle_graph.add_layer(
                "paddle.full", 
                inputs={}, 
                outputs=[node.name],
                dtype=string(dtype),
                shape=[1],
                fill_value=node.weight)
216
        else:
S
SunAhong1993 已提交
217 218 219 220 221 222 223 224 225
            self.params[node.name] = node.weight
            self.paddle_graph.add_layer(
                kernel="paddle.static.create_parameter",
                inputs={},
                outputs=[node.name],
                dtype=string(dtype),
                shape=shape,
                name=string(node.name),
                default_initializer="paddle.nn.initializer.Constant(value=0.0)")
C
update  
channingss 已提交
226 227 228 229 230 231 232 233 234 235 236 237 238 239

    def _pad_if_asymmetric(self, node, pads, val_name):  # pads: SSEE
        assert len(pads) & 1 == 0
        symmetric = True
        ndims = len(pads) // 2
        for idx_dim in range(ndims):
            if pads[idx_dim] != pads[ndims + idx_dim]:
                symmetric = False
                break
        if symmetric:
            return pads[:ndims], val_name
        val_padded = self.Pad(node, op_independent=False)
        return [0] * ndims, val_padded

C
channingss 已提交
240
    def _interpolate(self, node):
C
channingss 已提交
241
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
242
        inputs = {'x': val_x.name}
S
SunAhong1993 已提交
243
        attrs = dict()
244
        if node.layer_type == 'Resize':
C
Channingss 已提交
245 246 247
            if len(node.layer.input) == 2:
                # opset 10
                val_scales = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
248
                inputs['scale_factor'] = val_scales.name
C
Channingss 已提交
249 250 251
            elif len(node.layer.input) == 3:
                # opset 11
                val_scales = self.graph.get_input_node(node, idx=2, copy=True)
S
SunAhong1993 已提交
252
                attrs['scale_factor'] = self.params[val_scales.name].tolist()[2:]
C
Channingss 已提交
253 254 255
            elif len(node.layer.input) == 4:
                # opset 11
                val_sizes = self.graph.get_input_node(node, idx=3, copy=True)
S
SunAhong1993 已提交
256 257 258 259 260 261 262 263 264 265 266 267 268 269 270
                var_nc, var_hw = val_sizes.name + '_nc', val_sizes.name + '_hw'
                self.paddle_graph.add_layer(
                    'paddle.split',
                    inputs={"x": val_sizes.name},
                    outputs=[var_nc, var_hw],
                    num_or_sections=[2, 2],
                    axis=0)
                self.paddle_graph.add_layer(
                    "paddle.cast",
                    inputs={"x": var_hw},
                    outputs=[var_hw],
                    dtype=string('int32'))
#                 inputs['size'] = var_hw
                
                # TODO(syf): all use 
C
Channingss 已提交
271
                inputs['out_shape'] = var_hw
S
SunAhong1993 已提交
272 273 274
                ipt = inputs.pop("x")
                inputs["input"] = ipt
                mode = node.get_attr('mode', 'nearest')
S
fix  
SunAhong1993 已提交
275
                attrs.update({"align_corners": False})
S
SunAhong1993 已提交
276 277 278 279 280 281
                self.paddle_graph.add_layer(
                    kernel="fluid.layers.resize_nearest",
                    inputs=inputs,
                    outputs=[node.name],
                    **attrs)
                return
282 283
        elif node.layer_type == 'Upsample':
            val_scales = self.graph.get_input_node(node, idx=1, copy=True)
C
Channingss 已提交
284
            inputs['scale'] = val_scales
R
root 已提交
285

C
channingss 已提交
286
        mode = node.get_attr('mode', 'nearest')
S
SunAhong1993 已提交
287
        attrs.update({"align_corners": False,
S
SunAhong1993 已提交
288
                 "mode": string(mode),
S
SunAhong1993 已提交
289
                 "align_mode": 1})
S
SunAhong1993 已提交
290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.interpolate",
            inputs=inputs,
            outputs=[node.name],
            **attrs)
        
    @print_mapping_info
    def HardSigmoid(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        alpha = node.get_attr('alpha', 0.2)
        beta = node.get_attr('beta', 0.5)
        self.paddle_graph.add_layer(
            kernel="paddle.scale",
            inputs={"x": val_x.name},
            outputs=[node.name + "_val"],
            scale=alpha,
            bias=beta)
        self.paddle_graph.add_layer(
            kernel="paddle.clip",
            inputs={"x": node.name + "_val"},
            outputs=[node.name],
            min=0.0,
            max=1.0)  
        
    @print_mapping_info
    def Shape(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel="paddle.shape",
            inputs={"input": val_x.name},
            outputs=[node.name])
        self.paddle_graph.add_layer(
                'paddle.cast',
                inputs={"x": node.name},
                outputs=[node.name],
                dtype=string('int64'))   
R
root 已提交
326

327
    @print_mapping_info
C
channings 已提交
328 329 330
    def RoiAlign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
331 332 333

        pooled_height = node.get_attr('output_height')
        pooled_width = node.get_attr('output_width')
C
channings 已提交
334 335
        spatial_scale = node.get_attr('spatial_scale')
        sampling_ratio = node.get_attr('sampling_ratio')
S
SunAhong1993 已提交
336
        layer_attrs = {
R
root 已提交
337 338 339 340 341
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
            'sampling_ratio': sampling_ratio,
        }
S
SunAhong1993 已提交
342 343 344 345 346 347
        self.paddle_graph.add_layer(
            'fluid.layers.roi_align',
            inputs={'input': val_x.name,
                    'rois': val_rois.name},
            outputs=[node.name],
            **layer_attrs)
348 349

    @print_mapping_info
C
channings 已提交
350 351 352
    def MaxRoiPool(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
353

C
channings 已提交
354 355
        spatial_scale = node.get_attr('spatial_scale')
        pooled_height, pooled_width = node.get_attr('pooled_shape')
S
SunAhong1993 已提交
356
        layer_attrs = {
R
root 已提交
357 358 359 360
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
        }
S
SunAhong1993 已提交
361 362 363 364 365 366
        self.paddle_graph.add_layer(
            'fluid.layers.roi_pool',
            inputs={'input': val_x.name,
                    'rois': val_rois.name},
            outputs=[node.name],
            **layer_attrs)
367 368

    @print_mapping_info
C
update  
channingss 已提交
369
    def Pad(self, node, op_independent=True):
C
channingss 已提交
370
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
371 372 373
        pads = node.get_attr('pads')
        mode = node.get_attr('mode', 'constant')
        value = node.get_attr('value', 0.)
C
channingss 已提交
374 375
        data_shape = val_x.out_shapes[0]
        output_shape = node.out_shapes[0]
C
update  
channingss 已提交
376
        assume_pad2d = False
S
SunAhong1993 已提交
377 378
        layer_attrs = {}
        layer_attrs['mode'] = string(mode)
C
channings 已提交
379
        paddings = []
C
update  
channingss 已提交
380 381 382 383 384 385 386
        if len(pads) == 4:
            assume_pad2d |= mode != 'constant'
            if data_shape:
                assume_pad2d |= data_shape and len(data_shape) == 4  # NCHW
            if output_shape:
                assume_pad2d |= output_shape and len(output_shape) == 4  # NCHW
        if assume_pad2d:
S
SunAhong1993 已提交
387 388 389
            paddle_op = 'paddle.nn.functional.pad'
            layer_attrs['data_format'] = string('NCHW')
            layer_attrs['value'] = value
C
update  
channingss 已提交
390
        else:
S
SunAhong1993 已提交
391 392
            paddle_op = 'fluid.layers.pad'
            layer_attrs["pad_value"] = value
C
update  
channingss 已提交
393 394 395 396 397 398
        if len(pads) == 4:
            paddings = np.array(pads).reshape(
                (-1, 2)).transpose().flatten().tolist()  # SSEE -> SESE
        elif len(pads) == 8:
            paddings = np.array(pads).reshape(
                (-1, 4)).transpose().flatten().tolist()  # SSEE -> SESE
C
channingss 已提交
399
            if sum(paddings[:4]) == 0:
S
SunAhong1993 已提交
400
                paddle_op = 'paddle.nn.functional.pad'
C
channingss 已提交
401
                paddings = paddings[4:]
S
SunAhong1993 已提交
402 403 404 405 406 407 408 409 410 411 412 413
                layer_attrs['value'] = value
                if 'pad_value' in layer_attrs:
                    layer_attrs.pop('pad_value')
        tmp_paddings = copy.deepcopy(paddings)
        paddings[0] = tmp_paddings[2]
        paddings[1] = tmp_paddings[3]
        paddings[2] = tmp_paddings[0]
        paddings[3] = tmp_paddings[1]
        if paddle_op == 'paddle.nn.functional.pad':
            layer_attrs['pad'] = paddings
        else:
            layer_attrs['paddings'] = paddings
C
update  
channingss 已提交
414
        if op_independent:
S
SunAhong1993 已提交
415 416 417 418 419
            self.paddle_graph.add_layer(
                paddle_op, 
                inputs={'x': val_x.name}, 
                outputs=[node.name], 
                **layer_attrs)
C
update  
channingss 已提交
420
        else:
S
SunAhong1993 已提交
421 422 423 424 425 426
            self.paddle_graph.add_layer(
                paddle_op,
                inputs={'x': val_x.name},
                outputs=[node.name + '_paded'],
                **layer_attrs)
            return node.name + '_paded'
C
update  
channingss 已提交
427

428
    @print_mapping_info
C
update  
channingss 已提交
429
    def Unsqueeze(self, node):
C
channingss 已提交
430
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
431
        axes = node.get_attr('axes')
S
SunAhong1993 已提交
432
        layer_attrs = {'axis': axes}
R
root 已提交
433
        if len(val_x.out_shapes[0]) == 0:
S
SunAhong1993 已提交
434 435 436 437 438 439
            if node.name:
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={"x": val_x.name},
                    outputs=[node.name],
                    shape=[1])
440
        else:
S
SunAhong1993 已提交
441 442 443 444 445
            self.paddle_graph.add_layer(
                'paddle.unsqueeze', 
                inputs={"x": val_x.name}, 
                outputs=[node.name],
                **layer_attrs)
446

447
    @print_mapping_info
C
channingss 已提交
448
    def Shrink(self, node):
C
channingss 已提交
449
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
450 451 452
        bias = node.get_attr('bias')
        lambd = node.get_attr('lambd')
        assert bias == 0.0, 'not support bias!=0'
S
SunAhong1993 已提交
453 454 455 456 457
        self.paddle_graph.add_layer(
            'paddle.nn.functional.hardshrink', 
            inputs={"x": val_x.name}, 
            outputs=[node.name], 
            threshold=lambd)
C
channingss 已提交
458

459
    @print_mapping_info
C
update  
channingss 已提交
460 461 462 463 464 465 466 467
    def Constant(self, node):
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        value = node.get_attr('value')
        dtype = np.dtype(value.dtype)
        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'tensor dtype unmatches storage dtype'
R
root 已提交
468

C
update  
channingss 已提交
469
        shape = node.get_attr('shape', None)
R
root 已提交
470

C
update  
channingss 已提交
471
        if shape is None:
C
channingss 已提交
472
            shape = val_output.out_shapes[0]
C
update  
channingss 已提交
473 474
        if shape is None:
            shape = list(value.shape)
475 476 477
            _logger.warning('in (Constant -> %s): '
                            'attribute "shape" of %s not inferred, '
                            'using value as 1-D tensor may lead to fails',
S
SunAhong1993 已提交
478
                            val_output.name, val_output.name)
479
        if len(value) == 1:
C
channingss 已提交
480
            value = value.tolist()
C
update  
channingss 已提交
481
            value = value[0]
S
SunAhong1993 已提交
482 483 484 485 486 487 488
            self.paddle_graph.add_layer(
                "paddle.full", 
                inputs={}, 
                outputs=[node.name],
                dtype=string(dtype),
                shape=[1],
                fill_value=value)
C
channingss 已提交
489 490
        else:
            value = np.reshape(value, shape)
S
SunAhong1993 已提交
491 492 493 494 495 496 497 498 499
            self.params[node.name] = value
            self.paddle_graph.add_layer(
                kernel="paddle.static.create_parameter",
                inputs={},
                outputs=[node.name],
                dtype=string(dtype),
                shape=shape,
                name=string(node.name),
                default_initializer="paddle.nn.initializer.Constant(value=0.0)")
C
update  
channingss 已提交
500

501
    @print_mapping_info
C
update  
channingss 已提交
502
    def Resize(self, node):
503 504
        self._interpolate(node)

505
    @print_mapping_info
506 507 508
    def Upsample(self, node):
        self._interpolate(node)

509 510 511 512 513 514
    @print_mapping_info
    def InstanceNormalization(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        epsilon = node.get_attr('epsilon', 1e-5)
S
SunAhong1993 已提交
515
        layer_attrs = {
S
fix  
SunAhong1993 已提交
516
            'eps': epsilon,
517
        }
S
SunAhong1993 已提交
518 519
        dim = len(val_x.out_shapes[0])
        if dim ==2 :
S
fix  
SunAhong1993 已提交
520
            layer_attrs["data_format"] = string("NC")
S
SunAhong1993 已提交
521
        elif dim == 3:
S
fix  
SunAhong1993 已提交
522
            layer_attrs["data_format"] = string("NCL")
S
SunAhong1993 已提交
523
        elif dim == 4:
S
fix  
SunAhong1993 已提交
524
            layer_attrs["data_format"] = string("NCHW")
S
SunAhong1993 已提交
525
        elif dim == 5:
S
fix  
SunAhong1993 已提交
526
            layer_attrs["data_format"] = string("NCDHW")
S
SunAhong1993 已提交
527 528 529
        else:
            raise Exception("The paddle only support 2D, 3D, 4D or 5D input in InstanceNormalization.")
        self.paddle_graph.add_layer(
S
fix  
SunAhong1993 已提交
530
            "paddle.nn.functional.instance_norm", 
S
SunAhong1993 已提交
531 532 533
            inputs={"x": val_x.name,
                    "weight": val_scale.name,
                    "bias": val_b.name}, 
S
fix  
SunAhong1993 已提交
534
            outputs=[node.name], 
S
SunAhong1993 已提交
535
            **layer_attrs)
536 537

    @print_mapping_info
538
    def Expand(self, node):
C
channingss 已提交
539
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
540
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
541
        val_x_dtype = val_x.dtype
S
SunAhong1993 已提交
542
        name_ones = node.name + '_ones'
C
Channingss 已提交
543
        attr_ones = {
S
SunAhong1993 已提交
544
            'shape': val_shape.name,
C
Channingss 已提交
545
            'dtype': string(val_x_dtype),
S
SunAhong1993 已提交
546
            'fill_value': 1
C
Channingss 已提交
547
        }
S
SunAhong1993 已提交
548 549 550 551 552 553 554 555 556 557 558
        self.paddle_graph.add_layer(
            'paddle.full',
            inputs={},
            outputs=[name_ones],
            **attr_ones)
        inputs_dict = {'x': name_ones, 
                       'y': val_x.name}
        self.paddle_graph.add_layer(
            'paddle.multiply',
            inputs=inputs_dict,
            outputs=[node.name])
C
update  
channingss 已提交
559

560
    @print_mapping_info
C
channingss 已提交
561 562 563 564
    def Gather(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        indices_shape = indices.out_shapes[0]
C
Channingss 已提交
565
        axis = node.get_attr('axis', 0)
566 567
        #assert len(
        #    indices_shape) <= 2, "Gather op don't support dim of indice >2 "
R
root 已提交
568
        if axis == 0 and len(indices_shape) <= 1:
C
Channingss 已提交
569
            if len(val_x.out_shapes[0]) <= 1:
S
SunAhong1993 已提交
570 571 572 573 574
                self.paddle_graph.add_layer(
                    'paddle.gather',
                    inputs={'x': val_x.name,
                            'index': indices.name},
                    outputs=[node.name])
C
Channingss 已提交
575 576
            elif len(val_x.out_shapes[0]) > 1:
                if len(indices_shape) == 0:
S
SunAhong1993 已提交
577 578 579 580 581 582 583 584 585 586 587
                    gather_ = node.name + '_1'
                    self.paddle_graph.add_layer(
                        'paddle.gather',
                        inputs={'x': val_x.name,
                                'index': indices.name},
                        outputs=[gather_])
                    self.paddle_graph.add_layer(
                        'paddle.squeeze',
                        inputs={'x': gather_},
                        outputs=[node.name],
                        axis=[0])
C
Channingss 已提交
588
                else:
S
SunAhong1993 已提交
589 590 591 592 593
                    self.paddle_graph.add_layer(
                        'paddle.gather',
                        inputs={'x': val_x.name,
                                'index': indices.name},
                        outputs=[node.name])
C
channingss 已提交
594 595
        elif axis > 0 and len(indices_shape) <= 1:
            perm = list(range(len(val_x.out_shapes[0])))
C
channingss 已提交
596
            perm = [axis] + perm[:axis] + perm[axis + 1:]
S
SunAhong1993 已提交
597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612
            name_trans = val_x.name + '_trans'
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": val_x.name},
                outputs=[name_trans],
                perm=perm)
            self.paddle_graph.add_layer(
                'paddle.gather',
                inputs={'x': name_trans,
                        'index': indices.name},
                outputs=[node.name])
            self.paddle_graph.add_layer(
                'paddle.transpose', 
                inputs={"x": node.name}, 
                outputs=[node.name], 
                perm=perm)
C
Channingss 已提交
613
            if len(indices_shape) < 1:
S
SunAhong1993 已提交
614 615 616 617 618
                self.paddle_graph.add_layer(
                    'paddle.squeeze',
                    inputs={'x': node.name},
                    outputs=[node.name],
                    axis=[axis])
619 620 621
        elif axis == 0 and len(indices_shape) > 1:
            if val_x.out_shapes[0] is not None and isinstance(
                    val_x, ONNXGraphDataNode):
S
SunAhong1993 已提交
622 623 624 625 626 627 628 629 630 631 632 633 634 635 636
                indices_cast = indices.name + '_cast'
                self.paddle_graph.add_layer(
                    'paddle.cast',
                    inputs={"x": indices.name},
                    outputs=indices_cast,
                    dtype=string('int64'))
                op_name = name_generator("embedding", self.nn_name2id)
                output_name = node.name
                layer_outputs = [op_name, output_name]
                self.paddle_graph.add_layer(
                    'paddle.nn.Embedding',
                    inputs={"x": indices_cast},
                    outputs=layer_outputs,
                    param_attr=string(val_x.name),
                    size=val_x.out_shapes[0])
637 638 639
            else:
                from functools import reduce
                reshape_shape = reduce(lambda x, y: x * y, indices_shape)
S
SunAhong1993 已提交
640 641 642 643 644 645
                indices_reshape = indices.name + '_shape'
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={"x": indices.name},
                    outputs=[indices_reshape],
                    shape=[reshape_shape, ])
646 647

                perm = list(range(len(val_x.out_shapes[0])))
S
SunAhong1993 已提交
648 649 650
                self.paddle_graph.add_layer(
                    'paddle.gather',
                    inputs={'x': val_x.name,
651
                            'index': indices_reshape},
S
SunAhong1993 已提交
652
                    outputs=[node.name])
653 654 655 656 657 658
                val_x_shape = val_x.out_shapes[0]
                reshaped_shape = []
                for i in perm:
                    reshaped_shape.append(indices_shape[i])
                for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                    reshaped_shape.append(i)
S
SunAhong1993 已提交
659 660 661 662 663
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={"x": node.name},
                    outputs=[node.name],
                    shape=reshaped_shape)
664
        elif axis > 0 and len(indices_shape) > 1:
C
Channingss 已提交
665
            from functools import reduce
R
root 已提交
666
            reshape_shape = reduce(lambda x, y: x * y, indices_shape)
S
SunAhong1993 已提交
667 668 669 670 671 672
            indices_reshape = indices.name + '_shape'
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": indices.name},
                outputs=[indices_reshape],
                shape=[reshape_shape, ])
R
root 已提交
673

C
Channingss 已提交
674 675
            perm = list(range(len(val_x.out_shapes[0])))
            perm = [axis] + perm[:axis] + perm[axis + 1:]
S
SunAhong1993 已提交
676 677 678 679 680 681 682 683 684
            name_trans = val_x.name + '_transpose'
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": val_x.name},
                outputs=[name_trans],
                perm=perm)
            self.paddle_graph.add_layer(
                'paddle.gather',
                inputs={'x': name_trans,
685
                        'index': indices_reshape},
S
SunAhong1993 已提交
686 687 688 689 690 691 692
                outputs=[node.name])
            input_transpose = node.name + '_transpose'
            self.paddle_graph.add_layer(
                'paddle.transpose',
                inputs={"x": node.name},
                outputs=[input_transpose],
                perm=perm)
C
Channingss 已提交
693 694 695 696 697 698
            val_x_shape = val_x.out_shapes[0]
            reshaped_shape = []
            for i in perm:
                reshaped_shape.append(indices_shape[i])
            for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                reshaped_shape.append(i)
S
SunAhong1993 已提交
699 700 701 702 703
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": input_transpose},
                outputs=[node.name],
                shape=reshaped_shape)
704

C
Channingss 已提交
705 706 707 708 709 710
    @print_mapping_info
    def ScatterND(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        updates = self.graph.get_input_node(node, idx=2, copy=True)
        if len(indices.out_shapes[0]) == 1:
S
SunAhong1993 已提交
711 712 713 714 715 716
            self.paddle_graph.add_layer(
                'paddle.scatter',
                inputs={'x': val_x.name,
                        'index': indices.name,
                        'updates': updates.name},
                outputs=[node.name])
C
Channingss 已提交
717
        else:
S
SunAhong1993 已提交
718
            input_inner_indices = node.name + '_input_inner_indices'
719
            shape = val_x.out_shapes[0]
S
SunAhong1993 已提交
720 721 722 723 724 725 726 727 728 729 730 731 732
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": indices.name},
                outputs=[indices.name],
                shape=indices.out_shapes[0])

            zeros_like_val_x = val_x.name + '_zeros'
            self.paddle_graph.add_layer(
                'paddle.zeros_like',
                inputs={"x": val_x.name},
                outputs=[zeros_like_val_x])
            self.paddle_graph.add_layer(
                'paddle.scatter_nd_add',
C
Channingss 已提交
733
                inputs={
S
SunAhong1993 已提交
734 735 736
                    'x': zeros_like_val_x,
                    'index': indices.name,
                    'updates': updates.name
C
Channingss 已提交
737
                },
S
SunAhong1993 已提交
738 739 740
                outputs=[input_inner_indices])
            indices_mask = node.name + '_indices_mask'
            constant_minus_one = node.name + '_constant_minus_one'
741
            # full_like support create tensor shape like input tensor
S
SunAhong1993 已提交
742 743 744 745 746 747 748 749
            self.paddle_graph.add_layer(
                'paddle.full_like',
                inputs={"x": updates.name},
                outputs=[constant_minus_one],
                dtype=string(updates.dtype),
                fill_value=-1)
            self.paddle_graph.add_layer(
                'paddle.scatter_nd_add',
C
Channingss 已提交
750
                inputs={
S
SunAhong1993 已提交
751 752
                    'x': zeros_like_val_x,
                    'index': indices.name,
C
Channingss 已提交
753 754
                    'updates': constant_minus_one
                },
S
SunAhong1993 已提交
755 756
                outputs=[indices_mask])
            constant_one = node.name + '_constant_1'
757
            # full_like support create tensor shape like input tensor
S
SunAhong1993 已提交
758 759 760 761 762 763 764 765 766
            self.paddle_graph.add_layer(
                'paddle.full_like',
                inputs={"x": val_x.name},
                outputs=[constant_one],
                dtype=string(val_x.dtype),
                fill_value=1)
            input_out_indices_mask = node.name + '_input_out_indices_mask'
            self.paddle_graph.add_layer(
                "paddle.add",
C
Channingss 已提交
767
                inputs={"x": indices_mask,
768
                        "y": constant_one},
S
SunAhong1993 已提交
769
                outputs=[input_out_indices_mask])
C
Channingss 已提交
770

S
SunAhong1993 已提交
771 772 773 774
            input_out_indices = node.name + '_input_out_indices'
            self.paddle_graph.add_layer(
                "paddle.multiply",
                inputs={"x": val_x.name,
C
Channingss 已提交
775
                        "y": input_out_indices_mask},
S
SunAhong1993 已提交
776
                outputs=[input_out_indices])
C
Channingss 已提交
777

S
SunAhong1993 已提交
778 779
            self.paddle_graph.add_layer(
                "paddle.add",
C
Channingss 已提交
780 781
                inputs={"x": input_inner_indices,
                        "y": input_out_indices},
S
SunAhong1993 已提交
782
                outputs=[node.name])
C
Channingss 已提交
783

784 785 786 787 788 789
    @print_mapping_info
    def Range(self, node):
        val_start = self.graph.get_input_node(node, idx=0, copy=True)
        val_limit = self.graph.get_input_node(node, idx=1, copy=True)
        val_delta = self.graph.get_input_node(node, idx=2, copy=True)
        dtype = val_start.dtype
S
SunAhong1993 已提交
790 791 792 793 794
        inputs = {'start': val_start.name, 
                  'end': val_limit.name, 
                  'step': val_delta.name}
        self.paddle_graph.add_layer(
            'paddle.arange',
795
            inputs=inputs,
S
SunAhong1993 已提交
796 797
            outputs=[node.name],
            dtype=string(dtype))
798 799

    @print_mapping_info
C
channingss 已提交
800
    def Slice(self, node):
C
channingss 已提交
801
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channings 已提交
802
        starts, ends, axes, steps = None, None, None, None
S
SunAhong1993 已提交
803
        layer_attrs = {}
C
channingss 已提交
804 805 806
        if len(node.inputs) > 1:
            starts = self.graph.get_input_node(node, idx=1, copy=True)
            ends = self.graph.get_input_node(node, idx=2, copy=True)
C
Channingss 已提交
807 808 809
            starts_value = _const_weight_or_none(starts)
            ends_value = _const_weight_or_none(ends)

R
root 已提交
810
            if len(node.inputs) > 3:
C
channings 已提交
811
                axes = self.graph.get_input_node(node, idx=3, copy=True)
C
Channingss 已提交
812
                axes = _const_weight_or_none(axes, necessary=True)
R
root 已提交
813
            if len(node.inputs) > 4:
C
channings 已提交
814
                steps = self.graph.get_input_node(node, idx=4, copy=True)
C
update  
Channingss 已提交
815
                steps = _const_weight_or_none(steps)
S
SunAhong1993 已提交
816
            layer_attrs = {
817
                "axes": axes,
S
SunAhong1993 已提交
818 819
                "starts": starts.name,
                "ends": ends.name
820 821
            }
            if starts_value is not None and ends_value is not None:
C
Channingss 已提交
822
                starts_value = starts_value.copy()
823
                ends_value = ends_value.copy()
824 825 826 827
                #for idx in range(len(ends_value)):
                #    if ends_value[idx] > 2**31 - 1:
                #        ends_value[idx] = 2**31 - 1
                #print(val_x.out_shapes)
828
                for idx in range(len(ends_value)):
829 830
                    if starts_value[idx] >= val_x.out_shapes[0][axes[idx]]:
                        starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
C
Channingss 已提交
831
                        ends_value[idx] = val_x.out_shapes[0][axes[idx]]
832
                        starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
C
Channingss 已提交
833
                    elif ends_value[idx] > 2**31 - 1:
834
                        ends_value[idx] = 2**31 - 1
S
SunAhong1993 已提交
835
                layer_attrs = {
836 837 838 839 840 841
                    "axes": axes,
                    "starts": starts_value,
                    "ends": ends_value
                }
            else:
                if starts.dtype != 'int32':
S
SunAhong1993 已提交
842 843 844 845 846 847 848
                    starts_cast = starts.name + '_cast'
                    self.paddle_graph.add_layer(
                        'paddle.cast',
                        inputs={"x": starts.name},
                        outputs=[starts_cast],
                        dtype=string('int32'))
                    layer_attrs['starts'] = starts_cast
849
                if ends.dtype != 'int32':
S
SunAhong1993 已提交
850 851 852 853 854 855 856
                    ends_cast = ends.name + '_cast'
                self.paddle_graph.add_layer(
                    'paddle.cast',
                    inputs={"x": ends.name},
                    outputs=[ends_cast],
                    dtype=string('int32'))
                layer_attrs['ends'] = ends_cast
C
channingss 已提交
857 858 859 860
        else:
            starts = node.get_attr('starts')
            ends = node.get_attr('ends')
            axes = node.get_attr('axes')
861 862 863
            for idx in range(len(ends)):
                if ends[idx] > 2**31 - 1:
                    ends[idx] = 2**31 - 1
S
SunAhong1993 已提交
864
            layer_attrs = {"axes": axes, "starts": starts, "ends": ends}
C
channingss 已提交
865

C
Channingss 已提交
866
        if steps is not None:
S
SunAhong1993 已提交
867 868 869 870 871 872
            layer_attrs['strides'] = steps
            self.paddle_graph.add_layer(
                'paddle.strided_slice', 
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
                **layer_attrs)
C
Channingss 已提交
873
        else:
S
SunAhong1993 已提交
874 875 876 877 878
            self.paddle_graph.add_layer(
                'paddle.slice', 
                inputs={"input": val_x.name}, 
                outputs=[node.name],  
                **layer_attrs)
C
channingss 已提交
879

880
    @print_mapping_info
C
update  
channingss 已提交
881
    def ConstantOfShape(self, node):
C
channingss 已提交
882
        val_shape = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
883
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
C
update  
channingss 已提交
884 885 886 887

        value = node.get_attr('value')
        dtype = value.dtype
        value = value.tolist()
888 889
        assert len(value) == 1, ('given value not Scalar, shape of value > 1, '
                                 'this is not supported')
C
update  
channingss 已提交
890 891
        if len(value) == 1:
            value = value[0]
S
SunAhong1993 已提交
892 893
            layer_attrs = {
                'shape': val_shape.name,
894
                'dtype': string(dtype),
S
SunAhong1993 已提交
895
                'fill_value': value
896
            }
S
SunAhong1993 已提交
897 898 899 900 901
            self.paddle_graph.add_layer(
                "paddle.full", 
                inputs={}, 
                outputs=[node.name],
                **layer_attrs)
C
update  
channingss 已提交
902

C
Channingss 已提交
903 904 905 906 907 908 909 910
    @print_mapping_info
    def Clip(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
        max_value, min_value = None, None
        if len(node.inputs) == 1:
            max_value = node.get_attr('max')
            min_value = node.get_attr('min')
S
SunAhong1993 已提交
911
            layer_attrs = {
C
Channingss 已提交
912 913 914
                'max': max_value,
                'min': min_value,
            }
S
SunAhong1993 已提交
915 916 917 918 919
            self.paddle_graph.add_layer(
                'paddle.clip', 
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
                **layer_attrs)
C
Channingss 已提交
920
        else:
S
SunAhong1993 已提交
921 922
            min_ipt = self.graph.get_input_node(node, idx=1, copy=True)
            max_ipt = self.graph.get_input_node(node, idx=2, copy=True)
C
Channingss 已提交
923
            min_value = _const_weight_or_none(min_ipt)
S
SunAhong1993 已提交
924
            max_value = _const_weight_or_none(max_ipt)
925
            if max_value.shape == (1, ):
C
Channingss 已提交
926
                max_value = max_value[0]
927
            if min_value.shape == (1, ):
C
Channingss 已提交
928 929
                min_value = min_value[0]
        if max_value is not None and min_value is not None:
S
SunAhong1993 已提交
930 931 932 933 934 935
            layer_attrs = {'max': max_value, 'min': min_value}
            self.paddle_graph.add_layer(
                'paddle.clip', 
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
                **layer_attrs)
C
Channingss 已提交
936 937 938
        else:
            raise

939
    @print_mapping_info
C
update  
channingss 已提交
940
    def Split(self, node):
C
channingss 已提交
941
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
942
        paddle_op = 'split'
C
channingss 已提交
943
        split = node.get_attr('split')
C
update  
channingss 已提交
944
        axis = node.get_attr('axis', 0)
S
SunAhong1993 已提交
945
        layer_attrs = {
C
channingss 已提交
946
            'num_or_sections': split,
S
SunAhong1993 已提交
947
            'axis': axis,
C
channingss 已提交
948
        }
S
SunAhong1993 已提交
949 950 951 952 953 954 955 956 957 958 959
        outputs_list = list()
        if isinstance(split, list) or isinstance(split, tuple):
            for i in range(len(split)):
                outputs_list.append("{}_p{}".format(node.layer_name, i))
        else:
            outputs_list.append(node.name)
        self.paddle_graph.add_layer(
            'paddle.split', 
            inputs={"x": val_x.name}, 
            outputs=outputs_list, 
            **layer_attrs)
C
update  
channingss 已提交
960

961
    @print_mapping_info
C
update  
channingss 已提交
962
    def Reshape(self, node):
C
channingss 已提交
963 964
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
965
        val_reshaped = self.graph.get_node(node.layer.output[0], copy=True)
966 967 968 969
        shape_value = _const_weight_or_none(val_shape)
        shape_dims = len(val_shape.out_shapes[0])

        if shape_value is not None:
S
SunAhong1993 已提交
970 971 972 973 974
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={'x': val_x.name},
                outputs=[node.name],
                shape=shape_value.tolist())
C
Channingss 已提交
975 976
        elif len(node.out_shapes[0]) > 0 and _is_static_shape(node.out_shapes[
                0]):
S
SunAhong1993 已提交
977 978 979 980 981
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={'x': val_x.name},
                outputs=[node.name],
                shape=node.out_shapes[0])
982
        else:
983 984
            # shape may be [], come form Gather by scalar indices
            if len(val_shape.out_shapes[0]) > 0:
S
SunAhong1993 已提交
985 986 987 988 989 990 991 992 993 994
                self.paddle_graph.add_layer(
                    'paddle.reshape',
                    inputs={'x': val_shape.name},
                    outputs=[val_shape.name],
                    shape=val_shape.out_shapes[0])
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={'x': val_x.name,
                        'shape': val_shape.name},
                outputs=node)
995 996

    @print_mapping_info
C
update  
channingss 已提交
997
    def Cast(self, node):
C
channingss 已提交
998
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
999 1000 1001 1002 1003 1004 1005 1006 1007
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        dtype = node.get_attr('to')
        if not isinstance(dtype, np.dtype):
            dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]

        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'dtype of to unmatches output'
S
SunAhong1993 已提交
1008 1009 1010 1011 1012
        self.paddle_graph.add_layer(
            'paddle.cast', 
            inputs={'x': val_input.name}, 
            outputs=[node.name], 
            dtype=string(dtype))
C
update  
channingss 已提交
1013

C
Channingss 已提交
1014 1015 1016
    @print_mapping_info
    def Not(self, node):
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1017 1018 1019
        self.paddle_graph.add_layer('paddle.logical_not', 
                                    inputs={'x': val_input.name}, 
                                    outputs=[node.name])
C
Channingss 已提交
1020

1021
    @print_mapping_info
C
update  
channingss 已提交
1022
    def AveragePool(self, node):
C
channingss 已提交
1023
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1024 1025

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
1026 1027 1028 1029 1030 1031
        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))
        pads = node.get_attr('pads', [0] * (poolnd * 2))
C
channingss 已提交
1032

C
channingss 已提交
1033 1034
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1035
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
1036
            input_shape = val_x.out_shapes[0]
C
Channingss 已提交
1037 1038 1039 1040 1041
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
channingss 已提交
1042

S
SunAhong1993 已提交
1043 1044 1045
        paddle_op = 'fluid.layers.pool{}d'.format(poolnd)
        assert 2 <= poolnd <= 3, 'only pool2d and pool3d are supported'
        layer_attrs = {
C
update  
channingss 已提交
1046 1047 1048 1049 1050 1051
            "pool_size": kernel_shape,
            "pool_type": string('avg'),
            "pool_stride": strides,
            "pool_padding": paddings,
            "ceil_mode": ceil_mode,
            "exclusive": 'True',
S
SunAhong1993 已提交
1052
            "name": string(node.name)
C
update  
channingss 已提交
1053
        }
S
SunAhong1993 已提交
1054 1055 1056 1057 1058 1059
        self.paddle_graph.add_layer(
            paddle_op, 
            inputs={'input': val_x if isinstance(val_x, str) else val_x.name}, 
            outputs=[node.name], 
            **layer_attrs)
        # TODO(syf): op has diff
C
update  
channingss 已提交
1060

1061
    @print_mapping_info
C
update  
channingss 已提交
1062
    def Concat(self, node):
S
SunAhong1993 已提交
1063
        inputs_list = []
C
Channingss 已提交
1064
        dtypes = set()
C
update  
channingss 已提交
1065
        for i in range(len(node.layer.input)):
C
channingss 已提交
1066
            ipt = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1067 1068
            inputs_list.append(ipt.name)
            dtypes.add(ipt.dtype)
C
Channingss 已提交
1069 1070
        if len(dtypes) > 1:
            assert 'Unspported situation happened, please create issue on https://github.com/PaddlePaddle/X2Paddle/issues.'
C
update  
channingss 已提交
1071
        axis = node.get_attr('axis')
S
SunAhong1993 已提交
1072 1073 1074 1075 1076
        self.paddle_graph.add_layer(
            'paddle.concat', 
            inputs={"x": inputs_list}, 
            outputs=[node.name], 
            axis=axis)
C
update  
channingss 已提交
1077

1078
    @print_mapping_info
C
update  
channingss 已提交
1079
    def Flatten(self, node):
C
channingss 已提交
1080
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1081
        output_shape = node.out_shapes[0]
C
update  
channingss 已提交
1082
        axis = node.get_attr('axis', 1)
S
SunAhong1993 已提交
1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096
        shape_list = [1, 1]
        if axis == 0:
            for s in output_shape:
                shape_list[1] *= s
        else:
            for s in output_shape[:axis]:
                shape_list[0] *= s
            for s in output_shape[axis:]:
                shape_list[1] *= s
        self.paddle_graph.add_layer(
            'paddle.reshape', 
            inputs={"x": val_x.name}, 
            outputs=[node.name],
            shape=shape_list)
C
update  
channingss 已提交
1097

1098
    @print_mapping_info
C
update  
channingss 已提交
1099
    def Gemm(self, node):
C
channingss 已提交
1100 1101 1102
        val_a = self.graph.get_input_node(node, idx=0, copy=True)
        val_b = self.graph.get_input_node(node, idx=1, copy=True)
        val_c = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
1103 1104 1105 1106 1107

        alpha = node.get_attr('alpha', 1.)  # optional
        beta = node.get_attr('beta', 1.)  # optional
        trans_a = bool(node.get_attr('transA', 0))  # optional
        trans_b = bool(node.get_attr('transB', 0))  # optional
S
SunAhong1993 已提交
1108 1109 1110
        val_mm = node.name + '_mm'
        matmul_inputs = {"x": val_a.name, 
                         "y": val_b.name}
C
update  
channingss 已提交
1111 1112 1113 1114
        attr_matmul = {
            "transpose_x": trans_a,
            "transpose_y": trans_b,
        }
S
SunAhong1993 已提交
1115 1116
        self.paddle_graph.add_layer(
            'paddle.matmul',
1117
            inputs=matmul_inputs,
S
SunAhong1993 已提交
1118 1119 1120 1121 1122 1123 1124
            outputs=[val_mm],
            **attr_matmul)
        self.paddle_graph.add_layer(
            "paddle.scale", 
            inputs={"x": val_mm}, 
            outputs=[val_mm],
            scale=alpha)
C
channingss 已提交
1125

C
update  
channingss 已提交
1126 1127
        if beta != 0:
            if beta == 1.:
S
SunAhong1993 已提交
1128 1129 1130 1131
                add_inputs = {"x": val_mm, 
                              "y": val_c.name}
                self.paddle_graph.add_layer(
                    "paddle.add",
1132
                    inputs=add_inputs,
S
SunAhong1993 已提交
1133
                    outputs=[node.name])
C
update  
channingss 已提交
1134
            else:
S
SunAhong1993 已提交
1135 1136 1137 1138 1139 1140
                var_beta = node.name + '_beta'
                self.paddle_graph.add_layer(
                    "paddle.scale",
                    inputs={"x": val_c.name},
                    outputs=[var_beta],
                    scale=beta)
C
channingss 已提交
1141
                add_inputs = {"x": val_mm, "y": var_beta}
S
SunAhong1993 已提交
1142 1143
                self.paddle_graph.add_layer(
                    "paddle.add",
1144
                    inputs=add_inputs,
S
SunAhong1993 已提交
1145
                    outputs=[node.name])
C
update  
channingss 已提交
1146

1147
    @print_mapping_info
C
update  
channingss 已提交
1148
    def Sum(self, node):
1149
        val_inps = node.layer.input
S
SunAhong1993 已提交
1150
        inputs_dict = {
1151
            "x": self.graph.get_input_node(
S
SunAhong1993 已提交
1152
                node, idx=0, copy=True).name,
1153
            "y": self.graph.get_input_node(
S
SunAhong1993 已提交
1154
                node, idx=1, copy=True).name,
1155
        }
S
SunAhong1993 已提交
1156 1157 1158
        self.paddle_graph.add_layer("paddle.add", 
                                    inputs=inputs_dict, 
                                    outputs=[node.name])
1159

C
channingss 已提交
1160 1161
        for idx, ipt in enumerate(val_inps[2:]):
            y = self.graph.get_input_node(node, idx=idx, copy=True)
S
SunAhong1993 已提交
1162 1163 1164
            inputs_dict = {
                "x": node.name,
                "y": y.name,
1165
            }
S
SunAhong1993 已提交
1166 1167 1168 1169
            self.paddle_graph.add_layer(
                "paddle.add", 
                inputs=inputs_dict, 
                outputs=[node.name])
C
update  
channingss 已提交
1170

1171
    @print_mapping_info
C
update  
channingss 已提交
1172
    def MatMul(self, node):
C
channingss 已提交
1173 1174
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
C
Channingss 已提交
1175 1176
        x_shape = val_x.out_shapes[0]
        y_shape = val_y.out_shapes[0]
S
SunAhong1993 已提交
1177 1178
        inputs_dict = {"x": val_x.name, 
                       "y": val_y.name}
C
Channingss 已提交
1179
        if y_shape[0] == 1 and x_shape[-1] != 1 and x_shape[0] != 1:
S
SunAhong1993 已提交
1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190
            y_squeeze = val_y.name + '_squeeze'
            self.paddle_graph.add_layer(
                "paddle.squeeze",
                inputs={"x": val_y.name},
                outputs=[y_squeeze],
                axis=[0])
            inputs_dict['y'] = y_squeeze
            self.paddle_graph.add_layer(
                "paddle.matmul", 
                inputs=inputs_dict, 
                outputs=[node.name])
C
Channingss 已提交
1191
        else:
S
SunAhong1993 已提交
1192 1193 1194 1195 1196
            self.paddle_graph.add_layer(
                "paddle.matmul", 
                inputs=inputs_dict, 
                outputs=[node.name])
            
1197
    @print_mapping_info
C
update  
channingss 已提交
1198
    def BatchNormalization(self, node):
C
channingss 已提交
1199 1200 1201 1202 1203
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        val_mean = self.graph.get_input_node(node, idx=3, copy=True)
        val_var = self.graph.get_input_node(node, idx=4, copy=True)
C
update  
channingss 已提交
1204 1205 1206 1207

        momentum = node.get_attr('momentum', .9)
        epsilon = node.get_attr('epsilon', 1e-5)

C
channingss 已提交
1208 1209
        # Attribute: spatial is used in BatchNormalization-1,6,7
        spatial = bool(node.get_attr('spatial'))
S
SunAhong1993 已提交
1210
        layer_attrs = {
C
update  
channingss 已提交
1211 1212 1213
            "momentum": momentum,
            "epsilon": epsilon,
        }
S
SunAhong1993 已提交
1214 1215 1216 1217 1218 1219 1220 1221 1222 1223
        self.paddle_graph.add_layer(
            "paddle.nn.functional.batch_norm", 
            inputs={"x": val_x.name,
                    "weight": val_scale.name,
                    "bias": val_b.name,
                    "running_mean": val_mean.name,
                    "running_var": val_var.name}, 
            outputs=[node.name], 
            **layer_attrs)
        
1224
    @print_mapping_info
C
update  
channingss 已提交
1225
    def Transpose(self, node):
C
channingss 已提交
1226
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1227
        perm = node.get_attr('perm')
S
SunAhong1993 已提交
1228 1229 1230 1231 1232
        self.paddle_graph.add_layer(
            "paddle.transpose", 
            inputs={"x": val_x.name},
            outputs=[node.name], 
            perm=perm)
C
update  
channingss 已提交
1233

1234
    @print_mapping_info
C
update  
channingss 已提交
1235
    def PRelu(self, node):
S
SunAhong1993 已提交
1236 1237 1238
        op_name = name_generator("prelu", self.nn_name2id)
        output_name = node.name
        layer_outputs = [op_name, output_name]
C
channingss 已提交
1239 1240
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_slope = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1241

C
channingss 已提交
1242 1243
        mode = 'channel'
        shape_slope = val_slope.out_shapes[0]
C
Channingss 已提交
1244
        if shape_slope == [1]:
C
channingss 已提交
1245 1246
            mode = 'all'
        elif len(shape_slope) > 2:
S
SunAhong1993 已提交
1247
            raise Exception("The 'element' mode is not supported yet!")
C
Channingss 已提交
1248 1249 1250 1251 1252

        if mode == 'channel' and len(shape_slope) == 1:
            # paddle params shape need be [1, channel]
            slope_data = _const_weight_or_none(val_slope)
            slope_data = np.reshape(slope_data, [1] + shape_slope)
S
SunAhong1993 已提交
1253 1254 1255 1256 1257 1258 1259
            self.params[val_slope.name] = slope_data
  
        self.paddle_graph.add_layer(
            "paddle.nn.functional.prelu", 
            inputs={"x": val_x.name,
                    "weight": val_slope.name}, 
            outputs=[node.name])
C
update  
channingss 已提交
1260

1261
    @print_mapping_info
C
update  
channingss 已提交
1262
    def Squeeze(self, node):
C
channingss 已提交
1263 1264
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axes = node.get_attr('axes')
1265
        if len(val_x.out_shapes[0]) == 1:
S
SunAhong1993 已提交
1266 1267 1268 1269 1270
            self.paddle_graph.add_layer(
                "paddle.cast",
                inputs={"x": val_x.name},
                outputs=[node.name],
                dtype=string(val_x.dtype))
1271
        else:
S
SunAhong1993 已提交
1272 1273 1274 1275 1276
            self.paddle_graph.add_layer(
                "paddle.squeeze", 
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
                axis=axes)
R
root 已提交
1277

1278
    @print_mapping_info
C
channings 已提交
1279 1280 1281
    def Equal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
1282 1283 1284 1285 1286
        self.paddle_graph.add_layer(
            "paddle.equal",
            inputs={'x': val_x.name,
                    'y': val_y.name},
            outputs=[node.name])
1287

C
Channingss 已提交
1288 1289 1290 1291
    @print_mapping_info
    def Greater(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
1292 1293 1294 1295 1296
        self.paddle_graph.add_layer(
            "paddle.greater_than",
            inputs={'x': val_x.name,
                    'y': val_y.name},
            outputs=node,
C
Channingss 已提交
1297 1298
            param_attr=None)

1299
    @print_mapping_info
C
channings 已提交
1300 1301 1302 1303
    def Where(self, node):
        condition = self.graph.get_input_node(node, idx=0, copy=True)
        val_x = self.graph.get_input_node(node, idx=1, copy=True)
        val_y = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
1304

S
SunAhong1993 已提交
1305 1306 1307 1308 1309
        not_condition = condition.name + '_not'
        self.paddle_graph.add_layer(
            "paddle.logical_not",
            inputs={"x": condition.name},
            outputs=[not_condition])
R
root 已提交
1310
        cast_not_condition = not_condition + '_cast'
S
SunAhong1993 已提交
1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325
        self.paddle_graph.add_layer(
            "paddle.cast",
            inputs={"x": not_condition},
            outputs=[cast_not_condition],
            dtype=string(val_x.dtype))
        cast_condition = condition.name + '_cast'
        self.paddle_graph.add_layer(
            "paddle.cast",
            inputs={"x": condition.name},
            outputs=[cast_condition],
            dtype=string(val_x.dtype))
        mul_val_x = val_x.name + '_mul'
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs={'x': val_x.name,
1326
                    'y': cast_condition},
S
SunAhong1993 已提交
1327 1328 1329 1330 1331
            outputs=[mul_val_x])
        mul_val_y = val_y.name + '_mul'
        self.paddle_graph.add_layer(
            "paddle.multiply",
            inputs={'x': val_y.name,
1332
                    'y': cast_not_condition},
S
SunAhong1993 已提交
1333
            outputs=[mul_val_y])
1334

S
SunAhong1993 已提交
1335 1336
        self.paddle_graph.add_layer(
            "paddle.add",
1337 1338
            inputs={'x': mul_val_x,
                    'y': mul_val_y},
S
SunAhong1993 已提交
1339
            outputs=[node.name])
1340 1341

    @print_mapping_info
R
root 已提交
1342 1343
    def NonZero(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
1344 1345
        val_x_dim = len(val_x.out_shapes[0])
        if val_x_dim == 1:
S
SunAhong1993 已提交
1346 1347 1348 1349 1350 1351 1352 1353 1354
            self.paddle_graph.add_layer(
                "paddle.nonzero", 
                inputs={"x": val_x.name}, 
                outputs=[val_x.name])
            self.paddle_graph.add_layer(
                "paddle.transpose",
                inputs={"x": val_x.name},
                outputs=[node.layer_naem],
                perm=[1, 0])
1355
        if val_x_dim > 1:
S
SunAhong1993 已提交
1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369
            self.paddle_graph.add_layer(
                "paddle.nonzero", 
                inputs={"x": val_x.name}, 
                outputs=[val_x.name])
            self.paddle_graph.add_layer(
                "paddle.split",
                inputs={"x": val_x.name}, 
                outputs=[val_x.name],
                num_or_sections=1,
                axis=val_x_dim)
            self.paddle_graph.add_layer(
                "paddle.concat", 
                inputs={"x": val_x.name}, 
                outputs=[node.name])
1370 1371

    @print_mapping_info
C
update  
channingss 已提交
1372
    def Identity(self, node):
C
channingss 已提交
1373
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
1374 1375 1376 1377 1378
        self.paddle_graph.add_layer(
            "paddle.assign", 
            inputs={"x": val_x.name}, 
            outputs=[node.name])
        
1379
    @print_mapping_info
C
channings 已提交
1380 1381 1382 1383
    def Tile(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_repeats = self.graph.get_input_node(node, idx=1, copy=True)
        repeats = _const_weight_or_none(val_repeats)
R
root 已提交
1384

1385
        if repeats is None:
S
SunAhong1993 已提交
1386
            repeats = val_repeats.name
J
jiangjiajun 已提交
1387
            if val_repeats.dtype != 'int32':
S
SunAhong1993 已提交
1388 1389 1390 1391 1392
                self.paddle_graph.add_layer(
                    "paddle.cast",
                    inputs={"x": repeats},
                    outputs=["{}.tmp".format(repeats)],
                    dtype=string("int32"))
J
jiangjiajun 已提交
1393 1394
                repeats = "{}.tmp".format(repeats)

1395
        elif isinstance(repeats, int):
C
channings 已提交
1396
            repeats = [repeats]
R
root 已提交
1397

C
channings 已提交
1398
        attr = {
R
root 已提交
1399
            'expand_times': repeats,
S
SunAhong1993 已提交
1400
            "name": string(node.name),
C
channings 已提交
1401
        }
S
SunAhong1993 已提交
1402 1403 1404 1405 1406
        self.paddle_graph.add_layer(
            "paddle.tile", 
            inputs={"x": val_x.name}, 
                    outputs=[node.name], 
                    repeat_times=repeats)
R
root 已提交
1407

1408
    @print_mapping_info
C
update  
channingss 已提交
1409
    def MaxPool(self, node):
C
channingss 已提交
1410
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1411
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
1412 1413 1414 1415 1416 1417 1418 1419 1420
        assert node.get_attr(
            "dilations") is None, 'only dilations = 0 is supported'  # optional

        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))  # optional
        pads = node.get_attr('pads', [0] * (poolnd * 2))  # optional
S
SunAhong1993 已提交
1421 1422
        paddle_op = 'paddle.nn.functional.max_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only max_pool1d, max_pool2d and max_pool3d are supported'
C
channingss 已提交
1423

C
channingss 已提交
1424 1425
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1426
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
1427
            input_shape = val_x.out_shapes[0]
C
Channingss 已提交
1428 1429 1430 1431 1432
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
S
SunAhong1993 已提交
1433 1434 1435 1436 1437
            
        layer_attrs = {
            "kernel_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
C
update  
channingss 已提交
1438 1439
            "ceil_mode": ceil_mode,
        }
S
SunAhong1993 已提交
1440 1441 1442 1443 1444
        self.paddle_graph.add_layer(
            paddle_op, 
            inputs={'x': val_x if isinstance(val_x, str) else val_x.name}, 
            outputs=[node.name], 
            **layer_attrs)
R
root 已提交
1445

1446
    @print_mapping_info
C
channings 已提交
1447
    def GlobalMaxPool(self, node):
S
SunAhong1993 已提交
1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = val_x.out_shapes[0]
        if len(input_shape) == 4:
            poolnd = 2
        elif len(input_shape) == 5:
            poolnd = 3
        elif len(input_shape) == 3:
            poolnd = 1
        paddle_op = 'paddle.nn.functional.adaptive_max_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only adaptive_max_pool1d, adaptive_max_pool2d and adaptive_max_pool3d are supported'
        output_shape = node.out_shapes[0]
        self.paddle_graph.add_layer(
            paddle_op, 
            inputs={'x': val_x.name}, 
            outputs=[node.name], 
            output_size=output_shape[2:])
        
1465
    @print_mapping_info
C
channings 已提交
1466
    def GlobalAveragePool(self, node):
S
SunAhong1993 已提交
1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = val_x.out_shapes[0]
        if len(input_shape) == 4:
            poolnd = 2
        elif len(input_shape) == 5:
            poolnd = 3
        elif len(input_shape) == 3:
            poolnd = 1
        paddle_op = 'paddle.nn.functional.adaptive_avg_pool{}d'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'
        output_shape = node.out_shapes[0]
        self.paddle_graph.add_layer(
            paddle_op, 
            inputs={'x': val_x.name}, 
            outputs=[node.name], 
            output_size=output_shape[2:])
R
root 已提交
1483

1484
    @print_mapping_info
C
update  
channingss 已提交
1485
    def Conv(self, node):
C
channingss 已提交
1486 1487
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1488 1489
        has_bias = len(node.layer.input) == 3
        if has_bias:
C
channingss 已提交
1490
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
1491 1492
        auto_pad = node.get_attr('auto_pad', 'NOTSET')

C
channingss 已提交
1493
        kernel_shape = node.get_attr('kernel_shape')
C
update  
channingss 已提交
1494 1495
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d and conv3d is supported'
C
Channingss 已提交
1496
        num_out_channels = val_w.out_shapes[0][0]
S
SunAhong1993 已提交
1497 1498
        num_in_channels = val_w.out_shapes[0][1]
        paddle_op = 'paddle.nn.functional.conv{}d'.format(convnd)
C
update  
channingss 已提交
1499 1500

        num_groups = node.get_attr('group', 1)
C
Channingss 已提交
1501 1502 1503
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
update  
channingss 已提交
1504

C
channingss 已提交
1505
        input_shape = val_x.out_shapes[0]
C
update  
channingss 已提交
1506 1507
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1508
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
Channingss 已提交
1509 1510 1511 1512 1513
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
C
update  
channingss 已提交
1514

S
SunAhong1993 已提交
1515
        layer_attrs = {
C
update  
channingss 已提交
1516 1517 1518 1519
            "stride": strides,
            "padding": paddings,
            "dilation": dilations,
            "groups": num_groups,
S
SunAhong1993 已提交
1520 1521 1522 1523
        }
        layer_inputs = {
            "x": val_x.name,
            "weight": val_w.name
C
update  
channingss 已提交
1524 1525
        }
        if has_bias:
S
SunAhong1993 已提交
1526 1527 1528 1529 1530 1531
            layer_inputs["bias"] = val_b.name
        self.paddle_graph.add_layer(
            paddle_op, 
            inputs=layer_inputs, 
            outputs=[node.name], 
            **layer_attrs)
C
channingss 已提交
1532

1533
    @print_mapping_info
C
channingss 已提交
1534
    def ConvTranspose(self, node):
C
channingss 已提交
1535 1536
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
channingss 已提交
1537
        val_b = None
R
root 已提交
1538
        if len(node.layer.input) > 2:
C
channingss 已提交
1539
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
C
channingss 已提交
1540 1541
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        out_padding = node.get_attr('output_padding', [0, 0])
C
channingss 已提交
1542
        kernel_shape = node.get_attr('kernel_shape')
C
channingss 已提交
1543 1544 1545
        assert kernel_shape, 'kernel_shape not inferred'
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d_transpose and conv3d_transpose supported'
S
SunAhong1993 已提交
1546
        num_in_channels = val_w.out_shapes[0][0]
C
channingss 已提交
1547
        num_out_channels = val_w.out_shapes[0][1]
S
SunAhong1993 已提交
1548
        paddle_op = 'paddle.nn.functional.conv{}d_transpose'.format(convnd)
C
channingss 已提交
1549

C
channingss 已提交
1550 1551 1552 1553 1554
        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        output_size = node.get_attr('output_shape', [])
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
channingss 已提交
1555 1556 1557 1558

        paddings, var_x = self._pad_if_asymmetric(node, pads, val_x)

        output_size = [0, 0]
C
channingss 已提交
1559

1560 1561
        output_size[0] = (val_x.out_shapes[0][2] - 1
                          ) * strides[0] - 2 * paddings[0] + dilations[0] * (
C
channingss 已提交
1562
                              kernel_shape[0] - 1) + 1 + out_padding[0]
1563 1564
        output_size[1] = (val_x.out_shapes[0][3] - 1
                          ) * strides[1] - 2 * paddings[1] + dilations[1] * (
C
channingss 已提交
1565
                              kernel_shape[1] - 1) + 1 + out_padding[1]
S
SunAhong1993 已提交
1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576
        layer_inputs = {'x': val_x.name,
                       "weight": val_w.name}
        layer_attrs = {
            "stride": strides,
            "dilation": dilations,
            "padding": paddings,
            "groups": num_groups,
            "output_size": node.out_shapes[0][2:]}
        if val_b is not None:
            layer_inputs["bias"] = val_b.name
        self.paddle_graph.add_layer(
S
fix  
SunAhong1993 已提交
1577
            kernel=paddle_op,
S
SunAhong1993 已提交
1578 1579
            inputs=layer_inputs,
            outputs=[node.name],
S
SunAhong1993 已提交
1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592
            **layer_attrs)
        
    @print_mapping_info
    def ArgMax(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axis = node.get_attr('axis')
        keepdims = False if node.get_attr('keepdims') == 0 else True
        layer_attrs = {'axis': axis,
                      'keepdim': keepdims}
        self.paddle_graph.add_layer(
            'paddle.argmax', 
            inputs={"x": val_x.name}, 
            outputs=[node.name],
S
SunAhong1993 已提交
1593
            **layer_attrs)