opset.py 79.5 KB
Newer Older
S
SunAhong1993 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from x2paddle.decoder.onnx_decoder import ONNXGraph, ONNXGraphNode, ONNXGraphDataNode
from x2paddle.core.graph import GraphNode
from x2paddle.core.util import *
from functools import reduce
import numpy as np
import onnx
import onnx.numpy_helper as numpy_helper
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
import logging as _logging
from collections import OrderedDict
import math
import os
import copy
import sys
import shutil

_logger = _logging.getLogger(__name__)


def _const_weight_or_none(node, necessary=False):
    if 'Constant' in node.layer_type:
        return node.value
    if isinstance(node, ONNXGraphDataNode):
        return node.weight
    if necessary:
        assert '{} should be an initializer or Constant operator.'.format(
S
SunAhong1993 已提交
41
            node.name)
S
SunAhong1993 已提交
42 43 44
    return None


C
Channingss 已提交
45
def _rename_or_remove_weight(weights, origin_name, target_name=None, is_remove=True):
46 47 48 49 50
    ''' 
    Rename parameters by Paddle's naming rule of parameters.

    Args:
        weights(dict[String:np.ndarray]): Dict stored paramters, the key in weights is name of parameter.
51 52 53 54
        origin_name(String): Name of parameter to rename or remove.
        target_name(String, optional): if target_name is not None, add new key-value pair 
            {target_name:weights[origin_name]} to weights, and target_name must follow paddle's 
            naming rule of parameters. Default: None.
55 56 57 58
        is_remove: if is_remove is True, remove origin key-value pair. Default: True.
    Returns:
        None
    '''   
C
Channingss 已提交
59 60
    if origin_name not in weights:
        raise KeyError('{} not a key in {}'.format(origin_name, weights))
C
Channingss 已提交
61
    if is_remove:
C
Channingss 已提交
62
        # remove weight
C
Channingss 已提交
63 64 65 66 67 68
        data = weights.pop(origin_name)
    else:
        data = weights[origin_name]
    if target_name is not None:
        # rename weight
        weights[target_name] = data
C
Channingss 已提交
69

S
SunAhong1993 已提交
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99
def _is_static_shape(shape):
    negtive_dims = 0
    error_dims = 0
    for dim in shape:
        if dim < 0:
            negtive_dims += 1
        if dim < -1:
            error_dims += 1
    if negtive_dims > 1:
        return False
    if error_dims > 0:
        return False
    return True


def _get_same_padding(in_size, kernel_size, stride):
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]


def print_mapping_info(func):
    def run_mapping(*args, **kwargs):
        node = args[1]
        try:
            res = func(*args, **kwargs)
        except:
            print("convert failed node:{}, op_type is {}".format(
S
SunAhong1993 已提交
100
                node.name[9:], node.layer_type))
S
SunAhong1993 已提交
101 102 103 104 105 106 107 108 109 110 111
            raise
        else:
            return res

    return run_mapping


class OpSet9():
    elementwise_ops = {
        'Add': 'paddle.add',
        'Div': 'paddle.divide',
S
SunAhong1993 已提交
112
        'Sub': 'paddle.subtract',
S
SunAhong1993 已提交
113 114 115 116
        'Mul': 'paddle.multiply',
        'Pow': 'paddle.pow',
    }

S
SunAhong1993 已提交
117 118 119 120 121
    directly_map_ops = {
        'Ceil': ['paddle.ceil'],
        # reduce function
        'ReduceMean': ['paddle.mean',
                       dict(axes='axis', keepdims='keepdim'), 
S
SunAhong1993 已提交
122
                       dict(axes=None, keepdims=1)],
S
SunAhong1993 已提交
123 124
        'ReduceSum': ['paddle.sum', 
                      dict(axes='axis', keepdims='keepdim'), 
S
SunAhong1993 已提交
125
                      dict(axes=None, keepdims=1)],
S
SunAhong1993 已提交
126 127
        'ReduceMin': ['paddle.min', 
                      dict(axes='axis', keepdims='keepdim'), 
S
SunAhong1993 已提交
128
                      dict(axes=None, keepdim=1)],
S
SunAhong1993 已提交
129 130
        'ReduceMax': ['paddle.max', 
                      dict(axes='axis', keepdims='keepdim'), 
S
SunAhong1993 已提交
131
                      dict(axes=None, keepdim=1)],
S
SunAhong1993 已提交
132 133
        'ReduceProd': ['paddle.prod', 
                      dict(axes='axis', keepdims='keepdim'), 
S
SunAhong1993 已提交
134
                      dict(axes=None, keepdim=1)],
S
SunAhong1993 已提交
135 136 137 138
        # active function
        'Relu': ['paddle.nn.ReLU'],
        'LeakyRelu': ['paddle.nn.LeakyReLU', 
                      dict(alpha='negative_slope'), 
S
SunAhong1993 已提交
139
                      dict(negative_slope=.01)],
S
SunAhong1993 已提交
140
        'Elu': ['paddle.nn.functional.elu', 
S
fix  
SunAhong1993 已提交
141
                dict(alpha='alpha'), 
S
SunAhong1993 已提交
142 143 144 145 146 147 148 149
                dict(alpha=1.)],
        'ThresholdedRelu': ['paddle.nn.functional.thresholded_relu', 
                            dict(alpha='threshold'),
                            dict(alpha=1.)],
        'Tanh': ['paddle.nn.Tanh'],
        'Sigmoid': ['paddle.nn.Sigmoid'],
        'Softsign': ['paddle.nn.Softsign'],
        'Softplus': ['paddle.nn.Softplus', 
S
fix  
SunAhong1993 已提交
150
                     dict(threshold='threshold'), 
S
SunAhong1993 已提交
151 152
                     dict(threshold=float(sys.maxsize))],
        'Exp': ['paddle.exp'],
S
SunAhong1993 已提交
153
        'Log': ['paddle.log'],
C
Channingss 已提交
154 155 156
        'LogSoftmax': ['paddle.nn.functional.log_softmax', 
                    dict(axis='axis'), 
                    dict(axis=1)],
S
SunAhong1993 已提交
157
        'Softmax': ['paddle.nn.Softmax', 
S
fix  
SunAhong1993 已提交
158
                    dict(axis='axis'), 
S
SunAhong1993 已提交
159 160 161 162 163
                    dict(axis=1)],
        'Sqrt': ['paddle.sqrt'],
        'Floor': ['paddle.floor'],
        'Abs': ['paddle.abs'],
        'Erf': ['paddle.erf'],
S
SunAhong1993 已提交
164 165 166 167 168 169 170 171 172 173
    }

    def __init__(self, decoder, paddle_graph):
        super(OpSet9, self).__init__()
        self.graph = decoder.graph
        self.paddle_graph = paddle_graph
        self.input_index = 0
        self.inputs_info = dict()
        self.weights = dict()
        self.nn_name2id = dict()
S
fix  
SunAhong1993 已提交
174
        self.done_weight_list = list()
S
SunAhong1993 已提交
175 176 177 178 179 180

    @print_mapping_info
    def directly_map(self, node, *args, **kwargs):
        inputs = node.layer.input
        assert len(inputs) == 1, 'directly_map error with multi inputs'
        input = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
        onnx_attrs = node.attr_map
        if '' in onnx_attrs:
            onnx_attrs.pop('')
        if '_' in onnx_attrs:
            onnx_attrs.pop('_')
        op_info = self.directly_map_ops[node.layer_type]
        paddle_op = op_info[0]
        layer_attrs = dict()
        if len(op_info) > 1:
            attrs_name_map_dict = op_info[1]
            for onnx_attr_name, pd_attr_name in attrs_name_map_dict.items():
                if onnx_attr_name in onnx_attrs:
                    layer_attrs[pd_attr_name] = onnx_attrs[onnx_attr_name]
                else:
                    layer_attrs[pd_attr_name] = op_info[2][onnx_attr_name]
196
        if paddle_op.startswith("paddle.nn") and 'functional' not in paddle_op:
S
SunAhong1993 已提交
197 198
            op_name = paddle_op[10:].lower()
            op_name = name_generator(op_name, self.nn_name2id)
S
SunAhong1993 已提交
199
            output_name = node.name
S
SunAhong1993 已提交
200
            layer_outputs = [op_name, output_name]
201

S
SunAhong1993 已提交
202 203
            self.paddle_graph.add_layer(
                kernel=paddle_op,
S
SunAhong1993 已提交
204
                inputs={"x": input.name},
S
SunAhong1993 已提交
205 206 207 208 209
                outputs=layer_outputs,
                **layer_attrs)
        else:
            self.paddle_graph.add_layer(
                kernel=paddle_op,
S
SunAhong1993 已提交
210 211
                inputs={"x": input.name},
                outputs=[node.name],
S
SunAhong1993 已提交
212
                **layer_attrs)        
S
SunAhong1993 已提交
213
       
S
SunAhong1993 已提交
214 215 216 217 218 219
            
    @print_mapping_info
    def elementwise_map(self, node):
        op_type = self.elementwise_ops[node.layer_type]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
220 221
        inputs_dict = {'x': val_x.name, 
                       'y': val_y.name}
S
SunAhong1993 已提交
222 223 224
        self.paddle_graph.add_layer(
            op_type, 
            inputs=inputs_dict, 
S
SunAhong1993 已提交
225
            outputs=[node.name])
S
SunAhong1993 已提交
226 227 228 229 230 231 232 233 234 235 236 237

    @print_mapping_info
    def place_holder(self, node):
        shape = node.out_shapes[0]
        for i, dim_shape in enumerate(shape):
            if dim_shape == 0 and i == 0:
                shape[i] = 1
            if dim_shape == 0 and i != 0:
                assert 'shape of input is not assigned'
        self.paddle_graph.add_layer(
            kernel="paddle.to_tensor",
            inputs={},
S
SunAhong1993 已提交
238
            outputs=[node.name],
S
SunAhong1993 已提交
239 240 241 242 243 244 245 246 247 248
            data="x{}".format(self.input_index))
        self.inputs_info["x{}".format(self.input_index)] = [shape, node.dtype]
        self.input_index += 1

    @print_mapping_info
    def create_parameter(self, node, parameter=None):
        if parameter is not None:
            node = parameter
        dtype = node.dtype
        shape = node.out_shapes[0]
S
fix  
SunAhong1993 已提交
249
        if hasattr(node.weight, "shape") and len(node.weight.shape) == 0:
S
SunAhong1993 已提交
250 251 252
            self.paddle_graph.add_layer(
                "paddle.full", 
                inputs={}, 
S
SunAhong1993 已提交
253
                outputs=[node.name],
S
SunAhong1993 已提交
254 255 256 257
                dtype=string(dtype),
                shape=[1],
                fill_value=node.weight)
        else:
S
SunAhong1993 已提交
258
            self.weights[node.name] = node.weight
S
SunAhong1993 已提交
259 260 261
            self.paddle_graph.add_layer(
                "self.create_parameter",
                inputs={},
S
SunAhong1993 已提交
262
                outputs=[node.name],
S
SunAhong1993 已提交
263
                shape=shape,
S
SunAhong1993 已提交
264
                attr=string(node.name),
S
SunAhong1993 已提交
265
                dtype=string(dtype),
S
fix  
SunAhong1993 已提交
266
                default_initializer="paddle.nn.initializer.Constant(value=0.0)")       
S
SunAhong1993 已提交
267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282

    def _pad_if_asymmetric(self, node, pads, val_name):  # pads: SSEE
        assert len(pads) & 1 == 0
        symmetric = True
        ndims = len(pads) // 2
        for idx_dim in range(ndims):
            if pads[idx_dim] != pads[ndims + idx_dim]:
                symmetric = False
                break
        if symmetric:
            return pads[:ndims], val_name
        val_padded = self.Pad(node, op_independent=False)
        return [0] * ndims, val_padded

    def _interpolate(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
SunAhong1993 已提交
283
        inputs = {'x': val_x.name}
S
fix  
SunAhong1993 已提交
284
        attrs = dict()
S
SunAhong1993 已提交
285 286 287 288
        if node.layer_type == 'Resize':
            if len(node.layer.input) == 2:
                # opset 10
                val_scales = self.graph.get_input_node(node, idx=1, copy=True)
S
fix  
SunAhong1993 已提交
289 290 291
                # TODO(syf): paddle.nn.functional.interpolate will support the length  
                # which is the same as the rank of input.
                attrs['scale_factor'] = self.weights[val_scales.name].tolist()[2:]
S
SunAhong1993 已提交
292 293 294
            elif len(node.layer.input) == 3:
                # opset 11
                val_scales = self.graph.get_input_node(node, idx=2, copy=True)
S
fix  
SunAhong1993 已提交
295 296 297
                # TODO(syf): paddle.nn.functional.interpolate will support the length  
                # which is the same as the rank of input.
                attrs['scale_factor'] = self.weights[val_scales.name].tolist()[2:]
S
SunAhong1993 已提交
298 299 300
            elif len(node.layer.input) == 4:
                # opset 11
                val_sizes = self.graph.get_input_node(node, idx=3, copy=True)
S
SunAhong1993 已提交
301
                var_nc, var_hw = val_sizes.name + '_nc', val_sizes.name + '_hw'
S
SunAhong1993 已提交
302 303
                self.paddle_graph.add_layer(
                    'paddle.split',
S
SunAhong1993 已提交
304
                    inputs={"x": val_sizes.name},
S
SunAhong1993 已提交
305 306 307 308 309 310 311 312
                    outputs=[var_nc, var_hw],
                    num_or_sections=[2, 2],
                    axis=0)
                self.paddle_graph.add_layer(
                    "paddle.cast",
                    inputs={"x": var_hw},
                    outputs=[var_hw],
                    dtype=string('int32'))
S
SunAhong1993 已提交
313 314 315
                inputs['size'] = var_hw
                attrs = {"align_corners": False,
                         "mode": string(node.get_attr('mode', 'nearest'))}
S
SunAhong1993 已提交
316
                self.paddle_graph.add_layer(
S
docs  
SunAhong1993 已提交
317
                    kernel="paddle.nn.functional.interpolate",
S
SunAhong1993 已提交
318
                    inputs=inputs,
S
SunAhong1993 已提交
319
                    outputs=[node.name],
S
SunAhong1993 已提交
320 321
                    **attrs)
                return
S
SunAhong1993 已提交
322 323
        elif node.layer_type == 'Upsample':
            val_scales = self.graph.get_input_node(node, idx=1, copy=True)
S
SunAhong1993 已提交
324 325 326 327 328 329 330 331 332
            self.paddle_graph.add_layer(
                "paddle.slice",
                inputs={"input": val_scales.name},
                outputs=[val_scales.name],
                axes=[0],
                starts=[2],
                ends=[4])
            inputs['scale_factor'] = val_scales.name
 
S
SunAhong1993 已提交
333
        mode = node.get_attr('mode', 'nearest')
S
fix  
SunAhong1993 已提交
334 335 336
        attrs.update({"align_corners": False,
                      "mode": string(mode),
                      "align_mode": 1})
S
SunAhong1993 已提交
337 338 339
        val_x_shape = val_x.out_shapes[0]
        if mode == "linear" and len(val_x_shape) == 4:
            attrs["mode"] = string("bilinear")
S
fix  
SunAhong1993 已提交
340
            attrs["align_corners"] = True
S
SunAhong1993 已提交
341 342 343
        self.paddle_graph.add_layer(
            kernel="paddle.nn.functional.interpolate",
            inputs=inputs,
S
SunAhong1993 已提交
344
            outputs=[node.name],
S
SunAhong1993 已提交
345 346 347 348 349 350 351 352 353
            **attrs)
        
    @print_mapping_info
    def HardSigmoid(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        alpha = node.get_attr('alpha', 0.2)
        beta = node.get_attr('beta', 0.5)
        self.paddle_graph.add_layer(
            kernel="paddle.scale",
S
SunAhong1993 已提交
354 355
            inputs={"x": val_x.name},
            outputs=[node.name + "_val"],
S
SunAhong1993 已提交
356 357 358 359
            scale=alpha,
            bias=beta)
        self.paddle_graph.add_layer(
            kernel="paddle.clip",
S
SunAhong1993 已提交
360 361
            inputs={"x": node.name + "_val"},
            outputs=[node.name],
S
SunAhong1993 已提交
362
            min=0.0,
S
SunAhong1993 已提交
363 364 365 366 367 368 369 370 371 372 373 374 375 376
            max=1.0)  
        
    @print_mapping_info
    def Shape(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            kernel="paddle.shape",
            inputs={"input": val_x.name},
            outputs=[node.name])
        self.paddle_graph.add_layer(
                'paddle.cast',
                inputs={"x": node.name},
                outputs=[node.name],
                dtype=string('int64'))   
S
SunAhong1993 已提交
377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393

    @print_mapping_info
    def RoiAlign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)

        pooled_height = node.get_attr('output_height')
        pooled_width = node.get_attr('output_width')
        spatial_scale = node.get_attr('spatial_scale')
        sampling_ratio = node.get_attr('sampling_ratio')
        layer_attrs = {
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
            'sampling_ratio': sampling_ratio,
        }
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
394
            'paddle.fluid.layers.roi_align',
S
SunAhong1993 已提交
395 396 397
            inputs={'input': val_x.name,
                    'rois': val_rois.name},
            outputs=[node.name],
S
SunAhong1993 已提交
398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413
            **layer_attrs)
                       

    @print_mapping_info
    def MaxRoiPool(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)

        spatial_scale = node.get_attr('spatial_scale')
        pooled_height, pooled_width = node.get_attr('pooled_shape')
        layer_attrs = {
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
        }
        self.paddle_graph.add_layer(
S
SunAhong1993 已提交
414
            'paddle.fluid.layers.roi_pool',
S
SunAhong1993 已提交
415 416 417
            inputs={'input': val_x.name,
                    'rois': val_rois.name},
            outputs=[node.name],
S
SunAhong1993 已提交
418 419 420 421 422 423
            **layer_attrs)

    @print_mapping_info
    def Pad(self, node, op_independent=True):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        pads = node.get_attr('pads')
S
SunAhong1993 已提交
424 425 426 427 428 429 430 431
        is_pads_attr = True
        if pads is None:
            val_pad = self.graph.get_input_node(node, idx=1, copy=True)
            pad_shape = val_pad.out_shapes[0]
            is_pads_attr = False
            pads = _const_weight_or_none(val_pad)
            if pads is not None:
                is_pads_attr = True
S
SunAhong1993 已提交
432 433 434 435
        mode = node.get_attr('mode', 'constant')
        value = node.get_attr('value', 0.)
        data_shape = val_x.out_shapes[0]
        output_shape = node.out_shapes[0]
S
fix  
SunAhong1993 已提交
436
        assume_pad = False
S
SunAhong1993 已提交
437 438
        layer_attrs = {}
        layer_attrs['mode'] = string(mode)
S
fix  
SunAhong1993 已提交
439 440 441
        layer_attrs['value'] = value
        if not op_independent:
            output_name = node.name + '_paded'
S
SunAhong1993 已提交
442
        else:
S
fix  
SunAhong1993 已提交
443 444 445
            output_name = node.name
        nn_op_name = name_generator("pad", self.nn_name2id)
        layer_outputs = [nn_op_name, output_name]
S
SunAhong1993 已提交
446 447
        if is_pads_attr:
            paddings = []
S
fix  
SunAhong1993 已提交
448
            if len(pads) in [2, 4, 6]:
S
SunAhong1993 已提交
449
                if data_shape:
S
fix  
SunAhong1993 已提交
450
                    assume_pad |= data_shape and 2 * (len(data_shape) - 2) == len(pads) # NCHW
S
SunAhong1993 已提交
451
                if output_shape:
S
fix  
SunAhong1993 已提交
452 453 454 455 456
                    assume_pad |= output_shape and 2 * (len(output_shape) - 2) == len(pads)  # NCHW
                if assume_pad:
                    paddle_op = 'paddle.nn.Pad{}D'.format(len(output_shape) - 2)
                    paddings = np.array(pads).reshape(
                        (2, -1)).transpose().astype("int32")
S
for pad  
SunAhong1993 已提交
457
                    paddings = np.flip(paddings, axis=0).flatten().tolist()
S
fix  
SunAhong1993 已提交
458 459 460 461 462 463 464 465 466 467 468 469 470
                    layer_attrs['padding'] = paddings
                else:
                    if data_shape:
                        assume_pad |= data_shape and 2 * len(data_shape) == len(pads) # NCHW
                    if output_shape:
                        assume_pad |= output_shape and 2 * len(output_shape) == len(pads)  # NCHW
                    if assume_pad:
                        paddle_op = 'paddle.nn.functional.pad'
                        paddings = np.array(pads).reshape(
                            (2, -1)).transpose().astype("int32").flatten().tolist()
                        layer_attrs['pad'] = paddings
                    else:
                        raise Exception("The padding value {} is wrong!".format(pads))
S
SunAhong1993 已提交
471
            elif len(pads) == 8:
S
fix  
SunAhong1993 已提交
472 473 474 475 476
                if data_shape:
                    assume_pad |= data_shape and 2 * len(data_shape) == len(pads) # NCHW
                if output_shape:
                    assume_pad |= output_shape and 2 * len(output_shape) == len(pads)  # NCHW
                if assume_pad:
S
for pad  
SunAhong1993 已提交
477
                    paddle_op = 'paddle.nn.Pad2D'
S
fix  
SunAhong1993 已提交
478
                    paddings = np.array(pads).reshape(
S
for pad  
SunAhong1993 已提交
479 480 481 482 483 484 485 486
                        (2, -1)).transpose().astype("int32")
                    paddings = np.flip(paddings, axis=0).flatten().tolist()
                    if sum(paddings[:4]) == 0:
                        paddings = paddings[4:]
                        layer_attrs['padding'] = paddings
                    else:
                        layer_attrs["pad"] = paddings
                        paddle_op = "custom_layer:PadAllDim4WithOneInput"
S
SunAhong1993 已提交
487
            else:
S
fix  
SunAhong1993 已提交
488
                 raise Exception("The padding value {} is wrong!".format(pads))
S
SunAhong1993 已提交
489 490
            self.paddle_graph.add_layer(
                paddle_op, 
S
SunAhong1993 已提交
491
                inputs={'x': val_x.name}, 
S
fix  
SunAhong1993 已提交
492
                outputs=layer_outputs[1:] if paddle_op == 'paddle.nn.functional.pad' else layer_outputs, 
S
SunAhong1993 已提交
493
                **layer_attrs)
S
fix  
SunAhong1993 已提交
494
            if not op_independent:
S
SunAhong1993 已提交
495
                return node.name + '_paded'
S
SunAhong1993 已提交
496
        else:
S
fix  
SunAhong1993 已提交
497 498
            pads_len = val_pad.out_shapes[0][0]
            if pads_len in [2, 4, 6]:
S
SunAhong1993 已提交
499
                if data_shape:
S
fix  
SunAhong1993 已提交
500
                    assume_pad |= data_shape and 2 * (len(data_shape) - 2) == pads_len # NCHW
S
SunAhong1993 已提交
501
                if output_shape:
S
fix  
SunAhong1993 已提交
502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546
                    assume_pad |= output_shape and 2 * (len(output_shape) - 2) == pads_len  # NCHW 
                if assume_pad:
                    if pads_len == 2:
                        data_format = "NCL"
                    elif pads_len == 4:
                        data_format = "NCHW"
                    else:
                        data_format = "NCDHW"
                    self.paddle_graph.add_layer(
                        "custom_layer:PadWithTwoInput", 
                        inputs={'x': val_x.name, 'pad': val_pad.name}, 
                        outputs=layer_outputs,
                        value=value,
                        mode=string(mode),
                        data_format=string(data_format))
                else:
                    if data_shape:
                        assume_pad |= data_shape and 2 * len(data_shape) == pads_len # NCHW
                    if output_shape:
                        assume_pad |= output_shape and 2 * len(output_shape) == pads_len  # NCHW
                    if assume_pad:
                        if pads_len == 4:
                            self.paddle_graph.add_layer(
                                "custom_layer:PadAllDim2", 
                                inputs={'x': val_x.name, 'pad': val_pad.name}, 
                                outputs=layer_outputs, 
                                value=value,
                                mode=string(mode))
                        else:
                            raise Exception("The padding value is wrong!")
            elif pads_len == 8:
                if data_shape:
                    assume_pad |= data_shape and 2 * len(data_shape) == pads_len # NCHW
                if output_shape:
                    assume_pad |= output_shape and 2 * len(output_shape) == pads_len  # NCHW
                if assume_pad:
                    self.paddle_graph.add_layer(
                        "custom_layer:PadAllDim4", 
                        inputs={'x': val_x.name, 'pad': val_pad.name}, 
                        outputs=layer_outputs, 
                        value=value,
                        mode=string(mode))
            else:
                print(pads_len)
                raise Exception("The padding value is wrong!")   
S
SunAhong1993 已提交
547 548
            if not op_independent:
                return node.name + '_paded'
S
SunAhong1993 已提交
549 550 551 552 553 554 555

    @print_mapping_info
    def Unsqueeze(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axes = node.get_attr('axes')
        layer_attrs = {'axis': axes}
        if len(val_x.out_shapes[0]) == 0:
S
SunAhong1993 已提交
556
            if node.name:
S
SunAhong1993 已提交
557 558
                self.paddle_graph.add_layer(
                    'paddle.reshape',
S
SunAhong1993 已提交
559 560
                    inputs={"x": val_x.name},
                    outputs=[node.name],
S
SunAhong1993 已提交
561 562
                    shape=[1])
        else:
S
fix  
SunAhong1993 已提交
563 564
            self.paddle_graph.add_layer(
                'paddle.unsqueeze', 
S
SunAhong1993 已提交
565 566
                inputs={"x": val_x.name}, 
                outputs=[node.name],
S
fix  
SunAhong1993 已提交
567
                **layer_attrs)
S
SunAhong1993 已提交
568 569 570 571 572 573 574 575 576

    @print_mapping_info
    def Shrink(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        bias = node.get_attr('bias')
        lambd = node.get_attr('lambd')
        assert bias == 0.0, 'not support bias!=0'
        self.paddle_graph.add_layer(
            'paddle.nn.functional.hardshrink', 
S
SunAhong1993 已提交
577 578
            inputs={"x": val_x.name}, 
            outputs=[node.name], 
S
SunAhong1993 已提交
579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599
            threshold=lambd)

    @print_mapping_info
    def Constant(self, node):
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        value = node.get_attr('value')
        dtype = np.dtype(value.dtype)
        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'tensor dtype unmatches storage dtype'

        shape = node.get_attr('shape', None)

        if shape is None:
            shape = val_output.out_shapes[0]
        if shape is None:
            shape = list(value.shape)
            _logger.warning('in (Constant -> %s): '
                            'attribute "shape" of %s not inferred, '
                            'using value as 1-D tensor may lead to fails',
S
SunAhong1993 已提交
600
                            val_output.name, val_output.name)
S
SunAhong1993 已提交
601 602 603 604 605 606
        if len(value) == 1:
            value = value.tolist()
            value = value[0]
            self.paddle_graph.add_layer(
                "paddle.full", 
                inputs={}, 
S
SunAhong1993 已提交
607
                outputs=[node.name],
S
SunAhong1993 已提交
608 609 610 611 612
                dtype=string(dtype),
                shape=[1],
                fill_value=value)
        else:
            value = np.reshape(value, shape)
S
SunAhong1993 已提交
613
            self.weights[node.name] = value
S
SunAhong1993 已提交
614 615 616
            self.paddle_graph.add_layer(
                "self.create_parameter",
                inputs={},
S
SunAhong1993 已提交
617
                outputs=[node.name],
S
SunAhong1993 已提交
618
                shape=shape,
S
SunAhong1993 已提交
619
                attr=string(node.name),
S
SunAhong1993 已提交
620 621 622 623 624 625 626 627 628 629 630 631 632 633
                dtype=string(dtype),
                default_initializer="paddle.nn.initializer.Constant(value=0.0)")

    @print_mapping_info
    def Resize(self, node):
        self._interpolate(node)

    @print_mapping_info
    def Upsample(self, node):
        self._interpolate(node)

    @print_mapping_info
    def InstanceNormalization(self, node):
        op_name = name_generator("instanse_norm", self.nn_name2id)
S
SunAhong1993 已提交
634
        output_name = node.name
S
SunAhong1993 已提交
635 636 637 638 639
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        epsilon = node.get_attr('epsilon', 1e-5)
640 641
        self.weights[op_name+'.scale'] = self.weights[val_scale.name]
        self.weights[op_name+'.bias'] = self.weights[val_b.name]
S
SunAhong1993 已提交
642 643 644 645 646
        layer_attrs = {
            'num_features': node.out_shapes[0][1],
            'epsilon': epsilon,
        }
        dim = len(val_x.out_shapes[0])
S
SunAhong1993 已提交
647
        if dim == 3:
S
SunAhong1993 已提交
648 649 650 651 652 653 654 655 656
            paddle_op = "paddle.nn.InstanceNorm1D"
        elif dim == 4:
            paddle_op = "paddle.nn.InstanceNorm2D"
        elif dim == 5:
            paddle_op = "paddle.nn.InstanceNorm3D"
        else:
            raise Exception("The paddle only support 2D, 3D, 4D or 5D input in InstanceNormalization.")
        self.paddle_graph.add_layer(
            paddle_op, 
S
SunAhong1993 已提交
657
            inputs={"x": val_x.name}, 
S
SunAhong1993 已提交
658 659 660 661 662 663 664 665
            outputs=layer_outputs, 
            **layer_attrs)

    @print_mapping_info
    def Expand(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
        val_x_dtype = val_x.dtype
S
SunAhong1993 已提交
666
        name_ones = node.name + '_ones'
S
SunAhong1993 已提交
667
        attr_ones = {
S
SunAhong1993 已提交
668
            'shape': val_shape.name,
S
SunAhong1993 已提交
669 670 671 672 673 674 675 676 677
            'dtype': string(val_x_dtype),
            'fill_value': 1
        }
        self.paddle_graph.add_layer(
            'paddle.full',
            inputs={},
            outputs=[name_ones],
            **attr_ones)
        inputs_dict = {'x': name_ones, 
S
SunAhong1993 已提交
678
                       'y': val_x.name}
S
SunAhong1993 已提交
679 680 681
        self.paddle_graph.add_layer(
            'paddle.multiply',
            inputs=inputs_dict,
S
SunAhong1993 已提交
682
            outputs=[node.name])
S
SunAhong1993 已提交
683 684 685 686 687 688 689 690 691 692 693 694 695

    @print_mapping_info
    def Gather(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        indices_shape = indices.out_shapes[0]
        axis = node.get_attr('axis', 0)
        #assert len(
        #    indices_shape) <= 2, "Gather op don't support dim of indice >2 "
        if axis == 0 and len(indices_shape) <= 1:
            if len(val_x.out_shapes[0]) <= 1:
                self.paddle_graph.add_layer(
                    'paddle.gather',
S
SunAhong1993 已提交
696 697 698
                    inputs={'x': val_x.name,
                            'index': indices.name},
                    outputs=[node.name])
S
SunAhong1993 已提交
699 700
            elif len(val_x.out_shapes[0]) > 1:
                if len(indices_shape) == 0:
S
SunAhong1993 已提交
701
                    gather_ = node.name + '_1'
S
SunAhong1993 已提交
702 703
                    self.paddle_graph.add_layer(
                        'paddle.gather',
S
SunAhong1993 已提交
704 705
                        inputs={'x': val_x.name,
                                'index': indices.name},
S
SunAhong1993 已提交
706 707 708 709
                        outputs=[gather_])
                    self.paddle_graph.add_layer(
                        'paddle.squeeze',
                        inputs={'x': gather_},
S
SunAhong1993 已提交
710
                        outputs=[node.name],
S
SunAhong1993 已提交
711 712 713 714
                        axis=[0])
                else:
                    self.paddle_graph.add_layer(
                        'paddle.gather',
S
SunAhong1993 已提交
715 716 717
                        inputs={'x': val_x.name,
                                'index': indices.name},
                        outputs=[node.name])
S
SunAhong1993 已提交
718 719 720
        elif axis > 0 and len(indices_shape) <= 1:
            perm = list(range(len(val_x.out_shapes[0])))
            perm = [axis] + perm[:axis] + perm[axis + 1:]
S
SunAhong1993 已提交
721
            name_trans = val_x.name + '_trans'
S
SunAhong1993 已提交
722 723
            self.paddle_graph.add_layer(
                'paddle.transpose',
S
SunAhong1993 已提交
724
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
725 726 727 728 729
                outputs=[name_trans],
                perm=perm)
            self.paddle_graph.add_layer(
                'paddle.gather',
                inputs={'x': name_trans,
S
SunAhong1993 已提交
730 731
                        'index': indices.name},
                outputs=[node.name])
S
SunAhong1993 已提交
732 733
            self.paddle_graph.add_layer(
                'paddle.transpose', 
S
SunAhong1993 已提交
734 735
                inputs={"x": node.name}, 
                outputs=[node.name], 
S
SunAhong1993 已提交
736 737 738 739
                perm=perm)
            if len(indices_shape) < 1:
                self.paddle_graph.add_layer(
                    'paddle.squeeze',
S
SunAhong1993 已提交
740 741
                    inputs={'x': node.name},
                    outputs=[node.name],
S
SunAhong1993 已提交
742 743 744 745
                    axis=[axis])
        elif axis == 0 and len(indices_shape) > 1:
            if val_x.out_shapes[0] is not None and isinstance(
                    val_x, ONNXGraphDataNode):
S
SunAhong1993 已提交
746
                indices_cast = indices.name + '_cast'
S
SunAhong1993 已提交
747 748
                self.paddle_graph.add_layer(
                    'paddle.cast',
S
SunAhong1993 已提交
749
                    inputs={"x": indices.name},
S
SunAhong1993 已提交
750
                    outputs=[indices_cast],
S
SunAhong1993 已提交
751 752
                    dtype=string('int64'))
                op_name = name_generator("embedding", self.nn_name2id)
S
SunAhong1993 已提交
753
                output_name = node.name
S
SunAhong1993 已提交
754
                layer_outputs = [op_name, output_name]
C
Channingss 已提交
755
                self.weights[op_name + '.weight'] = _const_weight_or_none(val_x)
S
SunAhong1993 已提交
756 757 758 759
                self.paddle_graph.add_layer(
                    'paddle.nn.Embedding',
                    inputs={"x": indices_cast},
                    outputs=layer_outputs,
S
fix  
SunAhong1993 已提交
760 761
                    num_embeddings=val_x.out_shapes[0][0],
                    embedding_dim=val_x.out_shapes[0][1])
S
SunAhong1993 已提交
762 763 764
            else:
                from functools import reduce
                reshape_shape = reduce(lambda x, y: x * y, indices_shape)
S
SunAhong1993 已提交
765
                indices_reshape = indices.name + '_shape'
S
SunAhong1993 已提交
766 767
                self.paddle_graph.add_layer(
                    'paddle.reshape',
S
SunAhong1993 已提交
768
                    inputs={"x": indices.name},
S
SunAhong1993 已提交
769 770 771 772 773 774
                    outputs=[indices_reshape],
                    shape=[reshape_shape, ])

                perm = list(range(len(val_x.out_shapes[0])))
                self.paddle_graph.add_layer(
                    'paddle.gather',
S
SunAhong1993 已提交
775
                    inputs={'x': val_x.name,
S
SunAhong1993 已提交
776
                            'index': indices_reshape},
S
SunAhong1993 已提交
777
                    outputs=[node.name])
S
SunAhong1993 已提交
778 779 780 781 782 783 784 785
                val_x_shape = val_x.out_shapes[0]
                reshaped_shape = []
                for i in perm:
                    reshaped_shape.append(indices_shape[i])
                for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                    reshaped_shape.append(i)
                self.paddle_graph.add_layer(
                    'paddle.reshape',
S
SunAhong1993 已提交
786 787
                    inputs={"x": node.name},
                    outputs=[node.name],
S
SunAhong1993 已提交
788 789 790 791
                    shape=reshaped_shape)
        elif axis > 0 and len(indices_shape) > 1:
            from functools import reduce
            reshape_shape = reduce(lambda x, y: x * y, indices_shape)
S
SunAhong1993 已提交
792
            indices_reshape = indices.name + '_shape'
S
SunAhong1993 已提交
793 794
            self.paddle_graph.add_layer(
                'paddle.reshape',
S
SunAhong1993 已提交
795
                inputs={"x": indices.name},
S
SunAhong1993 已提交
796 797 798 799 800
                outputs=[indices_reshape],
                shape=[reshape_shape, ])

            perm = list(range(len(val_x.out_shapes[0])))
            perm = [axis] + perm[:axis] + perm[axis + 1:]
S
SunAhong1993 已提交
801
            name_trans = val_x.name + '_transpose'
S
SunAhong1993 已提交
802 803
            self.paddle_graph.add_layer(
                'paddle.transpose',
S
SunAhong1993 已提交
804
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
805 806 807 808 809 810
                outputs=[name_trans],
                perm=perm)
            self.paddle_graph.add_layer(
                'paddle.gather',
                inputs={'x': name_trans,
                        'index': indices_reshape},
S
SunAhong1993 已提交
811 812
                outputs=[node.name])
            input_transpose = node.name + '_transpose'
S
SunAhong1993 已提交
813 814
            self.paddle_graph.add_layer(
                'paddle.transpose',
S
SunAhong1993 已提交
815
                inputs={"x": node.name},
S
SunAhong1993 已提交
816 817 818 819 820 821 822 823 824 825 826
                outputs=[input_transpose],
                perm=perm)
            val_x_shape = val_x.out_shapes[0]
            reshaped_shape = []
            for i in perm:
                reshaped_shape.append(indices_shape[i])
            for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                reshaped_shape.append(i)
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": input_transpose},
S
SunAhong1993 已提交
827
                outputs=[node.name],
S
SunAhong1993 已提交
828 829 830 831 832 833 834 835 836 837
                shape=reshaped_shape)

    @print_mapping_info
    def ScatterND(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        updates = self.graph.get_input_node(node, idx=2, copy=True)
        if len(indices.out_shapes[0]) == 1:
            self.paddle_graph.add_layer(
                'paddle.scatter',
S
SunAhong1993 已提交
838 839 840 841
                inputs={'x': val_x.name,
                        'index': indices.name,
                        'updates': updates.name},
                outputs=[node.name])
S
SunAhong1993 已提交
842
        else:
S
SunAhong1993 已提交
843
            input_inner_indices = node.name + '_input_inner_indices'
S
SunAhong1993 已提交
844 845 846
            shape = val_x.out_shapes[0]
            self.paddle_graph.add_layer(
                'paddle.reshape',
S
SunAhong1993 已提交
847 848
                inputs={"x": indices.name},
                outputs=[indices.name],
S
SunAhong1993 已提交
849 850
                shape=indices.out_shapes[0])

S
SunAhong1993 已提交
851
            zeros_like_val_x = val_x.name + '_zeros'
S
SunAhong1993 已提交
852 853
            self.paddle_graph.add_layer(
                'paddle.zeros_like',
S
SunAhong1993 已提交
854
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
855 856 857 858 859
                outputs=[zeros_like_val_x])
            self.paddle_graph.add_layer(
                'paddle.scatter_nd_add',
                inputs={
                    'x': zeros_like_val_x,
S
SunAhong1993 已提交
860 861
                    'index': indices.name,
                    'updates': updates.name
S
SunAhong1993 已提交
862 863
                },
                outputs=[input_inner_indices])
S
SunAhong1993 已提交
864 865
            indices_mask = node.name + '_indices_mask'
            constant_minus_one = node.name + '_constant_minus_one'
S
SunAhong1993 已提交
866 867 868
            # full_like support create tensor shape like input tensor
            self.paddle_graph.add_layer(
                'paddle.full_like',
S
SunAhong1993 已提交
869
                inputs={"x": updates.name},
S
SunAhong1993 已提交
870 871 872 873 874 875 876
                outputs=[constant_minus_one],
                dtype=string(updates.dtype),
                fill_value=-1)
            self.paddle_graph.add_layer(
                'paddle.scatter_nd_add',
                inputs={
                    'x': zeros_like_val_x,
S
SunAhong1993 已提交
877
                    'index': indices.name,
S
SunAhong1993 已提交
878 879 880
                    'updates': constant_minus_one
                },
                outputs=[indices_mask])
S
SunAhong1993 已提交
881
            constant_one = node.name + '_constant_1'
S
SunAhong1993 已提交
882 883 884
            # full_like support create tensor shape like input tensor
            self.paddle_graph.add_layer(
                'paddle.full_like',
S
SunAhong1993 已提交
885
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
886 887 888
                outputs=[constant_one],
                dtype=string(val_x.dtype),
                fill_value=1)
S
SunAhong1993 已提交
889
            input_out_indices_mask = node.name + '_input_out_indices_mask'
S
SunAhong1993 已提交
890 891 892 893 894 895
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs={"x": indices_mask,
                        "y": constant_one},
                outputs=[input_out_indices_mask])

S
SunAhong1993 已提交
896
            input_out_indices = node.name + '_input_out_indices'
S
SunAhong1993 已提交
897 898
            self.paddle_graph.add_layer(
                "paddle.multiply",
S
SunAhong1993 已提交
899
                inputs={"x": val_x.name,
S
SunAhong1993 已提交
900 901 902 903 904 905 906
                        "y": input_out_indices_mask},
                outputs=[input_out_indices])

            self.paddle_graph.add_layer(
                "paddle.add",
                inputs={"x": input_inner_indices,
                        "y": input_out_indices},
S
SunAhong1993 已提交
907
                outputs=[node.name])
S
SunAhong1993 已提交
908 909 910 911 912 913 914

    @print_mapping_info
    def Range(self, node):
        val_start = self.graph.get_input_node(node, idx=0, copy=True)
        val_limit = self.graph.get_input_node(node, idx=1, copy=True)
        val_delta = self.graph.get_input_node(node, idx=2, copy=True)
        dtype = val_start.dtype
S
SunAhong1993 已提交
915 916 917
        inputs = {'start': val_start.name, 
                  'end': val_limit.name, 
                  'step': val_delta.name}
S
SunAhong1993 已提交
918 919 920
        self.paddle_graph.add_layer(
            'paddle.arange',
            inputs=inputs,
S
SunAhong1993 已提交
921
            outputs=[node.name],
S
SunAhong1993 已提交
922 923 924 925 926 927 928 929 930 931 932
            dtype=string(dtype))

    @print_mapping_info
    def Slice(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        starts, ends, axes, steps = None, None, None, None
        layer_attrs = {}
        if len(node.inputs) > 1:
            starts = self.graph.get_input_node(node, idx=1, copy=True)
            ends = self.graph.get_input_node(node, idx=2, copy=True)
            starts_value = _const_weight_or_none(starts)
S
fix  
SunAhong1993 已提交
933 934
            if starts_value is not None:
                starts_value = starts_value.tolist()
S
SunAhong1993 已提交
935
            ends_value = _const_weight_or_none(ends)
S
fix  
SunAhong1993 已提交
936 937 938 939 940
            if ends_value is not None:
                ends_value = ends_value.tolist()
            if len(node.inputs) > 2:
                s_len = len(val_x.out_shapes[0])
                axes = list(range(s_len))
S
SunAhong1993 已提交
941
            if len(node.inputs) > 3:
S
fix  
SunAhong1993 已提交
942 943
                axes_node = self.graph.get_input_node(node, idx=3, copy=True)
                axes = _const_weight_or_none(axes_node, necessary=True).tolist()
S
SunAhong1993 已提交
944 945
            if len(node.inputs) > 4:
                steps = self.graph.get_input_node(node, idx=4, copy=True)
S
fix  
SunAhong1993 已提交
946 947
                steps = _const_weight_or_none(steps).tolist()
            
S
SunAhong1993 已提交
948 949
            layer_attrs = {
                "axes": axes,
S
SunAhong1993 已提交
950 951
                "starts": starts.name,
                "ends": ends.name
S
SunAhong1993 已提交
952
            }
S
SunAhong1993 已提交
953
            if starts_value is not None and ends_value is not None and axes is not None:
S
SunAhong1993 已提交
954 955 956
                starts_value = starts_value.copy()
                ends_value = ends_value.copy()
                for idx in range(len(ends_value)):
S
SunAhong1993 已提交
957
                    if starts_value[idx] >= val_x.out_shapes[0][axes[idx]] and val_x.out_shapes[0][axes[idx]] > 0:
S
SunAhong1993 已提交
958 959
                        starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
                        ends_value[idx] = val_x.out_shapes[0][axes[idx]]
S
SunAhong1993 已提交
960 961
                    elif ends_value[idx] > 2**31 - 1:
                        ends_value[idx] = 2**31 - 1
S
SunAhong1993 已提交
962
                  
S
SunAhong1993 已提交
963 964 965 966 967 968 969
                layer_attrs = {
                    "axes": axes,
                    "starts": starts_value,
                    "ends": ends_value
                }
            else:
                if starts.dtype != 'int32':
S
SunAhong1993 已提交
970
                    starts_cast = starts.name + '_cast'
S
SunAhong1993 已提交
971 972
                    self.paddle_graph.add_layer(
                        'paddle.cast',
S
SunAhong1993 已提交
973
                        inputs={"x": starts.name},
S
SunAhong1993 已提交
974 975 976 977
                        outputs=[starts_cast],
                        dtype=string('int32'))
                    layer_attrs['starts'] = starts_cast
                if ends.dtype != 'int32':
S
SunAhong1993 已提交
978
                    ends_cast = ends.name + '_cast'
S
SunAhong1993 已提交
979 980
                else:
                    ends_cast = ends.name
S
SunAhong1993 已提交
981 982
                self.paddle_graph.add_layer(
                    'paddle.cast',
S
SunAhong1993 已提交
983
                    inputs={"x": ends.name},
S
SunAhong1993 已提交
984 985 986 987 988 989 990 991 992 993 994 995
                    outputs=[ends_cast],
                    dtype=string('int32'))
                layer_attrs['ends'] = ends_cast
        else:
            starts = node.get_attr('starts')
            ends = node.get_attr('ends')
            axes = node.get_attr('axes')
            for idx in range(len(ends)):
                if ends[idx] > 2**31 - 1:
                    ends[idx] = 2**31 - 1
            layer_attrs = {"axes": axes, "starts": starts, "ends": ends}

S
fix  
SunAhong1993 已提交
996

S
SunAhong1993 已提交
997 998 999 1000
        if steps is not None:
            layer_attrs['strides'] = steps
            self.paddle_graph.add_layer(
                'paddle.strided_slice', 
S
SunAhong1993 已提交
1001 1002
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
S
SunAhong1993 已提交
1003 1004 1005 1006
                **layer_attrs)
        else:
            self.paddle_graph.add_layer(
                'paddle.slice', 
S
SunAhong1993 已提交
1007 1008
                inputs={"input": val_x.name}, 
                outputs=[node.name],  
S
SunAhong1993 已提交
1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028
                **layer_attrs)

    @print_mapping_info
    def ConstantOfShape(self, node):
        val_shape = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_node(node.layer.output[0], copy=True)

        value = node.get_attr('value')
        dtype = value.dtype
        value = value.tolist()
        assert len(value) == 1, ('given value not Scalar, shape of value > 1, '
                                 'this is not supported')
        if len(value) == 1:
            value = value[0]
            layer_attrs = {
                'dtype': string(dtype),
                'fill_value': value
            }
            self.paddle_graph.add_layer(
                "paddle.full", 
S
SunAhong1993 已提交
1029
                inputs={'shape': val_shape.name}, 
S
SunAhong1993 已提交
1030
                outputs=[node.name],
S
SunAhong1993 已提交
1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044
                **layer_attrs)

    @print_mapping_info
    def Clip(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
        max_value, min_value = None, None
        if len(node.inputs) == 1:
            max_value = node.get_attr('max')
            min_value = node.get_attr('min')
            layer_attrs = {
                'max': max_value,
                'min': min_value,
            }
S
fix  
SunAhong1993 已提交
1045
            
S
SunAhong1993 已提交
1046 1047
            self.paddle_graph.add_layer(
                'paddle.clip', 
S
SunAhong1993 已提交
1048 1049
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
S
SunAhong1993 已提交
1050 1051
                **layer_attrs)
        else:
S
fix  
SunAhong1993 已提交
1052 1053
            min_ipt = self.graph.get_input_node(node, idx=1, copy=True)
            max_ipt = self.graph.get_input_node(node, idx=2, copy=True)
S
SunAhong1993 已提交
1054
            min_value = _const_weight_or_none(min_ipt)
S
fix  
SunAhong1993 已提交
1055
            max_value = _const_weight_or_none(max_ipt)
S
SunAhong1993 已提交
1056 1057 1058 1059 1060 1061 1062 1063
            if max_value.shape == (1, ):
                max_value = max_value[0]
            if min_value.shape == (1, ):
                min_value = min_value[0]
        if max_value is not None and min_value is not None:
            layer_attrs = {'max': max_value, 'min': min_value}
            self.paddle_graph.add_layer(
                'paddle.clip', 
S
SunAhong1993 已提交
1064 1065
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
S
SunAhong1993 已提交
1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081
                **layer_attrs)
        else:
            raise

    @print_mapping_info
    def Split(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        paddle_op = 'split'
        split = node.get_attr('split')
        axis = node.get_attr('axis', 0)
        layer_attrs = {
            'num_or_sections': split,
            'axis': axis,
        }
        outputs_list = list()
        if isinstance(split, list) or isinstance(split, tuple):
S
SunAhong1993 已提交
1082 1083 1084 1085 1086
            if len(split) == 1:
                outputs_list.append(node.name)
            else:
                for i in range(len(split)):
                    outputs_list.append("{}_p{}".format(node.layer_name, i))
S
SunAhong1993 已提交
1087
        else:
S
SunAhong1993 已提交
1088
            outputs_list.append(node.name)
S
SunAhong1993 已提交
1089 1090
        self.paddle_graph.add_layer(
            'paddle.split', 
S
SunAhong1993 已提交
1091
            inputs={"x": val_x.name}, 
S
SunAhong1993 已提交
1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105
            outputs=outputs_list, 
            **layer_attrs)

    @print_mapping_info
    def Reshape(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
        val_reshaped = self.graph.get_node(node.layer.output[0], copy=True)
        shape_value = _const_weight_or_none(val_shape)
        shape_dims = len(val_shape.out_shapes[0])

        if shape_value is not None:
            self.paddle_graph.add_layer(
                'paddle.reshape',
S
SunAhong1993 已提交
1106 1107
                inputs={'x': val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1108 1109 1110 1111 1112
                shape=shape_value.tolist())
        elif len(node.out_shapes[0]) > 0 and _is_static_shape(node.out_shapes[
                0]):
            self.paddle_graph.add_layer(
                'paddle.reshape',
S
SunAhong1993 已提交
1113 1114
                inputs={'x': val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1115 1116 1117 1118 1119 1120
                shape=node.out_shapes[0])
        else:
            # shape may be [], come form Gather by scalar indices
            if len(val_shape.out_shapes[0]) > 0:
                self.paddle_graph.add_layer(
                    'paddle.reshape',
S
SunAhong1993 已提交
1121 1122
                    inputs={'x': val_shape.name},
                    outputs=[val_shape.name],
S
SunAhong1993 已提交
1123
                    shape=val_shape.out_shapes[0])
S
fix  
SunAhong1993 已提交
1124 1125 1126 1127 1128 1129
            if val_shape.dtype != "int32":
                self.paddle_graph.add_layer(
                    'paddle.cast',
                    inputs={'x': val_shape.name},
                    outputs=[val_shape.name],
                    dtype=string("int32"))
S
SunAhong1993 已提交
1130 1131
            self.paddle_graph.add_layer(
                'paddle.reshape',
S
SunAhong1993 已提交
1132 1133
                inputs={'x': val_x.name,
                        'shape': val_shape.name},
S
SunAhong1993 已提交
1134
                outputs=[node.name])
S
SunAhong1993 已提交
1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149

    @print_mapping_info
    def Cast(self, node):
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        dtype = node.get_attr('to')
        if not isinstance(dtype, np.dtype):
            dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]

        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'dtype of to unmatches output'
        self.paddle_graph.add_layer(
            'paddle.cast', 
S
SunAhong1993 已提交
1150 1151
            inputs={'x': val_input.name}, 
            outputs=[node.name], 
S
SunAhong1993 已提交
1152 1153 1154 1155 1156 1157
            dtype=string(dtype))

    @print_mapping_info
    def Not(self, node):
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer('paddle.logical_not', 
S
SunAhong1993 已提交
1158 1159
                                    inputs={'x': val_input.name}, 
                                    outputs=[node.name])
S
SunAhong1993 已提交
1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182

    @print_mapping_info
    def AveragePool(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))
        pads = node.get_attr('pads', [0] * (poolnd * 2))

        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
            input_shape = val_x.out_shapes[0]
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w

S
SunAhong1993 已提交
1183 1184 1185 1186 1187
        op_name = name_generator("pool", self.nn_name2id)
        output_name = node.name
        layer_outputs = [op_name, output_name]
        paddle_op = 'paddle.nn.AvgPool{}D'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'
S
SunAhong1993 已提交
1188
        layer_attrs = {
S
SunAhong1993 已提交
1189 1190 1191
            "kernel_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
S
SunAhong1993 已提交
1192 1193 1194 1195 1196
            "ceil_mode": ceil_mode,
            "exclusive": 'True',
        }
        self.paddle_graph.add_layer(
            paddle_op, 
S
SunAhong1993 已提交
1197 1198
            inputs={'x': val_x.name}, 
            outputs=layer_outputs, 
S
SunAhong1993 已提交
1199 1200 1201 1202 1203 1204 1205 1206
            **layer_attrs)

    @print_mapping_info
    def Concat(self, node):
        inputs_list = []
        dtypes = set()
        for i in range(len(node.layer.input)):
            ipt = self.graph.get_input_node(node, idx=i, copy=True)
S
SunAhong1993 已提交
1207
            inputs_list.append(ipt.name)
S
SunAhong1993 已提交
1208 1209 1210 1211 1212 1213 1214
            dtypes.add(ipt.dtype)
        if len(dtypes) > 1:
            assert 'Unspported situation happened, please create issue on https://github.com/PaddlePaddle/X2Paddle/issues.'
        axis = node.get_attr('axis')
        self.paddle_graph.add_layer(
            'paddle.concat', 
            inputs={"x": inputs_list}, 
S
SunAhong1993 已提交
1215
            outputs=[node.name], 
S
SunAhong1993 已提交
1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233
            axis=axis)

    @print_mapping_info
    def Flatten(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        output_shape = node.out_shapes[0]
        axis = node.get_attr('axis', 1)
        shape_list = [1, 1]
        if axis == 0:
            for s in output_shape:
                shape_list[1] *= s
        else:
            for s in output_shape[:axis]:
                shape_list[0] *= s
            for s in output_shape[axis:]:
                shape_list[1] *= s
        self.paddle_graph.add_layer(
            'paddle.reshape', 
S
SunAhong1993 已提交
1234 1235
            inputs={"x": val_x.name}, 
            outputs=[node.name],
S
SunAhong1993 已提交
1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247
            shape=shape_list)

    @print_mapping_info
    def Gemm(self, node):
        val_a = self.graph.get_input_node(node, idx=0, copy=True)
        val_b = self.graph.get_input_node(node, idx=1, copy=True)
        val_c = self.graph.get_input_node(node, idx=2, copy=True)

        alpha = node.get_attr('alpha', 1.)  # optional
        beta = node.get_attr('beta', 1.)  # optional
        trans_a = bool(node.get_attr('transA', 0))  # optional
        trans_b = bool(node.get_attr('transB', 0))  # optional
S
SunAhong1993 已提交
1248 1249 1250
        val_mm = node.name + '_mm'
        matmul_inputs = {"x": val_a.name, 
                         "y": val_b.name}
S
SunAhong1993 已提交
1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268
        attr_matmul = {
            "transpose_x": trans_a,
            "transpose_y": trans_b,
        }
        self.paddle_graph.add_layer(
            'paddle.matmul',
            inputs=matmul_inputs,
            outputs=[val_mm],
            **attr_matmul)
        self.paddle_graph.add_layer(
            "paddle.scale", 
            inputs={"x": val_mm}, 
            outputs=[val_mm],
            scale=alpha)

        if beta != 0:
            if beta == 1.:
                add_inputs = {"x": val_mm, 
S
SunAhong1993 已提交
1269
                              "y": val_c.name}
S
SunAhong1993 已提交
1270 1271 1272
                self.paddle_graph.add_layer(
                    "paddle.add",
                    inputs=add_inputs,
S
SunAhong1993 已提交
1273
                    outputs=[node.name])
S
SunAhong1993 已提交
1274
            else:
S
SunAhong1993 已提交
1275
                var_beta = node.name + '_beta'
S
SunAhong1993 已提交
1276 1277
                self.paddle_graph.add_layer(
                    "paddle.scale",
S
SunAhong1993 已提交
1278
                    inputs={"x": val_c.name},
S
SunAhong1993 已提交
1279 1280 1281 1282
                    outputs=[var_beta],
                    scale=beta)
                add_inputs = {"x": val_mm, "y": var_beta}
                self.paddle_graph.add_layer(
S
SunAhong1993 已提交
1283
                    "paddle.add",
S
SunAhong1993 已提交
1284
                    inputs=add_inputs,
S
SunAhong1993 已提交
1285
                    outputs=[node.name])
S
SunAhong1993 已提交
1286 1287 1288 1289 1290

    @print_mapping_info
    def Sum(self, node):
        val_inps = node.layer.input
        inputs_dict = {
S
SunAhong1993 已提交
1291 1292 1293 1294
            "x": self.graph.get_input_node(
                node, idx=0, copy=True).name,
            "y": self.graph.get_input_node(
                node, idx=1, copy=True).name,
S
SunAhong1993 已提交
1295 1296 1297
        }
        self.paddle_graph.add_layer("paddle.add", 
                                    inputs=inputs_dict, 
S
SunAhong1993 已提交
1298
                                    outputs=[node.name])
S
SunAhong1993 已提交
1299 1300 1301 1302

        for idx, ipt in enumerate(val_inps[2:]):
            y = self.graph.get_input_node(node, idx=idx, copy=True)
            inputs_dict = {
S
SunAhong1993 已提交
1303 1304
                "x": node.name,
                "y": y.name,
S
SunAhong1993 已提交
1305 1306 1307 1308
            }
            self.paddle_graph.add_layer(
                "paddle.add", 
                inputs=inputs_dict, 
S
SunAhong1993 已提交
1309
                outputs=[node.name])
S
SunAhong1993 已提交
1310 1311 1312 1313 1314 1315 1316

    @print_mapping_info
    def MatMul(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        x_shape = val_x.out_shapes[0]
        y_shape = val_y.out_shapes[0]
S
SunAhong1993 已提交
1317 1318
        inputs_dict = {"x": val_x.name, 
                       "y": val_y.name}
S
SunAhong1993 已提交
1319
        if y_shape[0] == 1 and x_shape[-1] != 1 and x_shape[0] != 1:
S
SunAhong1993 已提交
1320
            y_squeeze = val_y.name + '_squeeze'
S
SunAhong1993 已提交
1321 1322
            self.paddle_graph.add_layer(
                "paddle.squeeze",
S
SunAhong1993 已提交
1323
                inputs={"x": val_y.name},
S
SunAhong1993 已提交
1324 1325 1326 1327 1328 1329
                outputs=[y_squeeze],
                axis=[0])
            inputs_dict['y'] = y_squeeze
            self.paddle_graph.add_layer(
                "paddle.matmul", 
                inputs=inputs_dict, 
S
SunAhong1993 已提交
1330
                outputs=[node.name])
S
SunAhong1993 已提交
1331 1332 1333 1334
        else:
            self.paddle_graph.add_layer(
                "paddle.matmul", 
                inputs=inputs_dict, 
S
SunAhong1993 已提交
1335
                outputs=[node.name])
S
SunAhong1993 已提交
1336 1337 1338 1339

    @print_mapping_info
    def BatchNormalization(self, node):
        op_name = name_generator("batchnorm", self.nn_name2id)
S
SunAhong1993 已提交
1340
        output_name = node.name
S
SunAhong1993 已提交
1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        val_mean = self.graph.get_input_node(node, idx=3, copy=True)
        val_var = self.graph.get_input_node(node, idx=4, copy=True)

        momentum = node.get_attr('momentum', .9)
        epsilon = node.get_attr('epsilon', 1e-5)
        c = val_x.out_shapes[0][1]

C
Channingss 已提交
1352 1353 1354 1355 1356
        _rename_or_remove_weight(self.weights, val_scale.name, op_name+'.weight')
        _rename_or_remove_weight(self.weights, val_b.name, op_name+'.bias')
        _rename_or_remove_weight(self.weights, val_var.name, op_name+'._variance')
        _rename_or_remove_weight(self.weights, val_mean.name, op_name+'._mean')

S
SunAhong1993 已提交
1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367
        # Attribute: spatial is used in BatchNormalization-1,6,7
        spatial = bool(node.get_attr('spatial'))
        layer_attrs = {
            "num_channels": c,
            "momentum": momentum,
            "epsilon": epsilon,
            "is_test": True,
            "use_global_stats": False,
        }
        self.paddle_graph.add_layer(
            "paddle.nn.BatchNorm", 
S
SunAhong1993 已提交
1368
            inputs={"x": val_x.name}, 
S
SunAhong1993 已提交
1369 1370 1371 1372 1373 1374
            outputs=layer_outputs, 
            **layer_attrs)

    @print_mapping_info
    def Transpose(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
fix  
SunAhong1993 已提交
1375 1376 1377 1378
        s_len = len(val_x.out_shapes[0])
        perm_default = list(range(s_len))
        perm_default.reverse()
        perm = node.get_attr('perm', perm_default)
S
SunAhong1993 已提交
1379 1380
        self.paddle_graph.add_layer(
            "paddle.transpose", 
S
SunAhong1993 已提交
1381 1382
            inputs={"x": val_x.name},
            outputs=[node.name], 
S
SunAhong1993 已提交
1383 1384 1385 1386 1387
            perm=perm)

    @print_mapping_info
    def PRelu(self, node):
        op_name = name_generator("prelu", self.nn_name2id)
S
SunAhong1993 已提交
1388
        output_name = node.name
S
SunAhong1993 已提交
1389 1390 1391 1392 1393 1394
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_slope = self.graph.get_input_node(node, idx=1, copy=True)

        mode = 'channel'
        shape_slope = val_slope.out_shapes[0]
1395
        if shape_slope == [1] * len(shape_slope):
S
SunAhong1993 已提交
1396 1397
            mode = 'all'

S
SunAhong1993 已提交
1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424
        if mode == "element":
            self.paddle_graph.add_layer(
                "paddle.zeros",
                inputs={}, 
                outputs=[output_name + "__zeros"], 
                shape=shape_slope,
                dtype=string(node.dtype))
            self.paddle_graph.add_layer(
                "paddle.maximum",
                inputs={"x": val_x.name, 
                        "y": output_name + "__zeros"}, 
                outputs=[output_name + "__max"])
            self.paddle_graph.add_layer(
                "paddle.minimum",
                inputs={"x": val_x.name, 
                        "y": output_name + "__zeros"}, 
                outputs=[output_name + "__max"])
            self.paddle_graph.add_layer(
                "paddle.multiply",
                inputs={"x": val_slope.name, 
                        "y": output_name + "__min"}, 
                outputs=[output_name + "__mul"])
            self.paddle_graph.add_layer(
                "paddle.add",
                inputs={"x": output_name + "__max", 
                        "y": output_name + "__mul"}, 
                outputs=[output_name])
S
SunAhong1993 已提交
1425
        else:
S
fix  
SunAhong1993 已提交
1426
            if mode == 'channel':
S
SunAhong1993 已提交
1427
                slope_data = _const_weight_or_none(val_slope)
S
SunAhong1993 已提交
1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439
                if slope_data is None:
                    self.paddle_graph.add_layer(
                        "paddle.reshape", 
                        inputs={"x": val_slope.name}, 
                        outputs=[val_slope.name],
                        shape=[shape_slope[0]])
                    self.paddle_graph.add_layer(
                        "paddle.nn.functional.prelu", 
                        inputs={"x": val_x.name,
                                "weight": val_slope.name}, 
                        outputs=[node.name])
                    return
C
Channingss 已提交
1440
                _rename_or_remove_weight(self.weights, val_slope.name)
S
fix  
SunAhong1993 已提交
1441
                if len(shape_slope) > 1:
1442
                    self.weights[op_name+'._weight'] = np.reshape(slope_data, shape_slope[0])
S
SunAhong1993 已提交
1443 1444 1445
                num_parameters = val_x.out_shapes[0][1]
            else:
                num_parameters = 1
C
Channingss 已提交
1446
                _rename_or_remove_weight(self.weights, val_slope.name)
1447
                self.weights[op_name+'._weight'] = np.reshape(self.weights[val_slope.name], [1])
S
SunAhong1993 已提交
1448 1449 1450 1451
            self.paddle_graph.add_layer(
                "paddle.nn.PReLU", 
                inputs={"x": val_x.name}, 
                outputs=layer_outputs, 
1452
                num_parameters=num_parameters)
S
SunAhong1993 已提交
1453 1454 1455 1456 1457 1458 1459 1460

    @print_mapping_info
    def Squeeze(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axes = node.get_attr('axes')
        if len(val_x.out_shapes[0]) == 1:
            self.paddle_graph.add_layer(
                "paddle.cast",
S
SunAhong1993 已提交
1461 1462
                inputs={"x": val_x.name},
                outputs=[node.name],
S
SunAhong1993 已提交
1463 1464 1465 1466
                dtype=string(val_x.dtype))
        else:
            self.paddle_graph.add_layer(
                "paddle.squeeze", 
S
SunAhong1993 已提交
1467 1468
                inputs={"x": val_x.name}, 
                outputs=[node.name], 
S
SunAhong1993 已提交
1469 1470 1471 1472 1473 1474 1475 1476
                axis=axes)

    @print_mapping_info
    def Equal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        self.paddle_graph.add_layer(
            "paddle.equal",
S
SunAhong1993 已提交
1477 1478 1479
            inputs={'x': val_x.name,
                    'y': val_y.name},
            outputs=[node.name])
S
SunAhong1993 已提交
1480 1481 1482 1483 1484 1485 1486

    @print_mapping_info
    def Greater(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        self.paddle_graph.add_layer(
            "paddle.greater_than",
S
SunAhong1993 已提交
1487 1488
            inputs={'x': val_x.name,
                    'y': val_y.name},
S
SunAhong1993 已提交
1489
            outputs=[node.name],
S
SunAhong1993 已提交
1490 1491 1492 1493 1494 1495 1496 1497
            param_attr=None)

    @print_mapping_info
    def Where(self, node):
        condition = self.graph.get_input_node(node, idx=0, copy=True)
        val_x = self.graph.get_input_node(node, idx=1, copy=True)
        val_y = self.graph.get_input_node(node, idx=2, copy=True)

S
SunAhong1993 已提交
1498
        not_condition = condition.name + '_not'
S
SunAhong1993 已提交
1499 1500
        self.paddle_graph.add_layer(
            "paddle.logical_not",
S
SunAhong1993 已提交
1501
            inputs={"x": condition.name},
S
SunAhong1993 已提交
1502 1503 1504 1505 1506 1507 1508
            outputs=[not_condition])
        cast_not_condition = not_condition + '_cast'
        self.paddle_graph.add_layer(
            "paddle.cast",
            inputs={"x": not_condition},
            outputs=[cast_not_condition],
            dtype=string(val_x.dtype))
S
SunAhong1993 已提交
1509
        cast_condition = condition.name + '_cast'
S
SunAhong1993 已提交
1510 1511
        self.paddle_graph.add_layer(
            "paddle.cast",
S
SunAhong1993 已提交
1512
            inputs={"x": condition.name},
S
SunAhong1993 已提交
1513 1514
            outputs=[cast_condition],
            dtype=string(val_x.dtype))
S
SunAhong1993 已提交
1515
        mul_val_x = val_x.name + '_mul'
S
SunAhong1993 已提交
1516 1517
        self.paddle_graph.add_layer(
            "paddle.multiply",
S
SunAhong1993 已提交
1518
            inputs={'x': val_x.name,
S
SunAhong1993 已提交
1519 1520
                    'y': cast_condition},
            outputs=[mul_val_x])
S
SunAhong1993 已提交
1521
        mul_val_y = val_y.name + '_mul'
S
SunAhong1993 已提交
1522 1523
        self.paddle_graph.add_layer(
            "paddle.multiply",
S
SunAhong1993 已提交
1524
            inputs={'x': val_y.name,
S
SunAhong1993 已提交
1525 1526 1527 1528 1529 1530 1531
                    'y': cast_not_condition},
            outputs=[mul_val_y])

        self.paddle_graph.add_layer(
            "paddle.add",
            inputs={'x': mul_val_x,
                    'y': mul_val_y},
S
SunAhong1993 已提交
1532
            outputs=[node.name])
S
SunAhong1993 已提交
1533 1534 1535 1536 1537 1538 1539 1540

    @print_mapping_info
    def NonZero(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_x_dim = len(val_x.out_shapes[0])
        if val_x_dim == 1:
            self.paddle_graph.add_layer(
                "paddle.nonzero", 
S
SunAhong1993 已提交
1541 1542
                inputs={"x": val_x.name}, 
                outputs=[val_x.name])
S
SunAhong1993 已提交
1543 1544
            self.paddle_graph.add_layer(
                "paddle.transpose",
S
SunAhong1993 已提交
1545
                inputs={"x": val_x.name},
S
SunAhong1993 已提交
1546
                outputs=[node.layer_name],
S
SunAhong1993 已提交
1547 1548 1549 1550
                perm=[1, 0])
        if val_x_dim > 1:
            self.paddle_graph.add_layer(
                "paddle.nonzero", 
S
SunAhong1993 已提交
1551 1552
                inputs={"x": val_x.name}, 
                outputs=[val_x.name])
S
SunAhong1993 已提交
1553 1554
            self.paddle_graph.add_layer(
                "paddle.split",
S
SunAhong1993 已提交
1555 1556
                inputs={"x": val_x.name}, 
                outputs=[val_x.name],
S
SunAhong1993 已提交
1557 1558 1559 1560
                num_or_sections=1,
                axis=val_x_dim)
            self.paddle_graph.add_layer(
                "paddle.concat", 
S
SunAhong1993 已提交
1561 1562
                inputs={"x": val_x.name}, 
                outputs=[node.name])
S
SunAhong1993 已提交
1563 1564 1565 1566 1567 1568

    @print_mapping_info
    def Identity(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.assign", 
S
SunAhong1993 已提交
1569 1570
            inputs={"x": val_x.name}, 
            outputs=[node.name])
S
SunAhong1993 已提交
1571 1572 1573 1574 1575 1576 1577 1578

    @print_mapping_info
    def Tile(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_repeats = self.graph.get_input_node(node, idx=1, copy=True)
        repeats = _const_weight_or_none(val_repeats)

        if repeats is None:
S
SunAhong1993 已提交
1579
            repeats = val_repeats.name
S
SunAhong1993 已提交
1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592
            if val_repeats.dtype != 'int32':
                self.paddle_graph.add_layer(
                    "paddle.cast",
                    inputs={"x": repeats},
                    outputs=["{}.tmp".format(repeats)],
                    dtype=string("int32"))
                repeats = "{}.tmp".format(repeats)

        elif isinstance(repeats, int):
            repeats = [repeats]

        attr = {
            'expand_times': repeats,
S
SunAhong1993 已提交
1593
            "name": string(node.name),
S
SunAhong1993 已提交
1594 1595 1596
        }
        self.paddle_graph.add_layer(
            "paddle.tile", 
S
SunAhong1993 已提交
1597 1598
            inputs={"x": val_x.name}, 
                    outputs=[node.name], 
S
SunAhong1993 已提交
1599 1600 1601 1602 1603
                    repeat_times=repeats)

    @print_mapping_info
    def MaxPool(self, node):
        op_name = name_generator("pool", self.nn_name2id)
S
SunAhong1993 已提交
1604
        output_name = node.name
S
SunAhong1993 已提交
1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        assert node.get_attr(
            "dilations") is None, 'only dilations = 0 is supported'  # optional

        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))  # optional
        pads = node.get_attr('pads', [0] * (poolnd * 2))  # optional
        paddle_op = 'paddle.nn.MaxPool{}D'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'

        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
            input_shape = val_x.out_shapes[0]
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w
            
        layer_attrs = {
            "kernel_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
            "ceil_mode": ceil_mode,
        }
        self.paddle_graph.add_layer(
            paddle_op, 
S
SunAhong1993 已提交
1638
            inputs={'x': val_x if isinstance(val_x, str) else val_x.name}, 
S
SunAhong1993 已提交
1639 1640 1641 1642 1643 1644
            outputs=layer_outputs, 
            **layer_attrs)

    @print_mapping_info
    def GlobalMaxPool(self, node):
        op_name = name_generator("pool", self.nn_name2id)
S
SunAhong1993 已提交
1645
        output_name = node.name
S
SunAhong1993 已提交
1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = val_x.out_shapes[0]
        if len(input_shape) == 4:
            poolnd = 2
        elif len(input_shape) == 5:
            poolnd = 3
        elif len(input_shape) == 3:
            poolnd = 1
        paddle_op = 'paddle.nn.AdaptiveMaxPool{}D'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'
        output_shape = node.out_shapes[0]
        self.paddle_graph.add_layer(
            paddle_op, 
S
SunAhong1993 已提交
1660
            inputs={'x': val_x.name}, 
S
SunAhong1993 已提交
1661 1662 1663 1664 1665 1666
            outputs=layer_outputs, 
            output_size=output_shape[2:])

    @print_mapping_info
    def GlobalAveragePool(self, node):
        op_name = name_generator("pool", self.nn_name2id)
S
SunAhong1993 已提交
1667
        output_name = node.name
S
SunAhong1993 已提交
1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        input_shape = val_x.out_shapes[0]
        if len(input_shape) == 4:
            poolnd = 2
        elif len(input_shape) == 5:
            poolnd = 3
        elif len(input_shape) == 3:
            poolnd = 1
        paddle_op = 'paddle.nn.AdaptiveAvgPool{}D'.format(poolnd)
        assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'
        output_shape = node.out_shapes[0]
        self.paddle_graph.add_layer(
            paddle_op, 
S
SunAhong1993 已提交
1682
            inputs={'x': val_x.name}, 
S
SunAhong1993 已提交
1683 1684 1685 1686 1687 1688
            outputs=layer_outputs, 
            output_size=output_shape[2:])

    @print_mapping_info
    def Conv(self, node):
        op_name = name_generator("conv", self.nn_name2id)
S
SunAhong1993 已提交
1689
        output_name = node.name
S
SunAhong1993 已提交
1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
        has_bias = len(node.layer.input) == 3
        if has_bias:
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
        auto_pad = node.get_attr('auto_pad', 'NOTSET')

        kernel_shape = node.get_attr('kernel_shape')
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only Conv2D and Conv3D is supported'
        num_out_channels = val_w.out_shapes[0][0]
        num_in_channels = val_w.out_shapes[0][1]
        paddle_op = 'paddle.nn.Conv{}D'.format(convnd)

        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        pads = node.get_attr('pads', [0] * (convnd * 2))

        input_shape = val_x.out_shapes[0]
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
            pad_h = _get_same_padding(input_shape[2], kernel_shape[0],
                                      strides[0])
            pad_w = _get_same_padding(input_shape[3], kernel_shape[1],
                                      strides[1])
            paddings = pad_h + pad_w

S
fix  
SunAhong1993 已提交
1720
        layer_inputs = {'x': val_x if isinstance(val_x, str) else val_x.name}
S
SunAhong1993 已提交
1721 1722 1723 1724 1725 1726 1727 1728 1729
        layer_attrs = {
            "in_channels": num_in_channels * num_groups,
            "out_channels": num_out_channels,
            "kernel_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
            "dilation": dilations,
            "groups": num_groups,
        }
C
Channingss 已提交
1730 1731 1732 1733
        remove_weight = True if  val_w.name in self.done_weight_list else False
        if remove_weight:
            self.done_weight_list.append(val_w.name)
        _rename_or_remove_weight(self.weights, val_w.name, op_name+'.weight', remove_weight)
S
SunAhong1993 已提交
1734
        if has_bias:
C
Channingss 已提交
1735 1736 1737 1738
            remove_bias = True if val_b.name in self.done_weight_list else False
            if remove_bias:
                self.done_weight_list.append(val_b_name)
            _rename_or_remove_weight(self.weights, val_b.name, op_name+'.bias', remove_bias)
S
SunAhong1993 已提交
1739 1740
        else:
            layer_attrs["bias_attr"] = False
S
fix  
SunAhong1993 已提交
1741 1742 1743 1744 1745 1746 1747 1748 1749
        if reduce(lambda x,y:x*y, input_shape) in [1, -1] and 1 not in input_shape:
            input_shape[1] = num_in_channels * num_groups
            input_shape[0] = 0
            input_shape[2] = 0
            self.paddle_graph.add_layer(
                "paddle.reshape", 
                inputs=layer_inputs, 
                outputs=[layer_inputs["x"]], 
                shape=input_shape)
S
SunAhong1993 已提交
1750 1751
        self.paddle_graph.add_layer(
            paddle_op, 
S
fix  
SunAhong1993 已提交
1752
            inputs=layer_inputs, 
S
SunAhong1993 已提交
1753 1754 1755 1756 1757
            outputs=layer_outputs, 
            **layer_attrs)

    @print_mapping_info
    def ConvTranspose(self, node):
1758 1759 1760
        op_name = name_generator("conv_trans", self.nn_name2id)
        output_name = node.name
        layer_outputs = [op_name, output_name]
S
SunAhong1993 已提交
1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = None
        if len(node.layer.input) > 2:
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        out_padding = node.get_attr('output_padding', [0, 0])
        kernel_shape = node.get_attr('kernel_shape')
        assert kernel_shape, 'kernel_shape not inferred'
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only Conv2DTranspose and Conv3DTranspose supported'
        num_in_channels = val_w.out_shapes[0][0]
        num_out_channels = val_w.out_shapes[0][1]
1774
        paddle_op = 'paddle.nn.Conv{}DTranspose'.format(convnd)
S
SunAhong1993 已提交
1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791

        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        output_size = node.get_attr('output_shape', [])
        pads = node.get_attr('pads', [0] * (convnd * 2))

        paddings, var_x = self._pad_if_asymmetric(node, pads, val_x)

        output_size = [0, 0]

        output_size[0] = (val_x.out_shapes[0][2] - 1
                          ) * strides[0] - 2 * paddings[0] + dilations[0] * (
                              kernel_shape[0] - 1) + 1 + out_padding[0]
        output_size[1] = (val_x.out_shapes[0][3] - 1
                          ) * strides[1] - 2 * paddings[1] + dilations[1] * (
                              kernel_shape[1] - 1) + 1 + out_padding[1]
1792

S
fix  
SunAhong1993 已提交
1793
        # Conv2DTranspose缺少output_size,只能在forward里头传进output_size
1794
        inputs_dict = {'x': val_x if isinstance(val_x, str) else val_x.name}
S
SunAhong1993 已提交
1795
        layer_attrs = {
1796 1797
            "in_channels": num_in_channels,
            "out_channels": num_out_channels,
1798
            "kernel_size": kernel_shape,
S
fix  
SunAhong1993 已提交
1799 1800 1801
            "stride": strides,
            "dilation": dilations,
            "padding": paddings,
1802 1803 1804
            "groups": num_groups,
            "output_padding":out_padding}
            
C
Channingss 已提交
1805
        _rename_or_remove_weight(self.weights, val_w.name, op_name+'.weight',)
S
fix  
SunAhong1993 已提交
1806
        if val_b is not None:
C
Channingss 已提交
1807
            _rename_or_remove_weight(self.weights, val_b.name, op_name+'.bias')
S
SunAhong1993 已提交
1808
        self.paddle_graph.add_layer(
1809
            kernel=paddle_op,
S
fix  
SunAhong1993 已提交
1810
            inputs=inputs_dict,
1811
            outputs=layer_outputs,
S
SunAhong1993 已提交
1812
            **layer_attrs)
S
fix  
SunAhong1993 已提交
1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824
        
    @print_mapping_info
    def ArgMax(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axis = node.get_attr('axis')
        keepdims = False if node.get_attr('keepdims') == 0 else True
        layer_attrs = {'axis': axis,
                      'keepdim': keepdims}
        self.paddle_graph.add_layer(
            'paddle.argmax', 
            inputs={"x": val_x.name}, 
            outputs=[node.name],
C
Channingss 已提交
1825 1826
            **layer_attrs)

S
SunAhong1993 已提交
1827
        
C
Channingss 已提交
1828
    @print_mapping_info
S
SunAhong1993 已提交
1829 1830 1831 1832
    def Size(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.shape", 
S
fix  
SunAhong1993 已提交
1833
            inputs={"input": val_x.name}, 
S
SunAhong1993 已提交
1834
            outputs=[node.name])
S
fix  
SunAhong1993 已提交
1835 1836 1837 1838 1839
        self.paddle_graph.add_layer(
            'paddle.cast',
            inputs={"x": node.name},
            outputs=[node.name],
            dtype=string('int64'))  
S
SunAhong1993 已提交
1840 1841 1842 1843 1844 1845 1846 1847
        self.paddle_graph.add_layer(
            "paddle.prod",
            inputs={"x": node.name},
            outputs=[node.name])
        
    @print_mapping_info
    def Sign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
S
fix  
SunAhong1993 已提交
1848 1849 1850 1851 1852 1853
        if node.dtype not in ["float16", "float32", "float64"]:
            self.paddle_graph.add_layer(
                "paddle.cast", 
                inputs={"x": val_x.name}, 
                outputs=[val_x.name],
                dtype=string("float32"))
S
SunAhong1993 已提交
1854 1855 1856 1857
        self.paddle_graph.add_layer(
            "paddle.sign", 
            inputs={"x": val_x.name}, 
            outputs=[node.name])
S
fix  
SunAhong1993 已提交
1858 1859 1860 1861 1862 1863
        if node.dtype not in ["float16", "float32", "float64"]:
            self.paddle_graph.add_layer(
                "paddle.cast", 
                inputs={"x": node.name}, 
                outputs=[node.name],
                dtype=string(node.dtype))
S
SunAhong1993 已提交
1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888
        
    @print_mapping_info
    def OneHot(self, node):
        nn_op_name = name_generator("onehot", self.nn_name2id)
        output_name = node.name
        layer_outputs = [nn_op_name, output_name]
        indices = self.graph.get_input_node(node, idx=0, copy=True)
        depth = self.graph.get_input_node(node, idx=1, copy=True)
        values = self.graph.get_input_node(node, idx=2, copy=True)
        axis = node.get_attr('axis', -1)
        self.paddle_graph.add_layer(
            "custom_layer:OneHot", 
            inputs={"indices": indices.name,
                    "depth": depth.name,
                    "values": values.name}, 
            outputs=layer_outputs,
            axis=axis)
    
    @print_mapping_info
    def Reciprocal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        self.paddle_graph.add_layer(
            "paddle.reciprocal", 
            inputs={"x": val_x.name}, 
            outputs=[node.name])
C
Channingss 已提交
1889

1890 1891
    @print_mapping_info
    def LSTM(self, node):
C
Channingss 已提交
1892 1893 1894 1895 1896 1897
        x = self.graph.get_input_node(node, idx=0, copy=True)
        input_weight = self.graph.get_input_node(node, idx=1, copy=True)
        hidden_weight = self.graph.get_input_node(node, idx=2, copy=True)

        input_nums = len(node.layer.input)
        exist_input_nums = 3
1898
        have_bias = False
C
Channingss 已提交
1899 1900
        if input_nums > 3 and node.layer.input[3] != '':
            bias = self.graph.get_input_node(node, idx=exist_input_nums, copy=True)
1901
            have_bias = True
C
Channingss 已提交
1902 1903 1904 1905 1906 1907
            exist_input_nums += 1
        if input_nums > 4 and node.layer.input[4] != '':
            sequence_lens = self.graph.get_input_node(node, idx=exist_input_nums, copy=True)
            exist_input_nums += 1
        if input_nums > 5 and node.layer.input[5] != '':
            init_h = self.graph.get_input_node(node, idx=exist_input_nums, copy=True)
1908 1909 1910 1911 1912 1913
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": init_h.name},
                outputs=[init_h.name],
                shape=init_h.out_shapes[0]
                )
C
Channingss 已提交
1914 1915 1916
            exist_input_nums += 1
        if input_nums > 6 and node.layer.input[6] != '':
            init_c = self.graph.get_input_node(node, idx=exist_input_nums, copy=True)
1917 1918 1919 1920 1921 1922
            self.paddle_graph.add_layer(
                'paddle.reshape',
                inputs={"x": init_c.name},
                outputs=[init_c.name],
                shape=init_c.out_shapes[0]
                )
C
Channingss 已提交
1923 1924

        input_weight_np = _const_weight_or_none(input_weight)
C
Channingss 已提交
1925
        _rename_or_remove_weight(self.weights, input_weight.name)
1926
        hidden_size = node.get_attr('hidden_size', input_weight_np.shape[1]/4)
C
Channingss 已提交
1927 1928
        input_size = input_weight_np.shape[2]
        hidden_weight_np = _const_weight_or_none(hidden_weight)
C
Channingss 已提交
1929
        _rename_or_remove_weight(self.weights, hidden_weight.name)
C
Channingss 已提交
1930
        bias_np = _const_weight_or_none(bias)
C
Channingss 已提交
1931
        _rename_or_remove_weight(self.weights, bias.name)
1932 1933 1934 1935 1936 1937 1938 1939 1940 1941
        input_bias_np = bias_np[:, :4*hidden_size]
        hidden_bias_np = bias_np[:, 4*hidden_size:]

        # parameters order in paddle:lstm:
        # 1. gate order in paddle is: input, forget, cell, output.
        # 2. gate orfer in onnx is: input, output, forget, cell.

        def reform_weights(w, n, intervals):
            slices = [w[:,x * n: y * n] for x, y in intervals]
            return np.concatenate(slices, axis=1)
C
Channingss 已提交
1942

1943 1944 1945 1946
        def transform_weight_with_bias(weights, n, intervals):
            return [reform_weights(w, n, intervals) for w in weights]

        reform_permutation = [(0, 1), (2, 4), (1, 2)]
C
Channingss 已提交
1947

C
Channingss 已提交
1948
        weights = transform_weight_with_bias(
C
Channingss 已提交
1949 1950 1951 1952 1953 1954 1955
            [input_weight_np, hidden_weight_np, input_bias_np, hidden_bias_np],
            hidden_size, reform_permutation)

        op_name = name_generator("lstm", self.nn_name2id)
        y_out = node.output(0)
        yh_out = node.output(1) 
        yc_out = node.output(2)
1956
        direction = node.get_attr('direction', 'forward')
C
Channingss 已提交
1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971

        def generate_paddle_param_names(op_name, suffix=''):
            param_names = []
            param_names.extend(['{}.weight_ih_l0{}', '{}.weight_hh_l0{}'])
            if have_bias != False: param_names.append('{}.bias_ih_l0{}')
            if have_bias != False: param_names.append('{}.bias_hh_l0{}')
            param_names = [x.format(op_name, suffix) for x in param_names]
            return param_names

        def assign_params(op_name, weights, weight_idx=0, suffix=''):
            param_names = generate_paddle_param_names(op_name, suffix)
            print(param_names)
            for param_name, weight in zip(param_names, weights):
                self.weights[param_name] = weight[weight_idx]

1972 1973 1974
        if direction == 'backward':
            raise Exception("LSTM support 'forward' or 'bidirectional', except '{}'.".format(direction))
        else:
C
Channingss 已提交
1975 1976 1977
            assign_params(op_name, weights)
            if direction == 'bidirectional':
                assign_params(op_name, weights, 1, '_reverse')
1978

C
Channingss 已提交
1979 1980 1981 1982 1983 1984 1985
        self.paddle_graph.add_layer(
            'paddle.nn.LSTM', 
            inputs={'input': x.name, 'initial_states': (init_h.name, init_c.name)},
            outputs=[op_name, y_out, yh_out, yc_out],
            input_size=input_size,
            hidden_size=hidden_size,
            num_layers=1,
1986
            direction=string(direction),
C
Channingss 已提交
1987 1988 1989 1990 1991 1992
            time_major=True)

        self.paddle_graph.add_layer(
            'paddle.reshape',
            inputs={"x": y_out},
            outputs=[y_out],
1993
            shape=[0, 0, -1, hidden_size]
C
Channingss 已提交
1994 1995 1996 1997 1998 1999 2000
            )
        self.paddle_graph.add_layer(
            'paddle.transpose',
            inputs={"x": y_out},
            outputs=[y_out],
            perm=[0,2,1,3]
            )
S
SunAhong1993 已提交
2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015
        
    @print_mapping_info
    def TopK(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_k = self.graph.get_input_node(node, idx=1, copy=True)
        layer_attrs = dict()
        layer_attrs["axis"] = node.get_attr('axis', -1)
        layer_attrs["largest"] = True if node.get_attr('largest', 1) == 1 else False
        layer_attrs["sorted"] = True if node.get_attr('sorted', 1) == 1 else False
        self.paddle_graph.add_layer(
            "paddle.topk", 
            inputs={"x": val_x.name,
                    "k": val_k.name}, 
            outputs=["{}_p{}".format(node.layer_name, 0), "{}_p{}".format(node.layer_name, 1)],
            **layer_attrs)
S
add lrn  
SunAhong1993 已提交
2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037
        
    @print_mapping_info
    def LRN(self, node):
        op_name = name_generator("lrn", self.nn_name2id)
        output_name = node.name
        layer_outputs = [op_name, output_name]
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        alpha = node.get_attr('alpha', 0.0001)
        beta = node.get_attr('beta', 0.75)
        bias = node.get_attr('bias', 1.0)
        size = node.get_attr('size')
        layer_attrs = {
            'size': size,
            'alpha': alpha,
            'beta': beta,
            'k': bias
        }
        self.paddle_graph.add_layer(
            "paddle.nn.LocalResponseNorm", 
            inputs={"x": val_x.name}, 
            outputs=layer_outputs, 
            **layer_attrs)