opset.py 57.0 KB
Newer Older
1
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
C
update  
channingss 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

C
Channingss 已提交
15
from x2paddle.decoder.onnx_decoder import ONNXGraph, ONNXGraphNode, ONNXGraphDataNode
C
update  
channingss 已提交
16 17 18
from x2paddle.core.graph import GraphNode
from x2paddle.core.fluid_code import Layer
from x2paddle.core.fluid_code import FluidCode
C
channingss 已提交
19
from x2paddle.core.util import string
C
Channingss 已提交
20
from x2paddle.op_mapper.onnx2paddle.opset9.custom_layer import *
C
Channingss 已提交
21
from functools import reduce
C
update  
channingss 已提交
22
import numpy as np
C
channingss 已提交
23
import onnx
C
channingss 已提交
24
import onnx.numpy_helper as numpy_helper
C
channingss 已提交
25
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
C
update  
channingss 已提交
26
import logging as _logging
27
from collections import OrderedDict
C
channingss 已提交
28
import math
C
channingss 已提交
29 30
import os
import shutil
31

C
update  
channingss 已提交
32 33 34 35
_logger = _logging.getLogger(__name__)


def _const_weight_or_none(node):
C
channings 已提交
36
    if 'Constant' in node.layer_type:
C
channingss 已提交
37
        return node.value
C
update  
channingss 已提交
38 39 40 41 42
    if isinstance(node, ONNXGraphDataNode):
        return node.weight
    return None


C
channingss 已提交
43 44 45 46 47 48 49 50
def get_same_padding(in_size, kernel_size, stride):
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]


51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
def print_mapping_info(func):
    def run_mapping(*args, **kwargs):
        node = args[1]
        try:
            res = func(*args, **kwargs)
        except:
            print("convert failed node:{}, op_type is {}".format(
                node.layer_name[9:], node.layer_type))
            raise
        else:
            #print("convert successfully node:{}, op_type is {}".format(
            #    node.layer_name[9:], node.layer_type))
            return res

    return run_mapping


C
Channingss 已提交
68
class OpSet9():
69 70 71 72 73
    elementwise_ops = {
        'Add': 'elementwise_add',
        'Div': 'elementwise_div',
        'Sub': 'elementwise_sub',
        'Mul': 'elementwise_mul',
R
root 已提交
74 75
        'Pow': 'elementwise_pow',
    }
76

77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
    default_op_mapping_field_values = OrderedDict()
    default_op_mapping_field_values['FLUID_OP'] = ''
    default_op_mapping_field_values['FLUID_INPUT_ARGS'] = None
    default_op_mapping_field_values['FLUID_OUTPUT_ARGS'] = None
    default_op_mapping_field_values['ATTR_MAPPING'] = dict()
    default_op_mapping_field_values['DEFAULTS'] = dict()
    default_op_mapping_field_values['INPUT_PERM'] = None
    default_op_mapping_field_values['OUTPUT_PERM'] = None
    default_op_mapping_field_values['FILL_NAME_FIELD'] = True

    default_op_mapping = {
        'Shape': ['shape', ['X'], ['Out']],
        'Clip': [
            'clip', ['X'], ['Out'], dict(), dict(
                min=(np.asarray(
                    [255, 255, 127, 255], dtype=np.uint8).view(np.float32)[0]),
                max=(np.asarray(
                    [255, 255, 127, 127], dtype=np.uint8).view(np.float32)[0]),
            )
        ],
        'Erf': ['erf', ['X'], ['Out']],
        'Ceil': ['ceil', ['X'], ['Out']],
        'ReduceMean': [
            'reduce_mean', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
        'ReduceSum': [
            'reduce_sum', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
        'ReduceMin': [
            'reduce_min', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
C
Channingss 已提交
111 112 113 114
        'ReduceMax': [
            'reduce_max', ['X'], ['Out'], dict(
                axes='dim', keepdims='keep_dim'), dict(keep_dim=1)
        ],
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
        #active function
        'Relu': ['relu', ['X'], ['Out']],
        'LeakyRelu': ['leaky_relu', ['X'], ['Out'], dict(), dict(alpha=.01)],
        'Elu': ['elu', ['X'], ['Out'], dict(), dict(alpha=1.)],
        'ThresholdedRelu': [
            'thresholded_relu', ['X'], ['Out'], dict(alpha='threshold'),
            dict(alpha=1.)
        ],
        'Tanh': ['tanh', ['X'], ['Out']],
        'Sigmoid': ['sigmoid', ['X'], ['Out']],
        'HardSigmoid': [
            'hard_sigmoid', ['X'], ['Out'], dict(
                alpha='slope', beta='offset'), dict(
                    slope=.2, offset=.5)
        ],
        'Softsign': ['softsign', ['X'], ['Out']],
        'Softplus': ['softplus', ['X'], ['Out']],
        'Exp': ['exp', ['X'], ['Out']],
        'Softmax': ['softmax', ['X'], ['Out'], dict(), dict(axis=1)],
        'Sqrt': ['sqrt', ['X'], ['Out']],
        'Floor': ['floor', ['X'], ['Out']],
        'Abs': ['abs', ['X'], ['Out']],
    }

C
Channingss 已提交
139
    default_ioa_constraint = {}
140 141

    def __init__(self, decoder):
C
Channingss 已提交
142
        super(OpSet9, self).__init__()
143
        self.graph = decoder.graph
C
update  
channingss 已提交
144 145 146
        self.input_shapes = []
        self.weights = dict()
        self.omit_nodes = list()
C
channingss 已提交
147
        self.used_custom_layers = dict()
R
root 已提交
148

149
    @print_mapping_info
C
channingss 已提交
150
    def directly_map(self, node, name='', *args, **kwargs):
C
update  
channingss 已提交
151 152 153 154
        inputs = node.layer.input
        outputs = node.layer.output
        op_type = node.layer_type
        attrs = node.attr_map
155 156 157
        info = self.default_op_mapping[op_type]
        info.extend(
            list(self.default_op_mapping_field_values.values())[len(info):])
C
update  
channingss 已提交
158 159 160 161 162 163 164 165
        (
            fluid_op,
            fluid_input_args,
            fluid_output_args,
            attr_mapping,
            default_attrs,
            input_perm,
            output_perm,
166
            fill_name_field, ) = info
C
update  
channingss 已提交
167

168 169
        if fluid_op in self.default_ioa_constraint:
            for predicate, message in self.default_ioa_constraint[fluid_op]:
C
update  
channingss 已提交
170 171 172 173 174 175 176 177 178 179 180 181
                assert predicate(inputs, outputs, attrs), message

        mapped_attrs = {
            attr_mapping.get(key, key): value
            for key, value in attrs.items()
        }
        if '' in mapped_attrs:
            mapped_attrs.pop('')
        if '_' in mapped_attrs:
            mapped_attrs.pop('_')
        fluid_attrs = default_attrs.copy()
        fluid_attrs.update(mapped_attrs)
C
channingss 已提交
182
        inputs = inputs if input_perm is None else list(
C
update  
channingss 已提交
183
            map(lambda i: inputs[i], input_perm))
C
channingss 已提交
184 185 186 187
        val_inps = []
        for idx, ipt in enumerate(inputs):
            val_inps.append(self.graph.get_input_node(node, idx=idx, copy=True))

C
update  
channingss 已提交
188 189 190
        val_outs = outputs if output_perm is None else list(
            map(lambda i: outputs[i], output_perm))
        attr = fluid_attrs
C
channingss 已提交
191
        assert len(val_inps) == 1, 'directly_map error with multi inputs'
192
        if fluid_op not in ['shape', 'erf']:
C
update  
channingss 已提交
193
            attr['name'] = string(node.layer_name)
194 195 196 197 198 199 200 201 202 203
        node.fluid_code.add_layer(
            fluid_op, inputs=val_inps[0], output=val_outs[0], param_attr=attr)
        if fluid_op in ['shape']:
            node.fluid_code.add_layer(
                'cast',
                inputs=val_outs[0],
                output=val_outs[0],
                param_attr={'dtype': string('int64')})

    @print_mapping_info
C
channingss 已提交
204 205 206
    def deal_custom_layer(self, node):
        op = node.layer_type
        custom_code, func = make_custom_layer(node)
C
channingss 已提交
207
        child_func_code, child_func = make_custom_child_func(node)
C
channingss 已提交
208 209 210
        params = get_params(node.layer, node.layer_type)
        arg_names, kwargs = set_args(func, params)
        kwargs['name'] = string(node.layer_name)
211 212 213 214 215 216
        node.fluid_code.add_layer(
            func.__code__.co_name,
            inputs=node.inputs,
            output=node,
            param_attr=kwargs,
            is_custom_layer=True)
C
channingss 已提交
217 218
        if op not in self.used_custom_layers:
            self.used_custom_layers[op] = custom_code
C
channingss 已提交
219
            if op + '_child_func' not in self.used_custom_layers:
C
channingss 已提交
220 221 222
                if child_func_code is not None:
                    self.used_custom_layers[op +
                                            '_child_func'] = child_func_code
R
root 已提交
223

224
    @print_mapping_info
225 226 227
    def elementwise_map(self, node):
        assert node.layer_type in self.elementwise_ops
        op_type = self.elementwise_ops[node.layer_type]
R
root 已提交
228

229 230 231 232
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        val_y_shape = val_y.out_shapes[0]
        val_x_shape = val_x.out_shapes[0]
R
root 已提交
233 234

        if len(val_x_shape) < len(val_y_shape):
235
            val_x, val_y = val_y, val_x
236
            val_y_shape, val_x_shape = val_x_shape, val_y_shape
237 238 239

        str_y_shape = ','.join(str(e) for e in val_y_shape)
        str_x_shape = ','.join(str(e) for e in val_x_shape)
240
        slice_idx = 0
241 242 243 244 245 246
        if str_y_shape not in str_x_shape:
            for dim in val_y_shape:
                if dim == 1:
                    slice_idx += 1
                else:
                    break
247 248 249 250 251 252 253 254
        attr = {"name": string(node.layer_name)}
        if slice_idx < len(val_y_shape) and slice_idx > 0:
            val_y_reshaped = val_y_shape[slice_idx:]
            var_y_reshaped = val_y.layer_name + '_reshaped'
            attr_reshaped = {
                'shape': val_y_reshaped,
                'name': string(var_y_reshaped)
            }
255 256 257 258 259
            node.fluid_code.add_layer(
                'reshape',
                inputs=val_y,
                output=var_y_reshaped,
                param_attr=attr_reshaped)
260
            inputs = {'x': val_x, 'y': var_y_reshaped}
261 262
            node.fluid_code.add_layer(
                op_type, inputs=inputs, output=node, param_attr=attr)
263 264
        else:
            inputs = {'x': val_x, 'y': val_y}
265 266
            node.fluid_code.add_layer(
                op_type, inputs=inputs, output=node, param_attr=attr)
C
channingss 已提交
267

268
    @print_mapping_info
C
update  
channingss 已提交
269
    def place_holder(self, node):
C
channingss 已提交
270
        self.input_shapes.append(node.out_shapes[0])
R
root 已提交
271

C
channings 已提交
272 273
        shape = node.out_shapes[0]
        for i, dim_shape in enumerate(shape):
R
root 已提交
274 275 276
            if dim_shape == 0 and i == 0:
                shape[i] = 1
            if dim_shape == 0 and i != 0:
C
channings 已提交
277
                assert 'shape of input is not assigned'
C
update  
channingss 已提交
278 279
        attr = {
            "dtype": string(node.dtype),
C
channings 已提交
280
            "shape": shape,
C
update  
channingss 已提交
281 282 283 284
            "name": string(node.layer_name),
            "append_batch_size": 'False'
        }

285 286
        node.fluid_code.add_layer(
            "data", inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
287

288
    @print_mapping_info
C
update  
channingss 已提交
289 290 291 292
    def create_parameter(self, node, parameter=None):
        if parameter is not None:
            node = parameter
        dtype = node.dtype
C
channingss 已提交
293
        shape = node.out_shapes[0]
C
channingss 已提交
294 295
        if len(node.weight.shape) == 0:
            shape = [1]
C
update  
channingss 已提交
296 297 298 299 300 301 302
        self.weights[node.layer_name] = node.weight
        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name),
            'default_initializer': 'Constant(0.0)'
        }
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318
        if dtype == 'bool':
            attr['dtype'] = string('int64')
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
            node.fluid_code.add_layer(
                "cast",
                inputs=node,
                output=node,
                param_attr={'dtype': string('bool')})
        elif dtype == 'uint8':
            attr['dtype'] = string('float32')
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
        else:
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
319 320 321 322 323 324 325 326 327 328 329 330 331 332

    def _pad_if_asymmetric(self, node, pads, val_name):  # pads: SSEE
        assert len(pads) & 1 == 0
        symmetric = True
        ndims = len(pads) // 2
        for idx_dim in range(ndims):
            if pads[idx_dim] != pads[ndims + idx_dim]:
                symmetric = False
                break
        if symmetric:
            return pads[:ndims], val_name
        val_padded = self.Pad(node, op_independent=False)
        return [0] * ndims, val_padded

C
channingss 已提交
333
    def _interpolate(self, node):
C
channingss 已提交
334
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
335 336 337 338
        if node.layer_type == 'Resize':
            val_scales = self.graph.get_input_node(node, idx=2, copy=True)
        elif node.layer_type == 'Upsample':
            val_scales = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
339 340

        attr = {'name': string(node.layer_name)}
C
channingss 已提交
341 342
        mode = node.get_attr('mode', 'nearest')
        fluid_op = 'resize_{}'.format(mode)
343
        if 'linear' in mode:
R
root 已提交
344 345 346
            print(
                'Warnning: paddle not support op:resize wiht mode: linear, we use bilinear replace linear'
            )
347
            fluid_op = 'resize_bilinear'
R
root 已提交
348

349 350 351 352 353 354
        node.fluid_code.add_layer(
            fluid_op,
            inputs={'input': val_x,
                    'scale': val_scales},
            output=node,
            param_attr=attr)
R
root 已提交
355

356
    @print_mapping_info
C
channings 已提交
357 358 359
    def RoiAlign(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
360 361 362

        pooled_height = node.get_attr('output_height')
        pooled_width = node.get_attr('output_width')
C
channings 已提交
363 364 365
        spatial_scale = node.get_attr('spatial_scale')
        sampling_ratio = node.get_attr('sampling_ratio')
        attr = {
R
root 已提交
366 367 368 369 370
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
            'sampling_ratio': sampling_ratio,
        }
371 372 373 374 375 376 377 378
        node.fluid_code.add_layer(
            'roi_align',
            inputs={'input': val_x,
                    'rois': val_rois},
            output=node,
            param_attr=attr)

    @print_mapping_info
C
channings 已提交
379 380 381
    def MaxRoiPool(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_rois = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
382

C
channings 已提交
383 384 385
        spatial_scale = node.get_attr('spatial_scale')
        pooled_height, pooled_width = node.get_attr('pooled_shape')
        attr = {
R
root 已提交
386 387 388 389
            'pooled_height': pooled_height,
            'pooled_width': pooled_width,
            'spatial_scale': spatial_scale,
        }
390 391 392 393 394 395 396 397
        node.fluid_code.add_layer(
            'roi_pool',
            inputs={'input': val_x,
                    'rois': val_rois},
            output=node,
            param_attr=attr)

    @print_mapping_info
C
update  
channingss 已提交
398
    def Pad(self, node, op_independent=True):
C
channingss 已提交
399
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
400 401 402
        pads = node.get_attr('pads')
        mode = node.get_attr('mode', 'constant')
        value = node.get_attr('value', 0.)
C
channingss 已提交
403 404
        data_shape = val_x.out_shapes[0]
        output_shape = node.out_shapes[0]
C
update  
channingss 已提交
405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425
        assume_pad2d = False
        attr = {}
        if len(pads) == 4:
            assume_pad2d |= mode != 'constant'
            if data_shape:
                assume_pad2d |= data_shape and len(data_shape) == 4  # NCHW
            if output_shape:
                assume_pad2d |= output_shape and len(output_shape) == 4  # NCHW
        if assume_pad2d:
            fluid_op = 'pad2d'
            attr['data_format'] = string('NCHW')
            attr['mode'] = string(mode)
        else:
            attr = {'pad_value': value}
            fluid_op = 'pad'
        if len(pads) == 4:
            paddings = np.array(pads).reshape(
                (-1, 2)).transpose().flatten().tolist()  # SSEE -> SESE
        elif len(pads) == 8:
            paddings = np.array(pads).reshape(
                (-1, 4)).transpose().flatten().tolist()  # SSEE -> SESE
C
channingss 已提交
426 427 428 429
            if sum(paddings[:4]) == 0:
                fluid_op = 'pad2d'
                paddings = paddings[4:]
                attr['mode'] = string(mode)
C
update  
channingss 已提交
430 431 432
        attr['paddings'] = paddings
        if op_independent:
            attr['name'] = string(node.layer_name)
433 434
            node.fluid_code.add_layer(
                fluid_op, inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
435 436
        else:
            attr['name'] = string(node.layer_name + '_paded')
437 438 439 440 441
            node.fluid_code.add_layer(
                fluid_op,
                inputs=val_x,
                output=node.layer_name + '_paded',
                param_attr=attr)
C
update  
channingss 已提交
442 443
            return node.layer_name + '_paded'

444
    @print_mapping_info
C
update  
channingss 已提交
445
    def Unsqueeze(self, node):
C
channingss 已提交
446
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
447
        axes = node.get_attr('axes')
448
        attr = {'axes': axes, 'name': string(node.layer_name)}
R
root 已提交
449
        if len(val_x.out_shapes[0]) == 0:
450 451 452 453 454 455
            if node.layer_name:
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=val_x,
                    output=node,
                    param_attr={'shape': [1]})
456
        else:
457 458
            node.fluid_code.add_layer(
                'unsqueeze', inputs=val_x, output=node, param_attr=attr)
459

460
    @print_mapping_info
C
channingss 已提交
461
    def Shrink(self, node):
C
channingss 已提交
462
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
463 464 465 466
        bias = node.get_attr('bias')
        lambd = node.get_attr('lambd')
        assert bias == 0.0, 'not support bias!=0'
        attr = {'threshold': lambd, 'name': node.layer_name}
467 468
        node.fluid_code.add_layer(
            'hard_shrink', inputs=val_x, output=node, param_attr=attr)
C
channingss 已提交
469

470 471 472 473 474 475 476 477 478 479 480
    def Greater(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        node.fluid_code.add_layer(
            'greater_than',
            inputs={'x': val_x,
                    'y': val_y},
            output=node,
            param_attr=None)

    @print_mapping_info
C
update  
channingss 已提交
481 482 483 484 485 486 487 488
    def Constant(self, node):
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        value = node.get_attr('value')
        dtype = np.dtype(value.dtype)
        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'tensor dtype unmatches storage dtype'
R
root 已提交
489

C
update  
channingss 已提交
490
        shape = node.get_attr('shape', None)
R
root 已提交
491

C
update  
channingss 已提交
492
        if shape is None:
C
channingss 已提交
493
            shape = val_output.out_shapes[0]
C
update  
channingss 已提交
494 495
        if shape is None:
            shape = list(value.shape)
496 497 498 499
            _logger.warning('in (Constant -> %s): '
                            'attribute "shape" of %s not inferred, '
                            'using value as 1-D tensor may lead to fails',
                            val_output.layer_name, val_output.layer_name)
C
update  
channingss 已提交
500

501
        if len(value) == 1:
C
channingss 已提交
502
            value = value.tolist()
C
update  
channingss 已提交
503 504 505 506 507
            shape = [1]
            value = value[0]
            if dtype.name == 'int64':
                dtype = 'int32'
            attr = {'shape': shape, 'dtype': string(dtype), 'value': value}
508 509
            node.fluid_code.add_layer(
                'fill_constant', inputs=None, output=node, param_attr=attr)
C
channingss 已提交
510
        else:
511 512
            if dtype.name == 'uint8':
                dtype = 'int64'
C
channingss 已提交
513 514 515 516 517 518 519 520
            value = np.reshape(value, shape)
            self.weights[node.layer_name] = value
            attr = {
                'dtype': string(dtype),
                'shape': shape,
                'name': string(node.layer_name),
                'default_initializer': 'Constant(0.0)'
            }
521 522
            node.fluid_code.add_layer(
                "create_parameter", inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
523

524
    @print_mapping_info
C
update  
channingss 已提交
525
    def Resize(self, node):
526 527
        self._interpolate(node)

528
    @print_mapping_info
529 530 531
    def Upsample(self, node):
        self._interpolate(node)

532 533 534 535 536 537 538 539 540 541 542 543 544 545 546
    @print_mapping_info
    def InstanceNormalization(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        epsilon = node.get_attr('epsilon', 1e-5)
        attr = {
            'epsilon': epsilon,
            'param_attr': string(val_scale.layer_name),
            'bias_attr': string(val_b.layer_name)
        }
        node.fluid_code.add_layer(
            "instance_norm", inputs=val_x, output=node, param_attr=attr)

    @print_mapping_info
547
    def Expand(self, node):
C
channingss 已提交
548
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
549
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
R
root 已提交
550 551

        if len(val_shape.outputs) == 1:
552 553
            self.omit_nodes.append(val_shape.layer_name)

C
channingss 已提交
554
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
555
        out_shape = node.out_shapes[0]
556
        val_x_dtype = val_x.dtype
R
root 已提交
557 558 559

        name_ones = node.layer_name + '_ones'
        attr_ones = {'shape': out_shape, 'dtype': string(val_x_dtype)}
560 561
        node.fluid_code.add_layer(
            'ones', inputs=None, output=name_ones, param_attr=attr_ones)
R
root 已提交
562 563
        inputs = {'x': name_ones, 'y': val_x}
        attr = {'name': string(node.layer_name)}
564 565 566 567 568
        node.fluid_code.add_layer(
            'elementwise_mul',
            inputs=inputs,
            output=node.layer_name,
            param_attr=attr)
C
update  
channingss 已提交
569

570
    @print_mapping_info
C
channingss 已提交
571 572 573 574
    def Gather(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        indices = self.graph.get_input_node(node, idx=1, copy=True)
        indices_shape = indices.out_shapes[0]
C
Channingss 已提交
575
        axis = node.get_attr('axis', 0)
576 577
        #assert len(
        #    indices_shape) <= 2, "Gather op don't support dim of indice >2 "
R
root 已提交
578
        if axis == 0 and len(indices_shape) <= 1:
579 580 581 582 583 584
            node.fluid_code.add_layer(
                'gather',
                inputs={'input': val_x,
                        'index': indices},
                output=node,
                param_attr=None)
C
channingss 已提交
585 586
        elif axis > 0 and len(indices_shape) <= 1:
            perm = list(range(len(val_x.out_shapes[0])))
C
channingss 已提交
587 588 589
            perm = [axis] + perm[:axis] + perm[axis + 1:]
            attr_trans = {'perm': perm}
            name_trans = val_x.layer_name + '_trans'
590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644
            node.fluid_code.add_layer(
                'transpose',
                inputs=val_x,
                output=name_trans,
                param_attr=attr_trans)
            node.fluid_code.add_layer(
                'gather',
                inputs={'input': name_trans,
                        'index': indices},
                output=node,
                param_attr=None)
            node.fluid_code.add_layer(
                'transpose', inputs=node, output=node, param_attr=attr_trans)
        elif axis == 0 and len(indices_shape) > 1:
            if val_x.out_shapes[0] is not None and isinstance(
                    val_x, ONNXGraphDataNode):
                node.fluid_code.add_layer(
                    'embedding',
                    inputs=indices,
                    output=node,
                    use_fluid=True,
                    param_attr={
                        'param_attr': string(val_x.layer_name),
                        'size': val_x.out_shapes[0]
                    })
            else:
                from functools import reduce
                #indices_shape = [1,7]
                reshape_shape = reduce(lambda x, y: x * y, indices_shape)
                indices_reshape = indices.layer_name + '_shape'
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=indices,
                    output=indices_reshape,
                    param_attr={'shape': [reshape_shape, ]})

                perm = list(range(len(val_x.out_shapes[0])))
                node.fluid_code.add_layer(
                    'gather',
                    inputs={'input': val_x,
                            'index': indices_reshape},
                    output=node,
                    param_attr=None)
                val_x_shape = val_x.out_shapes[0]
                reshaped_shape = []
                for i in perm:
                    reshaped_shape.append(indices_shape[i])
                for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                    reshaped_shape.append(i)
                node.fluid_code.add_layer(
                    'reshape',
                    inputs=node,
                    output=node,
                    param_attr={'shape': reshaped_shape})
        elif axis > 0 and len(indices_shape) > 1:
C
Channingss 已提交
645
            from functools import reduce
R
root 已提交
646
            reshape_shape = reduce(lambda x, y: x * y, indices_shape)
647 648 649 650 651 652
            indices_reshape = indices.layer_name + '_shape'
            node.fluid_code.add_layer(
                'reshape',
                inputs=indices,
                output=indices_reshape,
                param_attr={'shape': [reshape_shape, ]})
R
root 已提交
653

C
Channingss 已提交
654 655 656 657
            perm = list(range(len(val_x.out_shapes[0])))
            perm = [axis] + perm[:axis] + perm[axis + 1:]
            attr_trans = {'perm': perm}
            name_trans = val_x.layer_name + '_trans'
658 659 660 661 662 663 664 665 666 667 668 669 670
            node.fluid_code.add_layer(
                'transpose',
                inputs=val_x,
                output=name_trans,
                param_attr=attr_trans)
            node.fluid_code.add_layer(
                'gather',
                inputs={'input': name_trans,
                        'index': indices_reshape},
                output=node,
                param_attr=None)
            node.fluid_code.add_layer(
                'transpose', inputs=node, output=node, param_attr=attr_trans)
C
Channingss 已提交
671 672 673 674 675 676
            val_x_shape = val_x.out_shapes[0]
            reshaped_shape = []
            for i in perm:
                reshaped_shape.append(indices_shape[i])
            for i in val_x_shape[:axis] + val_x_shape[axis + 1:]:
                reshaped_shape.append(i)
677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696
            node.fluid_code.add_layer(
                'reshape',
                inputs=node,
                output=node,
                param_attr={'shape': reshaped_shape})

    @print_mapping_info
    def Range(self, node):
        val_start = self.graph.get_input_node(node, idx=0, copy=True)
        val_limit = self.graph.get_input_node(node, idx=1, copy=True)
        val_delta = self.graph.get_input_node(node, idx=2, copy=True)
        dtype = val_start.dtype
        inputs = {'start': val_start, 'end': val_limit, 'step': val_delta}
        node.fluid_code.add_layer(
            'range',
            inputs=inputs,
            output=node,
            param_attr={'dtype': string(dtype)})

    @print_mapping_info
C
channingss 已提交
697
    def Slice(self, node):
C
channingss 已提交
698
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channings 已提交
699
        starts, ends, axes, steps = None, None, None, None
700
        attr = {}
C
channingss 已提交
701 702 703
        if len(node.inputs) > 1:
            starts = self.graph.get_input_node(node, idx=1, copy=True)
            ends = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
704
            if len(node.inputs) > 3:
C
channings 已提交
705 706
                axes = self.graph.get_input_node(node, idx=3, copy=True)
                axes = _const_weight_or_none(axes)
R
root 已提交
707
            if len(node.inputs) > 4:
C
channings 已提交
708 709
                steps = self.graph.get_input_node(node, idx=4, copy=True)
                steps = _const_weight_or_none(steps)
710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743
                if steps is not None:
                    assert steps == 1, "Only support convert op:Slice, which attribute:steps == 1"
            attr = {
                "axes": axes,
                "starts": starts.layer_name,
                "ends": ends.layer_name
            }
            starts_value = _const_weight_or_none(starts)
            ends_value = _const_weight_or_none(ends)
            if starts_value is not None and ends_value is not None:
                self.omit_nodes.append(starts.layer_name)
                self.omit_nodes.append(ends.layer_name)
                ends_value = ends_value.copy()
                for idx in range(len(ends_value)):
                    if ends_value[idx] > 2**31 - 1:
                        ends_value[idx] = 2**31 - 1
                attr = {
                    "axes": axes,
                    "starts": starts_value,
                    "ends": ends_value
                }
            else:
                if starts.dtype != 'int32':
                    node.fluid_code.add_layer(
                        'cast',
                        inputs=starts,
                        output=starts,
                        param_attr={'dtype': string('int32')})
                if ends.dtype != 'int32':
                    node.fluid_code.add_layer(
                        'cast',
                        inputs=ends,
                        output=ends,
                        param_attr={'dtype': string('int32')})
C
channingss 已提交
744 745 746 747
        else:
            starts = node.get_attr('starts')
            ends = node.get_attr('ends')
            axes = node.get_attr('axes')
748 749 750 751
            for idx in range(len(ends)):
                if ends[idx] > 2**31 - 1:
                    ends[idx] = 2**31 - 1
            attr = {"axes": axes, "starts": starts, "ends": ends}
C
channingss 已提交
752

753 754
        node.fluid_code.add_layer(
            'slice', inputs=val_x, output=node, param_attr=attr)
C
channingss 已提交
755

756
    @print_mapping_info
C
update  
channingss 已提交
757
    def ConstantOfShape(self, node):
C
channingss 已提交
758
        val_shape = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
759
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
C
update  
channingss 已提交
760 761 762 763

        value = node.get_attr('value')
        dtype = value.dtype
        value = value.tolist()
764 765
        assert len(value) == 1, ('given value not Scalar, shape of value > 1, '
                                 'this is not supported')
C
update  
channingss 已提交
766 767 768 769
        if len(value) == 1:
            value = value[0]
            if dtype.name == 'int64':
                dtype = 'int32'
770 771 772 773 774 775 776
            attr = {
                'shape': val_shape.layer_name,
                'dtype': string(dtype),
                'value': value
            }
            node.fluid_code.add_layer(
                'fill_constant', inputs=None, output=node, param_attr=attr)
C
update  
channingss 已提交
777

778
    @print_mapping_info
C
update  
channingss 已提交
779
    def Split(self, node):
C
channingss 已提交
780 781
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
C
update  
channingss 已提交
782 783

        fluid_op = 'split'
C
channingss 已提交
784
        split = node.get_attr('split')
C
update  
channingss 已提交
785
        axis = node.get_attr('axis', 0)
C
channingss 已提交
786 787 788 789 790
        attr = {
            'num_or_sections': split,
            'dim': axis,
            'name': string(node.layer_name)
        }
R
root 已提交
791

792 793
        node.fluid_code.add_layer(
            'split', inputs=val_x, output=val_y, param_attr=attr)
C
update  
channingss 已提交
794

795
    @print_mapping_info
C
update  
channingss 已提交
796
    def Reshape(self, node):
C
channingss 已提交
797 798
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_shape = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
799
        val_reshaped = self.graph.get_node(node.layer.output[0], copy=True)
800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841
        attr = {}
        shape_value = _const_weight_or_none(val_shape)
        shape_dims = len(val_shape.out_shapes[0])

        if shape_value is not None:
            node.fluid_code.add_layer(
                'reshape',
                inputs={'x': val_x},
                output=node,
                param_attr={'shape': shape_value.tolist()})
        elif val_shape.dtype == 'int64':
            val_shape_cast = val_shape.layer_name + '_cast'
            node.fluid_code.add_layer(
                'cast',
                inputs=val_shape,
                output=val_shape_cast,
                param_attr={'dtype': string('int32')})
            node.fluid_code.add_layer(
                'reshape',
                inputs=val_shape_cast,
                output=val_shape_cast,
                param_attr={'shape': val_shape.out_shapes[0]})
            node.fluid_code.add_layer(
                'reshape',
                inputs={'x': val_x,
                        'shape': val_shape_cast},
                output=node,
                param_attr=attr)
        else:
            node.fluid_code.add_layer(
                'reshape',
                inputs=val_shape,
                output=val_shape,
                param_attr={'shape': val_shape.out_shapes[0]})
            node.fluid_code.add_layer(
                'reshape',
                inputs={'x': val_x,
                        'shape': val_shape},
                output=node,
                param_attr=attr)

    @print_mapping_info
C
update  
channingss 已提交
842
    def Cast(self, node):
C
channingss 已提交
843
        val_input = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
844 845 846 847 848 849 850 851 852 853
        val_output = self.graph.get_node(node.layer.output[0], copy=True)

        dtype = node.get_attr('to')
        if not isinstance(dtype, np.dtype):
            dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]

        output_dtype = val_output.dtype
        if output_dtype:
            assert dtype == output_dtype, 'dtype of to unmatches output'
        attr = {'dtype': string(dtype)}
854 855
        node.fluid_code.add_layer(
            'cast', inputs=val_input, output=node, param_attr=attr)
C
update  
channingss 已提交
856

857
    @print_mapping_info
C
update  
channingss 已提交
858
    def AveragePool(self, node):
C
channingss 已提交
859
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
860 861

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
862 863 864 865 866 867 868 869
        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))
        pads = node.get_attr('pads', [0] * (poolnd * 2))
        fluid_op = 'pool{}d'.format(poolnd)
        assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported'
C
channingss 已提交
870

C
channingss 已提交
871 872
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
873
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
874
            input_shape = val_x.out_shapes[0]
C
channingss 已提交
875 876 877 878 879 880
            pad_h = get_same_padding(input_shape[2], kernel_shape[0],
                                     strides[0])
            pad_w = get_same_padding(input_shape[3], kernel_shape[1],
                                     strides[1])
            attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}

C
update  
channingss 已提交
881 882 883 884 885 886 887 888 889 890
        attr = {
            "pool_size": kernel_shape,
            "pool_type": string('avg'),
            "pool_stride": strides,
            "pool_padding": paddings,
            "ceil_mode": ceil_mode,
            "exclusive": 'True',
            "name": string(node.layer_name)
        }

891 892
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
893

894
    @print_mapping_info
C
update  
channingss 已提交
895 896 897
    def Concat(self, node):
        inputs = []
        for i in range(len(node.layer.input)):
C
channingss 已提交
898
            ipt = self.graph.get_input_node(node, idx=i, copy=True)
C
update  
channingss 已提交
899 900 901 902 903 904
            if isinstance(ipt, str):
                inputs.append(ipt)
            else:
                inputs.append(ipt.layer_name)
        axis = node.get_attr('axis')
        attr = {'axis': axis}
905 906
        node.fluid_code.add_layer(
            'concat', inputs=inputs, output=node, param_attr=attr)
C
update  
channingss 已提交
907

908
    @print_mapping_info
C
update  
channingss 已提交
909
    def Flatten(self, node):
C
channingss 已提交
910
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
911 912
        axis = node.get_attr('axis', 1)
        attr = {"axis": str(axis), "name": string(node.layer_name)}
913 914
        node.fluid_code.add_layer(
            'flatten', inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
915

916
    @print_mapping_info
C
update  
channingss 已提交
917
    def Gemm(self, node):
C
channingss 已提交
918 919 920
        val_a = self.graph.get_input_node(node, idx=0, copy=True)
        val_b = self.graph.get_input_node(node, idx=1, copy=True)
        val_c = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
921 922 923 924 925 926 927 928 929 930 931 932 933

        alpha = node.get_attr('alpha', 1.)  # optional
        beta = node.get_attr('beta', 1.)  # optional
        trans_a = bool(node.get_attr('transA', 0))  # optional
        trans_b = bool(node.get_attr('transB', 0))  # optional
        val_mm = node.layer_name + '_mm'
        matmul_inputs = {"x": val_a, "y": val_b}
        attr_matmul = {
            "transpose_x": trans_a,
            "transpose_y": trans_b,
            "alpha": alpha,
            "name": string(val_mm)
        }
934 935 936 937 938
        node.fluid_code.add_layer(
            'matmul',
            inputs=matmul_inputs,
            output=val_mm,
            param_attr=attr_matmul)
C
channingss 已提交
939

C
update  
channingss 已提交
940 941 942 943
        if beta != 0:
            if beta == 1.:
                add_inputs = {"x": val_mm, "y": val_c}
                attr = {"name": string(node.layer_name)}
944 945 946 947 948
                node.fluid_code.add_layer(
                    "elementwise_add",
                    inputs=add_inputs,
                    output=node,
                    param_attr=attr)
C
update  
channingss 已提交
949
            else:
C
channingss 已提交
950 951
                var_beta = node.layer_name + '_beta'
                matmul_beta_inputs = {"x": val_c, "y": var_beta}
952 953 954 955 956
                node.fluid_code.add_layer(
                    "Constant",
                    inputs=matmul_beta_inputs,
                    output=var_beta,
                    param_attr={'value': beta})
C
channingss 已提交
957 958 959

                add_inputs = {"x": val_mm, "y": var_beta}
                attr = {"name": string(node.layer_name)}
960 961 962 963 964
                node.fluid_code.add_layer(
                    "elementwise_add",
                    inputs=add_inputs,
                    output=node,
                    param_attr=attr)
C
update  
channingss 已提交
965

966
    @print_mapping_info
C
update  
channingss 已提交
967
    def Sum(self, node):
968
        val_inps = node.layer.input
969
        inputs = {
970 971 972 973
            "x": self.graph.get_input_node(
                node, idx=0, copy=True),
            "y": self.graph.get_input_node(
                node, idx=1, copy=True),
974 975
        }
        node.fluid_code.add_layer("elementwise_add", inputs=inputs, output=node)
976

C
channingss 已提交
977 978
        for idx, ipt in enumerate(val_inps[2:]):
            y = self.graph.get_input_node(node, idx=idx, copy=True)
979 980
            inputs = {
                "x": node.layer_name,
C
channingss 已提交
981
                "y": y,
982
            }
983 984
            node.fluid_code.add_layer(
                "elementwise_add", inputs=inputs, output=node)
C
update  
channingss 已提交
985

986
    @print_mapping_info
C
update  
channingss 已提交
987
    def MatMul(self, node):
C
channingss 已提交
988 989
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
990 991
        inputs = {"x": val_x, "y": val_y}
        attr = {"name": string(node.layer_name)}
992 993
        node.fluid_code.add_layer(
            "matmul", inputs=inputs, output=node, param_attr=attr)
C
update  
channingss 已提交
994

995
    @print_mapping_info
C
update  
channingss 已提交
996
    def BatchNormalization(self, node):
C
channingss 已提交
997 998 999 1000 1001
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_scale = self.graph.get_input_node(node, idx=1, copy=True)
        val_b = self.graph.get_input_node(node, idx=2, copy=True)
        val_mean = self.graph.get_input_node(node, idx=3, copy=True)
        val_var = self.graph.get_input_node(node, idx=4, copy=True)
C
update  
channingss 已提交
1002 1003 1004 1005 1006 1007 1008 1009 1010

        self.omit_nodes.append(val_scale.layer_name)
        self.omit_nodes.append(val_b.layer_name)
        self.omit_nodes.append(val_mean.layer_name)
        self.omit_nodes.append(val_var.layer_name)

        momentum = node.get_attr('momentum', .9)
        epsilon = node.get_attr('epsilon', 1e-5)

C
channingss 已提交
1011 1012
        # Attribute: spatial is used in BatchNormalization-1,6,7
        spatial = bool(node.get_attr('spatial'))
C
update  
channingss 已提交
1013 1014 1015 1016
        attr = {
            "momentum": momentum,
            "epsilon": epsilon,
            "data_layout": string('NCHW'),
C
channingss 已提交
1017
            "is_test": True,
C
update  
channingss 已提交
1018 1019 1020 1021
            "param_attr": string(val_scale.layer_name),
            "bias_attr": string(val_b.layer_name),
            "moving_mean_name": string(val_mean.layer_name),
            "moving_variance_name": string(val_var.layer_name),
C
channingss 已提交
1022
            "use_global_stats": spatial,
C
update  
channingss 已提交
1023 1024
            "name": string(node.layer_name)
        }
1025 1026
        node.fluid_code.add_layer(
            "batch_norm", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1027

1028
    @print_mapping_info
C
update  
channingss 已提交
1029
    def Transpose(self, node):
C
channingss 已提交
1030
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1031 1032
        perm = node.get_attr('perm')
        attr = {'perm': perm, "name": string(node.layer_name)}
1033 1034
        node.fluid_code.add_layer(
            "transpose", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1035

1036
    @print_mapping_info
C
update  
channingss 已提交
1037
    def Relu(self, node):
C
channingss 已提交
1038
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1039
        attr = {"name": string(node.layer_name)}
1040 1041
        node.fluid_code.add_layer(
            "relu", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1042

1043
    @print_mapping_info
C
update  
channingss 已提交
1044
    def PRelu(self, node):
C
channingss 已提交
1045 1046
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_slope = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1047

C
channingss 已提交
1048 1049 1050 1051 1052 1053 1054 1055 1056 1057
        mode = 'channel'
        shape_slope = val_slope.out_shapes[0]
        if len(shape_slope) == 1:
            mode = 'all'
        elif len(shape_slope) > 2:
            mode = 'element'
        attr = {
            "param_attr": string(val_slope.layer_name),
            'mode': string(mode)
        }
1058 1059
        node.fluid_code.add_layer(
            "prelu", inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1060

1061
    @print_mapping_info
C
update  
channingss 已提交
1062
    def Squeeze(self, node):
C
channingss 已提交
1063 1064 1065
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        axes = node.get_attr('axes')
        attr = {'axes': axes, "name": string(node.layer_name)}
1066 1067 1068 1069 1070 1071 1072 1073 1074
        if len(val_x.out_shapes[0]) == 1:
            node.fluid_code.add_layer(
                "cast",
                inputs=val_x,
                output=node,
                param_attr={'dtype': string(val_x.dtype)})
        else:
            node.fluid_code.add_layer(
                "squeeze", inputs=val_x, output=node, param_attr=attr)
R
root 已提交
1075

1076
    @print_mapping_info
C
channings 已提交
1077 1078 1079
    def Equal(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
1080 1081 1082 1083 1084 1085 1086
        node.fluid_code.add_layer(
            "equal",
            inputs={'x': val_x,
                    'y': val_y},
            output=node,
            param_attr=None)

C
Channingss 已提交
1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097
    @print_mapping_info
    def Greater(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_y = self.graph.get_input_node(node, idx=1, copy=True)
        node.fluid_code.add_layer(
            "greater_than",
            inputs={'x': val_x,
                    'y': val_y},
            output=node,
            param_attr=None)

1098
    @print_mapping_info
C
channings 已提交
1099 1100 1101 1102
    def Where(self, node):
        condition = self.graph.get_input_node(node, idx=0, copy=True)
        val_x = self.graph.get_input_node(node, idx=1, copy=True)
        val_y = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
1103

C
channings 已提交
1104
        not_condition = condition.layer_name + '_not'
1105 1106 1107 1108 1109
        node.fluid_code.add_layer(
            "logical_not",
            inputs=condition,
            output=not_condition,
            param_attr=None)
R
root 已提交
1110
        cast_not_condition = not_condition + '_cast'
1111 1112 1113 1114 1115
        node.fluid_code.add_layer(
            "cast",
            inputs=not_condition,
            output=cast_not_condition,
            param_attr={'dtype': string(val_x.dtype)})
C
channings 已提交
1116
        cast_condition = condition.layer_name + '_cast'
1117 1118 1119 1120 1121
        node.fluid_code.add_layer(
            "cast",
            inputs=condition,
            output=cast_condition,
            param_attr={'dtype': string(val_x.dtype)})
R
root 已提交
1122
        mul_val_x = val_x.layer_name + '_mul'
1123 1124 1125 1126 1127 1128
        node.fluid_code.add_layer(
            "elementwise_mul",
            inputs={'x': val_x,
                    'y': cast_condition},
            output=mul_val_x,
            param_attr=None)
R
root 已提交
1129

C
channings 已提交
1130
        mul_val_y = val_y.layer_name + '_mul'
1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145
        node.fluid_code.add_layer(
            "elementwise_mul",
            inputs={'x': val_y,
                    'y': cast_not_condition},
            output=mul_val_y,
            param_attr=None)

        node.fluid_code.add_layer(
            "elementwise_add",
            inputs={'x': mul_val_x,
                    'y': mul_val_y},
            output=node,
            param_attr=None)

    @print_mapping_info
R
root 已提交
1146 1147
    def NonZero(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167
        val_x_dim = len(val_x.out_shapes[0])
        print(val_x.layer_name, val_x.out_shapes[0])
        if val_x_dim == 1:
            node.fluid_code.add_layer("nonzero", inputs=val_x, output=val_x)
            node.fluid_code.add_layer(
                "transpose",
                inputs=val_x,
                output=node,
                param_attr={'perm': [1, 0]})
        if val_x_dim > 1:
            node.fluid_code.add_layer("nonzero", inputs=val_x, output=val_x)
            node.fluid_code.add_layer(
                "split",
                inputs=val_x,
                output=val_x,
                param_attr={'num_or_sections': 1,
                            'dim': val_x_dim})
            node.fluid_code.add_layer("concat", inputs=val_x, output=node)

    @print_mapping_info
C
update  
channingss 已提交
1168
    def Identity(self, node):
C
channingss 已提交
1169
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1170
        node.fluid_code.add_layer("assign", inputs=val_x, output=node)
R
root 已提交
1171

1172
    @print_mapping_info
C
channings 已提交
1173 1174 1175 1176
    def Tile(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_repeats = self.graph.get_input_node(node, idx=1, copy=True)
        repeats = _const_weight_or_none(val_repeats)
R
root 已提交
1177

1178 1179 1180
        if repeats is None:
            repeats = val_repeats.layer_name
        elif isinstance(repeats, int):
C
channings 已提交
1181
            repeats = [repeats]
R
root 已提交
1182

C
channings 已提交
1183
        attr = {
R
root 已提交
1184
            'expand_times': repeats,
C
channings 已提交
1185 1186
            "name": string(node.layer_name),
        }
1187 1188
        node.fluid_code.add_layer(
            "expand", inputs=val_x, output=node, param_attr=attr)
R
root 已提交
1189

1190
    @print_mapping_info
C
update  
channingss 已提交
1191
    def MaxPool(self, node):
C
channingss 已提交
1192
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
channingss 已提交
1193
        auto_pad = node.get_attr('auto_pad', 'NOTSET')
C
update  
channingss 已提交
1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204
        assert node.get_attr(
            "dilations") is None, 'only dilations = 0 is supported'  # optional

        kernel_shape = node.get_attr("kernel_shape")
        poolnd = len(kernel_shape)
        strides = node.get_attr("strides")
        pad_mode = node.get_attr("pads")
        ceil_mode = bool(node.get_attr('ceil_mode', 0))  # optional
        pads = node.get_attr('pads', [0] * (poolnd * 2))  # optional
        fluid_op = 'pool{}d'.format(poolnd)
        assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported'
C
channingss 已提交
1205

C
channingss 已提交
1206 1207
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1208
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
channingss 已提交
1209
            input_shape = val_x.out_shapes[0]
C
channingss 已提交
1210 1211 1212 1213 1214 1215
            pad_h = get_same_padding(input_shape[2], kernel_shape[0],
                                     strides[0])
            pad_w = get_same_padding(input_shape[3], kernel_shape[1],
                                     strides[1])
            attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}

C
update  
channingss 已提交
1216 1217 1218 1219 1220 1221 1222 1223 1224
        attr = {
            "pool_size": kernel_shape,
            "pool_type": string("max"),
            "pool_stride": strides,
            "pool_padding": paddings,
            "ceil_mode": ceil_mode,
            "name": string(node.layer_name),
            "exclusive": False
        }
1225 1226
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
update  
channingss 已提交
1227

C
channings 已提交
1228
    def _global_pool(self, node):
C
channingss 已提交
1229
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
C
update  
channingss 已提交
1230
        val_y = self.graph.get_node(node.layer.output[0], copy=True)
1231
        fluid_op = 'pool2d'
C
channings 已提交
1232 1233 1234 1235 1236 1237
        pool_type = None
        if node.layer.op_type == 'GlobalMaxPool':
            pool_type = 'max'
        elif node.layer.op_type == 'GlobalAveragePool':
            pool_type = 'avg'

C
update  
channingss 已提交
1238
        attr = {
C
channings 已提交
1239
            "pool_type": string(pool_type),
C
update  
channingss 已提交
1240 1241 1242
            "global_pooling": True,
            "name": string(node.layer_name)
        }
1243 1244
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
R
root 已提交
1245

1246
    @print_mapping_info
C
channings 已提交
1247 1248
    def GlobalMaxPool(self, node):
        self._global_pool(node)
R
root 已提交
1249

1250
    @print_mapping_info
C
channings 已提交
1251 1252
    def GlobalAveragePool(self, node):
        self._global_pool(node)
R
root 已提交
1253

1254
    @print_mapping_info
C
update  
channingss 已提交
1255
    def Conv(self, node):
C
channingss 已提交
1256 1257
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
update  
channingss 已提交
1258 1259 1260 1261 1262 1263
        val_y = self.graph.get_node(node.layer.output[0], copy=True)

        self.omit_nodes.append(val_w.layer_name)

        has_bias = len(node.layer.input) == 3
        if has_bias:
C
channingss 已提交
1264
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
C
update  
channingss 已提交
1265 1266 1267
            self.omit_nodes.append(val_b.layer_name)
        auto_pad = node.get_attr('auto_pad', 'NOTSET')

C
channingss 已提交
1268
        kernel_shape = node.get_attr('kernel_shape')
C
update  
channingss 已提交
1269 1270
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d and conv3d is supported'
C
channingss 已提交
1271
        num_out_channels = val_w.out_shapes[0][0]  # OI...
C
update  
channingss 已提交
1272 1273 1274 1275 1276 1277 1278
        fluid_op = 'conv{}d'.format(convnd)

        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)  # optional
        dilations = node.get_attr('dilations', [1] * convnd)  # optional
        pads = node.get_attr('pads', [0] * (convnd * 2))  # optional

C
channingss 已提交
1279
        input_shape = val_x.out_shapes[0]
C
update  
channingss 已提交
1280 1281
        paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)

C
channingss 已提交
1282
        if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
C
update  
channingss 已提交
1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302
            pad_h = get_same_padding(input_shape[2], kernel_shape[0],
                                     strides[0])
            pad_w = get_same_padding(input_shape[3], kernel_shape[1],
                                     strides[1])
            attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}

        attr = {
            "num_filters": num_out_channels,
            "filter_size": kernel_shape,
            "stride": strides,
            "padding": paddings,
            "dilation": dilations,
            "groups": num_groups,
            'param_attr': string(val_w.layer_name),
            "name": string(node.layer_name)
        }
        if has_bias:
            attr["bias_attr"] = string(val_b.layer_name)
        else:
            attr["bias_attr"] = False
1303 1304
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
channingss 已提交
1305

1306
    @print_mapping_info
C
channingss 已提交
1307
    def ConvTranspose(self, node):
C
channingss 已提交
1308 1309
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
C
channingss 已提交
1310
        val_b = None
R
root 已提交
1311
        if len(node.layer.input) > 2:
C
channingss 已提交
1312 1313
            val_b = self.graph.get_input_node(node, idx=2, copy=True)
            self.omit_nodes.append(val_b.layer_name)
C
channingss 已提交
1314 1315 1316 1317 1318 1319
        self.omit_nodes.append(val_w.layer_name)

        val_y = self.graph.get_node(node.layer.output[0], copy=True)

        auto_pad = node.get_attr('auto_pad', 'NOTSET')
        out_padding = node.get_attr('output_padding', [0, 0])
C
channingss 已提交
1320
        kernel_shape = node.get_attr('kernel_shape')
C
channingss 已提交
1321 1322 1323
        assert kernel_shape, 'kernel_shape not inferred'
        convnd = len(kernel_shape)
        assert 2 <= convnd <= 3, 'only conv2d_transpose and conv3d_transpose supported'
C
channingss 已提交
1324
        num_out_channels = val_w.out_shapes[0][1]
C
channingss 已提交
1325 1326
        fluid_op = 'conv{}d_transpose'.format(convnd)

C
channingss 已提交
1327 1328 1329 1330 1331
        num_groups = node.get_attr('group', 1)
        strides = node.get_attr('strides', [1] * convnd)
        dilations = node.get_attr('dilations', [1] * convnd)
        output_size = node.get_attr('output_shape', [])
        pads = node.get_attr('pads', [0] * (convnd * 2))
C
channingss 已提交
1332 1333 1334 1335

        paddings, var_x = self._pad_if_asymmetric(node, pads, val_x)

        output_size = [0, 0]
C
channingss 已提交
1336

1337 1338
        output_size[0] = (val_x.out_shapes[0][2] - 1
                          ) * strides[0] - 2 * paddings[0] + dilations[0] * (
C
channingss 已提交
1339
                              kernel_shape[0] - 1) + 1 + out_padding[0]
1340 1341
        output_size[1] = (val_x.out_shapes[0][3] - 1
                          ) * strides[1] - 2 * paddings[1] + dilations[1] * (
C
channingss 已提交
1342 1343 1344 1345 1346 1347 1348 1349 1350 1351
                              kernel_shape[1] - 1) + 1 + out_padding[1]
        attr = {
            'num_filters': num_out_channels,
            'output_size': output_size or None,
            'filter_size': kernel_shape,
            'padding': paddings,
            'stride': strides,
            'dilation': dilations,
            'groups': num_groups,
            'param_attr': string(val_w.layer_name),
C
channingss 已提交
1352
            'bias_attr': None if val_b is None else string(val_b.layer_name),
C
channingss 已提交
1353 1354
            'name': string(node.layer_name),
        }
1355 1356
        node.fluid_code.add_layer(
            fluid_op, inputs=val_x, output=node, param_attr=attr)
C
channings 已提交
1357

1358
    @print_mapping_info
C
channings 已提交
1359 1360 1361 1362
    def GRU(self, node):
        val_x = self.graph.get_input_node(node, idx=0, copy=True)
        val_w = self.graph.get_input_node(node, idx=1, copy=True)
        val_r = self.graph.get_input_node(node, idx=2, copy=True)
R
root 已提交
1363

C
channings 已提交
1364 1365 1366 1367 1368
        val_b = None
        val_len = None
        val_xh = None
        miss_arg_num = 0
        num_ipt = len(node.layer.input)
R
root 已提交
1369
        if num_ipt > 3 and node.layer.input[3] != '':
C
channings 已提交
1370 1371 1372
            val_b = self.graph.get_input_node(node, idx=3, copy=True)
        else:
            miss_arg_num += 1
R
root 已提交
1373
        if num_ipt > 4 and node.layer.input[4] != '':
1374 1375
            val_len = self.graph.get_input_node(
                node, idx=4 - miss_arg_num, copy=True)
C
channings 已提交
1376 1377
        else:
            miss_arg_num += 1
R
root 已提交
1378
        if num_ipt > 5 and node.layer.input[5] != '':
1379 1380
            val_xh = self.graph.get_input_node(
                node, idx=5 - miss_arg_num, copy=True)
R
root 已提交
1381

C
channings 已提交
1382
        x_shape = val_x.out_shapes[0]
C
Channingss 已提交
1383 1384

        assert x_shape[1] == 1, 'only X with batch_size = 1 supported'
C
channings 已提交
1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403
        assert node.get_attr('clip', None) is None, 'clipping not supported'

        hidden_size = node.get_attr('hidden_size', None)
        if hidden_size is None:
            r_shape = val_r.out_shapes[0]
            if r_shape:
                hidden_size = r_shape[-1]
        if hidden_size is None:
            w_shape = var_w.out_shapes[0]
            if w_shape:
                hidden_size = w_shape[-2] // 3
        if hidden_size is None and val_b:
            b_shape = val_b.out_shapes[0]
            if b_shape:
                hidden_size = b_shape[-1] // 6
        if hidden_size is None and val_xh:
            xh_shape = val_xh.out_shapes[0]
            if xh_shape:
                hidden_size = xh_shape[-1]
R
root 已提交
1404 1405

        direction = node.get_attr('direction', 'forward')
C
channings 已提交
1406
        assert direction != 'bidirectional', 'direction = bidirectional not supported'
R
root 已提交
1407

C
channings 已提交
1408 1409
        activations = node.get_attr('activations', ['Sigmoid', 'Tanh'])
        assert len(activations) == 2, 'bidirectional operation not supported'
R
root 已提交
1410 1411 1412 1413

        assert node.get_attr('linear_before_reset',
                             0) == 0, 'only linear_before_reset = 0 supported'

C
channings 已提交
1414 1415 1416
        activations = [s.lower() for s in activations]
        gate_activation, candidate_activation = activations
        is_reverse = direction == 'reverse'
R
root 已提交
1417

C
channings 已提交
1418
        var_x0 = node.layer_name + '_x0'
1419 1420 1421 1422 1423 1424
        node.fluid_code.add_layer(
            'squeeze',
            inputs=val_x,
            output=var_x0,
            param_attr={'axes': [1],
                        'name': string(var_x0)})
R
root 已提交
1425

C
channings 已提交
1426
        var_w0 = node.layer_name + '_w0'
1427 1428 1429 1430 1431 1432
        node.fluid_code.add_layer(
            'squeeze',
            inputs=val_w,
            output=var_w0,
            param_attr={'axes': [0],
                        'name': string(var_w0)})
R
root 已提交
1433

C
channings 已提交
1434 1435
        var_fc = node.layer_name + '_fc'
        var_mm = (node.layer_name + '_mm') if val_b else var_fc
1436 1437 1438 1439 1440 1441 1442 1443 1444 1445
        node.fluid_code.add_layer(
            'matmul',
            inputs={'x': var_x0,
                    'y': var_w0},
            output=var_mm,
            param_attr={
                'transpose_x': 0,
                'transpose_y': 1,
                'name': string(var_mm)
            })
R
root 已提交
1446

C
channings 已提交
1447
        var_r0 = node.layer_name + '_r0'
1448 1449 1450 1451 1452 1453
        node.fluid_code.add_layer(
            'squeeze',
            inputs=val_r,
            output=var_r0,
            param_attr={'axes': [0],
                        'name': string(var_r0)})
R
root 已提交
1454 1455 1456

        var_r0t = node.layer_name + '_r0t'

1457 1458 1459 1460 1461 1462
        node.fluid_code.add_layer(
            'transpose',
            inputs=var_r0,
            output=var_r0t,
            param_attr={'perm': [1, 0],
                        'name': string(var_r0t)})
C
channings 已提交
1463 1464 1465
        if val_b:
            var_bi = node.layer_name + '_bi'
            var_bh = node.layer_name + '_bh'
1466 1467 1468 1469 1470
            node.fluid_code.add_layer(
                'split',
                inputs=val_b,
                output=var_bi + ',' + var_bh,
                param_attr={
C
Channingss 已提交
1471 1472
                    'dim': 1,
                    'num_or_sections': [hidden_size * 3, hidden_size * 3],
1473 1474
                    'name': string(node.layer_name + '.b/split')
                })
C
channings 已提交
1475
            var_bi0 = node.layer_name + '_bi0'
1476 1477 1478 1479 1480 1481 1482 1483
            node.fluid_code.add_layer(
                'squeeze',
                inputs=var_bi,
                output=var_bi0,
                param_attr={'axes': [0],
                            'name': string(var_bi0)})

            node.fluid_code.add_layer(
C
Channingss 已提交
1484
                'elementwise_add',
1485 1486 1487 1488 1489 1490
                inputs=[var_mm, var_bi0],
                output=var_fc,
                param_attr={
                    'axes': 1,
                    'name': string(node.layer_name + '.i/bias')
                })
C
channings 已提交
1491 1492 1493

        if val_xh:
            var_xh0 = node.layer_name + '_xh0'
1494 1495 1496 1497 1498 1499
            node.fluid_code.add_layer(
                'squeeze',
                inputs=val_xh,
                output=var_xh0,
                param_attr={'axes': [1],
                            'name': string(var_xh0)})
C
channings 已提交
1500
        var_y00 = node.layer_name + '_y00'
R
root 已提交
1501 1502 1503

        attr = {
            'origin_mode': True,
C
channings 已提交
1504
            'h_0': var_xh0 if val_xh else None,
R
root 已提交
1505 1506 1507 1508 1509
            'is_reverse': is_reverse,
            'gate_activation': string(gate_activation),
            'candidate_activation': string(candidate_activation),
            'param_attr': string(var_r0t),
            'bias_attr': string(var_bh) if val_b else False,
C
channings 已提交
1510
        }
1511 1512 1513 1514 1515
        node.fluid_code.add_layer(
            'dynamic_gru',
            inputs=var_fc + ',' + str(hidden_size),
            output=var_y00,
            param_attr=attr)
R
root 已提交
1516

C
channings 已提交
1517
        num_opt = len(node.layer.output)
R
root 已提交
1518 1519

        if num_opt > 0 and node.layer.output[0] != '':
1520 1521 1522 1523 1524 1525 1526 1527
            node.fluid_code.add_layer(
                'unsqueeze',
                inputs=var_y00,
                output=node.layer.output[0],
                param_attr={
                    'axes': [1, 1],
                    'name': string(node.layer.output[0])
                })
R
root 已提交
1528
        if num_opt > 1 and node.layer.output[1] != '':
1529 1530 1531 1532 1533 1534 1535 1536
            node.fluid_code.add_layer(
                'unsqueeze',
                inputs=var_y00,
                output=node.layer.output[1],
                param_attr={
                    'axes': [1, 1],
                    'name': string(node.layer.output[1])
                })