tf_op_mapper.py 51.9 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
J
jiangjiajun 已提交
14

J
jiangjiajun 已提交
15 16
from x2paddle.decoder.tf_decoder import TFGraph
from x2paddle.core.op_mapper import OpMapper
J
jiangjiajun 已提交
17
from x2paddle.core.util import *
J
jiangjiajun 已提交
18
import inspect
J
jiangjiajun 已提交
19
import numpy
J
jiangjiajun 已提交
20
import sys
21

J
jiangjiajun 已提交
22

J
jiangjiajun 已提交
23 24 25 26
# compute padding size for SAME mode
def get_same_padding(in_size, kernel_size, stride):
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
J
jiangjiajun 已提交
27 28
    if pad_size < 0:
        pad_size = 0
J
jiangjiajun 已提交
29 30 31 32
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]

J
jiangjiajun 已提交
33

J
jiangjiajun 已提交
34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
def nhwc_dim_to_nchw(node, dim):
    tf_data_format = list(node.tf_data_format)
    pd_data_format = list(node.pd_data_format)
    if isinstance(dim, list):
        for i in range(len(dim)):
            char = tf_data_format[dim[i]]
            dim[i] = pd_data_format.index(char)
    else:
        char = tf_data_format[dim]
        dim = pd_data_format.index(char)
    return dim

    if dim < 0:
        dim += 4
    if dim > 0:
        dim = (dim + 1) % 4 + int((dim + 1) / 4)
    return dim


J
jiangjiajun 已提交
53
class TFOpMapper(OpMapper):
J
jiangjiajun 已提交
54 55 56 57 58 59 60
    directly_map_ops = {
        'Relu': ['relu'],
        'Relu6': ['relu6'],
        'Shape': ['shape'],
        'Abs': ['abs'],
        'Sigmoid': ['sigmoid'],
        'Exp': ['exp'],
J
jiangjiajun 已提交
61
        'Rsqrt': ['rsqrt'],
62
        'swish_f32': ['swish'],
J
jiangjiajun 已提交
63
        'Tanh': ['tanh'],
64 65 66
        'LeakyRelu': ['leaky_relu', {
            'alpha': 'alpha'
        }]
J
jiangjiajun 已提交
67 68 69 70 71 72
    }
    elementwise_ops = {
        'Add': 'elementwise_add',
        'RealDiv': 'elementwise_div',
        'Sub': 'elementwise_sub',
        'Maximum': 'elementwise_max',
73 74
        'Mul': 'elementwise_mul',
        'FloorDiv': 'elementwise_floordiv'
J
jiangjiajun 已提交
75 76
    }

J
jiangjiajun 已提交
77 78
    def __init__(self, decoder):
        super(TFOpMapper, self).__init__()
J
jiangjiajun 已提交
79
        self.decoder = decoder
J
jiangjiajun 已提交
80
        self.graph = decoder.tf_graph
81
        self.batch_node = None
J
jiangjiajun 已提交
82
        self.weights = dict()
J
jiangjiajun 已提交
83
        self.omit_nodes = list()
J
jiangjiajun 已提交
84
        self.used_custom_layers = dict()
85

J
jiangjiajun 已提交
86 87 88 89 90 91 92
        not_placeholder = list()
        for name in self.graph.input_nodes:
            if self.graph.get_node(name).layer_type != "Placeholder":
                not_placeholder.append(name)
        for name in not_placeholder:
            idx = self.graph.input_nodes.index(name)
            del self.graph.input_nodes[idx]
J
jiangjiajun 已提交
93

94
        sys.stderr.write("Total nodes: {}\n".format(len(self.graph.topo_sort)))
J
jiangjiajun 已提交
95
        unsupported_ops = set()
96 97
        for i, node_name in enumerate(self.graph.topo_sort):
            sys.stderr.write("\rConverting node {} ...    ".format(i + 1))
98 99
            node = self.graph.get_node(node_name)
            op = node.layer_type
J
jiangjiajun 已提交
100
            if op in self.directly_map_ops:
J
jiangjiajun 已提交
101 102
                if len(unsupported_ops) > 0:
                    continue
J
jiangjiajun 已提交
103 104
                self.directly_map(node)
            elif op in self.elementwise_ops:
J
jiangjiajun 已提交
105 106
                if len(unsupported_ops) > 0:
                    continue
J
jiangjiajun 已提交
107 108
                self.elementwise_map(node)
            elif hasattr(self, op):
J
jiangjiajun 已提交
109 110
                if len(unsupported_ops) > 0:
                    continue
J
jiangjiajun 已提交
111 112
                func = getattr(self, op)
                func(node)
J
jiangjiajun 已提交
113
            else:
J
jiangjiajun 已提交
114 115
                unsupported_ops.add(op)
        if len(unsupported_ops) > 0:
116 117 118
            sys.stderr.write(
                "=========={} Ops are not supported yet======\n".format(
                    len(unsupported_ops)))
J
jiangjiajun 已提交
119
            for op in unsupported_ops:
120
                sys.stderr.write("========== {} ==========\n".format(op))
J
jiangjiajun 已提交
121
            sys.exit(-1)
122
        sys.stderr.write('\nDone!\n')
123

J
jiangjiajun 已提交
124 125 126 127 128 129 130 131 132
    def add_omit_nodes(self, in_node_name, out_node_name):
        in_node = self.graph.get_node(in_node_name)
        out_node = self.graph.get_node(out_node_name)
        index = in_node.outputs.index(out_node_name)
        del in_node.outputs[index]
        index = out_node.inputs.index(in_node_name)
        del out_node.inputs[index]
        self.omit_nodes.append(in_node.layer_name)

J
jiangjiajun 已提交
133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_node(node.layer.input[0], copy=True)
        attr = dict()
        for param in op_info[1:]:
            tf_param_name = list(param.keys())[0]
            pd_param_name = list(param.values())[0]
            tf_param = node.get_attr(tf_param_name)
            attr[pd_param_name] = tf_param
        node.fluid_code.add_layer(op_info[0],
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def elementwise_map(self, node):
        assert node.layer_type in self.elementwise_ops
        op_type = self.elementwise_ops[node.layer_type]
J
jiangjiajun 已提交
151 152 153 154
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        x_shape = x.out_shapes[0]
        y_shape = y.out_shapes[0]
155 156 157 158
        if len(x_shape) == 0:
            x_shape = [1]
        if len(y_shape) == 0:
            y_shape = [1]
J
jiangjiajun 已提交
159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174
        # incomplement broadcasting support for paddle
        x_input = x
        y_input = y
        if len(x_shape) < len(y_shape):
            unrevertable_ops = [
                "elementwise_sub", "elementwise_div", "elementwise_floordiv",
                "elementwise_mod", "elementwise_pow"
            ]
            if op_type not in unrevertable_ops:
                x_input = y
                y_input = x
                x_shape = y.out_shapes[0]
                y_shape = x.out_shapes[0]
            else:
                raise Exception("Unexpected situation happend")

J
jiangjiajun 已提交
175 176 177 178 179 180 181 182 183 184 185 186 187
        if len(x_shape) == 4 and len(y_shape) == 1:
            if x_input.tf_data_format == "NHWC":
                axis = 1
            else:
                axis = -1
            attr = {"axis": axis}
            inputs = {"x": x_input, "y": y_input}
            node.fluid_code.add_layer(op_type,
                                      inputs=inputs,
                                      output=node,
                                      param_attr=attr)
            return

J
jiangjiajun 已提交
188 189 190 191 192 193
        is_sub_seq = True
        for i in range(len(y_shape)):
            index = -1 * i - 1
            if y_shape[index] != x_shape[index]:
                is_sub_seq = False
        if not is_sub_seq:
J
jiangjiajun 已提交
194 195 196 197
            if x_shape.count(-1) > 2:
                x_shape = self.decoder.infer_tensor_shape(x_input)
            if y_shape.count(-1) > 2:
                y_shape = self.decoder.infer_tensor_shape(y_input)
J
jiangjiajun 已提交
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
            x_expand_times = [1] * len(x_shape)
            y_expand_times = [1] * len(y_shape)
            x_need_expand = False
            y_need_expand = False
            for i in range(len(y_shape)):
                index = -1 * i - 1
                if y_shape[index] != x_shape[index]:
                    if y_shape[index] == 1:
                        y_expand_times[index] = x_shape[index]
                        y_need_expand = True
                    elif x_shape[index] == 1:
                        x_expand_times[index] = y_shape[index]
                        x_need_expand = True
                    else:
                        raise Exception("Unexpected situation happend")
            if x_need_expand:
J
jiangjiajun 已提交
214 215 216 217
                if len(x_expand_times) == 3 and x.tf_data_format == "NHWC":
                    x_expand_times = [x_expand_times[i] for i in [2, 0, 1]]
                if len(x_expand_times) == 4 and x.tf_data_format == "NHWC":
                    x_expand_times = [x_expand_times[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
218 219 220 221 222 223 224
                attr = {"expand_times": x_expand_times}
                node.fluid_code.add_layer("expand",
                                          inputs=x_input,
                                          output="x_tmp",
                                          param_attr=attr)
                x_input = "x_tmp"
            if y_need_expand:
J
jiangjiajun 已提交
225 226 227 228
                if len(y_expand_times) == 3 and y.tf_data_format == "NHWC":
                    y_expand_times = [y_expand_times[i] for i in [2, 0, 1]]
                if len(y_expand_times) == 4 and y.tf_data_format == "NHWC":
                    y_expand_times = [y_expand_times[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
229 230 231 232 233 234 235 236 237 238 239 240
                attr = {"expand_times": y_expand_times}
                node.fluid_code.add_layer("expand",
                                          inputs=y_input,
                                          output="y_tmp",
                                          param_attr=attr)
                y_input = "y_tmp"
        inputs = {"x": x_input, "y": y_input}
        node.fluid_code.add_layer(op_type,
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

241 242
    def Placeholder(self, node):
        shape = node.out_shapes[0]
J
jiangjiajun 已提交
243 244
        assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
            node.layer_name)
J
jiangjiajun 已提交
245 246 247 248
        if node.tf_data_format == "NHWC" and len(shape) == 4:
            shape = [shape[i] for i in [0, 3, 1, 2]]
        elif node.tf_data_format == "NCHW" and len(shape) == 4:
            self.graph.data_format_propagation(node)
249 250
        dtype = node.dtype
        attr = {
J
jiangjiajun 已提交
251
            'dtype': string(dtype),
252
            'shape': shape,
J
jiangjiajun 已提交
253 254
            'name': string(node.layer_name),
            'append_batch_size': False
255
        }
256 257 258
        if shape[0] < 0:
            self.batch_node = node

J
jiangjiajun 已提交
259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
        node.fluid_code.add_layer("data",
                                  inputs=None,
                                  output=node,
                                  param_attr=attr)

    def Const(self, node):
        shape = node.out_shapes[0]
        dtype = node.dtype
        value = node.value
        initializer = "Constant(0.0)"
        if len(shape) == 0:
            assert value.size == 1, "Unexpected situation happend"
            shape = [1]
            initializer = "Constant({})".format(value)

J
jiangjiajun 已提交
274 275 276 277 278 279 280 281 282 283 284 285 286
        self.weights[node.layer_name] = node.value

        if node.tf_data_format == "NHWC":
            if len(shape) == 4:
                shape = [shape[i] for i in [0, 3, 1, 2]]
            if len(shape) == 3:
                shape = [shape[i] for i in [2, 0, 1]]
                self.weights[node.layer_name] = numpy.transpose(
                    node.value, (2, 0, 1))
        elif node.tf_data_format == "NCHW":
            if len(shape) == 4:
                self.graph.data_format_propagation(node)

J
jiangjiajun 已提交
287 288 289 290 291 292 293 294 295 296 297 298
        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name),
            'default_initializer': initializer
        }
        node.fluid_code.add_layer("create_parameter",
                                  inputs=None,
                                  output=node,
                                  param_attr=attr)

    def Transpose(self, node):
J
jiangjiajun 已提交
299 300
        input = self.graph.get_node(node.layer.input[0], copy=True)
        perm = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
301
        assert perm.layer_type == "Const", "Perm of transpose OP should be Const"
302
        del self.weights[perm.layer_name.replace('/', '_')]
J
jiangjiajun 已提交
303 304 305
        perm.fluid_code.clear()
        perm = perm.value.tolist()

J
jiangjiajun 已提交
306
        if perm == [0, 3, 1, 2] and input.data_format == "NHWC":
307 308 309 310 311
            input_name = input.layer_name
            if hasattr(input, "index"):
                input_name = input_name + "[{}]".format(input.index)
            node.fluid_code.add_layer("{} = {}").format(node.layer_name,
                                                        input_name)
J
jiangjiajun 已提交
312 313 314
            node.tf_data_format = "NCHW"
            self.graph.data_format_propagation(node)
        elif perm == [0, 2, 3, 1] and input.tf_data_format == "NCHW":
315 316 317 318 319
            input_name = input.layer_name
            if hasattr(input, "index"):
                input_name = input_name + "[{}]".format(input.index)
            node.fluid_code.add_layer("{} = {}").format(node.layer_name,
                                                        input_name)
J
jiangjiajun 已提交
320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346
            node.tf_data_format = "NHWC"
            self.graph.data_format_propagation(node)
        elif len(input.out_shapes[0]) > 4:
            tf_data_format = list(input.tf_data_format)
            pd_data_format = list(input.pd_data_format)
            new_perm = [i for i in range(len(perm))]
            for i in range(len(perm)):
                char0 = tf_data_format[i]
                char1 = tf_data_format[perm[i]]
                index0 = pd_data_format.index(char0)
                index1 = pd_data_format.index(char1)
                new_perm[index0] = index1
            node.tf_data_format = [tf_data_format[i] for i in perm]
            node.pd_data_format = [pd_data_format[i] for i in perm]
            attr = {'perm': new_perm}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
        elif len(node.out_shapes[0]) != 4:
            attr = {'perm': perm}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
        else:
            raise Exception("Unexpected situation happend in Transpose OP")
J
jiangjiajun 已提交
347

J
jiangjiajun 已提交
348 349
    def MaxPool(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
350

J
jiangjiajun 已提交
351
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
352 353 354
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape

J
jiangjiajun 已提交
355 356 357 358
        k_size = node.get_attr("ksize")
        strides = node.get_attr("strides")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
J
jiangjiajun 已提交
359
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
360
        padding = 0
J
jiangjiajun 已提交
361

J
jiangjiajun 已提交
362
        if not channel_first:
J
jiangjiajun 已提交
363 364
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
365
            k_size = [k_size[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
366 367
        else:
            self.graph.data_format_propagation(node)
J
jiangjiajun 已提交
368 369

        if pad_mode == "SAME":
J
jiangjiajun 已提交
370 371
            pad_h = get_same_padding(in_shape[2], k_size[2], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
J
jiangjiajun 已提交
372 373
            pad_h = pad_h[0] + pad_h[1]
            pad_w = pad_w[0] + pad_w[1]
J
jiangjiajun 已提交
374 375 376 377 378 379 380
            if pad_h != 0 or pad_w != 0:
                attr = {"paddings": [0, pad_h, 0, pad_w], "pad_value": -10000.0}
                node.fluid_code.add_layer("pad2d",
                                          inputs=input,
                                          output=node,
                                          param_attr=attr)
                input = node
J
jiangjiajun 已提交
381
        attr = {
J
jiangjiajun 已提交
382
            "pool_size": k_size[2:4],
J
jiangjiajun 已提交
383
            "pool_type": string("max"),
J
jiangjiajun 已提交
384
            "pool_padding": padding,
J
jiangjiajun 已提交
385
            "pool_stride": strides[2:4]
J
jiangjiajun 已提交
386
        }
J
jiangjiajun 已提交
387 388 389 390
        node.fluid_code.add_layer("pool2d",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
391 392 393 394 395

    def Conv2D(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of Conv2D should be Const"
J
jiangjiajun 已提交
396
        self.add_omit_nodes(kernel.layer_name, node.layer_name)
J
jiangjiajun 已提交
397

J
jiangjiajun 已提交
398
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
399 400
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
J
jiangjiajun 已提交
401
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
402 403 404
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
405 406 407 408 409
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
410 411 412 413
        padding = 0

        self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
            kernel.value, (3, 2, 0, 1))
J
jiangjiajun 已提交
414 415 416 417 418

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
419 420
        else:
            self.graph.data_format_propagation(node)
J
jiangjiajun 已提交
421

J
jiangjiajun 已提交
422 423 424
        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
J
jiangjiajun 已提交
425 426 427 428 429 430 431 432 433
            if pad_h[0] == pad_h[1] and pad_w[0] == pad_w[1]:
                padding = [pad_h[0], pad_w[0]]
            else:
                attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
                node.fluid_code.add_layer("pad2d",
                                          inputs=input,
                                          output=node,
                                          param_attr=attr)
                input = node
J
jiangjiajun 已提交
434 435 436 437 438 439
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": k_size[3],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
J
jiangjiajun 已提交
440 441
            "dilation": dilations[2:4],
            "padding": padding
J
jiangjiajun 已提交
442
        }
J
jiangjiajun 已提交
443 444 445 446
        node.fluid_code.add_layer("conv2d",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
447

J
jiangjiajun 已提交
448 449 450 451 452 453 454 455 456 457 458 459
    def BiasAdd(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        bias = self.graph.get_node(node.layer.input[1], copy=True)
        axis = -1
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            axis = 1
        inputs = {"x": input, "y": bias}
        attr = {"axis": axis}
        node.fluid_code.add_layer("elementwise_add",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
460 461 462 463 464 465 466

    def FusedBatchNorm(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        gamma = self.graph.get_node(node.layer.input[1], copy=True)
        beta = self.graph.get_node(node.layer.input[2], copy=True)
        moving_mean = self.graph.get_node(node.layer.input[3], copy=True)
        moving_var = self.graph.get_node(node.layer.input[4], copy=True)
J
jiangjiajun 已提交
467 468
        data_format = node.get_attr("data_format").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
469 470 471 472 473

        assert gamma.layer_type == "Const"
        assert beta.layer_type == "Const"
        assert moving_mean.layer_type == "Const"
        assert moving_var.layer_type == "Const"
J
jiangjiajun 已提交
474 475 476 477
        self.add_omit_nodes(gamma.layer_name, node.layer_name)
        self.add_omit_nodes(beta.layer_name, node.layer_name)
        self.add_omit_nodes(moving_mean.layer_name, node.layer_name)
        self.add_omit_nodes(moving_var.layer_name, node.layer_name)
J
jiangjiajun 已提交
478 479
        if channel_first:
            self.data_format_propagation(node)
J
jiangjiajun 已提交
480

J
jiangjiajun 已提交
481 482 483 484 485 486 487 488 489 490
        attr = {
            "epsilon": node.get_attr("epsilon"),
            "param_attr": string(gamma.layer_name),
            "bias_attr": string(beta.layer_name),
            "moving_mean_name": string(moving_mean.layer_name),
            "moving_variance_name": string(moving_var.layer_name),
            "is_test": True
        }

        node.fluid_code.add_layer("batch_norm",
J
jiangjiajun 已提交
491
                                  inputs=input,
J
jiangjiajun 已提交
492 493 494 495 496 497 498
                                  output=node,
                                  param_attr=attr)

    def DepthwiseConv2dNative(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of DepthwiseConv2DNative should be Const"
J
jiangjiajun 已提交
499
        self.add_omit_nodes(kernel.layer_name, node.layer_name)
J
jiangjiajun 已提交
500

J
jiangjiajun 已提交
501
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
502 503
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
J
jiangjiajun 已提交
504
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
505 506 507
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
508 509 510 511 512
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
513 514 515 516
        padding = 0

        self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
            kernel.value, (2, 3, 0, 1))
J
jiangjiajun 已提交
517 518 519 520 521

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
522 523
        else:
            self.data_format_propagation(node)
J
jiangjiajun 已提交
524 525 526 527

        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
J
jiangjiajun 已提交
528 529 530 531
            if pad_h[0] == pad_h[1] and pad_w[0] == pad_w[1]:
                padding = [pad_h[0], pad_w[0]]
            else:
                attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
J
jiangjiajun 已提交
532
                node.fluid_code.add_layer("pad2d",
J
jiangjiajun 已提交
533
                                          inputs=input,
J
jiangjiajun 已提交
534 535
                                          output=node,
                                          param_attr=attr)
J
jiangjiajun 已提交
536 537
                input = node

J
jiangjiajun 已提交
538 539 540 541 542 543 544
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": in_shape[1],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
            "dilation": dilations[2:4],
J
jiangjiajun 已提交
545
            "groups": k_size[3] * in_shape[1],
J
jiangjiajun 已提交
546
            "use_cudnn": False,
J
jiangjiajun 已提交
547
            "padding": padding
J
jiangjiajun 已提交
548
        }
J
jiangjiajun 已提交
549
        node.fluid_code.add_layer("conv2d",
J
jiangjiajun 已提交
550
                                  inputs=input,
J
jiangjiajun 已提交
551 552
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
553

J
jiangjiajun 已提交
554 555 556
    def Reshape(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        param = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
557
        is_variable = False
J
jiangjiajun 已提交
558 559
        if param.layer_type == "Const":
            attr = {"shape": param.value.tolist()}
J
jiangjiajun 已提交
560
            self.add_omit_nodes(param.layer_name, node.layer_name)
J
jiangjiajun 已提交
561 562
        else:
            # Here is a trick method to solove tensor parameter in tensorflow
J
jiangjiajun 已提交
563 564 565
            shape = self.decoder.infer_shape_tensor(param, node.out_shapes[0])
            if shape.count(-1) <= 1:
                attr = {"shape": shape}
J
jiangjiajun 已提交
566 567 568 569 570
                self.add_omit_nodes(param.layer_name, node.layer_name)
            elif shape.count(-1) == 2 and shape[0] == -1:
                shape[0] = 0
                attr = {"shape": shape}
                self.add_omit_nodes(param.layer_name, node.layer_name)
J
jiangjiajun 已提交
571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588
            else:
                assert len(param.out_shapes[0]
                           ) == 1, "Unexpected situation of shape parameter"
                attr = {"shape": [-1]}
                node.fluid_code.add_layer("reshape",
                                          inputs=param,
                                          output="shape_param",
                                          param_attr=attr)
                attr = {"num_or_sections": param.out_shapes[0][0], "dim": 0}
                node.fluid_code.add_layer("split",
                                          inputs="shape_param",
                                          output=node,
                                          param_attr=attr)
                new_param = "["
                for i in range(param.out_shapes[0][0]):
                    new_param += (node.layer_name + "[{}]".format(i) + ", ")
                new_param = new_param.strip(", ") + "]"
                attr = {"shape": new_param}
J
jiangjiajun 已提交
589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606
                is_variable = True

        # to change [192, -1]->[-1, 192], allways put -1 in the first dimension
        # optimization for Paddle-Lite
        in_shape = input.out_shapes[0]
        if is_variable and in_shape.count(-1) < 1:
            total_size = 1
            for i in range(len(in_shape)):
                total_size *= in_shape[i]
            for i in range(len(attr["shape"])):
                if attr["shape"][i] == 0:
                    attr["shape"][i] = in_shape[i]
                if attr["shape"][i] != -1:
                    total_size /= attr["shape"][i]
            if attr["shape"].count(-1) > 0:
                index = attr["shape"].index(-1)
                attr["shape"][index] = int(total_size)
                attr["shape"][0] = -1
607 608 609 610 611 612 613 614 615 616 617 618 619 620

        if len(input.out_shapes[0]) == 4 and node.tf_data_format == "NHWC":
            if len(attr["shape"]) < 3:
                perm = {"perm": [0, 2, 3, 1]}
                node.fluid_code.add_layer("transpose",
                                          inputs=input,
                                          output=node,
                                          param_attr=perm)
                node.fluid_code.add_layer("reshape",
                                          inputs=node,
                                          output=node,
                                          param_attr=attr)
                return

J
jiangjiajun 已提交
621
        if len(attr["shape"]) == 4 and node.tf_data_format == "NHWC":
J
jiangjiajun 已提交
622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640
            input_shape = self.decoder.infer_tensor(input).shape
            if input_shape[1] == attr["shape"][1]:
                attr["shape"] = [attr["shape"][i] for i in [0, 3, 1, 2]]
            else:
                perm = {"perm": [0, 2, 3, 1]}
                node.fluid_code.add_layer("transpose",
                                          inputs=input,
                                          output=node,
                                          param_attr=perm)
                node.fluid_code.add_layer("reshape",
                                          inputs=node,
                                          output=node,
                                          param_attr=attr)
                perm = {"perm": [0, 3, 1, 2]}
                node.fluid_code.add_layer("transpose",
                                          inputs=node,
                                          output=node,
                                          param_attr=perm)
                return
J
jiangjiajun 已提交
641 642 643 644 645 646 647
        node.fluid_code.add_layer("reshape",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def AvgPool(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
648

J
jiangjiajun 已提交
649
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
650 651 652
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape

J
jiangjiajun 已提交
653 654 655 656 657 658 659 660 661
        k_size = node.get_attr("ksize")
        strides = node.get_attr("strides")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
662
            k_size = [k_size[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
663 664
        else:
            self.graph.data_format_propagation(node)
J
jiangjiajun 已提交
665 666

        attr = {
J
jiangjiajun 已提交
667
            "pool_size": k_size[2:4],
J
jiangjiajun 已提交
668 669 670 671
            "pool_type": string("avg"),
            "pool_stride": strides[2:4]
        }
        if pad_mode == "SAME":
J
jiangjiajun 已提交
672 673
            pad_h = get_same_padding(in_shape[2], k_size[2], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
J
jiangjiajun 已提交
674 675 676 677
            assert pad_h[0] == pad_h[1] and pad_w[0] == pad_w[
                1], "Cannot map AvgPool"
            attr["pool_padding"] = [pad_h[0], pad_w[0]]
        node.fluid_code.add_layer("pool2d",
J
jiangjiajun 已提交
678
                                  inputs=input,
J
jiangjiajun 已提交
679 680 681
                                  output=node,
                                  param_attr=attr)

J
jiangjiajun 已提交
682 683 684 685 686 687
    def SplitV(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        num_sections = self.graph.get_node(node.layer.input[1], copy=True)
        dim = self.graph.get_node(node.layer.input[2], copy=True)
        assert num_sections.layer_type == "Const"
        assert dim.layer_type == "Const"
J
jiangjiajun 已提交
688 689
        self.add_omit_nodes(num_sections.layer_name, node.layer_name)
        self.add_omit_nodes(dim.layer_name, node.layer_name)
J
jiangjiajun 已提交
690 691 692
        dim = dim.value
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)
J
jiangjiajun 已提交
693 694 695 696 697 698 699 700
        attr = {
            "num_or_sections": num_sections.value.tolist(),
            "dim": dim.value
        }
        node.fluid_code.add_layer("split",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
701 702

    def ConcatV2(self, node):
J
jiangjiajun 已提交
703 704 705 706
        inputs = [
            self.graph.get_node(name, copy=True)
            for name in node.layer.input[:-1]
        ]
J
jiangjiajun 已提交
707 708
        axis = self.graph.get_node(node.layer.input[-1], copy=True)
        assert axis.layer_type == "Const"
J
jiangjiajun 已提交
709
        self.add_omit_nodes(axis.layer_name, node.layer_name)
J
jiangjiajun 已提交
710 711 712 713 714
        axis = axis.value
        if inputs[0].tf_data_format == "NHWC" and len(
                inputs[0].out_shapes[0]) == 4:
            axis = nhwc_dim_to_nchw(inputs[0], axis)
        attr = {"axis": axis}
J
jiangjiajun 已提交
715 716 717 718
        node.fluid_code.add_layer("concat",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
719 720 721 722

    def Tile(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        expand_times = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
723
        self.add_omit_nodes(expand_times.layer_name, node.layer_name)
724 725 726 727
        if expand_times.layer_type == "Const":
            expand_times = expand_times.value.tolist()
        else:
            expand_times = self.decoder.infer_shape_tensor(expand_times)
J
jiangjiajun 已提交
728 729 730 731 732
        if input.tf_data_format == "NHWC":
            if len(input.out_shapes[0]) == 4:
                expand_times = [expand_times[i] for i in [0, 3, 1, 2]]
            elif len(input.out_shape[0]) == 3:
                expand_times = [expand_times[i] for i in [2, 0, 1]]
733 734 735 736
        for i in range(len(expand_times)):
            if expand_times[i] < 0:
                expand_times[i] = 1

J
jiangjiajun 已提交
737
        attr = {"expand_times": expand_times}
J
jiangjiajun 已提交
738 739 740 741
        node.fluid_code.add_layer("expand",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
742 743

    def Pack(self, node):
J
jiangjiajun 已提交
744 745 746
        inputs = [
            self.graph.get_node(name, copy=True) for name in node.layer.input
        ]
J
jiangjiajun 已提交
747 748 749 750 751 752 753 754 755 756 757 758
        axis = node.get_attr("axis")
        if inputs[0].tf_data_format == "NHWC" and len(
                inputs[0].out_shapes[0]) == 4:
            tf_data_format = list(inputs[0].tf_data_format)
            tf_data_format.insert(axis, str(len(tf_data_format)))
            axis = nhwc_dim_to_nchw(inputs[0], axis)
            pd_data_format = list(inputs[0].pd_data_format)
            pd_data_format.insert(axis, str(len(pd_data_format)))
            node.tf_data_format = "".join(tf_data_format)
            node.pd_data_format = "".join(pd_data_format)

        attr = {"axis": axis}
J
jiangjiajun 已提交
759 760 761
        node.fluid_code.add_layer("stack",
                                  inputs=inputs,
                                  output=node,
J
jiangjiajun 已提交
762
                                  param_attr=attr)
J
jiangjiajun 已提交
763 764 765

    def Pad(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
766
        paddings = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
767
        assert paddings.layer_type == "Const", "Padding should be Const"
J
jiangjiajun 已提交
768
        self.add_omit_nodes(paddings.layer_name, node.layer_name)
J
jiangjiajun 已提交
769 770 771
        paddings = paddings.value.flatten().tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            paddings = [paddings[i] for i in [0, 1, 6, 7, 2, 3, 4, 5]]
J
jiangjiajun 已提交
772 773 774 775 776 777

        pad_op = "pad"
        if len(input.out_shapes[0]) == 4:
            if paddings[0] + paddings[1] + paddings[2] + paddings[3] == 0:
                paddings = paddings[4:]
                pad_op = "pad2d"
J
jiangjiajun 已提交
778
        attr = {"paddings": paddings}
J
jiangjiajun 已提交
779
        node.fluid_code.add_layer(pad_op,
J
jiangjiajun 已提交
780 781 782
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
783 784 785 786 787

    def Range(self, node):
        start = self.graph.get_node(node.layer.input[0], copy=True)
        limit = self.graph.get_node(node.layer.input[1], copy=True)
        delta = self.graph.get_node(node.layer.input[2], copy=True)
M
mamingjie-China 已提交
788 789 790
        self.add_omit_nodes(start.layer_name, node.layer_name)
        self.add_omit_nodes(limit.layer_name, node.layer_name)
        self.add_omit_nodes(delta.layer_name, node.layer_name)
J
jiangjiajun 已提交
791 792
        if start.layer_type == "Const":
            start = start.value
793 794
        else:
            start = self.decoder.infer_tensor(start)
J
jiangjiajun 已提交
795 796
        if limit.layer_type == "Const":
            limit = limit.value
797 798
        else:
            limit = self.decoder.infer_tensor(limit)
J
jiangjiajun 已提交
799 800
        if delta.layer_type == "Const":
            delta = delta.value
801 802 803
        else:
            delta = self.decoder.infer_tensor(delta)

J
jiangjiajun 已提交
804
        inputs = {"start": start, "end": limit, "step": delta}
J
jiangjiajun 已提交
805
        attr = {"dtype": string(node.dtype)}
806 807 808 809
        node.fluid_code.add_layer("range",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
810 811 812 813 814

    def Mean(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
J
jiangjiajun 已提交
815
        dims = reduce_idx.value.tolist()
J
jiangjiajun 已提交
816
        keep_dims = node.get_attr("keep_dims")
J
jiangjiajun 已提交
817 818 819 820 821 822

        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            for i in range(len(dims)):
                dims[i] = nhwc_dim_to_nchw(input, dims[i])

        attr = {"dim": dims, "keep_dim": keep_dims}
J
jiangjiajun 已提交
823 824 825 826 827 828 829 830 831 832 833
        node.fluid_code.add_layer("reduce_mean",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def MatMul(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        transpose_a = node.get_attr('transpose_a')
        transpose_b = node.get_attr('transpose_b')
        inputs = {"x": x, "y": y}
J
jiangjiajun 已提交
834 835 836 837 838 839 840 841 842 843
        # fix paddle shape infer problem
        # should be removed after paddle 1.6
        if x.out_shapes[0][-1] < 0 and y.out_shapes[0][0] > 0:
            shape = x.out_shapes[0]
            shape[-1] = y.out_shapes[0][0]
            attr = {"shape": shape}
            node.fluid_code.add_layer("reshape",
                                      inputs=x,
                                      output=x,
                                      param_attr=attr)
J
jiangjiajun 已提交
844 845 846 847 848 849 850 851 852 853
        attr = {"transpose_x": transpose_a, "transpose_y": transpose_b}
        node.fluid_code.add_layer("matmul",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)

    def ArgMax(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        axis = self.graph.get_node(node.layer.input[1], copy=True)
        assert axis.layer_type == "Const", "ArgMax only support Const parameter"
J
jiangjiajun 已提交
854
        self.add_omit_nodes(axis.layer_name, node.layer_name)
J
jiangjiajun 已提交
855 856 857 858
        axis = axis.value
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            axis = nhwc_dim_to_nchw(input, axis)
        attr = {"axis": axis}
J
jiangjiajun 已提交
859 860 861 862 863 864 865 866 867 868 869 870 871
        node.fluid_code.add_layer("argmax",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def StridedSlice(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        begin = self.graph.get_node(node.layer.input[1], copy=True)
        end = self.graph.get_node(node.layer.input[2], copy=True)
        strides = self.graph.get_node(node.layer.input[3], copy=True)
        assert begin.layer_type == "Const"
        assert end.layer_type == "Const"
        assert strides.layer_type == "Const"
J
jiangjiajun 已提交
872 873 874
        self.add_omit_nodes(begin.layer_name, node.layer_name)
        self.add_omit_nodes(end.layer_name, node.layer_name)
        self.add_omit_nodes(strides.layer_name, node.layer_name)
J
jiangjiajun 已提交
875 876 877
        strides = strides.value.tolist()
        assert len(set(strides)) == 1 and strides[0] == 1

J
jiangjiajun 已提交
878 879 880 881 882 883
        begin = begin.value.tolist()
        end = end.value.tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            begin = [begin[i] for i in [0, 3, 1, 2]]
            end = [end[i] for i in [0, 3, 1, 2]]

J
jiangjiajun 已提交
884 885 886 887 888 889 890 891 892
        for i in range(len(end)):
            if end[i] == 0:
                end[i] = 999999

        attr = {
            "axes": [i for i in range(len(strides))],
            "starts": begin,
            "ends": end
        }
J
jiangjiajun 已提交
893 894 895 896
        node.fluid_code.add_layer("slice",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
897 898 899 900 901

    def Slice(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        begin = self.graph.get_node(node.layer.input[1], copy=True)
        size = self.graph.get_node(node.layer.input[2], copy=True)
J
jiangjiajun 已提交
902 903
        self.add_omit_nodes(begin.layer_name, node.layer_name)
        self.add_omit_nodes(size.layer_name, node.layer_name)
J
jiangjiajun 已提交
904 905 906 907 908 909 910 911
        if begin.layer_type == "Const":
            begin = begin.value.tolist()
        else:
            begin = self.decoder.infer_tensor(begin).tolist()
        if size.layer_type == "const":
            size = size.value.tolist()
        else:
            size = self.decoder.infer_tensor(size).tolist()
912

J
jiangjiajun 已提交
913 914 915 916
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            size = [size[i] for i in [0, 3, 1, 2]]
            begin = [begin[i] for i in [0, 3, 1, 2]]

917 918 919 920 921 922 923 924 925 926 927 928
        for i in range(len(size)):
            if size[i] < 0:
                size[i] = 99999999
            else:
                size[i] = size[i] + begin[i]

        attr = {
            "axes": [i for i in range(len(size))],
            "starts": begin,
            "ends": size
        }
        node.fluid_code.add_layer("slice",
J
jiangjiajun 已提交
929 930 931
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
932 933

    def Conv2DBackpropInput(self, node):
934
        out_shape = self.graph.get_node(node.layer.input[0], copy=True)
935
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
936 937
        input = self.graph.get_node(node.layer.input[2], copy=True)

938
        assert kernel.layer_type == "Const", "Kernel of Conv2DBackpropInput should be Const"
939

J
jiangjiajun 已提交
940
        self.add_omit_nodes(kernel.layer_name, node.layer_name)
941 942
        self.add_omit_nodes(out_shape.layer_name, node.layer_name)

J
jiangjiajun 已提交
943 944 945 946 947 948
        if out_shape.layer_type == "Const":
            out_shape = out_shape.value.tolist()
        else:
            out_shape = self.decoder.infer_shape_tensor(out_shape,
                                                        node.out_shapes[0])

949
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
950 951
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
952
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
953 954 955
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
956
        pad_mode = node.get_attr("padding").decode()
957 958 959 960
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        channel_first = data_format == "NCHW"
961

J
jiangjiajun 已提交
962 963
        self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
            kernel.value, (3, 2, 0, 1))
964 965 966 967
        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
968 969
        else:
            self.data_format_propagation(node)
970

J
jiangjiajun 已提交
971
        padding = 0
972 973 974
        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
J
jiangjiajun 已提交
975 976 977 978 979 980 981 982 983
            if pad_h[0] == pad_h[1] and pad_w[0] == pad_w[1]:
                padding = [pad_h[0], pad_w[0]]
            else:
                attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
                node.fluid_code.add_layer("pad2d",
                                          inputs=input,
                                          output=node,
                                          param_attr=attr)
                input = node
984

985 986 987 988 989 990
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": k_size[3],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
J
jiangjiajun 已提交
991 992
            "dilation": dilations[2:4],
            "padding": padding
993
        }
994 995 996 997
        node.fluid_code.add_layer("conv2d_transpose",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
998

J
jiangjiajun 已提交
999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014
        if pad_mode == "SAME":
            if node.tf_data_format == "NHWC":
                out_shape = [out_shape[i] for i in [0, 3, 1, 2]]
            for i in range(4):
                if out_shape[i] < 0:
                    out_shape[i] = 999999
            attr = {
                "axes": [0, 1, 2, 3],
                "starts": [0, 0, 0, 0],
                "ends": out_shape
            }
            node.fluid_code.add_layer("slice",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)

1015 1016 1017 1018 1019
    def Max(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
J
jiangjiajun 已提交
1020 1021 1022 1023 1024
        dim = reduce_idx.value.tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)

        attr = {"dim": dim, "keep_dim": keep_dims}
1025 1026 1027 1028 1029 1030 1031 1032 1033 1034
        node.fluid_code.add_layer("reduce_max",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def Sum(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
J
jiangjiajun 已提交
1035 1036 1037 1038 1039
        dim = reduce_idx.value.tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)

        attr = {"dim": dim, "keep_dim": keep_dims}
1040 1041 1042 1043 1044
        node.fluid_code.add_layer("reduce_sum",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

J
jiangjiajun 已提交
1045 1046 1047 1048 1049 1050 1051 1052
    def Cast(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        dtype = node.dtype_map[node.get_attr('DstT')]
        attr = {"dtype": string(dtype)}
        node.fluid_code.add_layer("cast",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
1053

J
jiangjiajun 已提交
1054 1055 1056
    def Split(self, node):
        dim = self.graph.get_node(node.layer.input[0], copy=True)
        input = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
1057
        self.add_omit_nodes(dim.layer_name, node.layer_name)
J
jiangjiajun 已提交
1058
        num_split = node.get_attr('num_split')
J
jiangjiajun 已提交
1059 1060 1061 1062 1063
        dim = dim.value
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)

        attr = {"num_or_sections": num_split, "dim": dim}
J
jiangjiajun 已提交
1064 1065 1066 1067
        node.fluid_code.add_layer("split",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083

    def Squeeze(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        squeeze_dims = node.get_attr('squeeze_dims')
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            for i in range(len(squeeze_dims)):
                squeeze_dims[i] = nhwc_dim_to_nchw(input, squeeze_dims[i])
        attr = {"axes": squeeze_dims}
        node.fluid_code.add_layer("squeeze",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def Softmax(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        axis = node.get_attr("axis")
J
jiangjiajun 已提交
1084 1085
        if axis is None:
            axis = -1 + len(input.out_shapes[0])
J
jiangjiajun 已提交
1086 1087 1088 1089 1090 1091 1092
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            axis = nhwc_dim_to_nchw(input, axis)
        attr = {"axis": axis}
        node.fluid_code.add_layer("softmax",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
1093 1094 1095 1096

    def ResizeNearestNeighbor(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
1097
        self.add_omit_nodes(resize_shape.layer_name, node.layer_name)
J
jiangjiajun 已提交
1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(resize_shape)
        align_corners = node.get_attr("align_corners")
        attr = {"align_corners": align_corners, "out_shape": resize_shape}
        node.fluid_code.add_layer("resize_nearest",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def ResizeBilinear(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
1112
        self.add_omit_nodes(resize_shape.layer_name, node.layer_name)
J
jiangjiajun 已提交
1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(resize_shape)
        align_corners = node.get_attr("align_corners")
        attr = {
            "align_corners": align_corners,
            "out_shape": resize_shape,
            "align_mode": 1
        }
        node.fluid_code.add_layer("resize_bilinear",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
1127 1128 1129 1130

    def ResizeNearestNeighbor(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
1131
        self.add_omit_nodes(resize_shape.layer_name, node.layer_name)
1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(
                resize_shape, node.out_shapes[0])
        align_corners = node.get_attr("align_corners")
        attr = {"align_corners": align_corners, "out_shape": resize_shape}
        node.fluid_code.add_layer("resize_nearest",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def ResizeBilinear(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
1147
        self.add_omit_nodes(resize_shape.layer_name, node.layer_name)
1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(
                resize_shape, node.out_shapes[0])
        align_corners = node.get_attr("align_corners")
        attr = {
            "align_corners": align_corners,
            "out_shape": resize_shape,
            "align_mode": 1
        }
        node.fluid_code.add_layer("resize_bilinear",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193

    def GreaterEqual(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        inputs = {"x": x, "y": y}
        node.fluid_code.add_layer("greater_equal",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

    def RandomUniform(self, node):
        shape = self.graph.get_node(node.layer.input[0], copy=True)
        self.add_omit_nodes(shape.layer_name, node.layer_name)
        if shape.layer_type == "Const":
            shape = shape.value.tolist()
        else:
            shape = self.decoder.infer_shape_tensor(shape)
        if node.tf_data_format == "NHWC" and len(shape) == 4:
            shape = [shape[i] for i in [0, 3, 1, 2]]
        attr = {"shape": shape, "min": 0.0, "max": 0.9999}
        if shape[0] < 0:
            input = self.batch_node
            node.fluid_code.add_layer("uniform_random_batch_size_like",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
        else:
            node.fluid_code.add_layer("uniform_random",
                                      inputs=None,
                                      output=node,
                                      param_attr=attr)
J
jiangjiajun 已提交
1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224

    def GreaterEqual(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        inputs = {"x": x, "y": y}
        node.fluid_code.add_layer("greater_equal",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

    def RandomUniform(self, node):
        shape = self.graph.get_node(node.layer.input[0], copy=True)
        self.add_omit_nodes(shape.layer_name, node.layer_name)
        if shape.layer_type == "Const":
            shape = shape.value.tolist()
        else:
            shape = self.decoder.infer_shape_tensor(shape)
        if len(shape) == 4 and node.tf_data_format == "NHWC":
            shape = [shape[i] for i in [0, 3, 1, 2]]
        attr = {"shape": shape, "min": 0.0, "max": 0.9999}
        if shape[0] < 0:
            input = self.batch_node
            node.fluid_code.add_layer("uniform_random_batch_size_like",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
        else:
            node.fluid_code.add_layer("uniform_random",
                                      inputs=None,
                                      output=node,
                                      param_attr=attr)
J
jiangjiajun 已提交
1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238

    def SquaredDifference(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        inputs = {"x": x, "y": y}
        node.fluid_code.add_layer("elementwise_sub",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)
        inputs = {"x": node, "y": node}
        node.fluid_code.add_layer("elementwise_mul",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)