tf_op_mapper.py 44.3 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
J
jiangjiajun 已提交
14

J
jiangjiajun 已提交
15 16
from x2paddle.decoder.tf_decoder import TFGraph
from x2paddle.core.op_mapper import OpMapper
J
jiangjiajun 已提交
17
from x2paddle.core.util import *
J
jiangjiajun 已提交
18
import inspect
J
jiangjiajun 已提交
19
import numpy
J
jiangjiajun 已提交
20
import sys
21

J
jiangjiajun 已提交
22

J
jiangjiajun 已提交
23 24 25 26 27 28 29 30
# compute padding size for SAME mode
def get_same_padding(in_size, kernel_size, stride):
    new_size = int(math.ceil(in_size * 1.0 / stride))
    pad_size = (new_size - 1) * stride + kernel_size - in_size
    pad0 = int(pad_size / 2)
    pad1 = pad_size - pad0
    return [pad0, pad1]

J
jiangjiajun 已提交
31

J
jiangjiajun 已提交
32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
def nhwc_dim_to_nchw(node, dim):
    tf_data_format = list(node.tf_data_format)
    pd_data_format = list(node.pd_data_format)
    if isinstance(dim, list):
        for i in range(len(dim)):
            char = tf_data_format[dim[i]]
            dim[i] = pd_data_format.index(char)
    else:
        char = tf_data_format[dim]
        dim = pd_data_format.index(char)
    return dim

    if dim < 0:
        dim += 4
    if dim > 0:
        dim = (dim + 1) % 4 + int((dim + 1) / 4)
    return dim


J
jiangjiajun 已提交
51
class TFOpMapper(OpMapper):
J
jiangjiajun 已提交
52 53 54 55 56 57 58
    directly_map_ops = {
        'Relu': ['relu'],
        'Relu6': ['relu6'],
        'Shape': ['shape'],
        'Abs': ['abs'],
        'Sigmoid': ['sigmoid'],
        'Exp': ['exp'],
J
jiangjiajun 已提交
59
        'Rsqrt': ['rsqrt'],
60 61 62 63
        'swish_f32': ['swish'],
        'LeakyRelu': ['leaky_relu', {
            'alpha': 'alpha'
        }]
J
jiangjiajun 已提交
64 65 66 67 68 69 70 71 72
    }
    elementwise_ops = {
        'Add': 'elementwise_add',
        'RealDiv': 'elementwise_div',
        'Sub': 'elementwise_sub',
        'Maximum': 'elementwise_max',
        'Mul': 'elementwise_mul'
    }

J
jiangjiajun 已提交
73 74
    def __init__(self, decoder):
        super(TFOpMapper, self).__init__()
J
jiangjiajun 已提交
75
        self.decoder = decoder
J
jiangjiajun 已提交
76 77
        self.graph = decoder.tf_graph
        self.weights = dict()
J
jiangjiajun 已提交
78
        self.omit_nodes = list()
J
jiangjiajun 已提交
79
        self.used_custom_layers = dict()
80

J
jiangjiajun 已提交
81 82 83 84 85 86 87
        not_placeholder = list()
        for name in self.graph.input_nodes:
            if self.graph.get_node(name).layer_type != "Placeholder":
                not_placeholder.append(name)
        for name in not_placeholder:
            idx = self.graph.input_nodes.index(name)
            del self.graph.input_nodes[idx]
J
jiangjiajun 已提交
88

J
jiangjiajun 已提交
89
        print("Total nodes: {}".format(len(self.graph.topo_sort)))
J
jiangjiajun 已提交
90
        unsupported_ops = set()
91 92 93
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
J
jiangjiajun 已提交
94
            if op in self.directly_map_ops:
J
jiangjiajun 已提交
95 96
                if len(unsupported_ops) > 0:
                    continue
J
jiangjiajun 已提交
97 98
                self.directly_map(node)
            elif op in self.elementwise_ops:
J
jiangjiajun 已提交
99 100
                if len(unsupported_ops) > 0:
                    continue
J
jiangjiajun 已提交
101 102
                self.elementwise_map(node)
            elif hasattr(self, op):
J
jiangjiajun 已提交
103 104
                if len(unsupported_ops) > 0:
                    continue
J
jiangjiajun 已提交
105 106
                func = getattr(self, op)
                func(node)
J
jiangjiajun 已提交
107
            else:
J
jiangjiajun 已提交
108 109 110 111 112 113 114
                unsupported_ops.add(op)
        if len(unsupported_ops) > 0:
            print("=========={} Ops are not supported yet======".format(
                len(unsupported_ops)))
            for op in unsupported_ops:
                print("========== {} ==========".format(op))
            sys.exit(-1)
115

J
jiangjiajun 已提交
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
    def directly_map(self, node):
        assert node.layer_type in self.directly_map_ops
        op_info = self.directly_map_ops[node.layer_type]
        input = self.graph.get_node(node.layer.input[0], copy=True)
        attr = dict()
        for param in op_info[1:]:
            tf_param_name = list(param.keys())[0]
            pd_param_name = list(param.values())[0]
            tf_param = node.get_attr(tf_param_name)
            attr[pd_param_name] = tf_param
        node.fluid_code.add_layer(op_info[0],
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def elementwise_map(self, node):
        assert node.layer_type in self.elementwise_ops
        op_type = self.elementwise_ops[node.layer_type]
J
jiangjiajun 已提交
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        x_shape = x.out_shapes[0]
        y_shape = y.out_shapes[0]
        # incomplement broadcasting support for paddle
        x_input = x
        y_input = y
        if len(x_shape) < len(y_shape):
            unrevertable_ops = [
                "elementwise_sub", "elementwise_div", "elementwise_floordiv",
                "elementwise_mod", "elementwise_pow"
            ]
            if op_type not in unrevertable_ops:
                x_input = y
                y_input = x
                x_shape = y.out_shapes[0]
                y_shape = x.out_shapes[0]
            else:
                raise Exception("Unexpected situation happend")

J
jiangjiajun 已提交
154 155 156 157 158 159 160 161 162 163 164 165 166
        if len(x_shape) == 4 and len(y_shape) == 1:
            if x_input.tf_data_format == "NHWC":
                axis = 1
            else:
                axis = -1
            attr = {"axis": axis}
            inputs = {"x": x_input, "y": y_input}
            node.fluid_code.add_layer(op_type,
                                      inputs=inputs,
                                      output=node,
                                      param_attr=attr)
            return

J
jiangjiajun 已提交
167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188
        is_sub_seq = True
        for i in range(len(y_shape)):
            index = -1 * i - 1
            if y_shape[index] != x_shape[index]:
                is_sub_seq = False
        if not is_sub_seq:
            x_expand_times = [1] * len(x_shape)
            y_expand_times = [1] * len(y_shape)
            x_need_expand = False
            y_need_expand = False
            for i in range(len(y_shape)):
                index = -1 * i - 1
                if y_shape[index] != x_shape[index]:
                    if y_shape[index] == 1:
                        y_expand_times[index] = x_shape[index]
                        y_need_expand = True
                    elif x_shape[index] == 1:
                        x_expand_times[index] = y_shape[index]
                        x_need_expand = True
                    else:
                        raise Exception("Unexpected situation happend")
            if x_need_expand:
J
jiangjiajun 已提交
189 190 191 192
                if len(x_expand_times) == 3 and x.tf_data_format == "NHWC":
                    x_expand_times = [x_expand_times[i] for i in [2, 0, 1]]
                if len(x_expand_times) == 4 and x.tf_data_format == "NHWC":
                    x_expand_times = [x_expand_times[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
193 194 195 196 197 198 199
                attr = {"expand_times": x_expand_times}
                node.fluid_code.add_layer("expand",
                                          inputs=x_input,
                                          output="x_tmp",
                                          param_attr=attr)
                x_input = "x_tmp"
            if y_need_expand:
J
jiangjiajun 已提交
200 201 202 203
                if len(y_expand_times) == 3 and y.tf_data_format == "NHWC":
                    y_expand_times = [y_expand_times[i] for i in [2, 0, 1]]
                if len(y_expand_times) == 4 and y.tf_data_format == "NHWC":
                    y_expand_times = [y_expand_times[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
204 205 206 207 208 209 210 211 212 213 214 215
                attr = {"expand_times": y_expand_times}
                node.fluid_code.add_layer("expand",
                                          inputs=y_input,
                                          output="y_tmp",
                                          param_attr=attr)
                y_input = "y_tmp"
        inputs = {"x": x_input, "y": y_input}
        node.fluid_code.add_layer(op_type,
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

216 217
    def Placeholder(self, node):
        shape = node.out_shapes[0]
J
jiangjiajun 已提交
218 219
        assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
            node.layer_name)
J
jiangjiajun 已提交
220 221 222 223
        if node.tf_data_format == "NHWC" and len(shape) == 4:
            shape = [shape[i] for i in [0, 3, 1, 2]]
        elif node.tf_data_format == "NCHW" and len(shape) == 4:
            self.graph.data_format_propagation(node)
224 225
        dtype = node.dtype
        attr = {
J
jiangjiajun 已提交
226
            'dtype': string(dtype),
227
            'shape': shape,
J
jiangjiajun 已提交
228 229
            'name': string(node.layer_name),
            'append_batch_size': False
230
        }
J
jiangjiajun 已提交
231 232 233 234 235 236 237 238 239 240 241 242 243 244 245
        node.fluid_code.add_layer("data",
                                  inputs=None,
                                  output=node,
                                  param_attr=attr)

    def Const(self, node):
        shape = node.out_shapes[0]
        dtype = node.dtype
        value = node.value
        initializer = "Constant(0.0)"
        if len(shape) == 0:
            assert value.size == 1, "Unexpected situation happend"
            shape = [1]
            initializer = "Constant({})".format(value)

J
jiangjiajun 已提交
246 247 248 249 250 251 252 253 254 255 256 257 258
        self.weights[node.layer_name] = node.value

        if node.tf_data_format == "NHWC":
            if len(shape) == 4:
                shape = [shape[i] for i in [0, 3, 1, 2]]
            if len(shape) == 3:
                shape = [shape[i] for i in [2, 0, 1]]
                self.weights[node.layer_name] = numpy.transpose(
                    node.value, (2, 0, 1))
        elif node.tf_data_format == "NCHW":
            if len(shape) == 4:
                self.graph.data_format_propagation(node)

J
jiangjiajun 已提交
259 260 261 262 263 264 265 266 267 268 269 270
        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name),
            'default_initializer': initializer
        }
        node.fluid_code.add_layer("create_parameter",
                                  inputs=None,
                                  output=node,
                                  param_attr=attr)

    def Transpose(self, node):
J
jiangjiajun 已提交
271 272
        input = self.graph.get_node(node.layer.input[0], copy=True)
        perm = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
273
        assert perm.layer_type == "Const", "Perm of transpose OP should be Const"
274
        del self.weights[perm.layer_name.replace('/', '_')]
J
jiangjiajun 已提交
275 276 277
        perm.fluid_code.clear()
        perm = perm.value.tolist()

J
jiangjiajun 已提交
278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317
        if perm == [0, 3, 1, 2] and input.data_format == "NHWC":
            node.fluid_code.add_layer("assign",
                                      inputs=input,
                                      output=node,
                                      param_attr=None)
            node.tf_data_format = "NCHW"
            self.graph.data_format_propagation(node)
        elif perm == [0, 2, 3, 1] and input.tf_data_format == "NCHW":
            node.fluid_code.add_layer("assign",
                                      inputs=input,
                                      output=node,
                                      param_attr=None)
            node.tf_data_format = "NHWC"
            self.graph.data_format_propagation(node)
        elif len(input.out_shapes[0]) > 4:
            print(input.layer_name, input.tf_data_format, input.pd_data_format)
            tf_data_format = list(input.tf_data_format)
            pd_data_format = list(input.pd_data_format)
            new_perm = [i for i in range(len(perm))]
            for i in range(len(perm)):
                char0 = tf_data_format[i]
                char1 = tf_data_format[perm[i]]
                index0 = pd_data_format.index(char0)
                index1 = pd_data_format.index(char1)
                new_perm[index0] = index1
            node.tf_data_format = [tf_data_format[i] for i in perm]
            node.pd_data_format = [pd_data_format[i] for i in perm]
            attr = {'perm': new_perm}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
        elif len(node.out_shapes[0]) != 4:
            attr = {'perm': perm}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
        else:
            raise Exception("Unexpected situation happend in Transpose OP")
J
jiangjiajun 已提交
318

J
jiangjiajun 已提交
319 320
    def MaxPool(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
321

J
jiangjiajun 已提交
322
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
323 324 325
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape

J
jiangjiajun 已提交
326 327 328 329
        k_size = node.get_attr("ksize")
        strides = node.get_attr("strides")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
J
jiangjiajun 已提交
330
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
331
        padding = 0
J
jiangjiajun 已提交
332

J
jiangjiajun 已提交
333
        if not channel_first:
J
jiangjiajun 已提交
334 335
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
336
            k_size = [k_size[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
337 338
        else:
            self.graph.data_format_propagation(node)
J
jiangjiajun 已提交
339 340

        if pad_mode == "SAME":
J
jiangjiajun 已提交
341 342
            pad_h = get_same_padding(in_shape[2], k_size[2], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
J
jiangjiajun 已提交
343 344 345
            pad_h = pad_h[0] + pad_h[1]
            pad_w = pad_w[0] + pad_w[1]
            attr = {"paddings": [0, pad_h, 0, pad_w], "pad_value": -10000.0}
J
jiangjiajun 已提交
346 347 348 349 350
            node.fluid_code.add_layer("pad2d",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
            input = node
J
jiangjiajun 已提交
351
        attr = {
J
jiangjiajun 已提交
352
            "pool_size": k_size[2:4],
J
jiangjiajun 已提交
353
            "pool_type": string("max"),
J
jiangjiajun 已提交
354
            "pool_padding": padding,
J
jiangjiajun 已提交
355
            "pool_stride": strides[2:4]
J
jiangjiajun 已提交
356
        }
J
jiangjiajun 已提交
357 358 359 360
        node.fluid_code.add_layer("pool2d",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
361 362 363 364 365 366 367

    def Conv2D(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of Conv2D should be Const"
        self.omit_nodes.append(kernel.layer_name)

J
jiangjiajun 已提交
368 369 370
        node.fluid_code.add_note("#{} : {}".format(node.layer.name,
                                                   node.layer_name))

J
jiangjiajun 已提交
371
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
372 373
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
J
jiangjiajun 已提交
374
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
375 376 377
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
378 379 380 381 382
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
383 384 385 386
        padding = 0

        self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
            kernel.value, (3, 2, 0, 1))
J
jiangjiajun 已提交
387 388 389 390 391

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
392 393
        else:
            self.graph.data_format_propagation(node)
J
jiangjiajun 已提交
394

J
jiangjiajun 已提交
395 396 397
        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
J
jiangjiajun 已提交
398 399 400 401 402 403 404 405 406
            if pad_h[0] == pad_h[1] and pad_w[0] == pad_w[1]:
                padding = [pad_h[0], pad_w[0]]
            else:
                attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
                node.fluid_code.add_layer("pad2d",
                                          inputs=input,
                                          output=node,
                                          param_attr=attr)
                input = node
J
jiangjiajun 已提交
407 408 409 410 411 412
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": k_size[3],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
J
jiangjiajun 已提交
413 414
            "dilation": dilations[2:4],
            "padding": padding
J
jiangjiajun 已提交
415
        }
J
jiangjiajun 已提交
416 417 418 419
        node.fluid_code.add_layer("conv2d",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
420

J
jiangjiajun 已提交
421 422 423 424 425 426 427 428 429 430 431 432
    def BiasAdd(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        bias = self.graph.get_node(node.layer.input[1], copy=True)
        axis = -1
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            axis = 1
        inputs = {"x": input, "y": bias}
        attr = {"axis": axis}
        node.fluid_code.add_layer("elementwise_add",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
433 434 435 436 437 438 439

    def FusedBatchNorm(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        gamma = self.graph.get_node(node.layer.input[1], copy=True)
        beta = self.graph.get_node(node.layer.input[2], copy=True)
        moving_mean = self.graph.get_node(node.layer.input[3], copy=True)
        moving_var = self.graph.get_node(node.layer.input[4], copy=True)
J
jiangjiajun 已提交
440 441
        data_format = node.get_attr("data_format").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
442 443 444 445 446 447 448 449 450 451

        assert gamma.layer_type == "Const"
        assert beta.layer_type == "Const"
        assert moving_mean.layer_type == "Const"
        assert moving_var.layer_type == "Const"
        self.omit_nodes.append(gamma.layer_name)
        self.omit_nodes.append(beta.layer_name)
        self.omit_nodes.append(moving_mean.layer_name)
        self.omit_nodes.append(moving_var.layer_name)

J
jiangjiajun 已提交
452 453
        if channel_first:
            self.data_format_propagation(node)
J
jiangjiajun 已提交
454

J
jiangjiajun 已提交
455 456 457 458 459 460 461 462 463 464
        attr = {
            "epsilon": node.get_attr("epsilon"),
            "param_attr": string(gamma.layer_name),
            "bias_attr": string(beta.layer_name),
            "moving_mean_name": string(moving_mean.layer_name),
            "moving_variance_name": string(moving_var.layer_name),
            "is_test": True
        }

        node.fluid_code.add_layer("batch_norm",
J
jiangjiajun 已提交
465
                                  inputs=input,
J
jiangjiajun 已提交
466 467 468 469 470 471 472 473 474
                                  output=node,
                                  param_attr=attr)

    def DepthwiseConv2dNative(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of DepthwiseConv2DNative should be Const"
        self.omit_nodes.append(kernel.layer_name)

J
jiangjiajun 已提交
475 476 477
        node.fluid_code.add_note("#{} : {}".format(node.layer.name,
                                                   node.layer_name))

J
jiangjiajun 已提交
478
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
479 480
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
J
jiangjiajun 已提交
481
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
482 483 484
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
485 486 487 488 489
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
490 491 492 493
        padding = 0

        self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
            kernel.value, (2, 3, 0, 1))
J
jiangjiajun 已提交
494 495 496 497 498

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
499 500
        else:
            self.data_format_propagation(node)
J
jiangjiajun 已提交
501 502 503 504

        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
J
jiangjiajun 已提交
505 506 507 508
            if pad_h[0] == pad_h[1] and pad_w[0] == pad_w[1]:
                padding = [pad_h[0], pad_w[0]]
            else:
                attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
J
jiangjiajun 已提交
509
                node.fluid_code.add_layer("pad2d",
J
jiangjiajun 已提交
510
                                          inputs=input,
J
jiangjiajun 已提交
511 512
                                          output=node,
                                          param_attr=attr)
J
jiangjiajun 已提交
513 514
                input = node

J
jiangjiajun 已提交
515 516 517 518 519 520 521
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": in_shape[1],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
            "dilation": dilations[2:4],
J
jiangjiajun 已提交
522
            "groups": k_size[3] * in_shape[1],
J
jiangjiajun 已提交
523
            "use_cudnn": False,
J
jiangjiajun 已提交
524
            "padding": padding
J
jiangjiajun 已提交
525
        }
J
jiangjiajun 已提交
526
        node.fluid_code.add_layer("conv2d",
J
jiangjiajun 已提交
527
                                  inputs=input,
J
jiangjiajun 已提交
528 529
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
530

J
jiangjiajun 已提交
531 532 533 534 535
    def Reshape(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        param = self.graph.get_node(node.layer.input[1], copy=True)
        if param.layer_type == "Const":
            attr = {"shape": param.value.tolist()}
J
jiangjiajun 已提交
536
            self.omit_nodes.append(param.layer_name)
J
jiangjiajun 已提交
537 538
        else:
            # Here is a trick method to solove tensor parameter in tensorflow
J
jiangjiajun 已提交
539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560
            shape = self.decoder.infer_shape_tensor(param, node.out_shapes[0])
            if shape.count(-1) <= 1:
                attr = {"shape": shape}
                self.omit_nodes.append(param.layer_name)
            else:
                assert len(param.out_shapes[0]
                           ) == 1, "Unexpected situation of shape parameter"
                attr = {"shape": [-1]}
                node.fluid_code.add_layer("reshape",
                                          inputs=param,
                                          output="shape_param",
                                          param_attr=attr)
                attr = {"num_or_sections": param.out_shapes[0][0], "dim": 0}
                node.fluid_code.add_layer("split",
                                          inputs="shape_param",
                                          output=node,
                                          param_attr=attr)
                new_param = "["
                for i in range(param.out_shapes[0][0]):
                    new_param += (node.layer_name + "[{}]".format(i) + ", ")
                new_param = new_param.strip(", ") + "]"
                attr = {"shape": new_param}
J
jiangjiajun 已提交
561 562
        if len(attr["shape"]) == 4 and node.tf_data_format == "NHWC":
            attr["shape"] = [attr["shape"][i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
563 564 565 566 567 568 569
        node.fluid_code.add_layer("reshape",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def AvgPool(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
570

J
jiangjiajun 已提交
571
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
572 573 574
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape

J
jiangjiajun 已提交
575 576 577 578 579 580 581 582 583
        k_size = node.get_attr("ksize")
        strides = node.get_attr("strides")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
584
            k_size = [k_size[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
585 586
        else:
            self.graph.data_format_propagation(node)
J
jiangjiajun 已提交
587 588

        attr = {
J
jiangjiajun 已提交
589
            "pool_size": k_size[2:4],
J
jiangjiajun 已提交
590 591 592 593
            "pool_type": string("avg"),
            "pool_stride": strides[2:4]
        }
        if pad_mode == "SAME":
J
jiangjiajun 已提交
594 595
            pad_h = get_same_padding(in_shape[2], k_size[2], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
J
jiangjiajun 已提交
596 597 598 599
            assert pad_h[0] == pad_h[1] and pad_w[0] == pad_w[
                1], "Cannot map AvgPool"
            attr["pool_padding"] = [pad_h[0], pad_w[0]]
        node.fluid_code.add_layer("pool2d",
J
jiangjiajun 已提交
600
                                  inputs=input,
J
jiangjiajun 已提交
601 602 603
                                  output=node,
                                  param_attr=attr)

J
jiangjiajun 已提交
604 605 606 607 608 609 610 611
    def SplitV(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        num_sections = self.graph.get_node(node.layer.input[1], copy=True)
        dim = self.graph.get_node(node.layer.input[2], copy=True)
        assert num_sections.layer_type == "Const"
        assert dim.layer_type == "Const"
        self.omit_nodes.append(num_sections.layer_name)
        self.omit_nodes.append(dim.layer_name)
J
jiangjiajun 已提交
612 613 614
        dim = dim.value
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)
J
jiangjiajun 已提交
615 616 617 618 619 620 621 622
        attr = {
            "num_or_sections": num_sections.value.tolist(),
            "dim": dim.value
        }
        node.fluid_code.add_layer("split",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
623 624

    def ConcatV2(self, node):
J
jiangjiajun 已提交
625 626 627 628
        inputs = [
            self.graph.get_node(name, copy=True)
            for name in node.layer.input[:-1]
        ]
J
jiangjiajun 已提交
629 630 631
        axis = self.graph.get_node(node.layer.input[-1], copy=True)
        assert axis.layer_type == "Const"
        self.omit_nodes.append(axis.layer_name)
J
jiangjiajun 已提交
632 633 634 635 636
        axis = axis.value
        if inputs[0].tf_data_format == "NHWC" and len(
                inputs[0].out_shapes[0]) == 4:
            axis = nhwc_dim_to_nchw(inputs[0], axis)
        attr = {"axis": axis}
J
jiangjiajun 已提交
637 638 639 640
        node.fluid_code.add_layer("concat",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
641 642 643 644 645

    def Tile(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        expand_times = self.graph.get_node(node.layer.input[1], copy=True)
        self.omit_nodes.append(expand_times.layer_name)
646 647 648 649
        if expand_times.layer_type == "Const":
            expand_times = expand_times.value.tolist()
        else:
            expand_times = self.decoder.infer_shape_tensor(expand_times)
J
jiangjiajun 已提交
650 651 652 653 654
        if input.tf_data_format == "NHWC":
            if len(input.out_shapes[0]) == 4:
                expand_times = [expand_times[i] for i in [0, 3, 1, 2]]
            elif len(input.out_shape[0]) == 3:
                expand_times = [expand_times[i] for i in [2, 0, 1]]
655 656 657 658
        for i in range(len(expand_times)):
            if expand_times[i] < 0:
                expand_times[i] = 1

J
jiangjiajun 已提交
659
        attr = {"expand_times": expand_times}
J
jiangjiajun 已提交
660 661 662 663
        node.fluid_code.add_layer("expand",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
664 665

    def Pack(self, node):
J
jiangjiajun 已提交
666 667 668
        inputs = [
            self.graph.get_node(name, copy=True) for name in node.layer.input
        ]
J
jiangjiajun 已提交
669 670 671 672 673 674 675 676 677 678 679 680
        axis = node.get_attr("axis")
        if inputs[0].tf_data_format == "NHWC" and len(
                inputs[0].out_shapes[0]) == 4:
            tf_data_format = list(inputs[0].tf_data_format)
            tf_data_format.insert(axis, str(len(tf_data_format)))
            axis = nhwc_dim_to_nchw(inputs[0], axis)
            pd_data_format = list(inputs[0].pd_data_format)
            pd_data_format.insert(axis, str(len(pd_data_format)))
            node.tf_data_format = "".join(tf_data_format)
            node.pd_data_format = "".join(pd_data_format)

        attr = {"axis": axis}
J
jiangjiajun 已提交
681 682 683
        node.fluid_code.add_layer("stack",
                                  inputs=inputs,
                                  output=node,
J
jiangjiajun 已提交
684
                                  param_attr=attr)
J
jiangjiajun 已提交
685 686 687

    def Pad(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
688
        paddings = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
689 690
        assert paddings.layer_type == "Const", "Padding should be Const"
        self.omit_nodes.append(paddings.layer_name)
J
jiangjiajun 已提交
691 692 693
        paddings = paddings.value.flatten().tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            paddings = [paddings[i] for i in [0, 1, 6, 7, 2, 3, 4, 5]]
J
jiangjiajun 已提交
694 695 696 697 698 699

        pad_op = "pad"
        if len(input.out_shapes[0]) == 4:
            if paddings[0] + paddings[1] + paddings[2] + paddings[3] == 0:
                paddings = paddings[4:]
                pad_op = "pad2d"
J
jiangjiajun 已提交
700
        attr = {"paddings": paddings}
J
jiangjiajun 已提交
701
        node.fluid_code.add_layer(pad_op,
J
jiangjiajun 已提交
702 703 704
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
705 706 707 708 709 710 711

    def Range(self, node):
        start = self.graph.get_node(node.layer.input[0], copy=True)
        limit = self.graph.get_node(node.layer.input[1], copy=True)
        delta = self.graph.get_node(node.layer.input[2], copy=True)
        if start.layer_type == "Const":
            start = start.value
712 713
        else:
            start = self.decoder.infer_tensor(start)
J
jiangjiajun 已提交
714 715
        if limit.layer_type == "Const":
            limit = limit.value
716 717
        else:
            limit = self.decoder.infer_tensor(limit)
J
jiangjiajun 已提交
718 719
        if delta.layer_type == "Const":
            delta = delta.value
720 721 722 723 724 725
        else:
            delta = self.decoder.infer_tensor(delta)
        self.omit_nodes.append(start.layer_name)
        self.omit_nodes.append(limit.layer_name)
        limit = self.decoder.infer_tensor(limit)

J
jiangjiajun 已提交
726
        inputs = {"start": start, "end": limit, "step": delta}
J
jiangjiajun 已提交
727
        attr = {"dtype": string(node.dtype)}
728 729 730 731
        node.fluid_code.add_layer("range",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
732 733 734 735 736

    def Mean(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
J
jiangjiajun 已提交
737
        dims = reduce_idx.value.tolist()
J
jiangjiajun 已提交
738
        keep_dims = node.get_attr("keep_dims")
J
jiangjiajun 已提交
739 740 741 742 743 744

        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            for i in range(len(dims)):
                dims[i] = nhwc_dim_to_nchw(input, dims[i])

        attr = {"dim": dims, "keep_dim": keep_dims}
J
jiangjiajun 已提交
745 746 747 748 749 750 751 752 753 754 755
        node.fluid_code.add_layer("reduce_mean",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def MatMul(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        transpose_a = node.get_attr('transpose_a')
        transpose_b = node.get_attr('transpose_b')
        inputs = {"x": x, "y": y}
J
jiangjiajun 已提交
756 757 758 759 760 761 762 763 764 765
        # fix paddle shape infer problem
        # should be removed after paddle 1.6
        if x.out_shapes[0][-1] < 0 and y.out_shapes[0][0] > 0:
            shape = x.out_shapes[0]
            shape[-1] = y.out_shapes[0][0]
            attr = {"shape": shape}
            node.fluid_code.add_layer("reshape",
                                      inputs=x,
                                      output=x,
                                      param_attr=attr)
J
jiangjiajun 已提交
766 767 768 769 770 771 772 773 774 775 776
        attr = {"transpose_x": transpose_a, "transpose_y": transpose_b}
        node.fluid_code.add_layer("matmul",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)

    def ArgMax(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        axis = self.graph.get_node(node.layer.input[1], copy=True)
        assert axis.layer_type == "Const", "ArgMax only support Const parameter"
        self.omit_nodes.append(axis.layer_name)
J
jiangjiajun 已提交
777 778 779 780
        axis = axis.value
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            axis = nhwc_dim_to_nchw(input, axis)
        attr = {"axis": axis}
J
jiangjiajun 已提交
781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799
        node.fluid_code.add_layer("argmax",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def StridedSlice(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        begin = self.graph.get_node(node.layer.input[1], copy=True)
        end = self.graph.get_node(node.layer.input[2], copy=True)
        strides = self.graph.get_node(node.layer.input[3], copy=True)
        assert begin.layer_type == "Const"
        assert end.layer_type == "Const"
        assert strides.layer_type == "Const"
        self.omit_nodes.append(begin.layer_name)
        self.omit_nodes.append(end.layer_name)
        self.omit_nodes.append(strides.layer_name)
        strides = strides.value.tolist()
        assert len(set(strides)) == 1 and strides[0] == 1

J
jiangjiajun 已提交
800 801 802 803 804 805
        begin = begin.value.tolist()
        end = end.value.tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            begin = [begin[i] for i in [0, 3, 1, 2]]
            end = [end[i] for i in [0, 3, 1, 2]]

J
jiangjiajun 已提交
806 807 808 809 810 811 812 813 814
        for i in range(len(end)):
            if end[i] == 0:
                end[i] = 999999

        attr = {
            "axes": [i for i in range(len(strides))],
            "starts": begin,
            "ends": end
        }
J
jiangjiajun 已提交
815 816 817 818
        node.fluid_code.add_layer("slice",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
819 820 821 822 823

    def Slice(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        begin = self.graph.get_node(node.layer.input[1], copy=True)
        size = self.graph.get_node(node.layer.input[2], copy=True)
J
jiangjiajun 已提交
824 825
        #        assert begin.layer_type == "Const"
        #        assert size.layer_type == "Const"
826 827
        self.omit_nodes.append(begin.layer_name)
        self.omit_nodes.append(size.layer_name)
J
jiangjiajun 已提交
828 829 830 831 832 833 834 835
        if begin.layer_type == "Const":
            begin = begin.value.tolist()
        else:
            begin = self.decoder.infer_tensor(begin).tolist()
        if size.layer_type == "const":
            size = size.value.tolist()
        else:
            size = self.decoder.infer_tensor(size).tolist()
836

J
jiangjiajun 已提交
837 838 839 840
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            size = [size[i] for i in [0, 3, 1, 2]]
            begin = [begin[i] for i in [0, 3, 1, 2]]

J
jiangjiajun 已提交
841 842 843 844 845
        attr = {"shape": size, "offsets": begin}
        node.fluid_code.add_layer("crop",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
846 847 848 849 850 851 852

    def Conv2DBackpropInput(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of Conv2DBackpropInput should be Const"
        self.omit_nodes.append(kernel.layer_name)

J
jiangjiajun 已提交
853 854 855
        node.fluid_code.add_note("#{} : {}".format(node.layer.name,
                                                   node.layer_name))

856
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
857 858
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
859
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
860 861 862
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

863 864 865 866 867
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
868 869
        self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
            kernel.value, (3, 2, 0, 1))
870 871 872 873 874

        if not channel_first:
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
875 876
        else:
            self.data_format_propagation(node)
877

J
jiangjiajun 已提交
878
        padding = 0
879 880 881
        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
J
jiangjiajun 已提交
882 883 884 885 886 887 888 889 890
            if pad_h[0] == pad_h[1] and pad_w[0] == pad_w[1]:
                padding = [pad_h[0], pad_w[0]]
            else:
                attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
                node.fluid_code.add_layer("pad2d",
                                          inputs=input,
                                          output=node,
                                          param_attr=attr)
                input = node
891 892 893 894 895 896
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": k_size[3],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
J
jiangjiajun 已提交
897 898
            "dilation": dilations[2:4],
            "padding": padding
899 900 901 902 903 904 905 906 907 908 909 910
        }
        node.fluid_code.add_layer(
            "conv2d_transpose",
            inputs=input if channel_first and pad_mode != "SAME" else node,
            output=node,
            param_attr=attr)

    def Max(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
J
jiangjiajun 已提交
911 912 913 914 915
        dim = reduce_idx.value.tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)

        attr = {"dim": dim, "keep_dim": keep_dims}
916 917 918 919 920 921 922 923 924 925
        node.fluid_code.add_layer("reduce_max",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def Sum(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
J
jiangjiajun 已提交
926 927 928 929 930
        dim = reduce_idx.value.tolist()
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)

        attr = {"dim": dim, "keep_dim": keep_dims}
931 932 933 934 935
        node.fluid_code.add_layer("reduce_sum",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

J
jiangjiajun 已提交
936 937 938 939 940 941 942 943
    def Cast(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        dtype = node.dtype_map[node.get_attr('DstT')]
        attr = {"dtype": string(dtype)}
        node.fluid_code.add_layer("cast",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
944 945 946 947 948 949 950 951 952 953 954 955 956

    def FloorDiv(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        inputs = {'x': x, 'y': y}
        node.fluid_code.add_layer("elementwise_div",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)
        node.fluid_code.add_layer("floor",
                                  inputs=node,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
957 958 959 960 961 962 963

    def Split(self, node):
        dim = self.graph.get_node(node.layer.input[0], copy=True)
        input = self.graph.get_node(node.layer.input[1], copy=True)
        assert dim.layer_type == "Const"
        self.omit_nodes.append(dim.layer_name)
        num_split = node.get_attr('num_split')
J
jiangjiajun 已提交
964 965 966 967 968
        dim = dim.value
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            dim = nhwc_dim_to_nchw(input, dim)

        attr = {"num_or_sections": num_split, "dim": dim}
J
jiangjiajun 已提交
969 970 971 972
        node.fluid_code.add_layer("split",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995

    def Squeeze(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        squeeze_dims = node.get_attr('squeeze_dims')
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            for i in range(len(squeeze_dims)):
                squeeze_dims[i] = nhwc_dim_to_nchw(input, squeeze_dims[i])
        attr = {"axes": squeeze_dims}
        node.fluid_code.add_layer("squeeze",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def Softmax(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        axis = node.get_attr("axis")
        if input.tf_data_format == "NHWC" and len(input.out_shapes[0]) == 4:
            axis = nhwc_dim_to_nchw(input, axis)
        attr = {"axis": axis}
        node.fluid_code.add_layer("softmax",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029

    def ResizeNearestNeighbor(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
        self.omit_nodes.append(resize_shape.layer_name)
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(resize_shape)
        align_corners = node.get_attr("align_corners")
        attr = {"align_corners": align_corners, "out_shape": resize_shape}
        node.fluid_code.add_layer("resize_nearest",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def ResizeBilinear(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
        self.omit_nodes.append(resize_shape.layer_name)
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(resize_shape)
        align_corners = node.get_attr("align_corners")
        attr = {
            "align_corners": align_corners,
            "out_shape": resize_shape,
            "align_mode": 1
        }
        node.fluid_code.add_layer("resize_bilinear",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065

    def ResizeNearestNeighbor(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
        self.omit_nodes.append(resize_shape.layer_name)
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(
                resize_shape, node.out_shapes[0])
        align_corners = node.get_attr("align_corners")
        attr = {"align_corners": align_corners, "out_shape": resize_shape}
        node.fluid_code.add_layer("resize_nearest",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def ResizeBilinear(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        resize_shape = self.graph.get_node(node.layer.input[1], copy=True)
        self.omit_nodes.append(resize_shape.layer_name)
        if resize_shape.layer_type == "Const":
            resize_shape = resize_shape.value.tolist()
        else:
            resize_shape = self.decoder.infer_shape_tensor(
                resize_shape, node.out_shapes[0])
        align_corners = node.get_attr("align_corners")
        attr = {
            "align_corners": align_corners,
            "out_shape": resize_shape,
            "align_mode": 1
        }
        node.fluid_code.add_layer("resize_bilinear",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)