tf_op_mapper.py 37.4 KB
Newer Older
J
jiangjiajun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
J
jiangjiajun 已提交
14

J
jiangjiajun 已提交
15 16
from x2paddle.decoder.tf_decoder import TFGraph
from x2paddle.core.op_mapper import OpMapper
J
jiangjiajun 已提交
17
from x2paddle.core.util import *
J
jiangjiajun 已提交
18
import numpy
19

J
jiangjiajun 已提交
20

J
jiangjiajun 已提交
21 22 23
class TFOpMapper(OpMapper):
    def __init__(self, decoder):
        super(TFOpMapper, self).__init__()
J
jiangjiajun 已提交
24
        self.decoder = decoder
J
jiangjiajun 已提交
25 26
        self.graph = decoder.tf_graph
        self.weights = dict()
J
jiangjiajun 已提交
27
        self.omit_nodes = list()
28 29 30

    def run(self):
        print("Total nodes: {}".format(len(self.graph.topo_sort)))
J
jiangjiajun 已提交
31 32 33 34 35

        # check if ops in model are all supported
        if not self.op_checker():
            raise Exception("Model are not supported yet.")

36 37 38 39
        for node_name in self.graph.topo_sort:
            node = self.graph.get_node(node_name)
            op = node.layer_type
            if hasattr(self, op):
J
jiangjiajun 已提交
40 41
                func = getattr(self, op)
                func(node)
42

J
jiangjiajun 已提交
43 44
        for i in range(len(self.graph.topo_sort)):
            node_name = self.graph.topo_sort[i]
J
jiangjiajun 已提交
45 46
            if node_name in self.omit_nodes:
                continue
J
jiangjiajun 已提交
47
            node = self.graph.get_node(node_name)
48
            self.net_code += node.fluid_code.gen_codes()
J
jiangjiajun 已提交
49

J
jiangjiajun 已提交
50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
    def elementwise_operator(self, node, op_type):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        x_shape = x.out_shapes[0]
        y_shape = y.out_shapes[0]
        # incomplement broadcasting support for paddle
        x_input = x
        y_input = y
        if len(x_shape) < len(y_shape):
            unrevertable_ops = [
                "elementwise_sub", "elementwise_div", "elementwise_floordiv",
                "elementwise_mod", "elementwise_pow"
            ]
            if op_type not in unrevertable_ops:
                x_input = y
                y_input = x
                x_shape = y.out_shapes[0]
                y_shape = x.out_shapes[0]
            else:
                raise Exception("Unexpected situation happend")

        is_sub_seq = True
        for i in range(len(y_shape)):
            index = -1 * i - 1
            if y_shape[index] != x_shape[index]:
                is_sub_seq = False
        if not is_sub_seq:
            x_expand_times = [1] * len(x_shape)
            y_expand_times = [1] * len(y_shape)
            x_need_expand = False
            y_need_expand = False
            for i in range(len(y_shape)):
                index = -1 * i - 1
                if y_shape[index] != x_shape[index]:
                    if y_shape[index] == 1:
                        y_expand_times[index] = x_shape[index]
                        y_need_expand = True
                    elif x_shape[index] == 1:
                        x_expand_times[index] = y_shape[index]
                        x_need_expand = True
                    else:
                        raise Exception("Unexpected situation happend")
            if x_need_expand:
                attr = {"expand_times": x_expand_times}
                node.fluid_code.add_layer("expand",
                                          inputs=x_input,
                                          output="x_tmp",
                                          param_attr=attr)
                x_input = "x_tmp"
            if y_need_expand:
                attr = {"expand_times": y_expand_times}
                node.fluid_code.add_layer("expand",
                                          inputs=y_input,
                                          output="y_tmp",
                                          param_attr=attr)
                y_input = "y_tmp"
        inputs = {"x": x_input, "y": y_input}
        node.fluid_code.add_layer(op_type,
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

112 113
    def Placeholder(self, node):
        shape = node.out_shapes[0]
J
jiangjiajun 已提交
114 115
        assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
            node.layer_name)
116 117
        dtype = node.dtype
        attr = {
J
jiangjiajun 已提交
118
            'dtype': string(dtype),
119
            'shape': shape,
J
jiangjiajun 已提交
120 121
            'name': string(node.layer_name),
            'append_batch_size': False
122
        }
J
jiangjiajun 已提交
123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147
        node.fluid_code.add_layer("data",
                                  inputs=None,
                                  output=node,
                                  param_attr=attr)

    def Const(self, node):
        shape = node.out_shapes[0]
        dtype = node.dtype
        value = node.value
        initializer = "Constant(0.0)"
        if len(shape) == 0:
            assert value.size == 1, "Unexpected situation happend"
            shape = [1]
            initializer = "Constant({})".format(value)

        attr = {
            'dtype': string(dtype),
            'shape': shape,
            'name': string(node.layer_name),
            'default_initializer': initializer
        }
        node.fluid_code.add_layer("create_parameter",
                                  inputs=None,
                                  output=node,
                                  param_attr=attr)
148
        self.weights[node.layer_name.replace('/', '_')] = node.value
J
jiangjiajun 已提交
149 150

    def Transpose(self, node):
J
jiangjiajun 已提交
151 152
        input = self.graph.get_node(node.layer.input[0], copy=True)
        perm = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
153
        assert perm.layer_type == "Const", "Perm of transpose OP should be Const"
154
        del self.weights[perm.layer_name.replace('/', '_')]
J
jiangjiajun 已提交
155 156 157 158 159 160
        perm.fluid_code.clear()
        perm = perm.value.tolist()

        attr = {'perm': perm}
        node.fluid_code.add_layer("transpose",
                                  inputs=input,
161 162
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
163 164

    def RealDiv(self, node):
J
jiangjiajun 已提交
165
        self.elementwise_operator(node, "elementwise_div")
J
jiangjiajun 已提交
166

J
jiangjiajun 已提交
167 168 169 170 171 172 173 174 175 176
    def Relu(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("relu",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)

    def Squeeze(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        squeeze_dims = node.get_attr('squeeze_dims')
J
jiangjiajun 已提交
177
        attr = {'axes': squeeze_dims}
J
jiangjiajun 已提交
178 179 180 181 182 183
        node.fluid_code.add_layer("squeeze",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def BiasAdd(self, node):
J
jiangjiajun 已提交
184 185 186
        input = self.graph.get_node(node.layer.input[0], copy=True)
        bias = self.graph.get_node(node.layer.input[1], copy=True)
        inputs = {'x': input, 'y': bias}
J
jiangjiajun 已提交
187 188 189 190 191 192 193 194 195 196 197 198 199 200
        node.fluid_code.add_layer("elementwise_add",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

    def Identity(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("assign",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)

    def MaxPool(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
201

J
jiangjiajun 已提交
202
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
203 204 205
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape

J
jiangjiajun 已提交
206 207 208 209
        k_size = node.get_attr("ksize")
        strides = node.get_attr("strides")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
J
jiangjiajun 已提交
210
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
211

J
jiangjiajun 已提交
212
        if not channel_first:
J
jiangjiajun 已提交
213 214 215 216 217 218 219
            attr = {"perm": [0, 3, 1, 2]}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
220
            k_size = [k_size[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
221 222

        if pad_mode == "SAME":
J
jiangjiajun 已提交
223 224
            pad_h = get_same_padding(in_shape[2], k_size[2], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
J
jiangjiajun 已提交
225 226 227
            pad_h = pad_h[0] + pad_h[1]
            pad_w = pad_w[0] + pad_w[1]
            attr = {"paddings": [0, pad_h, 0, pad_w], "pad_value": -10000.0}
J
jiangjiajun 已提交
228 229 230 231 232 233
            if pad_h + pad_w != 0:
                node.fluid_code.add_layer(
                    "pad2d",
                    inputs=input if channel_first else node,
                    output=node,
                    param_attr=attr)
J
jiangjiajun 已提交
234
        attr = {
J
jiangjiajun 已提交
235
            "pool_size": k_size[2:4],
J
jiangjiajun 已提交
236
            "pool_type": string("max"),
J
jiangjiajun 已提交
237
            "pool_stride": strides[2:4]
J
jiangjiajun 已提交
238
        }
J
jiangjiajun 已提交
239 240 241 242 243
        node.fluid_code.add_layer(
            "pool2d",
            inputs=input if channel_first and pad_mode != "SAME" else node,
            output=node,
            param_attr=attr)
J
jiangjiajun 已提交
244

J
jiangjiajun 已提交
245
        if not channel_first:
J
jiangjiajun 已提交
246
            attr = {"perm": [0, 2, 3, 1]}
J
jiangjiajun 已提交
247 248 249 250 251 252 253 254 255 256 257
            node.fluid_code.add_layer("transpose",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)

    def Conv2D(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of Conv2D should be Const"
        self.omit_nodes.append(kernel.layer_name)

J
jiangjiajun 已提交
258 259 260
        node.fluid_code.add_note("#{} : {}".format(node.layer.name,
                                                   node.layer_name))

J
jiangjiajun 已提交
261
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
262 263
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
J
jiangjiajun 已提交
264
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
265 266 267
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
268 269 270 271 272 273 274
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"

        if not channel_first:
275
            self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
J
jiangjiajun 已提交
276 277
                kernel.value, (3, 2, 0, 1))
            attr = {"perm": [0, 3, 1, 2]}
J
jiangjiajun 已提交
278 279 280 281
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
J
jiangjiajun 已提交
282 283 284
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
285

J
jiangjiajun 已提交
286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303
        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
            attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
            if pad_h[0] + pad_h[1] + pad_w[0] + pad_w[1] != 0:
                node.fluid_code.add_layer(
                    "pad2d",
                    inputs=input if channel_first else node,
                    output=node,
                    param_attr=attr)
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": k_size[3],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
            "dilation": dilations[2:4]
        }
J
jiangjiajun 已提交
304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329
        node.fluid_code.add_layer(
            "conv2d",
            inputs=input if channel_first and pad_mode != "SAME" else node,
            output=node,
            param_attr=attr)

        if not channel_first:
            attr = {"perm": [0, 2, 3, 1]}
            node.fluid_code.add_layer("transpose",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)

    def Relu6(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("relu6",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)

    def FusedBatchNorm(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        gamma = self.graph.get_node(node.layer.input[1], copy=True)
        beta = self.graph.get_node(node.layer.input[2], copy=True)
        moving_mean = self.graph.get_node(node.layer.input[3], copy=True)
        moving_var = self.graph.get_node(node.layer.input[4], copy=True)
J
jiangjiajun 已提交
330 331
        data_format = node.get_attr("data_format").decode()
        channel_first = data_format == "NCHW"
J
jiangjiajun 已提交
332 333 334 335 336 337 338 339 340 341

        assert gamma.layer_type == "Const"
        assert beta.layer_type == "Const"
        assert moving_mean.layer_type == "Const"
        assert moving_var.layer_type == "Const"
        self.omit_nodes.append(gamma.layer_name)
        self.omit_nodes.append(beta.layer_name)
        self.omit_nodes.append(moving_mean.layer_name)
        self.omit_nodes.append(moving_var.layer_name)

J
jiangjiajun 已提交
342 343 344 345 346 347 348
        if not channel_first:
            attr = {"perm": [0, 3, 1, 2]}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)

J
jiangjiajun 已提交
349 350 351
        attr = {
            "epsilon": node.get_attr("epsilon"),
            "param_attr": string(gamma.layer_name),
J
jiangjiajun 已提交
352
            #            "data_layout": string(node.get_attr("data_format").decode()),
J
jiangjiajun 已提交
353 354 355 356 357 358 359
            "bias_attr": string(beta.layer_name),
            "moving_mean_name": string(moving_mean.layer_name),
            "moving_variance_name": string(moving_var.layer_name),
            "is_test": True
        }

        node.fluid_code.add_layer("batch_norm",
J
jiangjiajun 已提交
360
                                  inputs=input if channel_first else node,
J
jiangjiajun 已提交
361 362 363
                                  output=node,
                                  param_attr=attr)

J
jiangjiajun 已提交
364 365 366 367 368 369 370
        if not channel_first:
            attr = {"perm": [0, 2, 3, 1]}
            node.fluid_code.add_layer("transpose",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)

J
jiangjiajun 已提交
371 372 373 374 375 376
    def DepthwiseConv2dNative(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of DepthwiseConv2DNative should be Const"
        self.omit_nodes.append(kernel.layer_name)

J
jiangjiajun 已提交
377 378 379
        node.fluid_code.add_note("#{} : {}".format(node.layer.name,
                                                   node.layer_name))

J
jiangjiajun 已提交
380
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
381 382
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
J
jiangjiajun 已提交
383
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
384 385 386
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

J
jiangjiajun 已提交
387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"

        if not channel_first:
            self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
                kernel.value, (2, 3, 0, 1))
            attr = {"perm": [0, 3, 1, 2]}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]

        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
            attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
            if pad_h[0] + pad_h[1] + pad_w[0] + pad_w[1] != 0:
                node.fluid_code.add_layer("pad2d",
                                          inputs=input if channel_first
                                          and pad_mode != "SAME" else node,
                                          output=node,
                                          param_attr=attr)
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": in_shape[1],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
            "dilation": dilations[2:4],
            "groups": k_size[3] * in_shape[1]
        }
J
jiangjiajun 已提交
424 425 426 427
        node.fluid_code.add_layer("conv2d",
                                  inputs=input if channel_first else node,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
428

J
jiangjiajun 已提交
429 430 431 432 433 434
        if not channel_first:
            attr = {"perm": [0, 2, 3, 1]}
            node.fluid_code.add_layer("transpose",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)
J
jiangjiajun 已提交
435 436 437 438 439 440 441 442 443 444 445 446 447

    def Shape(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("shape",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)

    def Reshape(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        param = self.graph.get_node(node.layer.input[1], copy=True)
        if param.layer_type == "Const":
            attr = {"shape": param.value.tolist()}
J
jiangjiajun 已提交
448
            self.omit_nodes.append(param.layer_name)
J
jiangjiajun 已提交
449 450
        else:
            # Here is a trick method to solove tensor parameter in tensorflow
J
jiangjiajun 已提交
451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472
            shape = self.decoder.infer_shape_tensor(param, node.out_shapes[0])
            if shape.count(-1) <= 1:
                attr = {"shape": shape}
                self.omit_nodes.append(param.layer_name)
            else:
                assert len(param.out_shapes[0]
                           ) == 1, "Unexpected situation of shape parameter"
                attr = {"shape": [-1]}
                node.fluid_code.add_layer("reshape",
                                          inputs=param,
                                          output="shape_param",
                                          param_attr=attr)
                attr = {"num_or_sections": param.out_shapes[0][0], "dim": 0}
                node.fluid_code.add_layer("split",
                                          inputs="shape_param",
                                          output=node,
                                          param_attr=attr)
                new_param = "["
                for i in range(param.out_shapes[0][0]):
                    new_param += (node.layer_name + "[{}]".format(i) + ", ")
                new_param = new_param.strip(", ") + "]"
                attr = {"shape": new_param}
J
jiangjiajun 已提交
473 474 475 476
        node.fluid_code.add_layer("reshape",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
477
        # temporary shape inference fix
J
jiangjiajun 已提交
478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494


#        if param.layer_type == "Pack":
#            shape_slices = list()
#            for i in range(len(param.layer.input)):
#                slice = self.graph.get_node(param.layer.input[i], copy=True)
#                if slice.layer_type == "Const":
#                    shape_slices.append(slice.value.tolist())
#                else:
#                    shape_slices.append(0)
#            if shape_slices.count(-1) == 0:
#                shape_slices[shape_slices.index(0)] = -1
#            attr = {"shape": shape_slices}
#            node.fluid_code.add_layer("reshape",
#                                      inputs=node,
#                                      output=node,
#                                      param_attr=attr)
J
jiangjiajun 已提交
495 496

    def Add(self, node):
J
jiangjiajun 已提交
497
        self.elementwise_operator(node, "elementwise_add")
J
jiangjiajun 已提交
498 499 500

    def AvgPool(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
501

J
jiangjiajun 已提交
502
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
503 504 505
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape

J
jiangjiajun 已提交
506 507 508 509 510 511 512 513 514 515 516 517 518 519
        k_size = node.get_attr("ksize")
        strides = node.get_attr("strides")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"

        if not channel_first:
            attr = {"perm": [0, 3, 1, 2]}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
520
            k_size = [k_size[i] for i in [0, 3, 1, 2]]
J
jiangjiajun 已提交
521 522

        attr = {
J
jiangjiajun 已提交
523
            "pool_size": k_size[2:4],
J
jiangjiajun 已提交
524 525 526 527
            "pool_type": string("avg"),
            "pool_stride": strides[2:4]
        }
        if pad_mode == "SAME":
J
jiangjiajun 已提交
528 529
            pad_h = get_same_padding(in_shape[2], k_size[2], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
J
jiangjiajun 已提交
530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550
            assert pad_h[0] == pad_h[1] and pad_w[0] == pad_w[
                1], "Cannot map AvgPool"
            attr["pool_padding"] = [pad_h[0], pad_w[0]]
        node.fluid_code.add_layer("pool2d",
                                  inputs=input if channel_first else node,
                                  output=node,
                                  param_attr=attr)

        if not channel_first:
            attr = {"perm": [0, 2, 3, 1]}
            node.fluid_code.add_layer("transpose",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)

    def Softmax(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("softmax",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
551 552 553

    def Sigmoid(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
554 555 556 557
        node.fluid_code.add_layer("sigmoid",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
558 559

    def Maximum(self, node):
J
jiangjiajun 已提交
560
        self.elementwise_operator(node, "elementwise_max")
J
jiangjiajun 已提交
561 562 563 564 565 566 567 568 569

    def SplitV(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        num_sections = self.graph.get_node(node.layer.input[1], copy=True)
        dim = self.graph.get_node(node.layer.input[2], copy=True)
        assert num_sections.layer_type == "Const"
        assert dim.layer_type == "Const"
        self.omit_nodes.append(num_sections.layer_name)
        self.omit_nodes.append(dim.layer_name)
J
jiangjiajun 已提交
570 571 572 573 574 575 576 577
        attr = {
            "num_or_sections": num_sections.value.tolist(),
            "dim": dim.value
        }
        node.fluid_code.add_layer("split",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
578 579 580

    def Exp(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
581 582 583 584
        node.fluid_code.add_layer("exp",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
585 586

    def ConcatV2(self, node):
J
jiangjiajun 已提交
587 588 589 590
        inputs = [
            self.graph.get_node(name, copy=True)
            for name in node.layer.input[:-1]
        ]
J
jiangjiajun 已提交
591 592 593 594
        axis = self.graph.get_node(node.layer.input[-1], copy=True)
        assert axis.layer_type == "Const"
        self.omit_nodes.append(axis.layer_name)
        attr = {"axis": axis.value}
J
jiangjiajun 已提交
595 596 597 598
        node.fluid_code.add_layer("concat",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
599 600 601 602 603 604 605

    def Tile(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        expand_times = self.graph.get_node(node.layer.input[1], copy=True)
        assert expand_times.layer_type == "Const"
        self.omit_nodes.append(expand_times.layer_name)
        attr = {"expand_times": expand_times.value.tolist()}
J
jiangjiajun 已提交
606 607 608 609
        node.fluid_code.add_layer("expand",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
610 611

    def Pack(self, node):
J
jiangjiajun 已提交
612 613 614
        inputs = [
            self.graph.get_node(name, copy=True) for name in node.layer.input
        ]
J
jiangjiajun 已提交
615
        attr = {"axis": node.get_attr("axis")}
J
jiangjiajun 已提交
616 617 618
        node.fluid_code.add_layer("stack",
                                  inputs=inputs,
                                  output=node,
J
jiangjiajun 已提交
619
                                  param_attr=attr)
J
jiangjiajun 已提交
620 621 622

    def Pad(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
J
jiangjiajun 已提交
623
        paddings = self.graph.get_node(node.layer.input[1], copy=True)
J
jiangjiajun 已提交
624 625 626
        assert paddings.layer_type == "Const", "Padding should be Const"
        self.omit_nodes.append(paddings.layer_name)
        attr = {"paddings": paddings.value.tolist()}
J
jiangjiajun 已提交
627 628 629 630
        node.fluid_code.add_layer("pad",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
J
jiangjiajun 已提交
631 632 633 634 635 636 637 638 639 640 641 642 643 644

    def Range(self, node):
        start = self.graph.get_node(node.layer.input[0], copy=True)
        limit = self.graph.get_node(node.layer.input[1], copy=True)
        delta = self.graph.get_node(node.layer.input[2], copy=True)
        if start.layer_type == "Const":
            self.omit_nodes.append(start.layer_name)
            start = start.value
        if limit.layer_type == "Const":
            self.omit_nodes.append(limit.layer_name)
            limit = limit.value
        if delta.layer_type == "Const":
            self.omit_nodes.append(delta.layer_name)
            delta = delta.value
J
jiangjiajun 已提交
645
        inputs = {"start": start, "end": limit, "step": delta}
J
jiangjiajun 已提交
646
        attr = {"dtype": string(node.dtype)}
J
jiangjiajun 已提交
647 648 649 650 651
        node.fluid_code.append("range",
                               inputs=inputs,
                               output=node,
                               param_attr=None)

J
jiangjiajun 已提交
652
    def Mul(self, node):
J
jiangjiajun 已提交
653
        self.elementwise_operator(node, "elementwise_mul")
J
jiangjiajun 已提交
654 655

    def Sub(self, node):
J
jiangjiajun 已提交
656
        self.elementwise_operator(node, "elementwise_sub")
J
jiangjiajun 已提交
657

J
jiangjiajun 已提交
658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693
    def Rsqrt(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("rsqrt",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)

    def swish_f32(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("sigmoid",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)
        inputs = {"x": input, "y": node}
        node.fluid_code.add_layer("elementwise_mul",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)

    def Mean(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
        attr = {"dim": reduce_idx.value.tolist(), "keep_dim": keep_dims}
        node.fluid_code.add_layer("reduce_mean",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def MatMul(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        transpose_a = node.get_attr('transpose_a')
        transpose_b = node.get_attr('transpose_b')
        inputs = {"x": x, "y": y}
J
jiangjiajun 已提交
694 695 696 697 698 699 700 701 702 703
        # fix paddle shape infer problem
        # should be removed after paddle 1.6
        if x.out_shapes[0][-1] < 0 and y.out_shapes[0][0] > 0:
            shape = x.out_shapes[0]
            shape[-1] = y.out_shapes[0][0]
            attr = {"shape": shape}
            node.fluid_code.add_layer("reshape",
                                      inputs=x,
                                      output=x,
                                      param_attr=attr)
J
jiangjiajun 已提交
704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734
        attr = {"transpose_x": transpose_a, "transpose_y": transpose_b}
        node.fluid_code.add_layer("matmul",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=attr)

    def ArgMax(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        axis = self.graph.get_node(node.layer.input[1], copy=True)
        assert axis.layer_type == "Const", "ArgMax only support Const parameter"
        self.omit_nodes.append(axis.layer_name)
        attr = {"axis": axis.value}
        node.fluid_code.add_layer("argmax",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def StridedSlice(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        begin = self.graph.get_node(node.layer.input[1], copy=True)
        end = self.graph.get_node(node.layer.input[2], copy=True)
        strides = self.graph.get_node(node.layer.input[3], copy=True)
        assert begin.layer_type == "Const"
        assert end.layer_type == "Const"
        assert strides.layer_type == "Const"
        self.omit_nodes.append(begin.layer_name)
        self.omit_nodes.append(end.layer_name)
        self.omit_nodes.append(strides.layer_name)
        strides = strides.value.tolist()
        assert len(set(strides)) == 1 and strides[0] == 1

735 736 737 738 739
        attr = {
            "axes": range(len(strides)),
            "starts": begin.value.tolist(),
            "ends": end.value.tolist()
        }
J
jiangjiajun 已提交
740 741 742 743
        node.fluid_code.add_layer("slice",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
744 745 746 747 748

    def Slice(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        begin = self.graph.get_node(node.layer.input[1], copy=True)
        size = self.graph.get_node(node.layer.input[2], copy=True)
J
jiangjiajun 已提交
749 750
        #        assert begin.layer_type == "Const"
        #        assert size.layer_type == "Const"
751 752
        self.omit_nodes.append(begin.layer_name)
        self.omit_nodes.append(size.layer_name)
J
jiangjiajun 已提交
753 754 755 756 757 758 759 760
        if begin.layer_type == "Const":
            begin = begin.value.tolist()
        else:
            begin = self.decoder.infer_tensor(begin).tolist()
        if size.layer_type == "const":
            size = size.value.tolist()
        else:
            size = self.decoder.infer_tensor(size).tolist()
761

J
jiangjiajun 已提交
762 763 764 765 766
        attr = {"shape": size, "offsets": begin}
        node.fluid_code.add_layer("crop",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
767 768 769 770 771 772 773 774 775 776 777 778 779 780

    def Abs(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        node.fluid_code.add_layer("abs",
                                  inputs=input,
                                  output=node,
                                  param_attr=None)

    def Conv2DBackpropInput(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        kernel = self.graph.get_node(node.layer.input[1], copy=True)
        assert kernel.layer_type == "Const", "Kernel of Conv2DBackpropInput should be Const"
        self.omit_nodes.append(kernel.layer_name)

J
jiangjiajun 已提交
781 782 783
        node.fluid_code.add_note("#{} : {}".format(node.layer.name,
                                                   node.layer_name))

784
        in_shape = input.out_shapes[0]
J
jiangjiajun 已提交
785 786
        if in_shape.count(-1) > 2:
            in_shape = self.decoder.infer_tensor(input).shape
787
        k_size = kernel.out_shapes[0]
J
jiangjiajun 已提交
788 789 790
        if k_size.count(-1) > 2:
            k_size = self.decoder.infer_tensor(kernel).shape

791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861
        strides = node.get_attr("strides")
        dilations = node.get_attr("dilations")
        data_format = node.get_attr("data_format").decode()
        pad_mode = node.get_attr("padding").decode()
        channel_first = data_format == "NCHW"

        if not channel_first:
            self.weights[kernel.layer_name.replace('/', '_')] = numpy.transpose(
                kernel.value, (3, 2, 0, 1))
            attr = {"perm": [0, 3, 1, 2]}
            node.fluid_code.add_layer("transpose",
                                      inputs=input,
                                      output=node,
                                      param_attr=attr)
            in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
            strides = [strides[i] for i in [0, 3, 1, 2]]
            dilations = [dilations[i] for i in [0, 3, 1, 2]]

        if pad_mode == "SAME":
            pad_h = get_same_padding(in_shape[2], k_size[0], strides[2])
            pad_w = get_same_padding(in_shape[3], k_size[1], strides[3])
            attr = {"paddings": pad_h + pad_w, "pad_value": 0.0}
            if pad_h[0] + pad_h[1] + pad_w[0] + pad_w[1] != 0:
                node.fluid_code.add_layer(
                    "pad2d",
                    inputs=input if channel_first else node,
                    output=node,
                    param_attr=attr)
        attr = {
            "bias_attr": False,
            "param_attr": string(kernel.layer_name),
            "num_filters": k_size[3],
            "filter_size": k_size[0:2],
            "stride": strides[2:4],
            "dilation": dilations[2:4]
        }
        node.fluid_code.add_layer(
            "conv2d_transpose",
            inputs=input if channel_first and pad_mode != "SAME" else node,
            output=node,
            param_attr=attr)

        if not channel_first:
            attr = {"perm": [0, 2, 3, 1]}
            node.fluid_code.add_layer("transpose",
                                      inputs=node,
                                      output=node,
                                      param_attr=attr)

    def Max(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
        attr = {"dim": reduce_idx.value.tolist(), "keep_dim": keep_dims}
        node.fluid_code.add_layer("reduce_max",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

    def Sum(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        reduce_idx = self.graph.get_node(node.layer.input[1], copy=True)
        assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
        keep_dims = node.get_attr("keep_dims")
        attr = {"dim": reduce_idx.value.tolist(), "keep_dim": keep_dims}
        node.fluid_code.add_layer("reduce_sum",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)

J
jiangjiajun 已提交
862 863 864 865 866 867 868 869
    def Cast(self, node):
        input = self.graph.get_node(node.layer.input[0], copy=True)
        dtype = node.dtype_map[node.get_attr('DstT')]
        attr = {"dtype": string(dtype)}
        node.fluid_code.add_layer("cast",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)
870 871 872 873 874 875 876 877 878 879 880 881 882

    def FloorDiv(self, node):
        x = self.graph.get_node(node.layer.input[0], copy=True)
        y = self.graph.get_node(node.layer.input[1], copy=True)
        inputs = {'x': x, 'y': y}
        node.fluid_code.add_layer("elementwise_div",
                                  inputs=inputs,
                                  output=node,
                                  param_attr=None)
        node.fluid_code.add_layer("floor",
                                  inputs=node,
                                  output=node,
                                  param_attr=None)
J
jiangjiajun 已提交
883 884 885 886 887 888 889 890 891 892 893 894

    def Split(self, node):
        dim = self.graph.get_node(node.layer.input[0], copy=True)
        input = self.graph.get_node(node.layer.input[1], copy=True)
        assert dim.layer_type == "Const"
        self.omit_nodes.append(dim.layer_name)
        num_split = node.get_attr('num_split')
        attr = {"num_or_sections": num_split, "dim": dim.value}
        node.fluid_code.add_layer("split",
                                  inputs=input,
                                  output=node,
                                  param_attr=attr)