diff --git a/x2paddle/decoder/tf_decoder.py b/x2paddle/decoder/tf_decoder.py index e7d63aa39a009e4b8dda3b9e625a8e81a045f640..dc04172ac9e3fbc659cf5ce05ec297a1fc4764ab 100644 --- a/x2paddle/decoder/tf_decoder.py +++ b/x2paddle/decoder/tf_decoder.py @@ -91,12 +91,8 @@ class TFGraphNode(GraphNode): @property def name(self): - multi_out_ops = ['Split', 'SplitV', 'IteratorV2'] - if self.layer_type in multi_out_ops: - if self.layer_name.count(':') > 0: - return self.layer_name.replace(':', '_p') - else: - return "{}_p0".format(self.layer_name) + if hasattr(self, 'index'): + return self.layer_name + "_p{}".format(self.index) return self.layer_name def get_attr(self, name): diff --git a/x2paddle/op_mapper/tf_op_mapper_nhwc.py b/x2paddle/op_mapper/tf_op_mapper_nhwc.py index 53e52a4b177665e0b376f93b3da21423ed9ecc48..6b00c5c212d4d0cd92067dc4ec83d8f543f244c9 100644 --- a/x2paddle/op_mapper/tf_op_mapper_nhwc.py +++ b/x2paddle/op_mapper/tf_op_mapper_nhwc.py @@ -197,26 +197,33 @@ class TFOpMapperNHWC(OpMapper): perm=perm) def Fill(self, node): - dims = self.graph.get_node(node.layer.input[0], copy=True) - input_value = self.graph.get_node(node.layer.input[1], copy=True) + dims = self.graph.get_node(node.layer.input[0]) + input_value = self.graph.get_node(node.layer.input[1]) + inputs = dict() + attr = dict() assert input_value.layer_type == "Const", "Value of fill OP should be Const" + if dims.layer_type == "Const": + attr["shape"] = dims.value.tolist() + else: + inputs["shape"] = dims.name + attr["dtype"] = string(input_value.dtype) + attr["value"] = input_value.value - input_value = input_value.value - input_dtype = string(input_value.dtype) program.add_layer( "fluid.layers.fill_constant", - inputs={}, + inputs=inputs, outputs=[node.name], - shape=dims, - dtype=string(input_dtype), - value=input_value) + **attr) def DepthToSpace(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) + input = self.graph.get_node(node.layer.input[0]) block_size = node.get_attr("block_size") data_format = node.get_attr("data_format").decode() - n, h, w, c = input.out_shapes[0] + if data_format == "NHWC": + n, h, w, c = input.out_shapes[0] + else: + n, c, h, w = input.out_shapes[0] input_name = input.name if data_format == "NHWC": @@ -251,8 +258,8 @@ class TFOpMapperNHWC(OpMapper): shape=[0, c, h, w]) program.add_layer( - kernel="fluid.layers.pixed_shuffle", - inputs={"input": reshape_name}, + kernel="fluid.layers.pixel_shuffle", + inputs={"x": reshape_name}, outputs=[node.name], upscale_factor=block_size) @@ -264,7 +271,7 @@ class TFOpMapperNHWC(OpMapper): perm=[0, 2, 3, 1]) def MaxPool(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) + input = self.graph.get_node(node.layer.input[0]) k_size = node.get_attr("ksize") strides = node.get_attr("strides") @@ -308,17 +315,21 @@ class TFOpMapperNHWC(OpMapper): dilations = node.get_attr("dilations") data_format = node.get_attr("data_format").decode() pad_mode = node.get_attr("padding").decode() + if data_format == "NHWC": + n, h, w, c = input.out_shapes[0] + else: + n, c, h, w = input.out_shapes[0] if kernel.layer_type == 'Const': kernel_value = kernel.value - kernel_weight_name = kernel.layer_name.replace('/', '_') + kernel_weight_name = kernel.name.replace('/', '_') else: kernel_value = self.decoder.infer_tensor(kernel) if kernel.layer_type == 'Split': - kernel_weight_name = "{}_{}_kernel".format(node.layer_name, - kernel.layer_name) + kernel_weight_name = "{}_{}_kernel".format(node.name, + kernel.name) else: - kernel_weight_name = kernel.layer_name.replace('/', '_') + kernel_weight_name = kernel.name.replace('/', '_') program.parameters[kernel_weight_name] = numpy.transpose(kernel_value, (3, 2, 0, 1)) @@ -334,6 +345,16 @@ class TFOpMapperNHWC(OpMapper): perm=[0, 3, 1, 2]) input_name = transpose_name + if c == -1: + attr = {"shape": [0, k_size[2], 0, 0]} + node.fluid_code.add_layer( + "reshape", inputs=input, output=input, param_attr=attr) + program.add_layer( + kernel="fluid.layers.reshape", + inputs={"x": input_name}, + outputs=[input_name], + shape=[0, k_size[2], 0, 0]) + program.add_layer( kernel="fluid.layers.conv2d", inputs={"input": input_name}, @@ -529,6 +550,12 @@ class TFOpMapperNHWC(OpMapper): transpose_x=transpose_a, transpose_y=transpose_b) + def BatchMatMul(self, node): + return self.MatMul(node) + + def BatchMatMulV2(self, node): + return self.MatMul(node) + def DepthwiseConv2dNative(self, node): input = self.graph.get_node(node.layer.input[0]) kernel = self.graph.get_node(node.layer.input[1]) @@ -578,7 +605,7 @@ class TFOpMapperNHWC(OpMapper): perm=[0, 2, 3, 1]) def AvgPool(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) + input = self.graph.get_node(node.layer.input[0]) k_size = node.get_attr("ksize") strides = node.get_attr("strides") @@ -701,7 +728,7 @@ class TFOpMapperNHWC(OpMapper): if len(new_axes) > 0: program.add_layer( kernel="fluid.layers.unsqueeze", - inputs={"x": node.name}, + inputs={"input": node.name}, outputs=[node.name], axes=new_axes) if len(shrink_axes) > 0: @@ -710,7 +737,7 @@ class TFOpMapperNHWC(OpMapper): else: program.add_layer( kernel="fluid.layers.unsqueeze", - inputs={"x": node.name}, + inputs={"input": node.name}, outputs=[node.name], axes=new_axes) @@ -741,14 +768,16 @@ class TFOpMapperNHWC(OpMapper): begin = begin.value.tolist() attrs['offsets'] = begin else: - shape = begin.out_shapes[0] - reshape_name = gen_name("slice", "reshape") - program.add_layer( - kernel="fluid.layers.reshape", - inputs={"x": begin.name}, - outputs=[reshape_name], - shape=shape) - inputs['offsets'] = reshape_name + # shape = begin.out_shapes[0] + # reshape_name = gen_name("slice", "reshape") + # program.add_layer( + # kernel="fluid.layers.reshape", + # inputs={"x": begin.name}, + # outputs=[reshape_name], + # shape=shape) + # inputs['offsets'] = reshape_name + begin = self.decoder.infer_tensor(begin).tolist() + attrs['offsets'] = begin if size.layer_type == "Const": size = size.value.tolist() attrs['shape'] = size @@ -888,7 +917,7 @@ class TFOpMapperNHWC(OpMapper): keep_dim=keep_dims) def RandomUniform(self, node): - shape = self.graph.get_node(node.layer.input[0], copy=True) + shape = self.graph.get_node(node.layer.input[0]) if shape.layer_type == "Const": shape = shape.value.tolist() program.add_layer( @@ -966,3 +995,147 @@ class TFOpMapperNHWC(OpMapper): inputs={"x": node.name}, outputs=[node.name], perm=[0, 2, 3, 1]) + + def Tile(self, node): + input = self.graph.get_node(node.layer.input[0]) + expand_times = self.graph.get_node(node.layer.input[1]) + inputs = {"x": input.name} + attr = dict() + if expand_times.layer_type == "Const": + expand_times = expand_times.value.tolist() + attr["expand_times"] = expand_times + else: + inputs["expand_times"] = expand_times.name + + program.add_layer( + kernel="fluid.layers.expand", + inputs=inputs, + outputs=[node.name], + **attr) + + def Range(self, node): + start = self.graph.get_node(node.layer.input[0]) + limit = self.graph.get_node(node.layer.input[1]) + delta = self.graph.get_node(node.layer.input[2]) + inputs = dict() + attr = dict() + + if start.layer_type == "Const": + attr["start"] = start.value + else: + inputs["start"] = start.name + if limit.layer_type == "Const": + attr["end"] = limit.value + else: + inputs["end"] = limit.name + if delta.layer_type == "Const": + attr["step"] = delta.value + else: + inputs["step"] = delta.name + attr["dtype"] = string(node.dtype) + + program.add_layer( + kernel="fluid.layers.range", + inputs=inputs, + outputs=[node.name], + **attr) + + def SquaredDifference(self, node): + x = self.graph.get_node(node.layer.input[0]) + y = self.graph.get_node(node.layer.input[1]) + inputs = {"x": x.name, "y": y.name} + program.add_layer( + "fluid.layers.elementwise_sub", inputs=inputs, outputs=[node.name]) + inputs = {"x": node.name, "y": node.name} + program.add_layer( + "fluid.layers.elementwise_mul", inputs=inputs, outputs=[node.name]) + + def OneHot(self, node): + input = self.graph.get_node(node.layer.input[0]) + depth = self.graph.get_node(node.layer.input[1]) + on_value = self.graph.get_node(node.layer.input[2]) + off_value = self.graph.get_node(node.layer.input[3]) + assert depth.layer_type == 'Const', 'Parameter depth should be Const in OneHot' + assert on_value.layer_type == 'Const', 'Parameter on_value should be Const in OneHot' + assert off_value.layer_type == 'Const', 'Parameter off_value should be Const in OneHot' + + attr = {'depth': depth.value} + on_value = on_value.value + off_value = off_value.value + assert math.fabs(on_value - + 1.0) < 1e-06, "on_value should be 1 in OneHot" + assert math.fabs(off_value - + 0.0) < 1e-06, "off_value should be 0 in OneHot" + + program.add_layer( + "fluid.one_hot", + inputs={"input": input.name}, + outputs=[node.name], + depth=depth.value) + + def Pow(self, node): + x = self.graph.get_node(node.layer.input[0]) + factor = self.graph.get_node(node.layer.input[1]) + inputs = {"x": x.name} + attr = dict() + if factor.layer_type == 'Const': + attr["factor"] = factor.value.tolist() + else: + inputs["factor"] = factor.name + program.add_layer( + "fluid.layers.pow", inputs=inputs, outputs=[node.name], **attr) + + def All(self, node): + input = self.graph.get_node(node.layer.input[0]) + reduce_idx = self.graph.get_node(node.layer.input[1]) + assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]" + attr = dict() + attr["dim"] = reduce_idx.value.tolist() + attr["keep_dim"] = node.get_attr("keep_dims") + + program.add_layer( + "fluid.layers.reduce_all", + inputs={"input": input.name}, + outputs=[node.name], + **attr) + + def GatherV2(self, node): + embeddings = self.graph.get_node(node.layer.input[0]) + index = self.graph.get_node(node.layer.input[1]) + axis = self.graph.get_node(node.layer.input[2]) + assert axis.layer_type == 'Const', "Only support Const parameter[axis]" + axis = axis.value.tolist() + assert axis == 0, "Only support axis=0 in GatherV2 OP" + index_name = index.name + if len(index.out_shapes[0]) != 1: + reshape_name = gen_name("gather", "reshape") + index_name = reshape_name + program.add_layer( + "fluid.layers.reshape", + inputs={"x": index.name}, + outputs=[reshape_name], + shape=[-1]) + inputs = {'input': embeddings.name, 'index': index_name} + program.add_layer( + "fluid.layers.gather", + inputs=inputs, + outputs=[node.name], + overwrite=False) + + def ExpandDims(self, node): + x = self.graph.get_node(node.layer.input[0], copy=True) + y = self.graph.get_node(node.layer.input[1], copy=True) + inputs = {"input": x.name} + attr = dict() + if y.layer_type == 'Const': + dim = y.value.tolist() + if not isinstance(dim, list): + dim = [dim] + attr['axes'] = dim + else: + inputs['axes'] = y.name + program.add_layer( + "fluid.layers.unsqueeze", + inputs=inputs, + outputs=[node.name], + **attr)