diff --git a/op_list.md b/op_list.md index 167ab5064dfb1785685013269ce2478db75258d7..3c4002c3ba8d7f2bf1ab88ee183f46d675af9b03 100644 --- a/op_list.md +++ b/op_list.md @@ -19,6 +19,8 @@ | 37 | Slice | 38 | Sum | 39 | Max | 40 | Conv2DBackpropInput | | 41 | Cast | 42 | Split | 43 | Squeeze | 44 | ResizeNearestNeighbor | | 45 | Softmax | 46 | Range | 47 | ConcatV2 | 48 | MirrorPad | +| 49 | Identity | 50 | GreaterEqual | 51 | StopGradient | 52 | Minimum | +| 53 | RadnomUniform | | | | | | | ## Caffe diff --git a/x2paddle/__init__.py b/x2paddle/__init__.py index 906d362f7de4fa4a809146a1ea93fb01d22ceab9..49e0fc1e09447ed8f5dd92db74c9d289f547a1a3 100644 --- a/x2paddle/__init__.py +++ b/x2paddle/__init__.py @@ -1 +1 @@ -__version__ = "0.6.0" +__version__ = "0.7.0" diff --git a/x2paddle/core/op_mapper.py b/x2paddle/core/op_mapper.py index d311e3093f2697137dc334bf4b32a21465bb6328..77876bc6db485d91d66aa125d2efb30828e06bc0 100644 --- a/x2paddle/core/op_mapper.py +++ b/x2paddle/core/op_mapper.py @@ -25,7 +25,8 @@ def export_paddle_param(param, param_name, dir): "int64": [framework_pb2.VarType.INT64, 'q'], "float16": [framework_pb2.VarType.FP16, 'e'], "float32": [framework_pb2.VarType.FP32, 'f'], - "float64": [framework_pb2.VarType.FP64, 'd'] + "float64": [framework_pb2.VarType.FP64, 'd'], + "bool": [framework_pb2.VarType.BOOL, None] } shape = param.shape if len(shape) == 0: diff --git a/x2paddle/decoder/tf_decoder.py b/x2paddle/decoder/tf_decoder.py index 06c30529b3a8a68691301324bd143c2e3db0e80d..acd149e52bbc844dd11b76e4a70a0967ff5fbf45 100644 --- a/x2paddle/decoder/tf_decoder.py +++ b/x2paddle/decoder/tf_decoder.py @@ -25,20 +25,26 @@ import sys class TFGraphNode(GraphNode): def __init__(self, layer, layer_name=None, data_format="NHWC"): if layer_name is None: - super(TFGraphNode, - self).__init__(layer, - layer.name.replace('/', '_').replace('-', '_')) + super(TFGraphNode, self).__init__( + layer, + layer.name.replace('/', '_').replace('-', '_').replace('^', '')) else: - super(TFGraphNode, - self).__init__(layer, - layer_name.replace('/', '_').replace('-', '_')) + super(TFGraphNode, self).__init__( + layer, + layer_name.replace('/', '_').replace('-', '_').replace('^', '')) self.layer_type = layer.op self.tf_data_format = data_format self.pd_data_format = "NCHW" self.fluid_code = FluidCode() - self.dtype_map = {1: "float32", 3: "int32", 4: "uint8", 9: "int64"} + self.dtype_map = { + 1: "float32", + 3: "int32", + 4: "uint8", + 9: "int64", + 10: "bool" + } @property def out_shapes(self): @@ -113,7 +119,9 @@ class TFGraph(Graph): for layer_name, node in self.node_map.items(): for in_node in node.layer.input: - in_node = in_node.replace('/', '_').replace('-', '_') + in_node = in_node.replace('/', + '_').replace('-', + '_').replace('^', '') if in_node not in self.node_map: if in_node.strip().split(':')[0] in self.node_map: self.connect(in_node.strip().split(':')[0], layer_name) @@ -140,6 +148,9 @@ class TFGraph(Graph): node = super(TFGraph, self).get_node(new_node_name, copy) if node is None: return None + if node.layer_type == "Switch": + if hasattr(node, 'index'): + del node.index if len(items) == 1 and node.layer_type in self.multi_out_ops: node.index = 0 return node @@ -184,9 +195,13 @@ class TFGraph(Graph): del self.topo_sort[idx] def _remove_identity_node(self): + identity_ops = [ + 'Identity', 'StopGradient', 'Switch', 'Merge', + 'PlaceholderWithDefault' + ] identity_node = list() for node_name, node in self.node_map.items(): - if node.layer_type == "Identity" or node.layer_type == "StopGradient": + if node.layer_type in identity_ops: identity_node.append(node_name) for node_name in identity_node: diff --git a/x2paddle/op_mapper/caffe_custom_layer/detectionoutput.py b/x2paddle/op_mapper/caffe_custom_layer/detectionoutput.py index 173f5f31d5f26545a112d11b0994d73097ebb16b..6411833213d37ebc87bae0e0f565bfd1cb0d9131 100644 --- a/x2paddle/op_mapper/caffe_custom_layer/detectionoutput.py +++ b/x2paddle/op_mapper/caffe_custom_layer/detectionoutput.py @@ -12,6 +12,7 @@ def detectionoutput_layer(inputs, share_location=True, keep_top_k=100, confidence_threshold=0.1, + num_classes=2, input_shape=None, name=None): nms_param_str = nms_param @@ -36,9 +37,9 @@ def detectionoutput_layer(inputs, pb = fluid.layers.reshape(x=pb, shape=[-1, 4]) pbv = fluid.layers.reshape(x=pbv, shape=[-1, 4]) mbox_loc = inputs[0] - mbox_loc = fluid.layers.reshape(x=mbox_loc, shape=[-1, pb.shape[0], 4]) + mbox_loc = fluid.layers.reshape(x=mbox_loc, shape=[0, -1, 4]) mbox_conf_flatten = fluid.layers.reshape(x=mbox_conf_flatten, - shape=[0, pb.shape[0], -1]) + shape=[0, -1, num_classes]) default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0} fields = ['eta', 'top_k', 'nms_threshold'] diff --git a/x2paddle/op_mapper/caffe_op_mapper.py b/x2paddle/op_mapper/caffe_op_mapper.py index abdbbaef11a45af55657a15d1c2b2536383df8f5..e398f2b120d306733759d10eb0a90eb3da7c7e86 100644 --- a/x2paddle/op_mapper/caffe_op_mapper.py +++ b/x2paddle/op_mapper/caffe_op_mapper.py @@ -940,7 +940,9 @@ class CaffeOpMapper(OpMapper): input = self.graph.get_bottom_node(node, idx=i, copy=True) if i == 1 and op == 'DetectionOutput': input = self.graph.get_bottom_node(node, idx=i, copy=True) - while input is not None and input.layer_type != 'Softmax': + while input is not None \ + and input.layer_type != 'Softmax' \ + and input.layer_type != 'Sigmoid': input = self.graph.get_bottom_node(input, idx=0, copy=True) assert input is not None, 'This kind of DetectionOutput is not supported!' input = self.graph.get_bottom_node(input, idx=0, copy=True) diff --git a/x2paddle/op_mapper/caffe_shape.py b/x2paddle/op_mapper/caffe_shape.py index 608a9d242aa7b703a070916f6cfaebc29e574dad..b1b18e4c27cb4c464af75d15bfd515bc076d01d7 100644 --- a/x2paddle/op_mapper/caffe_shape.py +++ b/x2paddle/op_mapper/caffe_shape.py @@ -293,15 +293,12 @@ def shape_reshape(layer, input_shape): explicit_count *= count(l) for i in range(len(copy_axes)): explicit_count *= outshape[start_axis + copy_axes[i]] - assert input_count % explicit_count == 0, "[Reshape]botom count[%d] "\ - "must be divisible by product of the specified dimensions[%d] "\ - % (input_count, explicit_count) - outshape[start_axis + inferred_axis] = int(input_count / explicit_count) + outshape[start_axis + inferred_axis] = -1 + outshape[0] = 0 + else: + outshape[0] = -1 output_count = count(outshape) - assert output_count == input_count, "[Reshape]output count[%d] must match input count[%d]" % ( - output_count, input_count) - outshape[0] = -1 return [outshape] @@ -345,9 +342,10 @@ def shape_flatten(layer, input_shape): output_shape = inshape[0:start_axis] if len(inshape[start_axis:end_axis]) != 0: flat_sz = reduce(lambda a, b: a * b, inshape[start_axis:end_axis]) + flat_sz = -1 + output_shape[0] = 0 output_shape += [flat_sz] output_shape += inshape[end_axis:len(inshape)] - output_shape[0] = -1 return [output_shape] diff --git a/x2paddle/op_mapper/tf_op_mapper.py b/x2paddle/op_mapper/tf_op_mapper.py index 58807a5145a1502fb593856e6725fb1ff3ca7ab2..0512c6edc520b44a2ea98546c21dad4cfb2de5cc 100644 --- a/x2paddle/op_mapper/tf_op_mapper.py +++ b/x2paddle/op_mapper/tf_op_mapper.py @@ -125,9 +125,9 @@ class TFOpMapper(OpMapper): in_node = self.graph.get_node(in_node_name) out_node = self.graph.get_node(out_node_name) index = in_node.outputs.index(out_node_name) - del in_node.outputs[index] + # del in_node.outputs[index] index = out_node.inputs.index(in_node_name) - del out_node.inputs[index] + # del out_node.inputs[index] self.omit_nodes.append(in_node.layer_name) def directly_map(self, node): @@ -624,6 +624,9 @@ class TFOpMapper(OpMapper): output=node, param_attr=perm) return + if len(attr["shape"]) == 5: + attr["shape"] = [attr["shape"][i] for i in [0, 1, 4, 2, 3]] + node.fluid_code.add_layer("reshape", inputs=input, output=node, @@ -893,10 +896,23 @@ class TFOpMapper(OpMapper): "starts": begin, "ends": end } + + shrink_axis_mask = node.get_attr('shrink_axis_mask') + squeeze_dims = list() + for i in range(len(begin)): + x = shrink_axis_mask >> i & 1 + if x == 1: + squeeze_dims.append(i) node.fluid_code.add_layer("slice", inputs=input, output=node, param_attr=attr) + if shrink_axis_mask > 0 and len(input.out_shapes[0]) == 5: + attr = {"axes": squeeze_dims} + node.fluid_code.add_layer("squeeze", + inputs=node, + output=node, + param_attr=attr) def Slice(self, node): input = self.graph.get_node(node.layer.input[0], copy=True)