未验证 提交 2ed0c371 编写于 作者: C channings 提交者: GitHub

Merge pull request #8 from PaddlePaddle/develop

pull 
...@@ -19,6 +19,8 @@ ...@@ -19,6 +19,8 @@
| 37 | Slice | 38 | Sum | 39 | Max | 40 | Conv2DBackpropInput | | 37 | Slice | 38 | Sum | 39 | Max | 40 | Conv2DBackpropInput |
| 41 | Cast | 42 | Split | 43 | Squeeze | 44 | ResizeNearestNeighbor | | 41 | Cast | 42 | Split | 43 | Squeeze | 44 | ResizeNearestNeighbor |
| 45 | Softmax | 46 | Range | 47 | ConcatV2 | 48 | MirrorPad | | 45 | Softmax | 46 | Range | 47 | ConcatV2 | 48 | MirrorPad |
| 49 | Identity | 50 | GreaterEqual | 51 | StopGradient | 52 | Minimum |
| 53 | RadnomUniform | | | | | | |
## Caffe ## Caffe
......
__version__ = "0.6.0" __version__ = "0.7.0"
...@@ -25,7 +25,8 @@ def export_paddle_param(param, param_name, dir): ...@@ -25,7 +25,8 @@ def export_paddle_param(param, param_name, dir):
"int64": [framework_pb2.VarType.INT64, 'q'], "int64": [framework_pb2.VarType.INT64, 'q'],
"float16": [framework_pb2.VarType.FP16, 'e'], "float16": [framework_pb2.VarType.FP16, 'e'],
"float32": [framework_pb2.VarType.FP32, 'f'], "float32": [framework_pb2.VarType.FP32, 'f'],
"float64": [framework_pb2.VarType.FP64, 'd'] "float64": [framework_pb2.VarType.FP64, 'd'],
"bool": [framework_pb2.VarType.BOOL, None]
} }
shape = param.shape shape = param.shape
if len(shape) == 0: if len(shape) == 0:
......
...@@ -25,20 +25,26 @@ import sys ...@@ -25,20 +25,26 @@ import sys
class TFGraphNode(GraphNode): class TFGraphNode(GraphNode):
def __init__(self, layer, layer_name=None, data_format="NHWC"): def __init__(self, layer, layer_name=None, data_format="NHWC"):
if layer_name is None: if layer_name is None:
super(TFGraphNode, super(TFGraphNode, self).__init__(
self).__init__(layer, layer,
layer.name.replace('/', '_').replace('-', '_')) layer.name.replace('/', '_').replace('-', '_').replace('^', ''))
else: else:
super(TFGraphNode, super(TFGraphNode, self).__init__(
self).__init__(layer, layer,
layer_name.replace('/', '_').replace('-', '_')) layer_name.replace('/', '_').replace('-', '_').replace('^', ''))
self.layer_type = layer.op self.layer_type = layer.op
self.tf_data_format = data_format self.tf_data_format = data_format
self.pd_data_format = "NCHW" self.pd_data_format = "NCHW"
self.fluid_code = FluidCode() self.fluid_code = FluidCode()
self.dtype_map = {1: "float32", 3: "int32", 4: "uint8", 9: "int64"} self.dtype_map = {
1: "float32",
3: "int32",
4: "uint8",
9: "int64",
10: "bool"
}
@property @property
def out_shapes(self): def out_shapes(self):
...@@ -113,7 +119,9 @@ class TFGraph(Graph): ...@@ -113,7 +119,9 @@ class TFGraph(Graph):
for layer_name, node in self.node_map.items(): for layer_name, node in self.node_map.items():
for in_node in node.layer.input: for in_node in node.layer.input:
in_node = in_node.replace('/', '_').replace('-', '_') in_node = in_node.replace('/',
'_').replace('-',
'_').replace('^', '')
if in_node not in self.node_map: if in_node not in self.node_map:
if in_node.strip().split(':')[0] in self.node_map: if in_node.strip().split(':')[0] in self.node_map:
self.connect(in_node.strip().split(':')[0], layer_name) self.connect(in_node.strip().split(':')[0], layer_name)
...@@ -140,6 +148,9 @@ class TFGraph(Graph): ...@@ -140,6 +148,9 @@ class TFGraph(Graph):
node = super(TFGraph, self).get_node(new_node_name, copy) node = super(TFGraph, self).get_node(new_node_name, copy)
if node is None: if node is None:
return None return None
if node.layer_type == "Switch":
if hasattr(node, 'index'):
del node.index
if len(items) == 1 and node.layer_type in self.multi_out_ops: if len(items) == 1 and node.layer_type in self.multi_out_ops:
node.index = 0 node.index = 0
return node return node
...@@ -184,9 +195,13 @@ class TFGraph(Graph): ...@@ -184,9 +195,13 @@ class TFGraph(Graph):
del self.topo_sort[idx] del self.topo_sort[idx]
def _remove_identity_node(self): def _remove_identity_node(self):
identity_ops = [
'Identity', 'StopGradient', 'Switch', 'Merge',
'PlaceholderWithDefault'
]
identity_node = list() identity_node = list()
for node_name, node in self.node_map.items(): for node_name, node in self.node_map.items():
if node.layer_type == "Identity" or node.layer_type == "StopGradient": if node.layer_type in identity_ops:
identity_node.append(node_name) identity_node.append(node_name)
for node_name in identity_node: for node_name in identity_node:
......
...@@ -12,6 +12,7 @@ def detectionoutput_layer(inputs, ...@@ -12,6 +12,7 @@ def detectionoutput_layer(inputs,
share_location=True, share_location=True,
keep_top_k=100, keep_top_k=100,
confidence_threshold=0.1, confidence_threshold=0.1,
num_classes=2,
input_shape=None, input_shape=None,
name=None): name=None):
nms_param_str = nms_param nms_param_str = nms_param
...@@ -36,9 +37,9 @@ def detectionoutput_layer(inputs, ...@@ -36,9 +37,9 @@ def detectionoutput_layer(inputs,
pb = fluid.layers.reshape(x=pb, shape=[-1, 4]) pb = fluid.layers.reshape(x=pb, shape=[-1, 4])
pbv = fluid.layers.reshape(x=pbv, shape=[-1, 4]) pbv = fluid.layers.reshape(x=pbv, shape=[-1, 4])
mbox_loc = inputs[0] mbox_loc = inputs[0]
mbox_loc = fluid.layers.reshape(x=mbox_loc, shape=[-1, pb.shape[0], 4]) mbox_loc = fluid.layers.reshape(x=mbox_loc, shape=[0, -1, 4])
mbox_conf_flatten = fluid.layers.reshape(x=mbox_conf_flatten, mbox_conf_flatten = fluid.layers.reshape(x=mbox_conf_flatten,
shape=[0, pb.shape[0], -1]) shape=[0, -1, num_classes])
default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0} default = {"nms_threshold": 0.3, "top_k": 10, "eta": 1.0}
fields = ['eta', 'top_k', 'nms_threshold'] fields = ['eta', 'top_k', 'nms_threshold']
......
...@@ -940,7 +940,9 @@ class CaffeOpMapper(OpMapper): ...@@ -940,7 +940,9 @@ class CaffeOpMapper(OpMapper):
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_bottom_node(node, idx=i, copy=True)
if i == 1 and op == 'DetectionOutput': if i == 1 and op == 'DetectionOutput':
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_bottom_node(node, idx=i, copy=True)
while input is not None and input.layer_type != 'Softmax': while input is not None \
and input.layer_type != 'Softmax' \
and input.layer_type != 'Sigmoid':
input = self.graph.get_bottom_node(input, idx=0, copy=True) input = self.graph.get_bottom_node(input, idx=0, copy=True)
assert input is not None, 'This kind of DetectionOutput is not supported!' assert input is not None, 'This kind of DetectionOutput is not supported!'
input = self.graph.get_bottom_node(input, idx=0, copy=True) input = self.graph.get_bottom_node(input, idx=0, copy=True)
......
...@@ -293,15 +293,12 @@ def shape_reshape(layer, input_shape): ...@@ -293,15 +293,12 @@ def shape_reshape(layer, input_shape):
explicit_count *= count(l) explicit_count *= count(l)
for i in range(len(copy_axes)): for i in range(len(copy_axes)):
explicit_count *= outshape[start_axis + copy_axes[i]] explicit_count *= outshape[start_axis + copy_axes[i]]
assert input_count % explicit_count == 0, "[Reshape]botom count[%d] "\ outshape[start_axis + inferred_axis] = -1
"must be divisible by product of the specified dimensions[%d] "\ outshape[0] = 0
% (input_count, explicit_count) else:
outshape[start_axis + inferred_axis] = int(input_count / explicit_count) outshape[0] = -1
output_count = count(outshape) output_count = count(outshape)
assert output_count == input_count, "[Reshape]output count[%d] must match input count[%d]" % (
output_count, input_count)
outshape[0] = -1
return [outshape] return [outshape]
...@@ -345,9 +342,10 @@ def shape_flatten(layer, input_shape): ...@@ -345,9 +342,10 @@ def shape_flatten(layer, input_shape):
output_shape = inshape[0:start_axis] output_shape = inshape[0:start_axis]
if len(inshape[start_axis:end_axis]) != 0: if len(inshape[start_axis:end_axis]) != 0:
flat_sz = reduce(lambda a, b: a * b, inshape[start_axis:end_axis]) flat_sz = reduce(lambda a, b: a * b, inshape[start_axis:end_axis])
flat_sz = -1
output_shape[0] = 0
output_shape += [flat_sz] output_shape += [flat_sz]
output_shape += inshape[end_axis:len(inshape)] output_shape += inshape[end_axis:len(inshape)]
output_shape[0] = -1
return [output_shape] return [output_shape]
......
...@@ -125,9 +125,9 @@ class TFOpMapper(OpMapper): ...@@ -125,9 +125,9 @@ class TFOpMapper(OpMapper):
in_node = self.graph.get_node(in_node_name) in_node = self.graph.get_node(in_node_name)
out_node = self.graph.get_node(out_node_name) out_node = self.graph.get_node(out_node_name)
index = in_node.outputs.index(out_node_name) index = in_node.outputs.index(out_node_name)
del in_node.outputs[index] # del in_node.outputs[index]
index = out_node.inputs.index(in_node_name) index = out_node.inputs.index(in_node_name)
del out_node.inputs[index] # del out_node.inputs[index]
self.omit_nodes.append(in_node.layer_name) self.omit_nodes.append(in_node.layer_name)
def directly_map(self, node): def directly_map(self, node):
...@@ -624,6 +624,9 @@ class TFOpMapper(OpMapper): ...@@ -624,6 +624,9 @@ class TFOpMapper(OpMapper):
output=node, output=node,
param_attr=perm) param_attr=perm)
return return
if len(attr["shape"]) == 5:
attr["shape"] = [attr["shape"][i] for i in [0, 1, 4, 2, 3]]
node.fluid_code.add_layer("reshape", node.fluid_code.add_layer("reshape",
inputs=input, inputs=input,
output=node, output=node,
...@@ -893,10 +896,23 @@ class TFOpMapper(OpMapper): ...@@ -893,10 +896,23 @@ class TFOpMapper(OpMapper):
"starts": begin, "starts": begin,
"ends": end "ends": end
} }
shrink_axis_mask = node.get_attr('shrink_axis_mask')
squeeze_dims = list()
for i in range(len(begin)):
x = shrink_axis_mask >> i & 1
if x == 1:
squeeze_dims.append(i)
node.fluid_code.add_layer("slice", node.fluid_code.add_layer("slice",
inputs=input, inputs=input,
output=node, output=node,
param_attr=attr) param_attr=attr)
if shrink_axis_mask > 0 and len(input.out_shapes[0]) == 5:
attr = {"axes": squeeze_dims}
node.fluid_code.add_layer("squeeze",
inputs=node,
output=node,
param_attr=attr)
def Slice(self, node): def Slice(self, node):
input = self.graph.get_node(node.layer.input[0], copy=True) input = self.graph.get_node(node.layer.input[0], copy=True)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册