未验证 提交 1dc2a6b8 编写于 作者: M mamingjie-China 提交者: GitHub

Merge pull request #1 from PaddlePaddle/develop

更新数据
...@@ -100,8 +100,3 @@ class Graph(object): ...@@ -100,8 +100,3 @@ class Graph(object):
raise Exception("node[{}] not in graph".format(dst)) raise Exception("node[{}] not in graph".format(dst))
self.node_map[dst].inputs.append(src) self.node_map[dst].inputs.append(src)
self.node_map[src].outputs.append(dst) self.node_map[src].outputs.append(dst)
def print(self):
for i, tmp in enumerate(self.topo_sort):
print(tmp, self.node_map[tmp].layer_type, self.node_map[tmp].inputs,
self.node_map[tmp].outputs)
...@@ -24,6 +24,8 @@ import sys ...@@ -24,6 +24,8 @@ import sys
def get_same_padding(in_size, kernel_size, stride): def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride)) new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size pad_size = (new_size - 1) * stride + kernel_size - in_size
if pad_size < 0:
pad_size = 0
pad0 = int(pad_size / 2) pad0 = int(pad_size / 2)
pad1 = pad_size - pad0 pad1 = pad_size - pad0
return [pad0, pad1] return [pad0, pad1]
...@@ -369,12 +371,13 @@ class TFOpMapper(OpMapper): ...@@ -369,12 +371,13 @@ class TFOpMapper(OpMapper):
pad_w = get_same_padding(in_shape[3], k_size[3], strides[3]) pad_w = get_same_padding(in_shape[3], k_size[3], strides[3])
pad_h = pad_h[0] + pad_h[1] pad_h = pad_h[0] + pad_h[1]
pad_w = pad_w[0] + pad_w[1] pad_w = pad_w[0] + pad_w[1]
attr = {"paddings": [0, pad_h, 0, pad_w], "pad_value": -10000.0} if pad_h != 0 or pad_w != 0:
node.fluid_code.add_layer("pad2d", attr = {"paddings": [0, pad_h, 0, pad_w], "pad_value": -10000.0}
inputs=input, node.fluid_code.add_layer("pad2d",
output=node, inputs=input,
param_attr=attr) output=node,
input = node param_attr=attr)
input = node
attr = { attr = {
"pool_size": k_size[2:4], "pool_size": k_size[2:4],
"pool_type": string("max"), "pool_type": string("max"),
...@@ -551,6 +554,7 @@ class TFOpMapper(OpMapper): ...@@ -551,6 +554,7 @@ class TFOpMapper(OpMapper):
def Reshape(self, node): def Reshape(self, node):
input = self.graph.get_node(node.layer.input[0], copy=True) input = self.graph.get_node(node.layer.input[0], copy=True)
param = self.graph.get_node(node.layer.input[1], copy=True) param = self.graph.get_node(node.layer.input[1], copy=True)
is_variable = False
if param.layer_type == "Const": if param.layer_type == "Const":
attr = {"shape": param.value.tolist()} attr = {"shape": param.value.tolist()}
self.add_omit_nodes(param.layer_name, node.layer_name) self.add_omit_nodes(param.layer_name, node.layer_name)
...@@ -582,6 +586,24 @@ class TFOpMapper(OpMapper): ...@@ -582,6 +586,24 @@ class TFOpMapper(OpMapper):
new_param += (node.layer_name + "[{}]".format(i) + ", ") new_param += (node.layer_name + "[{}]".format(i) + ", ")
new_param = new_param.strip(", ") + "]" new_param = new_param.strip(", ") + "]"
attr = {"shape": new_param} attr = {"shape": new_param}
is_variable = True
# to change [192, -1]->[-1, 192], allways put -1 in the first dimension
# optimization for Paddle-Lite
in_shape = input.out_shapes[0]
if is_variable and in_shape.count(-1) < 1:
total_size = 1
for i in range(len(in_shape)):
total_size *= in_shape[i]
for i in range(len(attr["shape"])):
if attr["shape"][i] == 0:
attr["shape"][i] = in_shape[i]
if attr["shape"][i] != -1:
total_size /= attr["shape"][i]
if attr["shape"].count(-1) > 0:
index = attr["shape"].index(-1)
attr["shape"][index] = int(total_size)
attr["shape"][0] = -1
if len(input.out_shapes[0]) == 4 and node.tf_data_format == "NHWC": if len(input.out_shapes[0]) == 4 and node.tf_data_format == "NHWC":
if len(attr["shape"]) < 3: if len(attr["shape"]) < 3:
......
...@@ -24,6 +24,8 @@ import sys ...@@ -24,6 +24,8 @@ import sys
def get_same_padding(in_size, kernel_size, stride): def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride)) new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size pad_size = (new_size - 1) * stride + kernel_size - in_size
if pad_size < 0:
pad_size = 0
pad0 = int(pad_size / 2) pad0 = int(pad_size / 2)
pad1 = pad_size - pad0 pad1 = pad_size - pad0
return [pad0, pad1] return [pad0, pad1]
...@@ -500,6 +502,7 @@ class TFOpMapperNHWC(OpMapper): ...@@ -500,6 +502,7 @@ class TFOpMapperNHWC(OpMapper):
def Reshape(self, node): def Reshape(self, node):
input = self.graph.get_node(node.layer.input[0], copy=True) input = self.graph.get_node(node.layer.input[0], copy=True)
param = self.graph.get_node(node.layer.input[1], copy=True) param = self.graph.get_node(node.layer.input[1], copy=True)
is_variable = False
if param.layer_type == "Const": if param.layer_type == "Const":
attr = {"shape": param.value.tolist()} attr = {"shape": param.value.tolist()}
self.add_omit_nodes(param.layer_name, node.layer_name) self.add_omit_nodes(param.layer_name, node.layer_name)
...@@ -527,6 +530,24 @@ class TFOpMapperNHWC(OpMapper): ...@@ -527,6 +530,24 @@ class TFOpMapperNHWC(OpMapper):
new_param += (node.layer_name + "[{}]".format(i) + ", ") new_param += (node.layer_name + "[{}]".format(i) + ", ")
new_param = new_param.strip(", ") + "]" new_param = new_param.strip(", ") + "]"
attr = {"shape": new_param} attr = {"shape": new_param}
is_variable = True
# to change [192, -1]->[-1, 192], allways put -1 in the first dimension
# optimization for Paddle-Lite
in_shape = input.out_shapes[0]
if not is_variable and in_shape.count(-1) < 1:
total_size = 1
for i in range(len(in_shape)):
total_size *= in_shape[i]
for i in range(len(attr["shape"])):
if attr["shape"][i] == 0:
attr["shape"][i] = in_shape[i]
if attr["shape"][i] != -1:
total_size /= attr["shape"][i]
if attr["shape"].count(-1) > 0:
index = attr["shape"].index(-1)
attr["shape"][index] = int(total_size)
attr["shape"][0] = -1
node.fluid_code.add_layer("reshape", node.fluid_code.add_layer("reshape",
inputs=input, inputs=input,
output=node, output=node,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册