未验证 提交 9f27c2cc 编写于 作者: J Jason 提交者: GitHub

Merge pull request #722 from wjj19950828/fixed_for_convtranspose

fixed convtranspose bug
...@@ -81,7 +81,7 @@ ...@@ -81,7 +81,7 @@
"source": [ "source": [
"## 模型迁移\n", "## 模型迁移\n",
"### 1. 获取MobileNetV1的FrozenModel\n", "### 1. 获取MobileNetV1的FrozenModel\n",
"由于X2Paddle只支持TensorFlow中FrozenModel的转换,如果为纯checkpoint模型,需要参考参考X2Paddle官方[文档](https://github.com/PaddlePaddle/X2Paddle/blob/develop/docs/user_guides/export_tf_model.md),将其转换为FrozenModel,本示例中提供的模型为FrozenModel,所以无需转换。" "由于X2Paddle只支持TensorFlow中FrozenModel的转换,如果为纯checkpoint模型,需要参考参考X2Paddle官方[文档](https://github.com/PaddlePaddle/X2Paddle/blob/release-1.1/docs/user_guides/export_tf_model.md),将其转换为FrozenModel,本示例中提供的模型为FrozenModel,所以无需转换。"
] ]
}, },
{ {
......
...@@ -532,9 +532,9 @@ class PaddleGraph(object): ...@@ -532,9 +532,9 @@ class PaddleGraph(object):
paddle.save(self.parameters, save_path) paddle.save(self.parameters, save_path)
def dygraph2static(self, save_dir, input_shapes=[], input_types=[]): def dygraph2static(self, save_dir, input_shapes=[], input_types=[]):
sepc_list = list() spec_list = list()
for i, name in enumerate(self.inputs): for i, name in enumerate(self.inputs):
sepc_list.append( spec_list.append(
paddle.static.InputSpec( paddle.static.InputSpec(
shape=input_shapes[i], name=name, dtype=input_types[i])) shape=input_shapes[i], name=name, dtype=input_types[i]))
path = osp.abspath(save_dir) path = osp.abspath(save_dir)
...@@ -548,7 +548,7 @@ class PaddleGraph(object): ...@@ -548,7 +548,7 @@ class PaddleGraph(object):
else: else:
model.set_dict(restore) model.set_dict(restore)
model.eval() model.eval()
static_model = paddle.jit.to_static(model, input_spec=sepc_list) static_model = paddle.jit.to_static(model, input_spec=spec_list)
try: try:
paddle.jit.save(static_model, paddle.jit.save(static_model,
osp.join(save_dir, "inference_model/model")) osp.join(save_dir, "inference_model/model"))
......
...@@ -583,7 +583,8 @@ class ONNXDecoder(object): ...@@ -583,7 +583,8 @@ class ONNXDecoder(object):
item.name = self.make_variable_name(item.name) item.name = self.make_variable_name(item.name)
for node in graph.node: for node in graph.node:
node.name = node.output[0] node.name = node.output[0]
if ":" in node.name and len(node.output) > 1: if ":" in node.name and len(
node.output) > 1 and node.op_type != "LSTM":
node.name = node.name.split(':')[0] node.name = node.name.split(':')[0]
node.name = self.make_variable_name(node.name) node.name = self.make_variable_name(node.name)
for i in range(len(node.input)): for i in range(len(node.input)):
......
...@@ -169,6 +169,8 @@ class OpSet9(): ...@@ -169,6 +169,8 @@ class OpSet9():
'Floor': ['paddle.floor'], 'Floor': ['paddle.floor'],
'Abs': ['paddle.abs'], 'Abs': ['paddle.abs'],
'Erf': ['paddle.erf'], 'Erf': ['paddle.erf'],
'Sin': ['paddle.sin'],
'Cos': ['paddle.cos'],
} }
def __init__(self, decoder, paddle_graph): def __init__(self, decoder, paddle_graph):
...@@ -2128,12 +2130,33 @@ class OpSet9(): ...@@ -2128,12 +2130,33 @@ class OpSet9():
paddings, var_x = self._pad_if_asymmetric(node, pads, val_x) paddings, var_x = self._pad_if_asymmetric(node, pads, val_x)
if len(output_size) != 0:
paddings = [0] * 4
total_paddings = list()
total_paddings.append((val_x.out_shapes[0][2] - 1) * strides[
0] + dilations[0] * (kernel_shape[0] - 1) + 1 + out_padding[0] -
output_size[0])
total_paddings.append((val_x.out_shapes[0][3] - 1) * strides[
1] + dilations[1] * (kernel_shape[1] - 1) + 1 + out_padding[1] -
output_size[1])
if auto_pad == "SAME_UPPER":
for i in range(len(total_paddings)):
paddings[2 * i] = total_paddings[0] - total_paddings[0] // 2
paddings[2 * i + 1] = total_paddings[0] // 2
else:
for i in range(len(total_paddings)):
paddings[2 * i] = total_paddings[0] // 2
paddings[2 * i + 1] = total_paddings[0] - total_paddings[
0] // 2
else:
output_size = [0, 0] output_size = [0, 0]
output_size[0] = (val_x.out_shapes[0][2] - 1 output_size[0] = (
val_x.out_shapes[0][2] - 1
) * strides[0] - 2 * paddings[0] + dilations[0] * ( ) * strides[0] - 2 * paddings[0] + dilations[0] * (
kernel_shape[0] - 1) + 1 + out_padding[0] kernel_shape[0] - 1) + 1 + out_padding[0]
output_size[1] = (val_x.out_shapes[0][3] - 1 output_size[1] = (
val_x.out_shapes[0][3] - 1
) * strides[1] - 2 * paddings[1] + dilations[1] * ( ) * strides[1] - 2 * paddings[1] + dilations[1] * (
kernel_shape[1] - 1) + 1 + out_padding[1] kernel_shape[1] - 1) + 1 + out_padding[1]
...@@ -2178,6 +2201,8 @@ class OpSet9(): ...@@ -2178,6 +2201,8 @@ class OpSet9():
if val_b is not None: if val_b is not None:
_rename_or_remove_weight(self.weights, val_b.name, _rename_or_remove_weight(self.weights, val_b.name,
op_name + '.bias') op_name + '.bias')
else:
layer_attrs["bias_attr"] = False
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel=paddle_op, kernel=paddle_op,
inputs=inputs_dict, inputs=inputs_dict,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册