From eb2eb0b86a12a986ef010f32d090a8102aefd6e8 Mon Sep 17 00:00:00 2001 From: wjj19950828 Date: Thu, 30 Dec 2021 16:28:37 +0800 Subject: [PATCH] fixed for CI --- docs/inference_model_convertor/demo/tensorflow2paddle.ipynb | 4 ++-- x2paddle/core/program.py | 6 +++--- x2paddle/decoder/onnx_decoder.py | 3 ++- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/docs/inference_model_convertor/demo/tensorflow2paddle.ipynb b/docs/inference_model_convertor/demo/tensorflow2paddle.ipynb index e9d4d07..30122de 100644 --- a/docs/inference_model_convertor/demo/tensorflow2paddle.ipynb +++ b/docs/inference_model_convertor/demo/tensorflow2paddle.ipynb @@ -81,7 +81,7 @@ "source": [ "## 模型迁移\n", "### 1. 获取MobileNetV1的FrozenModel\n", - "由于X2Paddle只支持TensorFlow中FrozenModel的转换,如果为纯checkpoint模型,需要参考参考X2Paddle官方[文档](https://github.com/PaddlePaddle/X2Paddle/blob/develop/docs/user_guides/export_tf_model.md),将其转换为FrozenModel,本示例中提供的模型为FrozenModel,所以无需转换。" + "由于X2Paddle只支持TensorFlow中FrozenModel的转换,如果为纯checkpoint模型,需要参考参考X2Paddle官方[文档](https://github.com/PaddlePaddle/X2Paddle/blob/release-1.1/docs/user_guides/export_tf_model.md),将其转换为FrozenModel,本示例中提供的模型为FrozenModel,所以无需转换。" ] }, { @@ -210,4 +210,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/x2paddle/core/program.py b/x2paddle/core/program.py index 1841c7c..9bc895a 100644 --- a/x2paddle/core/program.py +++ b/x2paddle/core/program.py @@ -532,9 +532,9 @@ class PaddleGraph(object): paddle.save(self.parameters, save_path) def dygraph2static(self, save_dir, input_shapes=[], input_types=[]): - sepc_list = list() + spec_list = list() for i, name in enumerate(self.inputs): - sepc_list.append( + spec_list.append( paddle.static.InputSpec( shape=input_shapes[i], name=name, dtype=input_types[i])) path = osp.abspath(save_dir) @@ -548,7 +548,7 @@ class PaddleGraph(object): else: model.set_dict(restore) model.eval() - static_model = paddle.jit.to_static(model, input_spec=sepc_list) + static_model = paddle.jit.to_static(model, input_spec=spec_list) try: paddle.jit.save(static_model, osp.join(save_dir, "inference_model/model")) diff --git a/x2paddle/decoder/onnx_decoder.py b/x2paddle/decoder/onnx_decoder.py index ae3e61e..db4ed90 100755 --- a/x2paddle/decoder/onnx_decoder.py +++ b/x2paddle/decoder/onnx_decoder.py @@ -583,7 +583,8 @@ class ONNXDecoder(object): item.name = self.make_variable_name(item.name) for node in graph.node: node.name = node.output[0] - if ":" in node.name and len(node.output) > 1: + if ":" in node.name and len( + node.output) > 1 and node.op_type != "LSTM": node.name = node.name.split(':')[0] node.name = self.make_variable_name(node.name) for i in range(len(node.input)): -- GitLab