提交 8e1a081b 编写于 作者: C Channingss

update code

上级 b99dfb81
...@@ -17,29 +17,9 @@ ...@@ -17,29 +17,9 @@
"type": "PATH" "type": "PATH"
}, },
{ {
"name": "PADDLE_DIR", "name": "OPENVINO_DIR",
"value": "C:/projects/fluid_install_dir_win_cpu_1.6/fluid_install_dir_win_cpu_1.6", "value": "C:/projetcs/inference_engine",
"type": "PATH" "type": "PATH"
},
{
"name": "CMAKE_BUILD_TYPE",
"value": "Release",
"type": "STRING"
},
{
"name": "WITH_STATIC_LIB",
"value": "True",
"type": "BOOL"
},
{
"name": "WITH_MKL",
"value": "True",
"type": "BOOL"
},
{
"name": "WITH_GPU",
"value": "False",
"type": "BOOL"
} }
] ]
} }
......
...@@ -17,6 +17,7 @@ import paddle.fluid as fluid ...@@ -17,6 +17,7 @@ import paddle.fluid as fluid
import os import os
import sys import sys
import paddlex as pdx import paddlex as pdx
import paddlex.utils.logging as logging
__all__ = ['export_onnx'] __all__ = ['export_onnx']
...@@ -45,10 +46,10 @@ def export_onnx_model(model, save_dir): ...@@ -45,10 +46,10 @@ def export_onnx_model(model, save_dir):
from fluid_onnx.variables import paddle_variable_to_onnx_tensor, paddle_onnx_weight from fluid_onnx.variables import paddle_variable_to_onnx_tensor, paddle_onnx_weight
from debug.model_check import debug_model, Tracker from debug.model_check import debug_model, Tracker
except Exception as e: except Exception as e:
print(e) logging.error(e)
print( logging.error(
"Import Module Failed! Please install paddle2onnx. Related requirements \ "Import Module Failed! Please install paddle2onnx. Related requirements see https://github.com/PaddlePaddle/paddle2onnx."
see https://github.com/PaddlePaddle/paddle2onnx.") )
sys.exit(-1) sys.exit(-1)
place = fluid.CPUPlace() place = fluid.CPUPlace()
exe = fluid.Executor(place) exe = fluid.Executor(place)
...@@ -74,7 +75,7 @@ def export_onnx_model(model, save_dir): ...@@ -74,7 +75,7 @@ def export_onnx_model(model, save_dir):
paddle_variable_to_onnx_tensor(v, global_block) paddle_variable_to_onnx_tensor(v, global_block)
for v in test_input_names for v in test_input_names
] ]
print("load the model parameter done.") logging.INFO("load the model parameter done.")
onnx_nodes = [] onnx_nodes = []
op_check_list = [] op_check_list = []
op_trackers = [] op_trackers = []
...@@ -107,8 +108,8 @@ def export_onnx_model(model, save_dir): ...@@ -107,8 +108,8 @@ def export_onnx_model(model, save_dir):
else: else:
if op.type not in ['feed', 'fetch']: if op.type not in ['feed', 'fetch']:
op_check_list.append(op.type) op_check_list.append(op.type)
print('The operator sets to run test case.') logging.info('The operator sets to run test case.')
print(set(op_check_list)) logging.info(set(op_check_list))
# Create outputs # Create outputs
# Get the new names for outputs if they've been renamed in nodes' making # Get the new names for outputs if they've been renamed in nodes' making
...@@ -145,4 +146,4 @@ def export_onnx_model(model, save_dir): ...@@ -145,4 +146,4 @@ def export_onnx_model(model, save_dir):
os.mkdir(save_dir) os.mkdir(save_dir)
with open(onnx_model_file, 'wb') as f: with open(onnx_model_file, 'wb') as f:
f.write(onnx_model.SerializeToString()) f.write(onnx_model.SerializeToString())
print("Saved converted model to path: %s" % onnx_model_file) logging.info("Saved converted model to path: %s" % onnx_model_file)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册