使用with_inference_optimize时报错
Created by: tianjie491
- 版本、环境信息:paddlepaddle1.5.0 1)PaddlePaddle版本:paddlepaddle1.5.0 2)CPU:i5-7500_CPU_@_3.40GHz 3)GPU:v100 4)系统环境:win10,Python版本:3.5 -预测信息 用CPU或GPU进行预测,使用with_inference_optimize时都会报错,去除下面的代码段则正常 prog_file = "{}/model".format(args.init_checkpoints_type + "/inference_model") params_file = "{}/params".format(args.init_checkpoints_type + "/inference_model") config = fluid.core.AnalysisConfig(prog_file, params_file) type_infer_program = compiler.CompiledProgram(type_infer_program) type_infer_program.with_inference_optimize(config)
报错信息如下: e[1me[35m--- Running analysis [ir_graph_build_pass]e[0m e[1me[35m--- Running analysis [ir_analysis_pass]e[0m e[32m--- Running IR pass [infer_clean_graph_pass]e[0m e[32m--- Running IR pass [attention_lstm_fuse_pass]e[0m e[32m--- Running IR pass [seqconv_eltadd_relu_fuse_pass]e[0m e[32m--- Running IR pass [fc_lstm_fuse_pass]e[0m e[32m--- Running IR pass [mul_lstm_fuse_pass]e[0m e[32m--- Running IR pass [fc_gru_fuse_pass]e[0m e[32m--- Running IR pass [mul_gru_fuse_pass]e[0m e[32m--- Running IR pass [seq_concat_fc_fuse_pass]e[0m e[32m--- Running IR pass [fc_fuse_pass]e[0m e[37m--- detected 74 subgraphse[0m e[32m--- Running IR pass [repeated_fc_relu_fuse_pass]e[0m e[32m--- Running IR pass [squared_mat_sub_fuse_pass]e[0m e[32m--- Running IR pass [conv_bn_fuse_pass]e[0m e[32m--- Running IR pass [conv_eltwiseadd_bn_fuse_pass]e[0m e[32m--- Running IR pass [is_test_pass]e[0m e[32m--- Running IR pass [runtime_context_cache_pass]e[0m e[1me[35m--- Running analysis [ir_params_sync_among_devices_pass]e[0m e[1me[35m--- Running analysis [adjust_cudnn_workspace_size_pass]e[0m e[1me[35m--- Running analysis [inference_op_replace_pass]e[0m e[1me[35m--- Running analysis [ir_graph_to_program_pass]e[0m Traceback (most recent call last): File "E:\tools\PyCharm\helpers\pydev\pydevd.py", line 1758, in main() File "E:\tools\PyCharm\helpers\pydev\pydevd.py", line 1752, in main globals = debugger.run(setup['file'], None, None, is_module) File "E:\tools\PyCharm\helpers\pydev\pydevd.py", line 1147, in run pydev_imports.execfile(file, globals, locals) # execute the script File "E:\tools\PyCharm\helpers\pydev_pydev_imps_pydev_execfile.py", line 18, in execfile exec(compile(contents+"\n", file, 'exec'), glob, loc) File "E:/Project/DBQA/predict_with_csv2.py", line 291, in main(args) File "E:/Project/DBQA/predict_with_csv2.py", line 239, in main type_probs, type_scope) File "E:/Project/DBQA/predict_with_csv2.py", line 157, in prediction scope=scope) File "E:\tools\anaconda\lib\site-packages\paddle\fluid\executor.py", line 668, in run return self._run_inference(program._executor, feed) File "E:\tools\anaconda\lib\site-packages\paddle\fluid\executor.py", line 758, in _run_inference return exe.run(feed) TypeError: run(): incompatible function arguments. The following argument types are supported: 1. (self: paddle.fluid.core_avx.AnalysisPredictor, arg0: List[paddle.fluid.core_avx.PaddleTensor]) -> List[paddle.fluid.core_avx.PaddleTensor]
Invoked with: <paddle.fluid.core_avx.AnalysisPredictor object at 0x000000843BB78A08>, {'read_file_0.tmp_0': array([[[ 1], [ 226], [ 170], [ 4], [ 75], [ 10], [1464], [ 17], [ 11], (中间的也是数字,省略) [1.]]], dtype=float32)} I0725 10:35:23.495776 6368 analysis_predictor.cc:451] == optimize end ==
相关代码片段: ernie_config = ErnieConfig(args.ernie_config_path) # 读取配置参数 if args.use_cuda: # 使用cuda place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0'))) else: os.environ['CPU_NUM'] = str(1) place = fluid.CPUPlace() exe = fluid.Executor(place)
reader = task_reader.ClassifyReader(
vocab_path=args.vocab_path,
label_map_config=args.label_map_config,
max_seq_len=args.max_seq_len,
do_lower_case=args.do_lower_case,
in_tokens=args.in_tokens,
random_seed=args.random_seed)
startup_prog = fluid.Program()
exe.run(startup_prog)
type_scope = fluid.core.Scope()
with fluid.scope_guard(type_scope):
type_model_path = os.path.join(args.init_checkpoints_type, "inference_model")
type_infer_program, type_feed_target_names, type_probs = fluid.io.load_inference_model(
type_model_path, exe, model_filename='__model__', params_filename='__params__')
prog_file = "{}/__model__".format(args.init_checkpoints_type + "/inference_model")
params_file = "{}/__params__".format(args.init_checkpoints_type + "/inference_model")
config = fluid.core.AnalysisConfig(prog_file, params_file)
type_infer_program = compiler.CompiledProgram(type_infer_program)
type_infer_program.with_inference_optimize(config)
wrapper, example_type = reader.pridict_generator(args.law_file,
question,
batch_size=args.batch_size)
index_type = prediction(exe, type_infer_program, wrapper, type_feed_target_names,
type_probs, type_scope)
def prediction(exe, prog, predict_data_generator, feed_target_names, probs, scope): index = [] src_ids = feed_target_names[0] sent_ids = feed_target_names[2] pos_ids = feed_target_names[1] input_mask = feed_target_names[3] for sample in predict_data_generator(): src_ids_data = sample[0] sent_ids_data = sample[1] pos_ids_data = sample[2] input_mask_data = sample[3] probs[0].persistable = True np_probs = exe.run( prog, feed={src_ids: src_ids_data, sent_ids: sent_ids_data, pos_ids: pos_ids_data, input_mask: input_mask_data}, fetch_list=probs, scope=scope) index.extend(np_probs[0])
lists = list(np.array(index)[:, 1])
list_new = []
for index, values in enumerate(lists):
if values > 0.10:
list_new.append((index, values))
index = sorted(list_new, key=lambda x: x[1], reverse=True)[:args.beam_size]
return index
我看报错说 TypeError: run(): incompatible function arguments. The following argument types are supported: 1. (self: paddle.fluid.core_avx.AnalysisPredictor, arg0: List[paddle.fluid.core_avx.PaddleTensor]) -> List[paddle.fluid.core_avx.PaddleTensor] 但是不使用with_inference_optimize时是正常的,这是为什么?怎么解决?