使用save_inference_model保存后的模型进行预测时,结果与用代码直接预测不一致
Created by: yc556600
如果您没有查询到相似问题,为快速解决您的提问,建立issue时请提供如下细节信息:
-
版本、环境信息: 1)PaddlePaddle版本:1.6.2 同样一个文本,预测出的概率以及对应标签答案不一致 预测代码如下: place = fluid.CUDAPlace(0) if args.use_cuda == True else fluid.CPUPlace() exe = fluid.Executor(place) exe.run(predict_startup)
if args.init_checkpoint: #init_pretraining_params(exe, args.init_checkpoint, predict_prog, args.use_fp16) init_checkpoint(exe, args.init_checkpoint, predict_prog, args.use_fp16) else: raise ValueError("args 'init_checkpoint' should be set for prediction!")
predict_exe = fluid.ParallelExecutor( use_cuda=args.use_cuda, main_program=predict_prog)
predict_data_loader.set_batch_generator( processor.data_generator( batch_size=args.batch_size, phase='test', epoch=1, shuffle=False))
predict_data_loader.start() all_results = [] xxx_results = [] time_begin = time.time() while True: try: results = predict_exe.run(fetch_list=[probs.name]) all_results.extend(results[0]) xxx_results.extend(results) except fluid.core.EOFException: predict_data_loader.reset() break
保存模型代码如下: if args.save_inference_model_path: _, ckpt_dir = os.path.split(args.init_checkpoint.rstrip('/')) dir_name = ckpt_dir + '_inference_model' model_path = os.path.join(args.save_inference_model_path, dir_name) print("save inference model to %s" % model_path) fluid.io.save_inference_model( model_path, feed_target_names, [probs], exe, params_filename="params", model_filename="model", main_program=predict_prog)
加载模型代码: def load_model(self): print (os.path.join(self.init_checkpoint, "params")) print (os.path.join(self.init_checkpoint, "model")) self.config = AnalysisConfig(os.path.join(self.init_checkpoint, "model"), os.path.join(self.init_checkpoint, "params"))
self.config.enable_use_gpu(20000, 0)
print (self.config.gpu_device_id())
# self.config.disable_gpu()
# 创建PaddlePredictor
self.predictor = create_paddle_predictor(self.config)