Paddlehub进行finetune时出现Best模型保存错误
Created by: x286202
版本
1.6.0
环境
AI Studio 高级版 Python版本 python3.7 框架版本 PaddlePaddle 1.7.0
执行代码
代码来自官方demo
module = hub.Module(name="bert_uncased_L-12_H-768_A-12") inputs, outputs, program = module.context(trainable=True, max_seq_len=256) dataset = hub.dataset.SQUAD(version_2_with_negative=False) reader = hub.reader.ReadingComprehensionReader( dataset=dataset, vocab_path=module.get_vocab_path(), max_seq_len=256, doc_stride=128, max_query_length=64)
seq_output = outputs["sequence_output"]
feed_list = [ inputs["input_ids"].name, inputs["position_ids"].name, inputs["segment_ids"].name, inputs["input_mask"].name, ]
strategy = hub.AdamWeightDecayStrategy( weight_decay=0.01, learning_rate=0.00001, warmup_proportion=0.1)
config = hub.RunConfig( eval_interval=100, log_interval=20, num_epoch=5, batch_size=32, checkpoint_dir="test_dir", strategy=strategy)
reading_comprehension_task = hub.ReadingComprehensionTask( data_reader=reader, feature=seq_output, feed_list=feed_list, config=config, sub_task="squad", )
reading_comprehension_task.finetune_and_eval()
报错信息:
[2020-04-03 15:36:44,364] [ TRAIN] - step 400 / 15168: loss=4.22770 [step/sec: 1.75] [2020-04-03 15:36:44,365] [ INFO] - Evaluation on dev dataset start share_vars_from is set, scope is ignored. [2020-04-03 15:39:01,824] [ EVAL] - [dev dataset evaluation result] loss=4.13981 exact_match=7.82403 f1=15.55682 [step/sec: 5.86] [2020-04-03 15:39:01,825] [ EVAL] - best model saved to test_dir/best_model [best exact_match=7.82403] ---------------------------------------------------------------------------ValueError Traceback (most recent call last) in 50 51 # Finetune by PaddleHub's API ---> 52 reading_comprehension_task.finetune_and_eval() /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in finetune_and_eval(self) 861 862 def finetune_and_eval(self): --> 863 return self.finetune(do_eval=True) 864 865 def finetune(self, do_eval=False): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in finetune(self, do_eval) 882 while self.current_epoch <= self.config.num_epoch: 883 self.config.strategy.step() --> 884 run_states = self._run(do_eval=do_eval) 885 self.env.current_epoch += 1 886 /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in _run(self, do_eval) 1095 1096 if do_eval and self.current_step % self.config.eval_interval == 0: -> 1097 self._eval_interval_event() 1098 1099 self._run_step_event(step_run_state) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in hook_function(self, *args) 628 for name, func in self._hooks[hook_type].items(): 629 if inspect.ismethod(func): --> 630 func(*args) 631 else: 632 partial(func, self)(*args) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in _default_eval_interval_event(self) 780 781 def _default_eval_interval_event(self): --> 782 self.eval(phase="dev") 783 784 def _default_run_step_event(self, run_state): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in eval(self, phase, load_best_model) 920 self._eval_start_event() 921 run_states = self._run() --> 922 self._eval_end_event(run_states) 923 return run_states 924 /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in hook_function(self, *args) 628 for name, func in self._hooks[hook_type].items(): 629 if inspect.ismethod(func): --> 630 func(*args) 631 else: 632 partial(func, self)(*args) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in _default_eval_end_event(self, run_states) 751 logger.eval("best model saved to %s [best %s=%.5f]" % 752 (model_saved_dir, main_metric, main_value)) --> 753 self.save_inference_model(dirname=model_saved_dir) 754 755 def _default_log_interval_event(self, run_states): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddlehub/finetune/task/base_task.py in save_inference_model(self, dirname, model_filename, params_filename) 858 main_program=self.main_program, 859 model_filename=model_filename, --> 860 params_filename=params_filename) 861 862 def finetune_and_eval(self): /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py in save_inference_model(dirname, feeded_var_names, target_vars, executor, main_program, model_filename, params_filename, export_for_deployment, program_only) 1216 1217 main_program = main_program._prune_with_input( -> 1218 feeded_var_names=feeded_var_names, targets=target_vars) 1219 main_program = main_program._inference_optimize(prune_read_op=True) 1220 fetch_var_names = [v.name for v in target_vars] /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py in _prune_with_input(self, feeded_var_names, targets) 4120 if t is None: 4121 raise ValueError( -> 4122 "The target variable must have an " 4123 "associated operator that generates it.") 4124 else: ValueError: The target variable must have an associated operator that generates it.