提交 ef0880b9 编写于 作者: W WenmuZhou

delete save_inference_mode fun, because the dev paddle has support export crnn model

上级 606a3873
......@@ -333,22 +333,6 @@ def eval(model, valid_dataloader, post_process_class, eval_class):
return metirc
def save_inference_mode(model, config, logger):
if dist.get_rank() == 0:
model.eval()
print('infer')
save_path = '{}/infer/{}'.format(config['Global']['save_model_dir'],
config['Architecture']['model_type'])
if config['Architecture']['model_type'] == 'rec':
input_shape = [None, 3, 32, None]
jit_model = paddle.jit.to_static(
model, input_spec=[paddle.static.InputSpec(input_shape)])
paddle.jit.save(jit_model, save_path)
logger.info('inference model save to {}'.format(save_path))
model.train()
def preprocess():
FLAGS = ArgsParser().parse_args()
config = load_config(FLAGS.config)
......
......@@ -89,7 +89,6 @@ def main(config, device, logger, vdl_writer):
program.train(config, train_dataloader, valid_dataloader, device, model,
loss_class, optimizer, lr_scheduler, post_process_class,
eval_class, pre_best_model_dict, logger, vdl_writer)
program.save_inference_mode(model, config, logger)
def test_reader(config, device, logger):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册