diff --git a/configs/rec/ch_ppocr_v2.0/rec_chinese_lite_train_distillation_v2.1.yml b/configs/rec/ch_ppocr_v2.1/rec_chinese_lite_train_distillation_v2.1.yml similarity index 94% rename from configs/rec/ch_ppocr_v2.0/rec_chinese_lite_train_distillation_v2.1.yml rename to configs/rec/ch_ppocr_v2.1/rec_chinese_lite_train_distillation_v2.1.yml index 016788ea72be3d9b4536c0c410354449c7de84ae..6b60ae0860959405fd512913d022c02e2e2dae05 100644 --- a/configs/rec/ch_ppocr_v2.0/rec_chinese_lite_train_distillation_v2.1.yml +++ b/configs/rec/ch_ppocr_v2.1/rec_chinese_lite_train_distillation_v2.1.yml @@ -8,9 +8,9 @@ Global: save_epoch_step: 3 eval_batch_step: [0, 2000] cal_metric_during_train: true - pretrained_model: null - checkpoints: null - save_inference_dir: null + pretrained_model: + checkpoints: + save_inference_dir: use_visualdl: false infer_img: doc/imgs_words/ch/word_1.jpg character_dict_path: ppocr/utils/ppocr_keys_v1.txt @@ -38,7 +38,7 @@ Architecture: algorithm: Distillation Models: Student: - pretrained: null + pretrained: freeze_params: false return_all_feats: true model_type: rec @@ -57,7 +57,7 @@ Architecture: name: CTCHead fc_decay: 0.00001 Teacher: - pretrained: null + pretrained: freeze_params: false return_all_feats: true model_type: rec @@ -118,8 +118,8 @@ Train: - DecodeImage: img_mode: BGR channel_first: false - - RecAug: null - - CTCLabelEncode: null + - RecAug: + - CTCLabelEncode: - RecResizeImg: image_shape: [3, 32, 320] - KeepKeys: @@ -143,7 +143,7 @@ Eval: - DecodeImage: img_mode: BGR channel_first: false - - CTCLabelEncode: null + - CTCLabelEncode: - RecResizeImg: image_shape: [3, 32, 320] - KeepKeys: diff --git a/ppocr/modeling/architectures/distillation_model.py b/ppocr/modeling/architectures/distillation_model.py index 255ff32b23743f34bc6bd7cabde9d607d8416928..2e512331afcfc20e422dbef4ba1a4acd581df9e7 100644 --- a/ppocr/modeling/architectures/distillation_model.py +++ b/ppocr/modeling/architectures/distillation_model.py @@ -21,7 +21,7 @@ from ppocr.modeling.backbones import build_backbone from ppocr.modeling.necks import build_neck from ppocr.modeling.heads import build_head from .base_model import BaseModel -from ppocr.utils.save_load import load_dygraph_pretrain +from ppocr.utils.save_load import init_model __all__ = ['DistillationModel'] @@ -46,7 +46,7 @@ class DistillationModel(nn.Layer): pretrained = model_config.pop("pretrained") model = BaseModel(model_config) if pretrained is not None: - load_dygraph_pretrain(model, path=pretrained) + init_model(model, path=pretrained) if freeze_params: for param in model.parameters(): param.trainable = False diff --git a/ppocr/utils/save_load.py b/ppocr/utils/save_load.py index 951132c3ab8b137a5023617f83036b428003cc97..23f5401bb71a2ef50ff2ff2c3c27275d7e10b3c0 100644 --- a/ppocr/utils/save_load.py +++ b/ppocr/utils/save_load.py @@ -23,6 +23,8 @@ import six import paddle +from ppocr.utils.logging import get_logger + __all__ = ['init_model', 'save_model', 'load_dygraph_pretrain'] @@ -42,19 +44,11 @@ def _mkdir_if_not_exist(path, logger): raise OSError('Failed to mkdir {}'.format(path)) -def load_dygraph_pretrain(model, logger=None, path=None): - if not (os.path.isdir(path) or os.path.exists(path + '.pdparams')): - raise ValueError("Model pretrain path {} does not " - "exists.".format(path)) - param_state_dict = paddle.load(path + '.pdparams') - model.set_state_dict(param_state_dict) - return - - -def init_model(config, model, logger, optimizer=None, lr_scheduler=None): +def init_model(config, model, optimizer=None, lr_scheduler=None): """ load model from checkpoint or pretrained_model """ + logger = get_logger() global_config = config['Global'] checkpoints = global_config.get('checkpoints') pretrained_model = global_config.get('pretrained_model') @@ -77,13 +71,17 @@ def init_model(config, model, logger, optimizer=None, lr_scheduler=None): best_model_dict = states_dict.get('best_model_dict', {}) if 'epoch' in states_dict: best_model_dict['start_epoch'] = states_dict['epoch'] + 1 - logger.info("resume from {}".format(checkpoints)) elif pretrained_model: if not isinstance(pretrained_model, list): pretrained_model = [pretrained_model] for pretrained in pretrained_model: - load_dygraph_pretrain(model, logger, path=pretrained) + if not (os.path.isdir(pretrained) or + os.path.exists(pretrained + '.pdparams')): + raise ValueError("Model pretrain path {} does not " + "exists.".format(pretrained)) + param_state_dict = paddle.load(pretrained + '.pdparams') + model.set_state_dict(param_state_dict) logger.info("load pretrained model from {}".format( pretrained_model)) else: diff --git a/tools/eval.py b/tools/eval.py index 9817fa75093dd5127e3d11501ebc0473c9b53365..66eb315f9b37ed681f6a899613fa43c1313bc654 100755 --- a/tools/eval.py +++ b/tools/eval.py @@ -49,7 +49,7 @@ def main(): model = build_model(config['Architecture']) use_srn = config['Architecture']['algorithm'] == "SRN" - best_model_dict = init_model(config, model, logger) + best_model_dict = init_model(config, model) if len(best_model_dict): logger.info('metric in ckpt ***************') for k, v in best_model_dict.items(): diff --git a/tools/export_model.py b/tools/export_model.py index 1d4538c829672d7780fdf01868e544311f6cd312..625c82468edff7c3eeb787422bdef07b4b274460 100755 --- a/tools/export_model.py +++ b/tools/export_model.py @@ -95,7 +95,7 @@ def main(): else: # base rec model config["Architecture"]["Head"]["out_channels"] = char_num model = build_model(config["Architecture"]) - init_model(config, model, logger) + init_model(config, model) model.eval() save_path = config["Global"]["save_inference_dir"] diff --git a/tools/infer_cls.py b/tools/infer_cls.py index 496964826b0b063f9f937c31342932c6cd95502f..a588cab433442695e3bd395da63e35a2052de501 100755 --- a/tools/infer_cls.py +++ b/tools/infer_cls.py @@ -47,7 +47,7 @@ def main(): # build model model = build_model(config['Architecture']) - init_model(config, model, logger) + init_model(config, model) # create data ops transforms = [] diff --git a/tools/infer_det.py b/tools/infer_det.py index 913d617defea18fe881e6fd2212b1df20f7d26d3..674f52ee35aab25356ccdbf371f8bac5b52b871a 100755 --- a/tools/infer_det.py +++ b/tools/infer_det.py @@ -61,7 +61,7 @@ def main(): # build model model = build_model(config['Architecture']) - init_model(config, model, logger) + init_model(config, model) # build post process post_process_class = build_post_process(config['PostProcess']) diff --git a/tools/infer_e2e.py b/tools/infer_e2e.py index 9c079f6074f088ef0298cab839f74faefad82abb..1cd468b8e552237af31d985b8b68ddbeecba9c96 100755 --- a/tools/infer_e2e.py +++ b/tools/infer_e2e.py @@ -68,7 +68,7 @@ def main(): # build model model = build_model(config['Architecture']) - init_model(config, model, logger) + init_model(config, model) # build post process post_process_class = build_post_process(config['PostProcess'], diff --git a/tools/infer_rec.py b/tools/infer_rec.py index 6894207d4bb7eaa2aa84f4a0a30ee878d389b5cc..09f5a0c767b15c312cdfbe8ed695ea06bdc8cdc4 100755 --- a/tools/infer_rec.py +++ b/tools/infer_rec.py @@ -58,7 +58,7 @@ def main(): model = build_model(config['Architecture']) - init_model(config, model, logger) + init_model(config, model) # create data ops transforms = [] diff --git a/tools/train.py b/tools/train.py index 555d33671a2b66e82e04fa59d54a1d6c7e80d33e..b024240b4d5d4973645336c62d3762087ec7bbeb 100755 --- a/tools/train.py +++ b/tools/train.py @@ -97,7 +97,7 @@ def main(config, device, logger, vdl_writer): # build metric eval_class = build_metric(config['Metric']) # load pretrain model - pre_best_model_dict = init_model(config, model, logger, optimizer) + pre_best_model_dict = init_model(config, model, optimizer) logger.info('train dataloader has {} iters'.format(len(train_dataloader))) if valid_dataloader is not None: