Global: use_gpu: true epoch_num: 1200 log_smooth_window: 20 print_batch_step: 2 save_model_dir: ./output/ch_db_mv3/ save_epoch_step: 1200 # evaluation is run every 2000 iterations after the 3000th iteration eval_batch_step: [3000, 2000] cal_metric_during_train: False pretrained_model: ./pretrain_models/ch_PP-OCRv2_det_distill_train/best_accuracy checkpoints: save_inference_dir: use_visualdl: False infer_img: doc/imgs_en/img_10.jpg save_res_path: ./output/det_db/predicts_db.txt Architecture: name: DistillationModel algorithm: Distillation Models: Teacher: freeze_params: true return_all_feats: false model_type: det algorithm: DB Transform: Backbone: name: ResNet layers: 18 Neck: name: DBFPN out_channels: 256 Head: name: DBHead k: 50 Student: freeze_params: false return_all_feats: false model_type: det algorithm: DB Backbone: name: MobileNetV3 scale: 0.5 model_name: large disable_se: True Neck: name: DBFPN out_channels: 96 Head: name: DBHead k: 50 Student2: freeze_params: false return_all_feats: false model_type: det algorithm: DB Transform: Backbone: name: MobileNetV3 scale: 0.5 model_name: large disable_se: True Neck: name: DBFPN out_channels: 96 Head: name: DBHead k: 50 Loss: name: CombinedLoss loss_config_list: - DistillationDilaDBLoss: weight: 1.0 model_name_pairs: - ["Student", "Teacher"] - ["Student2", "Teacher"] key: maps balance_loss: true main_loss_type: DiceLoss alpha: 5 beta: 10 ohem_ratio: 3 - DistillationDMLLoss: model_name_pairs: - ["Student", "Student2"] maps_name: "thrink_maps" weight: 1.0 # act: None model_name_pairs: ["Student", "Student2"] key: maps - DistillationDBLoss: weight: 1.0 model_name_list: ["Student", "Student2"] # key: maps # name: DBLoss balance_loss: true main_loss_type: DiceLoss alpha: 5 beta: 10 ohem_ratio: 3 Optimizer: name: Adam beta1: 0.9 beta2: 0.999 lr: name: Cosine learning_rate: 0.001 warmup_epoch: 2 regularizer: name: 'L2' factor: 0 PostProcess: name: DistillationDBPostProcess model_name: ["Student", "Student2", "Teacher"] # key: maps thresh: 0.3 box_thresh: 0.6 max_candidates: 1000 unclip_ratio: 1.5 Metric: name: DistillationMetric base_metric_name: DetMetric main_indicator: hmean key: "Student" Train: dataset: name: SimpleDataSet data_dir: ./train_data/icdar2015/text_localization/ label_file_list: - ./train_data/icdar2015/text_localization/train_icdar2015_label.txt ratio_list: [1.0] transforms: - DecodeImage: # load image img_mode: BGR channel_first: False - DetLabelEncode: # Class handling label - IaaAugment: augmenter_args: - { 'type': Fliplr, 'args': { 'p': 0.5 } } - { 'type': Affine, 'args': { 'rotate': [-10, 10] } } - { 'type': Resize, 'args': { 'size': [0.5, 3] } } - EastRandomCropData: size: [960, 960] max_tries: 50 keep_ratio: true - MakeBorderMap: shrink_ratio: 0.4 thresh_min: 0.3 thresh_max: 0.7 - MakeShrinkMap: shrink_ratio: 0.4 min_text_size: 8 - NormalizeImage: scale: 1./255. mean: [0.485, 0.456, 0.406] std: [0.229, 0.224, 0.225] order: 'hwc' - ToCHWImage: - KeepKeys: keep_keys: ['image', 'threshold_map', 'threshold_mask', 'shrink_map', 'shrink_mask'] # the order of the dataloader list loader: shuffle: True drop_last: False batch_size_per_card: 8 num_workers: 4 Eval: dataset: name: SimpleDataSet data_dir: ./train_data/icdar2015/text_localization/ label_file_list: - ./train_data/icdar2015/text_localization/test_icdar2015_label.txt transforms: - DecodeImage: # load image img_mode: BGR channel_first: False - DetLabelEncode: # Class handling label - DetResizeForTest: # image_shape: [736, 1280] - NormalizeImage: scale: 1./255. mean: [0.485, 0.456, 0.406] std: [0.229, 0.224, 0.225] order: 'hwc' - ToCHWImage: - KeepKeys: keep_keys: ['image', 'shape', 'polys', 'ignore_tags'] loader: shuffle: False drop_last: False batch_size_per_card: 1 # must be 1 num_workers: 2