From 53014d34810779994c035388a3de93a78e16a804 Mon Sep 17 00:00:00 2001 From: LDOUBLEV Date: Thu, 26 Aug 2021 09:21:19 +0000 Subject: [PATCH] add fpgm to ocr_det ci --- deploy/slim/prune/sensitivity_anal.py | 19 +++++----- tests/ocr_det_params.txt | 8 ++--- tests/ocr_det_params_prune.txt | 52 +++++++++++++++++++++++++++ tests/prepare.sh | 3 +- 4 files changed, 68 insertions(+), 14 deletions(-) create mode 100644 tests/ocr_det_params_prune.txt diff --git a/deploy/slim/prune/sensitivity_anal.py b/deploy/slim/prune/sensitivity_anal.py index bd2b9649..f80ddd9f 100644 --- a/deploy/slim/prune/sensitivity_anal.py +++ b/deploy/slim/prune/sensitivity_anal.py @@ -75,7 +75,7 @@ def main(config, device, logger, vdl_writer): model = build_model(config['Architecture']) flops = paddle.flops(model, [1, 3, 640, 640]) - logger.info(f"FLOPs before pruning: {flops}") + logger.info("FLOPs before pruning: {}".format(flops)) from paddleslim.dygraph import FPGMFilterPruner model.train() @@ -106,8 +106,8 @@ def main(config, device, logger, vdl_writer): def eval_fn(): metric = program.eval(model, valid_dataloader, post_process_class, - eval_class) - logger.info(f"metric['hmean']: {metric['hmean']}") + eval_class, False) + logger.info("metric['hmean']: {}".format(metric['hmean'])) return metric['hmean'] params_sensitive = pruner.sensitive( @@ -123,16 +123,17 @@ def main(config, device, logger, vdl_writer): # calculate pruned params's ratio params_sensitive = pruner._get_ratios_by_loss(params_sensitive, loss=0.02) for key in params_sensitive.keys(): - logger.info(f"{key}, {params_sensitive[key]}") + logger.info("{}, {}".format(key, params_sensitive[key])) + + #params_sensitive = {} + #for param in model.parameters(): + # if 'transpose' not in param.name and 'linear' not in param.name: + # params_sensitive[param.name] = 0.1 plan = pruner.prune_vars(params_sensitive, [0]) - for param in model.parameters(): - if ("weights" in param.name and "conv" in param.name) or ( - "w_0" in param.name and "conv2d" in param.name): - logger.info(f"{param.name}: {param.shape}") flops = paddle.flops(model, [1, 3, 640, 640]) - logger.info(f"FLOPs after pruning: {flops}") + logger.info("FLOPs after pruning: {}".format(flops)) # start train diff --git a/tests/ocr_det_params.txt b/tests/ocr_det_params.txt index 6aff66c6..be29a0f5 100644 --- a/tests/ocr_det_params.txt +++ b/tests/ocr_det_params.txt @@ -1,10 +1,10 @@ ===========================train_params=========================== model_name:ocr_det python:python3.7 -gpu_list:0|0,1 +gpu_list:3 Global.use_gpu:True|True Global.auto_cast:null -Global.epoch_num:lite_train_infer=2|whole_train_infer=300 +Global.epoch_num:lite_train_infer=1|whole_train_infer=300 Global.save_model_dir:./output/ Train.loader.batch_size_per_card:lite_train_infer=2|whole_train_infer=4 Global.pretrained_model:null @@ -15,7 +15,7 @@ null:null trainer:norm_train|pact_train norm_train:tools/train.py -c configs/det/det_mv3_db.yml -o Global.pretrained_model=./pretrain_models/MobileNetV3_large_x0_5_pretrained pact_train:deploy/slim/quantization/quant.py -c configs/det/det_mv3_db.yml -o -fpgm_train:null +fpgm_train:deploy/slim/prune/sensitivity_anal.py -c configs/det/det_mv3_db.yml -o distill_train:null null:null null:null @@ -29,7 +29,7 @@ Global.save_inference_dir:./output/ Global.pretrained_model: norm_export:tools/export_model.py -c configs/det/det_mv3_db.yml -o quant_export:deploy/slim/quantization/export_model.py -c configs/det/det_mv3_db.yml -o -fpgm_export:deploy/slim/prune/export_prune_model.py +fpgm_export:deploy/slim/prune/export_prune_model.py -c configs/det/det_mv3_db.yml -o distill_export:null export1:null export2:null diff --git a/tests/ocr_det_params_prune.txt b/tests/ocr_det_params_prune.txt new file mode 100644 index 00000000..de218c1b --- /dev/null +++ b/tests/ocr_det_params_prune.txt @@ -0,0 +1,52 @@ +===========================train_params=========================== +model_name:ocr_det +python:python3.7 +gpu_list:3 +Global.use_gpu:True +Global.auto_cast:null +Global.epoch_num:lite_train_infer=1|whole_train_infer=300 +Global.save_model_dir:./output/ +Train.loader.batch_size_per_card:lite_train_infer=2|whole_train_infer=4 +Global.pretrained_model:null +train_model_name:latest +train_infer_img_dir:./train_data/icdar2015/text_localization/ch4_test_images/ +null:null +## +trainer:fpgm_train +norm_train:null +pact_train:null +fpgm_train:deploy/slim/prune/sensitivity_anal.py -c configs/det/det_mv3_db.yml -o +distill_train:null +null:null +null:null +## +===========================eval_params=========================== +eval:tools/eval.py -c configs/det/det_mv3_db.yml -o +null:null +## +===========================infer_params=========================== +Global.save_inference_dir:./output/ +Global.pretrained_model: +norm_export:tools/export_model.py -c configs/det/det_mv3_db.yml -o +quant_export:deploy/slim/quantization/export_model.py -c configs/det/det_mv3_db.yml -o +fpgm_export:deploy/slim/prune/export_prune_model.py -c configs/det/det_mv3_db.yml -o +distill_export:null +export1:null +export2:null +## +infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/ +infer_export:null +infer_quant:False +inference:tools/infer/predict_det.py +--use_gpu:True|False +--enable_mkldnn:True|False +--cpu_threads:1|6 +--rec_batch_num:1 +--use_tensorrt:False|True +--precision:fp32|fp16|int8 +--det_model_dir: +--image_dir:./inference/ch_det_data_50/all-sum-510/ +--save_log_path:null +--benchmark:True +null:null + diff --git a/tests/prepare.sh b/tests/prepare.sh index 8138f26d..1b1130d1 100644 --- a/tests/prepare.sh +++ b/tests/prepare.sh @@ -38,7 +38,8 @@ if [ ${MODE} = "lite_train_infer" ];then rm -rf ./train_data/ic15_data wget -nc -P ./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar wget -nc -P ./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ic15_data.tar # todo change to bcebos - + wget -nc -P ./deploy/slim/prune https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/sen.pickle + cd ./train_data/ && tar xf icdar2015_lite.tar && tar xf ic15_data.tar ln -s ./icdar2015_lite ./icdar2015 cd ../ -- GitLab