From 15ae5176a3aebd7ccb85a45d287b2d1089af0607 Mon Sep 17 00:00:00 2001 From: tink2123 Date: Wed, 10 Jun 2020 15:41:16 +0800 Subject: [PATCH] add cosine decay --- configs/rec/rec_icdar15_train.yml | 4 ++++ ppocr/optimizer.py | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/configs/rec/rec_icdar15_train.yml b/configs/rec/rec_icdar15_train.yml index 934a9410..8aa96160 100755 --- a/configs/rec/rec_icdar15_train.yml +++ b/configs/rec/rec_icdar15_train.yml @@ -41,3 +41,7 @@ Optimizer: base_lr: 0.0005 beta1: 0.9 beta2: 0.999 + decay: + function: cosine_decay + step_each_epoch: 20 + total_epoch: 1000 diff --git a/ppocr/optimizer.py b/ppocr/optimizer.py index a6ad1eb7..8598e48b 100755 --- a/ppocr/optimizer.py +++ b/ppocr/optimizer.py @@ -28,6 +28,16 @@ def AdamDecay(params, parameter_list=None): base_lr = params['base_lr'] beta1 = params['beta1'] beta2 = params['beta2'] + if 'decay' in params: + params = params['decay'] + decay_mode = params['function'] + step_each_epoch = params['step_each_epoch'] + total_epoch = params['total_epoch'] + if decay_mode == "cosine_decay": + base_lr = fluid.layers.cosine_decay( + learning_rate=base_lr, + step_each_epoch=step_each_epoch, + epochs=total_epoch) optimizer = fluid.optimizer.Adam( learning_rate=base_lr, beta1=beta1, -- GitLab