diff --git a/configs/rec/rec_icdar15_train.yml b/configs/rec/rec_icdar15_train.yml index 934a94109cb304c5dd5e8db281f1fbf00d928e39..8aa96160f4182f94b79cf0340ade7698d4bf7e55 100755 --- a/configs/rec/rec_icdar15_train.yml +++ b/configs/rec/rec_icdar15_train.yml @@ -41,3 +41,7 @@ Optimizer: base_lr: 0.0005 beta1: 0.9 beta2: 0.999 + decay: + function: cosine_decay + step_each_epoch: 20 + total_epoch: 1000 diff --git a/ppocr/optimizer.py b/ppocr/optimizer.py index a6ad1eb702b8e1c999249d0a3dd98de0efc06baf..8598e48b4df09408ed3259bcf5ba55b6dbc698ff 100755 --- a/ppocr/optimizer.py +++ b/ppocr/optimizer.py @@ -28,6 +28,16 @@ def AdamDecay(params, parameter_list=None): base_lr = params['base_lr'] beta1 = params['beta1'] beta2 = params['beta2'] + if 'decay' in params: + params = params['decay'] + decay_mode = params['function'] + step_each_epoch = params['step_each_epoch'] + total_epoch = params['total_epoch'] + if decay_mode == "cosine_decay": + base_lr = fluid.layers.cosine_decay( + learning_rate=base_lr, + step_each_epoch=step_each_epoch, + epochs=total_epoch) optimizer = fluid.optimizer.Adam( learning_rate=base_lr, beta1=beta1,