From b27207ba7df65b111d2b2262a381da8adbac0fd5 Mon Sep 17 00:00:00 2001 From: tink2123 Date: Fri, 12 Jun 2020 14:07:58 +0800 Subject: [PATCH] update cosine decay doc --- doc/doc_ch/config.md | 2 +- doc/doc_en/config_en.md | 2 +- ppocr/optimizer.py | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/doc_ch/config.md b/doc/doc_ch/config.md index fae0c677..ae16263e 100644 --- a/doc/doc_ch/config.md +++ b/doc/doc_ch/config.md @@ -58,6 +58,6 @@ | beta1 | 设置一阶矩估计的指数衰减率 | 0.9 | \ | | beta2 | 设置二阶矩估计的指数衰减率 | 0.999 | \ | | decay | 是否使用decay | \ | \ | -| function(decay) | 设置decay方式 | cosine_decay | \ | +| function(decay) | 设置decay方式 | cosine_decay | 目前只支持cosin_decay | | step_each_epoch | 每个epoch包含多少次迭代 | 20 | 计算方式:total_image_num / (batch_size_per_card * card_size) | | total_epoch | 总共迭代多少个epoch | 1000 | 与Global.epoch_num 一致 | diff --git a/doc/doc_en/config_en.md b/doc/doc_en/config_en.md index 80558e7a..41c2bb86 100644 --- a/doc/doc_en/config_en.md +++ b/doc/doc_en/config_en.md @@ -58,6 +58,6 @@ Take `rec_icdar15_train.yml` as an example: | beta1 | Set the exponential decay rate for the 1st moment estimates | 0.9 | \ | | beta2 | Set the exponential decay rate for the 2nd moment estimates | 0.999 | \ | | decay | Whether to use decay | \ | \ | -| function(decay) | Set the decay function | cosine_decay | \ | +| function(decay) | Set the decay function | cosine_decay | Only support cosine_decay | | step_each_epoch | The number of steps in an epoch. | 20 | Calculation :total_image_num / (batch_size_per_card * card_size) | | total_epoch | The number of epochs | 1000 | Consistent with Global.epoch_num | diff --git a/ppocr/optimizer.py b/ppocr/optimizer.py index 8598e48b..c50b14c8 100755 --- a/ppocr/optimizer.py +++ b/ppocr/optimizer.py @@ -15,6 +15,9 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function import paddle.fluid as fluid +from ppocr.utils.utility import initial_logger + +logger = initial_logger() def AdamDecay(params, parameter_list=None): @@ -38,6 +41,8 @@ def AdamDecay(params, parameter_list=None): learning_rate=base_lr, step_each_epoch=step_each_epoch, epochs=total_epoch) + else: + logger.info("Only support Cosine decay currently") optimizer = fluid.optimizer.Adam( learning_rate=base_lr, beta1=beta1, -- GitLab