From 8a28962cd7bd617bf672e93232921996bcb942dd Mon Sep 17 00:00:00 2001 From: WenmuZhou <572459439@qq.com> Date: Sat, 12 Feb 2022 13:35:13 +0000 Subject: [PATCH] add Const lr --- configs/vqa/re/layoutlmv2.yml | 10 ++++++---- configs/vqa/re/layoutxlm.yml | 6 ++---- ppocr/optimizer/__init__.py | 7 ++----- ppocr/optimizer/learning_rate.py | 34 +++++++++++++++++++++++++++++++- 4 files changed, 43 insertions(+), 14 deletions(-) diff --git a/configs/vqa/re/layoutlmv2.yml b/configs/vqa/re/layoutlmv2.yml index 9daa2a96..b213212f 100644 --- a/configs/vqa/re/layoutlmv2.yml +++ b/configs/vqa/re/layoutlmv2.yml @@ -34,10 +34,12 @@ Optimizer: beta2: 0.999 clip_norm: 10 lr: - name: Piecewise - values: [0.000005, 0.00005] - decay_epochs: [10] - warmup_epoch: 0 + # name: Piecewise + # values: [0.000005, 0.00005] + # decay_epochs: [10] + # warmup_epoch: 0 + learning_rate: 0.00005 + warmup_epoch: 10 regularizer: name: L2 factor: 0.00000 diff --git a/configs/vqa/re/layoutxlm.yml b/configs/vqa/re/layoutxlm.yml index d413b174..ff16120a 100644 --- a/configs/vqa/re/layoutxlm.yml +++ b/configs/vqa/re/layoutxlm.yml @@ -34,10 +34,8 @@ Optimizer: beta2: 0.999 clip_norm: 10 lr: - name: Piecewise - values: [0.000005, 0.00005] - decay_epochs: [10] - warmup_epoch: 0 + learning_rate: 0.00005 + warmup_epoch: 10 regularizer: name: L2 factor: 0.00000 diff --git a/ppocr/optimizer/__init__.py b/ppocr/optimizer/__init__.py index e0c6b903..4110fb47 100644 --- a/ppocr/optimizer/__init__.py +++ b/ppocr/optimizer/__init__.py @@ -25,11 +25,8 @@ __all__ = ['build_optimizer'] def build_lr_scheduler(lr_config, epochs, step_each_epoch): from . import learning_rate lr_config.update({'epochs': epochs, 'step_each_epoch': step_each_epoch}) - if 'name' in lr_config: - lr_name = lr_config.pop('name') - lr = getattr(learning_rate, lr_name)(**lr_config)() - else: - lr = lr_config['learning_rate'] + lr_name = lr_config.pop('name', 'Const') + lr = getattr(learning_rate, lr_name)(**lr_config)() return lr diff --git a/ppocr/optimizer/learning_rate.py b/ppocr/optimizer/learning_rate.py index b1879f3e..fe251f36 100644 --- a/ppocr/optimizer/learning_rate.py +++ b/ppocr/optimizer/learning_rate.py @@ -275,4 +275,36 @@ class OneCycle(object): start_lr=0.0, end_lr=self.max_lr, last_epoch=self.last_epoch) - return learning_rate \ No newline at end of file + return learning_rate + + +class Const(object): + """ + Const learning rate decay + Args: + learning_rate(float): initial learning rate + step_each_epoch(int): steps each epoch + last_epoch (int, optional): The index of last epoch. Can be set to restart training. Default: -1, means initial learning rate. + """ + + def __init__(self, + learning_rate, + step_each_epoch, + warmup_epoch=0, + last_epoch=-1, + **kwargs): + super(Const, self).__init__() + self.learning_rate = learning_rate + self.last_epoch = last_epoch + self.warmup_epoch = round(warmup_epoch * step_each_epoch) + + def __call__(self): + learning_rate = self.learning_rate + if self.warmup_epoch > 0: + learning_rate = lr.LinearWarmup( + learning_rate=learning_rate, + warmup_steps=self.warmup_epoch, + start_lr=0.0, + end_lr=self.learning_rate, + last_epoch=self.last_epoch) + return learning_rate -- GitLab