From 3d1dfea9eb39b09b149ee8d097aec732a76f2d25 Mon Sep 17 00:00:00 2001 From: shangliang Xu Date: Tue, 15 Mar 2022 21:00:45 +0800 Subject: [PATCH] [dev] add use_epoch in LinearWarmup (#5366) --- ppdet/optimizer.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/ppdet/optimizer.py b/ppdet/optimizer.py index 84d28e7c6..fd46c949a 100644 --- a/ppdet/optimizer.py +++ b/ppdet/optimizer.py @@ -132,19 +132,24 @@ class LinearWarmup(object): Args: steps (int): warm up steps start_factor (float): initial learning rate factor + epochs (int|None): use epochs as warm up steps, the priority + of `epochs` is higher than `steps`. Default: None. """ - def __init__(self, steps=500, start_factor=1. / 3): + def __init__(self, steps=500, start_factor=1. / 3, epochs=None): super(LinearWarmup, self).__init__() self.steps = steps self.start_factor = start_factor + self.epochs = epochs def __call__(self, base_lr, step_per_epoch): boundary = [] value = [] - for i in range(self.steps + 1): - if self.steps > 0: - alpha = i / self.steps + warmup_steps = self.epochs * step_per_epoch \ + if self.epochs is not None else self.steps + for i in range(warmup_steps + 1): + if warmup_steps > 0: + alpha = i / warmup_steps factor = self.start_factor * (1 - alpha) + alpha lr = base_lr * factor value.append(lr) -- GitLab