From 74fa0cc2035af01da9a8cb43a48b7babcb9137b8 Mon Sep 17 00:00:00 2001 From: tianyi1997 <93087391+tianyi1997@users.noreply.github.com> Date: Fri, 17 Feb 2023 02:21:03 +0800 Subject: [PATCH] Modify docstring --- ppcls/arch/gears/metabnneck.py | 6 +++--- ppcls/optimizer/learning_rate.py | 22 +++++++++++----------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/ppcls/arch/gears/metabnneck.py b/ppcls/arch/gears/metabnneck.py index d2f743da..a27a7a85 100644 --- a/ppcls/arch/gears/metabnneck.py +++ b/ppcls/arch/gears/metabnneck.py @@ -99,9 +99,9 @@ class MetaBNNeck(nn.Layer): def setup_opt(self, opt): """ - enable_inside_update: enable inside updating for `gate` in MetaBIN - lr_gate: learning rate of `gate` during meta-train phase - bn_mode: control the running stats & updating of BN + Arg: + opt (dict): Optional setting to change the behavior of MetaBIN during training. + It includes three settings which are `enable_inside_update`, `lr_gate` and `bn_mode`. """ self.check_opt(opt) self.opt = copy.deepcopy(opt) diff --git a/ppcls/optimizer/learning_rate.py b/ppcls/optimizer/learning_rate.py index d875d8bd..f1d9074e 100644 --- a/ppcls/optimizer/learning_rate.py +++ b/ppcls/optimizer/learning_rate.py @@ -257,31 +257,31 @@ class Cyclic(LRBase): """Cyclic learning rate decay Args: - epochs (int): Total epoch(s) - step_each_epoch (int): Number of iterations within an epoch + epochs (int): Total epoch(s). + step_each_epoch (int): Number of iterations within an epoch. base_learning_rate (float): Initial learning rate, which is the lower boundary in the cycle. The paper recommends that set the base_learning_rate to 1/3 or 1/4 of max_learning_rate. max_learning_rate (float): Maximum learning rate in the cycle. It defines the cycle amplitude as above. Since there is some scaling operation during process of learning rate adjustment, max_learning_rate may not actually be reached. - warmup_epoch (int): Number of warmup epoch(s) - warmup_start_lr (float): Start learning rate within warmup + warmup_epoch (int): Number of warmup epoch(s). + warmup_start_lr (float): Start learning rate within warmup. step_size_up (int): Number of training steps, which is used to increase learning rate in a cycle. The step size of one cycle will be defined by step_size_up + step_size_down. According to the paper, step size should be set as at least 3 or 4 times steps in one epoch. step_size_down (int, optional): Number of training steps, which is used to decrease learning rate in a cycle. - If not specified, it's value will initialize to `` step_size_up `` . Default: None + If not specified, it's value will initialize to `` step_size_up `` . Default: None. mode (str, optional): One of 'triangular', 'triangular2' or 'exp_range'. - If scale_fn is specified, this argument will be ignored. Default: 'triangular' - exp_gamma (float): Constant in 'exp_range' scaling function: exp_gamma**iterations. Used only when mode = 'exp_range'. Default: 1.0 + If scale_fn is specified, this argument will be ignored. Default: 'triangular'. + exp_gamma (float): Constant in 'exp_range' scaling function: exp_gamma**iterations. Used only when mode = 'exp_range'. Default: 1.0. scale_fn (function, optional): A custom scaling function, which is used to replace three build-in methods. It should only have one argument. For all x >= 0, 0 <= scale_fn(x) <= 1. - If specified, then 'mode' will be ignored. Default: None + If specified, then 'mode' will be ignored. Default: None. scale_mode (str, optional): One of 'cycle' or 'iterations'. Defines whether scale_fn is evaluated on cycle - number or cycle iterations (total iterations since start of training). Default: 'cycle' + number or cycle iterations (total iterations since start of training). Default: 'cycle'. last_epoch (int, optional): The index of last epoch. Can be set to restart training. Default: -1, means initial learning rate. - by_epoch (bool): Learning rate decays by epoch when by_epoch is True, else by iter - verbose: (bool, optional): If True, prints a message to stdout for each update. Defaults to False + by_epoch (bool): Learning rate decays by epoch when by_epoch is True, else by iter. + verbose: (bool, optional): If True, prints a message to stdout for each update. Defaults to False. """ def __init__(self, -- GitLab