提交 6149e719 编写于 作者: H HydrogenSulfate

refine code

上级 674447f6
...@@ -25,7 +25,7 @@ Arch: ...@@ -25,7 +25,7 @@ Arch:
infer_add_softmax: False infer_add_softmax: False
Backbone: Backbone:
name: "ResNet50_last_stage_stride1" name: "ResNet50_last_stage_stride1"
pretrained: "./dataset/resnet50-19c8e357_for_strong_baseline" pretrained: True
stem_act: null stem_act: null
BackboneStopLayer: BackboneStopLayer:
name: "flatten" name: "flatten"
...@@ -73,7 +73,7 @@ Loss: ...@@ -73,7 +73,7 @@ Loss:
Optimizer: Optimizer:
- Adam: - Adam:
scope: model scope: RecModel
lr: lr:
name: Piecewise name: Piecewise
decay_epochs: [30, 60] decay_epochs: [30, 60]
......
...@@ -71,7 +71,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): ...@@ -71,7 +71,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None):
optim_cfg = optim_item[optim_name] # get optim_cfg optim_cfg = optim_item[optim_name] # get optim_cfg
lr = build_lr_scheduler(optim_cfg.pop('lr'), epochs, step_each_epoch) lr = build_lr_scheduler(optim_cfg.pop('lr'), epochs, step_each_epoch)
logger.debug("build lr ({}) for scope ({}) success..".format( logger.info("build lr ({}) for scope ({}) success..".format(
lr, optim_scope)) lr, optim_scope))
# step2 build regularization # step2 build regularization
if 'regularizer' in optim_cfg and optim_cfg['regularizer'] is not None: if 'regularizer' in optim_cfg and optim_cfg['regularizer'] is not None:
...@@ -83,7 +83,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): ...@@ -83,7 +83,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None):
reg_name = reg_config.pop('name') + 'Decay' reg_name = reg_config.pop('name') + 'Decay'
reg = getattr(paddle.regularizer, reg_name)(**reg_config) reg = getattr(paddle.regularizer, reg_name)(**reg_config)
optim_cfg["weight_decay"] = reg optim_cfg["weight_decay"] = reg
logger.debug("build regularizer ({}) for scope ({}) success..". logger.info("build regularizer ({}) for scope ({}) success..".
format(reg, optim_scope)) format(reg, optim_scope))
# step3 build optimizer # step3 build optimizer
if 'clip_norm' in optim_cfg: if 'clip_norm' in optim_cfg:
...@@ -115,7 +115,9 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): ...@@ -115,7 +115,9 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None):
optim_model.append(m) optim_model.append(m)
else: else:
# opmizer for module in model, such as backbone, neck, head... # opmizer for module in model, such as backbone, neck, head...
if hasattr(model_list[i], optim_scope): if optim_scope == model_list[i].__class__.__name__:
optim_model.append(model_list[i])
elif hasattr(model_list[i], optim_scope):
optim_model.append(getattr(model_list[i], optim_scope)) optim_model.append(getattr(model_list[i], optim_scope))
assert len(optim_model) == 1, \ assert len(optim_model) == 1, \
...@@ -123,7 +125,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): ...@@ -123,7 +125,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None):
optim = getattr(optimizer, optim_name)( optim = getattr(optimizer, optim_name)(
learning_rate=lr, grad_clip=grad_clip, learning_rate=lr, grad_clip=grad_clip,
**optim_cfg)(model_list=optim_model) **optim_cfg)(model_list=optim_model)
logger.debug("build optimizer ({}) for scope ({}) success..".format( logger.info("build optimizer ({}) for scope ({}) success..".format(
optim, optim_scope)) optim, optim_scope))
optim_list.append(optim) optim_list.append(optim)
lr_list.append(lr) lr_list.append(lr)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册