diff --git a/ppcls/configs/Pedestrian/strong_baseline_m1_centerloss.yaml b/ppcls/configs/Pedestrian/strong_baseline_m1_centerloss.yaml index c4ea70b26672025f75aa4ce6dc1b6760e5fe5fef..042cdffdec49852200ecdfe4dd9d2e52434eb873 100644 --- a/ppcls/configs/Pedestrian/strong_baseline_m1_centerloss.yaml +++ b/ppcls/configs/Pedestrian/strong_baseline_m1_centerloss.yaml @@ -25,7 +25,7 @@ Arch: infer_add_softmax: False Backbone: name: "ResNet50_last_stage_stride1" - pretrained: "./dataset/resnet50-19c8e357_for_strong_baseline" + pretrained: True stem_act: null BackboneStopLayer: name: "flatten" @@ -73,7 +73,7 @@ Loss: Optimizer: - Adam: - scope: model + scope: RecModel lr: name: Piecewise decay_epochs: [30, 60] diff --git a/ppcls/optimizer/__init__.py b/ppcls/optimizer/__init__.py index d27f1100eef871db48b8da9ab86eba6af8aecee8..c354f6cb9c7e38bca0fd5df6e0cf7846a2ef8794 100644 --- a/ppcls/optimizer/__init__.py +++ b/ppcls/optimizer/__init__.py @@ -71,7 +71,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): optim_cfg = optim_item[optim_name] # get optim_cfg lr = build_lr_scheduler(optim_cfg.pop('lr'), epochs, step_each_epoch) - logger.debug("build lr ({}) for scope ({}) success..".format( + logger.info("build lr ({}) for scope ({}) success..".format( lr, optim_scope)) # step2 build regularization if 'regularizer' in optim_cfg and optim_cfg['regularizer'] is not None: @@ -83,8 +83,8 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): reg_name = reg_config.pop('name') + 'Decay' reg = getattr(paddle.regularizer, reg_name)(**reg_config) optim_cfg["weight_decay"] = reg - logger.debug("build regularizer ({}) for scope ({}) success..". - format(reg, optim_scope)) + logger.info("build regularizer ({}) for scope ({}) success..". + format(reg, optim_scope)) # step3 build optimizer if 'clip_norm' in optim_cfg: clip_norm = optim_cfg.pop('clip_norm') @@ -115,7 +115,9 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): optim_model.append(m) else: # opmizer for module in model, such as backbone, neck, head... - if hasattr(model_list[i], optim_scope): + if optim_scope == model_list[i].__class__.__name__: + optim_model.append(model_list[i]) + elif hasattr(model_list[i], optim_scope): optim_model.append(getattr(model_list[i], optim_scope)) assert len(optim_model) == 1, \ @@ -123,7 +125,7 @@ def build_optimizer(config, epochs, step_each_epoch, model_list=None): optim = getattr(optimizer, optim_name)( learning_rate=lr, grad_clip=grad_clip, **optim_cfg)(model_list=optim_model) - logger.debug("build optimizer ({}) for scope ({}) success..".format( + logger.info("build optimizer ({}) for scope ({}) success..".format( optim, optim_scope)) optim_list.append(optim) lr_list.append(lr)