diff --git a/ppcls/optimizer/optimizer.py b/ppcls/optimizer/optimizer.py index 8e4ca9e9983abcb642ab838f47c86e25cf74bd70..17c04b8fd2aeaf2192d94f3190b1f7438018e15c 100644 --- a/ppcls/optimizer/optimizer.py +++ b/ppcls/optimizer/optimizer.py @@ -310,8 +310,9 @@ class AdamW(object): if self.one_dim_param_no_weight_decay: self.no_weight_decay_param_name_list += [ - p.name for model in model_list - for n, p in model.named_parameters() if len(p.shape) == 1 + p.name + for model in model_list for n, p in model.named_parameters() + if len(p.shape) == 1 ] if model_list else [] opt = optim.AdamW( @@ -374,7 +375,7 @@ class AdamWDL(object): name_dict=None, name=None): if not isinstance(layerwise_decay, float) and \ - not isinstance(layerwise_decay, fluid.framework.Variable): + not isinstance(layerwise_decay, paddle.static.Variable): raise TypeError("coeff should be float or Tensor.") self.layerwise_decay = layerwise_decay self.name_dict = name_dict