From 032c45c1d328c0401048d2978f9a7e46c89f0fe0 Mon Sep 17 00:00:00 2001 From: zhiboniu Date: Wed, 18 May 2022 11:58:53 +0000 Subject: [PATCH] delete norm_decay in resnet --- .../arch/backbone/legendary_models/resnet.py | 23 ++----------------- ppcls/configs/Attr/StrongBaselineAttr.yaml | 1 - 2 files changed, 2 insertions(+), 22 deletions(-) diff --git a/ppcls/arch/backbone/legendary_models/resnet.py b/ppcls/arch/backbone/legendary_models/resnet.py index 313f16a3..ca75c2ea 100644 --- a/ppcls/arch/backbone/legendary_models/resnet.py +++ b/ppcls/arch/backbone/legendary_models/resnet.py @@ -117,7 +117,6 @@ class ConvBNLayer(TheseusLayer): is_vd_mode=False, act=None, lr_mult=1.0, - norm_decay=0., data_format="NCHW"): super().__init__() self.is_vd_mode = is_vd_mode @@ -135,14 +134,8 @@ class ConvBNLayer(TheseusLayer): bias_attr=False, data_format=data_format) - weight_attr = ParamAttr( - learning_rate=lr_mult, - regularizer=L2Decay(norm_decay), - trainable=True) - bias_attr = ParamAttr( - learning_rate=lr_mult, - regularizer=L2Decay(norm_decay), - trainable=True) + weight_attr = ParamAttr(learning_rate=lr_mult, trainable=True) + bias_attr = ParamAttr(learning_rate=lr_mult, trainable=True) self.bn = BatchNorm2D( num_filters, weight_attr=weight_attr, bias_attr=bias_attr) @@ -166,7 +159,6 @@ class BottleneckBlock(TheseusLayer): shortcut=True, if_first=False, lr_mult=1.0, - norm_decay=0., data_format="NCHW"): super().__init__() @@ -176,7 +168,6 @@ class BottleneckBlock(TheseusLayer): filter_size=1, act="relu", lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) self.conv1 = ConvBNLayer( num_channels=num_filters, @@ -185,7 +176,6 @@ class BottleneckBlock(TheseusLayer): stride=stride, act="relu", lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) self.conv2 = ConvBNLayer( num_channels=num_filters, @@ -193,7 +183,6 @@ class BottleneckBlock(TheseusLayer): filter_size=1, act=None, lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) if not shortcut: @@ -204,7 +193,6 @@ class BottleneckBlock(TheseusLayer): stride=stride if if_first else 1, is_vd_mode=False if if_first else True, lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) self.relu = nn.ReLU() @@ -233,7 +221,6 @@ class BasicBlock(TheseusLayer): shortcut=True, if_first=False, lr_mult=1.0, - norm_decay=0., data_format="NCHW"): super().__init__() @@ -245,7 +232,6 @@ class BasicBlock(TheseusLayer): stride=stride, act="relu", lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) self.conv1 = ConvBNLayer( num_channels=num_filters, @@ -253,7 +239,6 @@ class BasicBlock(TheseusLayer): filter_size=3, act=None, lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) if not shortcut: self.short = ConvBNLayer( @@ -263,7 +248,6 @@ class BasicBlock(TheseusLayer): stride=stride if if_first else 1, is_vd_mode=False if if_first else True, lr_mult=lr_mult, - norm_decay=norm_decay, data_format=data_format) self.shortcut = shortcut self.relu = nn.ReLU() @@ -300,7 +284,6 @@ class ResNet(TheseusLayer): stem_act="relu", class_num=1000, lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0], - norm_decay=0., data_format="NCHW", input_image_channel=3, return_patterns=None, @@ -340,7 +323,6 @@ class ResNet(TheseusLayer): stride=s, act=stem_act, lr_mult=self.lr_mult_list[0], - norm_decay=norm_decay, data_format=data_format) for in_c, out_c, k, s in self.stem_cfg[version] ]) @@ -359,7 +341,6 @@ class ResNet(TheseusLayer): shortcut=shortcut, if_first=block_idx == i == 0 if version == "vd" else True, lr_mult=self.lr_mult_list[block_idx + 1], - norm_decay=norm_decay, data_format=data_format)) shortcut = True self.blocks = nn.Sequential(*block_list) diff --git a/ppcls/configs/Attr/StrongBaselineAttr.yaml b/ppcls/configs/Attr/StrongBaselineAttr.yaml index 97017a14..7f90e745 100644 --- a/ppcls/configs/Attr/StrongBaselineAttr.yaml +++ b/ppcls/configs/Attr/StrongBaselineAttr.yaml @@ -20,7 +20,6 @@ Arch: name: "ResNet50" pretrained: True class_num: 26 - norm_decay: 0.0005 # loss function config for traing/eval process Loss: -- GitLab