From 1e696ac22ba5b25bbc1a51b756bd1b9cd45849f2 Mon Sep 17 00:00:00 2001 From: gaotingquan Date: Thu, 23 Dec 2021 09:18:27 +0000 Subject: [PATCH] fix: remove unnecessary register_hook() call & pre-commit --- ppcls/arch/backbone/legendary_models/esnet.py | 6 +++++- ppcls/arch/backbone/legendary_models/hrnet.py | 11 +++++++---- .../backbone/legendary_models/inception_v3.py | 1 - .../backbone/legendary_models/mobilenet_v1.py | 3 +-- .../backbone/legendary_models/mobilenet_v3.py | 1 - ppcls/arch/backbone/legendary_models/pp_lcnet.py | 16 ++++++++++------ ppcls/arch/backbone/legendary_models/resnet.py | 1 - ppcls/arch/backbone/legendary_models/vgg.py | 7 +++++-- 8 files changed, 28 insertions(+), 18 deletions(-) diff --git a/ppcls/arch/backbone/legendary_models/esnet.py b/ppcls/arch/backbone/legendary_models/esnet.py index cf9c9626..3a8d6690 100644 --- a/ppcls/arch/backbone/legendary_models/esnet.py +++ b/ppcls/arch/backbone/legendary_models/esnet.py @@ -217,7 +217,8 @@ class ESNet(TheseusLayer): class_num=1000, scale=1.0, dropout_prob=0.2, - class_expand=1280): + class_expand=1280, + return_patterns=None): super().__init__() self.scale = scale self.class_num = class_num @@ -268,6 +269,9 @@ class ESNet(TheseusLayer): self.flatten = nn.Flatten(start_axis=1, stop_axis=-1) self.fc = Linear(self.class_expand, self.class_num) + if return_patterns is not None: + self.update_res(return_patterns) + def forward(self, x): x = self.conv1(x) x = self.max_pool(x) diff --git a/ppcls/arch/backbone/legendary_models/hrnet.py b/ppcls/arch/backbone/legendary_models/hrnet.py index 7c4898a1..da6c5f67 100644 --- a/ppcls/arch/backbone/legendary_models/hrnet.py +++ b/ppcls/arch/backbone/legendary_models/hrnet.py @@ -244,7 +244,7 @@ class HighResolutionModule(TheseusLayer): for i in range(len(num_filters)): self.basic_block_list.append( - nn.Sequential(*[ + nn.Sequential(* [ BasicBlock( num_channels=num_filters[i], num_filters=num_filters[i], @@ -367,7 +367,11 @@ class HRNet(TheseusLayer): model: nn.Layer. Specific HRNet model depends on args. """ - def __init__(self, width=18, has_se=False, class_num=1000, return_patterns=None): + def __init__(self, + width=18, + has_se=False, + class_num=1000, + return_patterns=None): super().__init__() self.width = width @@ -394,7 +398,7 @@ class HRNet(TheseusLayer): stride=2, act="relu") - self.layer1 = nn.Sequential(*[ + self.layer1 = nn.Sequential(* [ BottleneckBlock( num_channels=64 if i == 0 else 256, num_filters=64, @@ -458,7 +462,6 @@ class HRNet(TheseusLayer): weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv))) if return_patterns is not None: self.update_res(return_patterns) - self.register_forward_post_hook(self._return_dict_hook) def forward(self, x): x = self.conv_layer1_1(x) diff --git a/ppcls/arch/backbone/legendary_models/inception_v3.py b/ppcls/arch/backbone/legendary_models/inception_v3.py index 50fbcb4c..c5ccc3dc 100644 --- a/ppcls/arch/backbone/legendary_models/inception_v3.py +++ b/ppcls/arch/backbone/legendary_models/inception_v3.py @@ -498,7 +498,6 @@ class Inception_V3(TheseusLayer): bias_attr=ParamAttr()) if return_patterns is not None: self.update_res(return_patterns) - self.register_forward_post_hook(self._return_dict_hook) def forward(self, x): x = self.inception_stem(x) diff --git a/ppcls/arch/backbone/legendary_models/mobilenet_v1.py b/ppcls/arch/backbone/legendary_models/mobilenet_v1.py index 944bdb14..8bda78d5 100644 --- a/ppcls/arch/backbone/legendary_models/mobilenet_v1.py +++ b/ppcls/arch/backbone/legendary_models/mobilenet_v1.py @@ -128,7 +128,7 @@ class MobileNet(TheseusLayer): [int(512 * scale), 512, 1024, 512, 2], [int(1024 * scale), 1024, 1024, 1024, 1]] - self.blocks = nn.Sequential(*[ + self.blocks = nn.Sequential(* [ DepthwiseSeparable( num_channels=params[0], num_filters1=params[1], @@ -147,7 +147,6 @@ class MobileNet(TheseusLayer): weight_attr=ParamAttr(initializer=KaimingNormal())) if return_patterns is not None: self.update_res(return_patterns) - self.register_forward_post_hook(self._return_dict_hook) def forward(self, x): x = self.conv(x) diff --git a/ppcls/arch/backbone/legendary_models/mobilenet_v3.py b/ppcls/arch/backbone/legendary_models/mobilenet_v3.py index 438e48a4..1ad42d5c 100644 --- a/ppcls/arch/backbone/legendary_models/mobilenet_v3.py +++ b/ppcls/arch/backbone/legendary_models/mobilenet_v3.py @@ -202,7 +202,6 @@ class MobileNetV3(TheseusLayer): self.fc = Linear(self.class_expand, class_num) if return_patterns is not None: self.update_res(return_patterns) - self.register_forward_post_hook(self._return_dict_hook) def forward(self, x): x = self.conv(x) diff --git a/ppcls/arch/backbone/legendary_models/pp_lcnet.py b/ppcls/arch/backbone/legendary_models/pp_lcnet.py index 05bbccd3..327980f3 100644 --- a/ppcls/arch/backbone/legendary_models/pp_lcnet.py +++ b/ppcls/arch/backbone/legendary_models/pp_lcnet.py @@ -171,7 +171,8 @@ class PPLCNet(TheseusLayer): scale=1.0, class_num=1000, dropout_prob=0.2, - class_expand=1280): + class_expand=1280, + return_patterns=None): super().__init__() self.scale = scale self.class_expand = class_expand @@ -182,7 +183,7 @@ class PPLCNet(TheseusLayer): num_filters=make_divisible(16 * scale), stride=2) - self.blocks2 = nn.Sequential(*[ + self.blocks2 = nn.Sequential(* [ DepthwiseSeparable( num_channels=make_divisible(in_c * scale), num_filters=make_divisible(out_c * scale), @@ -192,7 +193,7 @@ class PPLCNet(TheseusLayer): for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks2"]) ]) - self.blocks3 = nn.Sequential(*[ + self.blocks3 = nn.Sequential(* [ DepthwiseSeparable( num_channels=make_divisible(in_c * scale), num_filters=make_divisible(out_c * scale), @@ -202,7 +203,7 @@ class PPLCNet(TheseusLayer): for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks3"]) ]) - self.blocks4 = nn.Sequential(*[ + self.blocks4 = nn.Sequential(* [ DepthwiseSeparable( num_channels=make_divisible(in_c * scale), num_filters=make_divisible(out_c * scale), @@ -212,7 +213,7 @@ class PPLCNet(TheseusLayer): for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks4"]) ]) - self.blocks5 = nn.Sequential(*[ + self.blocks5 = nn.Sequential(* [ DepthwiseSeparable( num_channels=make_divisible(in_c * scale), num_filters=make_divisible(out_c * scale), @@ -222,7 +223,7 @@ class PPLCNet(TheseusLayer): for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks5"]) ]) - self.blocks6 = nn.Sequential(*[ + self.blocks6 = nn.Sequential(* [ DepthwiseSeparable( num_channels=make_divisible(in_c * scale), num_filters=make_divisible(out_c * scale), @@ -248,6 +249,9 @@ class PPLCNet(TheseusLayer): self.fc = Linear(self.class_expand, class_num) + if return_patterns is not None: + self.update_res(return_patterns) + def forward(self, x): x = self.conv1(x) diff --git a/ppcls/arch/backbone/legendary_models/resnet.py b/ppcls/arch/backbone/legendary_models/resnet.py index 4f79c0d7..f37cfef9 100644 --- a/ppcls/arch/backbone/legendary_models/resnet.py +++ b/ppcls/arch/backbone/legendary_models/resnet.py @@ -340,7 +340,6 @@ class ResNet(TheseusLayer): self.data_format = data_format if return_patterns is not None: self.update_res(return_patterns) - self.register_forward_post_hook(self._return_dict_hook) def forward(self, x): with paddle.static.amp.fp16_guard(): diff --git a/ppcls/arch/backbone/legendary_models/vgg.py b/ppcls/arch/backbone/legendary_models/vgg.py index 9b1750d5..9316e12d 100644 --- a/ppcls/arch/backbone/legendary_models/vgg.py +++ b/ppcls/arch/backbone/legendary_models/vgg.py @@ -111,7 +111,11 @@ class VGGNet(TheseusLayer): model: nn.Layer. Specific VGG model depends on args. """ - def __init__(self, config, stop_grad_layers=0, class_num=1000, return_patterns=None): + def __init__(self, + config, + stop_grad_layers=0, + class_num=1000, + return_patterns=None): super().__init__() self.stop_grad_layers = stop_grad_layers @@ -139,7 +143,6 @@ class VGGNet(TheseusLayer): self.fc3 = Linear(4096, class_num) if return_patterns is not None: self.update_res(return_patterns) - self.register_forward_post_hook(self._return_dict_hook) def forward(self, inputs): x = self.conv_block_1(inputs) -- GitLab