提交 1e696ac2 编写于 作者: G gaotingquan 提交者: Tingquan Gao

fix: remove unnecessary register_hook() call & pre-commit

上级 a86c4b29
......@@ -217,7 +217,8 @@ class ESNet(TheseusLayer):
class_num=1000,
scale=1.0,
dropout_prob=0.2,
class_expand=1280):
class_expand=1280,
return_patterns=None):
super().__init__()
self.scale = scale
self.class_num = class_num
......@@ -268,6 +269,9 @@ class ESNet(TheseusLayer):
self.flatten = nn.Flatten(start_axis=1, stop_axis=-1)
self.fc = Linear(self.class_expand, self.class_num)
if return_patterns is not None:
self.update_res(return_patterns)
def forward(self, x):
x = self.conv1(x)
x = self.max_pool(x)
......
......@@ -244,7 +244,7 @@ class HighResolutionModule(TheseusLayer):
for i in range(len(num_filters)):
self.basic_block_list.append(
nn.Sequential(*[
nn.Sequential(* [
BasicBlock(
num_channels=num_filters[i],
num_filters=num_filters[i],
......@@ -367,7 +367,11 @@ class HRNet(TheseusLayer):
model: nn.Layer. Specific HRNet model depends on args.
"""
def __init__(self, width=18, has_se=False, class_num=1000, return_patterns=None):
def __init__(self,
width=18,
has_se=False,
class_num=1000,
return_patterns=None):
super().__init__()
self.width = width
......@@ -394,7 +398,7 @@ class HRNet(TheseusLayer):
stride=2,
act="relu")
self.layer1 = nn.Sequential(*[
self.layer1 = nn.Sequential(* [
BottleneckBlock(
num_channels=64 if i == 0 else 256,
num_filters=64,
......@@ -458,7 +462,6 @@ class HRNet(TheseusLayer):
weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
if return_patterns is not None:
self.update_res(return_patterns)
self.register_forward_post_hook(self._return_dict_hook)
def forward(self, x):
x = self.conv_layer1_1(x)
......
......@@ -498,7 +498,6 @@ class Inception_V3(TheseusLayer):
bias_attr=ParamAttr())
if return_patterns is not None:
self.update_res(return_patterns)
self.register_forward_post_hook(self._return_dict_hook)
def forward(self, x):
x = self.inception_stem(x)
......
......@@ -128,7 +128,7 @@ class MobileNet(TheseusLayer):
[int(512 * scale), 512, 1024, 512, 2],
[int(1024 * scale), 1024, 1024, 1024, 1]]
self.blocks = nn.Sequential(*[
self.blocks = nn.Sequential(* [
DepthwiseSeparable(
num_channels=params[0],
num_filters1=params[1],
......@@ -147,7 +147,6 @@ class MobileNet(TheseusLayer):
weight_attr=ParamAttr(initializer=KaimingNormal()))
if return_patterns is not None:
self.update_res(return_patterns)
self.register_forward_post_hook(self._return_dict_hook)
def forward(self, x):
x = self.conv(x)
......
......@@ -202,7 +202,6 @@ class MobileNetV3(TheseusLayer):
self.fc = Linear(self.class_expand, class_num)
if return_patterns is not None:
self.update_res(return_patterns)
self.register_forward_post_hook(self._return_dict_hook)
def forward(self, x):
x = self.conv(x)
......
......@@ -171,7 +171,8 @@ class PPLCNet(TheseusLayer):
scale=1.0,
class_num=1000,
dropout_prob=0.2,
class_expand=1280):
class_expand=1280,
return_patterns=None):
super().__init__()
self.scale = scale
self.class_expand = class_expand
......@@ -182,7 +183,7 @@ class PPLCNet(TheseusLayer):
num_filters=make_divisible(16 * scale),
stride=2)
self.blocks2 = nn.Sequential(*[
self.blocks2 = nn.Sequential(* [
DepthwiseSeparable(
num_channels=make_divisible(in_c * scale),
num_filters=make_divisible(out_c * scale),
......@@ -192,7 +193,7 @@ class PPLCNet(TheseusLayer):
for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks2"])
])
self.blocks3 = nn.Sequential(*[
self.blocks3 = nn.Sequential(* [
DepthwiseSeparable(
num_channels=make_divisible(in_c * scale),
num_filters=make_divisible(out_c * scale),
......@@ -202,7 +203,7 @@ class PPLCNet(TheseusLayer):
for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks3"])
])
self.blocks4 = nn.Sequential(*[
self.blocks4 = nn.Sequential(* [
DepthwiseSeparable(
num_channels=make_divisible(in_c * scale),
num_filters=make_divisible(out_c * scale),
......@@ -212,7 +213,7 @@ class PPLCNet(TheseusLayer):
for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks4"])
])
self.blocks5 = nn.Sequential(*[
self.blocks5 = nn.Sequential(* [
DepthwiseSeparable(
num_channels=make_divisible(in_c * scale),
num_filters=make_divisible(out_c * scale),
......@@ -222,7 +223,7 @@ class PPLCNet(TheseusLayer):
for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks5"])
])
self.blocks6 = nn.Sequential(*[
self.blocks6 = nn.Sequential(* [
DepthwiseSeparable(
num_channels=make_divisible(in_c * scale),
num_filters=make_divisible(out_c * scale),
......@@ -248,6 +249,9 @@ class PPLCNet(TheseusLayer):
self.fc = Linear(self.class_expand, class_num)
if return_patterns is not None:
self.update_res(return_patterns)
def forward(self, x):
x = self.conv1(x)
......
......@@ -340,7 +340,6 @@ class ResNet(TheseusLayer):
self.data_format = data_format
if return_patterns is not None:
self.update_res(return_patterns)
self.register_forward_post_hook(self._return_dict_hook)
def forward(self, x):
with paddle.static.amp.fp16_guard():
......
......@@ -111,7 +111,11 @@ class VGGNet(TheseusLayer):
model: nn.Layer. Specific VGG model depends on args.
"""
def __init__(self, config, stop_grad_layers=0, class_num=1000, return_patterns=None):
def __init__(self,
config,
stop_grad_layers=0,
class_num=1000,
return_patterns=None):
super().__init__()
self.stop_grad_layers = stop_grad_layers
......@@ -139,7 +143,6 @@ class VGGNet(TheseusLayer):
self.fc3 = Linear(4096, class_num)
if return_patterns is not None:
self.update_res(return_patterns)
self.register_forward_post_hook(self._return_dict_hook)
def forward(self, inputs):
x = self.conv_block_1(inputs)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册