diff --git a/dygraph/paddleseg/models/ann.py b/dygraph/paddleseg/models/ann.py index a9d805a5ed02aef4326191b42c04745ae515867e..1e123552f6fe85e1f0d206ff186e41d9b0f5ee01 100644 --- a/dygraph/paddleseg/models/ann.py +++ b/dygraph/paddleseg/models/ann.py @@ -73,7 +73,6 @@ class ANN(nn.Layer): utils.load_entire_model(self, pretrained) def forward(self, input): - feat_list = self.backbone(input) logit_list = self.head(feat_list) return [ @@ -154,7 +153,6 @@ class ANNHead(nn.Layer): self.init_weight() def forward(self, feat_list): - logit_list = [] low_level_x = feat_list[self.backbone_indices[0]] high_level_x = feat_list[self.backbone_indices[1]] diff --git a/dygraph/paddleseg/models/common/activation.py b/dygraph/paddleseg/models/common/activation.py index 69af72e0ea96cd389e48511ff7f7d4bee8680a8a..3c9a4115c2f8f6798aef5a23a593a848019792a4 100644 --- a/dygraph/paddleseg/models/common/activation.py +++ b/dygraph/paddleseg/models/common/activation.py @@ -53,7 +53,6 @@ class Activation(nn.Layer): act, act_dict.keys())) def forward(self, x): - if self._act is not None: return self.act_func(x) else: diff --git a/dygraph/paddleseg/models/common/pyramid_pool.py b/dygraph/paddleseg/models/common/pyramid_pool.py index d1c643382d8f4e2a02f3f158a2eea09d57c4adaf..d768a36449b90d5f8160ce07a2304e2ba1b01d77 100644 --- a/dygraph/paddleseg/models/common/pyramid_pool.py +++ b/dygraph/paddleseg/models/common/pyramid_pool.py @@ -44,7 +44,6 @@ class ASPPModule(nn.Layer): self.aspp_blocks = [] for ratio in aspp_ratios: - if sep_conv and ratio > 1: conv_func = layer_libs.SeparableConvBNReLU else: @@ -76,7 +75,6 @@ class ASPPModule(nn.Layer): self.dropout = nn.Dropout(p=0.1) # drop rate def forward(self, x): - outputs = [] for block in self.aspp_blocks: y = block(x) @@ -141,7 +139,6 @@ class PPModule(nn.Layer): After pooling, the channels are reduced to 1/len(bin_sizes) immediately, while some other implementations keep the channels to be same. - Args: in_channels (int): the number of intput channels to pyramid pooling module. size (int): the out size of the pooled layer. diff --git a/dygraph/paddleseg/models/deeplab.py b/dygraph/paddleseg/models/deeplab.py index 56c23b5cdc572abd45228eef5f415ecd61211c29..00f218c06ffcd5dd844f7d2c324687763920e885 100644 --- a/dygraph/paddleseg/models/deeplab.py +++ b/dygraph/paddleseg/models/deeplab.py @@ -73,7 +73,6 @@ class DeepLabV3P(nn.Layer): utils.load_entire_model(self, pretrained) def forward(self, input): - feat_list = self.backbone(input) logit_list = self.head(feat_list) return [ @@ -122,7 +121,6 @@ class DeepLabV3PHead(nn.Layer): self.init_weight() def forward(self, feat_list): - logit_list = [] low_level_feat = feat_list[self.backbone_indices[0]] x = feat_list[self.backbone_indices[1]] @@ -171,7 +169,6 @@ class DeepLabV3(nn.Layer): utils.load_entire_model(self, pretrained) def forward(self, input): - feat_list = self.backbone(input) logit_list = self.head(feat_list) return [ @@ -205,9 +202,7 @@ class DeepLabV3Head(nn.Layer): self.init_weight() def forward(self, feat_list): - logit_list = [] - x = feat_list[self.backbone_indices[0]] x = self.aspp(x) logit = self.cls(x) diff --git a/dygraph/paddleseg/models/fast_scnn.py b/dygraph/paddleseg/models/fast_scnn.py index 2a916835241581f9f3cab4616bcbf39330ad70fb..b4c6eac739d8ba5b531735a4b53e719704493738 100644 --- a/dygraph/paddleseg/models/fast_scnn.py +++ b/dygraph/paddleseg/models/fast_scnn.py @@ -61,7 +61,6 @@ class FastSCNN(nn.Layer): utils.load_entire_model(self, pretrained) def forward(self, input, label=None): - logit_list = [] higher_res_features = self.learning_to_downsample(input) x = self.global_feature_extractor(higher_res_features) @@ -274,9 +273,7 @@ class FeatureFusionModule(nn.Layer): low_res_input = F.resize_bilinear(input=low_res_input, scale=4) low_res_input = self.dwconv(low_res_input) low_res_input = self.conv_low_res(low_res_input) - high_res_input = self.conv_high_res(high_res_input) - x = high_res_input + low_res_input return self.relu(x) diff --git a/dygraph/paddleseg/models/gcnet.py b/dygraph/paddleseg/models/gcnet.py index 1f89a67014ce5d9487b55cc2c2969d23048c554a..ed5b0e8643a331aa2209bdba0c2808126d1075eb 100644 --- a/dygraph/paddleseg/models/gcnet.py +++ b/dygraph/paddleseg/models/gcnet.py @@ -70,7 +70,6 @@ class GCNet(nn.Layer): utils.load_entire_model(self, pretrained) def forward(self, input): - feat_list = self.backbone(input) logit_list = self.head(feat_list) return [ @@ -142,7 +141,6 @@ class GCNetHead(nn.Layer): self.init_weight() def forward(self, feat_list): - logit_list = [] x = feat_list[self.backbone_indices[1]] diff --git a/dygraph/paddleseg/models/pspnet.py b/dygraph/paddleseg/models/pspnet.py index c05b5423ba9d8c083bc5d16e5beaeefe06139178..db5a99b136aa117b9e0f786b654d39ad58a08d60 100644 --- a/dygraph/paddleseg/models/pspnet.py +++ b/dygraph/paddleseg/models/pspnet.py @@ -70,7 +70,6 @@ class PSPNet(nn.Layer): utils.load_entire_model(self, pretrained) def forward(self, input): - feat_list = self.backbone(input) logit_list = self.head(feat_list) return [ @@ -130,9 +129,7 @@ class PSPNetHead(nn.Layer): self.init_weight() def forward(self, feat_list): - logit_list = [] - x = feat_list[self.backbone_indices[1]] x = self.psp_module(x) x = F.dropout(x, p=0.1) # dropout_prob