提交 5a88b62a 编写于 作者: L lijianshe02

refine psgan code according tho the reviewer's comment

上级 12e819ab
...@@ -151,8 +151,6 @@ class MDNet(paddle.nn.Layer): ...@@ -151,8 +151,6 @@ class MDNet(paddle.nn.Layer):
# Bottleneck # Bottleneck
for i in range(repeat_num): for i in range(repeat_num):
layers.append(ResidualBlock(dim_in=curr_dim, dim_out=curr_dim)) layers.append(ResidualBlock(dim_in=curr_dim, dim_out=curr_dim))
#layers.append(nn.InstanceNorm2d(curr_dim, weight_attr=None, bias_attr=None))
#layers.append(PONO())
self.main = nn.Sequential(*layers) self.main = nn.Sequential(*layers)
...@@ -201,7 +199,6 @@ class TNetDown(paddle.nn.Layer): ...@@ -201,7 +199,6 @@ class TNetDown(paddle.nn.Layer):
for i in range(repeat_num): for i in range(repeat_num):
layers.append( layers.append(
ResidualBlock(dim_in=curr_dim, dim_out=curr_dim, mode='t')) ResidualBlock(dim_in=curr_dim, dim_out=curr_dim, mode='t'))
#layers.append(nn.InstanceNorm2d(curr_dim, weight_attr=False, bias_attr=False))
self.main = nn.Sequential(*layers) self.main = nn.Sequential(*layers)
...@@ -268,7 +265,6 @@ class MANet(paddle.nn.Layer): ...@@ -268,7 +265,6 @@ class MANet(paddle.nn.Layer):
bias_attr=False)) bias_attr=False))
setattr(self, "up_acts_" + str(i), nn.ReLU()) setattr(self, "up_acts_" + str(i), nn.ReLU())
#setattr(self, "up_betas_" + str(i), Conv2d(y_dim, curr_dim//2, kernel_size=3, padding=1))
setattr( setattr(
self, "up_betas_" + str(i), self, "up_betas_" + str(i),
nn.ConvTranspose2d(y_dim, nn.ConvTranspose2d(y_dim,
...@@ -276,7 +272,6 @@ class MANet(paddle.nn.Layer): ...@@ -276,7 +272,6 @@ class MANet(paddle.nn.Layer):
kernel_size=4, kernel_size=4,
stride=2, stride=2,
padding=1)) padding=1))
#setattr(self, "up_gammas_" + str(i), Conv2d(y_dim, curr_dim//2, kernel_size=3, padding=1))
setattr( setattr(
self, "up_gammas_" + str(i), self, "up_gammas_" + str(i),
nn.ConvTranspose2d(y_dim, nn.ConvTranspose2d(y_dim,
...@@ -317,9 +312,7 @@ class MANet(paddle.nn.Layer): ...@@ -317,9 +312,7 @@ class MANet(paddle.nn.Layer):
# mask softmax # mask softmax
if consistency_mask is not None: if consistency_mask is not None:
a_ = a_ - 100.0 * (1 - consistency_mask) a_ = a_ - 100.0 * (1 - consistency_mask)
#a_ = a_ * consistency_mask
a = F.softmax(a_, axis=-1) a = F.softmax(a_, axis=-1)
#a = a * consistency_mask
gamma, beta = self.simple_spade(y) gamma, beta = self.simple_spade(y)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册