diff --git a/ppcls/modeling/architectures/densenet.py b/ppcls/modeling/architectures/densenet.py index cc7391554934c7a4bb361292d1c594921436af69..dfef9a6daebcaf6ecf921e81ddfca3c988c7ea69 100644 --- a/ppcls/modeling/architectures/densenet.py +++ b/ppcls/modeling/architectures/densenet.py @@ -278,7 +278,7 @@ class DenseNet(nn.Layer): conv = self.batch_norm(conv) y = self.pool2d_avg(conv) - y = paddle.reshape(y, shape=[0, -1]) + y = paddle.reshape(y, shape=[-1, y.shape[1]]) y = self.out(y) return y diff --git a/ppcls/modeling/architectures/hrnet.py b/ppcls/modeling/architectures/hrnet.py index 697441f9162938aa2bb5eb6f997090281ac560d6..9994a5167bfcb5f9990e9a9e61122b4a7cbd90d3 100644 --- a/ppcls/modeling/architectures/hrnet.py +++ b/ppcls/modeling/architectures/hrnet.py @@ -657,7 +657,7 @@ class HRNet(nn.Layer): y = self.conv_last(y) y = self.pool2d_avg(y) - y = paddle.reshape(y, shape=[0, -1]) + y = paddle.reshape(y, shape=[-1, y.shape[1]]) y = self.out(y) return y diff --git a/ppcls/modeling/architectures/mobilenet_v3.py b/ppcls/modeling/architectures/mobilenet_v3.py index f25782d14364549fb235ffb5d0b8f6592e987411..d179c22156a19c7fd4a349e6026de94642c6e69d 100644 --- a/ppcls/modeling/architectures/mobilenet_v3.py +++ b/ppcls/modeling/architectures/mobilenet_v3.py @@ -306,7 +306,7 @@ class SEModule(nn.Layer): outputs = F.relu(outputs) outputs = self.conv2(outputs) outputs = hard_sigmoid(outputs) - return paddle.multiply(x=inputs, y=outputs, axis=0) + return paddle.multiply(x=inputs, y=outputs) def MobileNetV3_small_x0_35(**args): diff --git a/ppcls/modeling/architectures/resnest.py b/ppcls/modeling/architectures/resnest.py index 0820ba2866dd889ed0ba3996c22b668c67487608..eb6f8bef1206ed228410a6315a554cc1de588a80 100644 --- a/ppcls/modeling/architectures/resnest.py +++ b/ppcls/modeling/architectures/resnest.py @@ -85,11 +85,11 @@ class rSoftmax(nn.Layer): x = paddle.reshape( x=x, shape=[ - 0, cardinality, radix, int(r * h * w / cardinality / radix) + batch, cardinality, radix, int(r * h * w / cardinality / radix) ]) x = paddle.transpose(x=x, perm=[0, 2, 1, 3]) x = nn.functional.softmax(x, axis=1) - x = paddle.reshape(x=x, shape=[0, r * h * w]) + x = paddle.reshape(x=x, shape=[batch, r * h * w]) else: x = nn.functional.sigmoid(x) return x diff --git a/ppcls/modeling/architectures/vgg.py b/ppcls/modeling/architectures/vgg.py index 27619c314468ce9280076a7f887475f19ef05f69..ccb94a9fccca7a967b653f26d764ed747322212b 100644 --- a/ppcls/modeling/architectures/vgg.py +++ b/ppcls/modeling/architectures/vgg.py @@ -113,7 +113,7 @@ class VGGNet(nn.Layer): x = self._conv_block_4(x) x = self._conv_block_5(x) - x = paddle.reshape(x, [0, -1]) + x = paddle.reshape(x, [-1, x.shape[1]*x.shape[2]*x.shape[3]]) x = self._fc1(x) x = F.relu(x) x = self._drop(x) diff --git a/ppcls/modeling/architectures/xception.py b/ppcls/modeling/architectures/xception.py index cf2ea35a65a58c2ce14e494ea0706ca307c605d4..61c87ab4a11e07da806e75682d9f0447740ad5b7 100644 --- a/ppcls/modeling/architectures/xception.py +++ b/ppcls/modeling/architectures/xception.py @@ -305,7 +305,7 @@ class ExitFlow(nn.Layer): conv2 = self._conv_2(conv1) conv2 = F.relu(conv2) pool = self._pool(conv2) - pool = paddle.reshape(pool, [0, -1]) + pool = paddle.reshape(pool, [-1, pool.shape[1]]) out = self._out(pool) return out