提交 2ec4d525 编写于 作者: T Topdu

fix svtrnet bug in python3.10

上级 3e58e517
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from collections import Callable
from paddle import ParamAttr from paddle import ParamAttr
from paddle.nn.initializer import KaimingNormal from paddle.nn.initializer import KaimingNormal
import numpy as np import numpy as np
...@@ -228,11 +227,8 @@ class Block(nn.Layer): ...@@ -228,11 +227,8 @@ class Block(nn.Layer):
super().__init__() super().__init__()
if isinstance(norm_layer, str): if isinstance(norm_layer, str):
self.norm1 = eval(norm_layer)(dim, epsilon=epsilon) self.norm1 = eval(norm_layer)(dim, epsilon=epsilon)
elif isinstance(norm_layer, Callable):
self.norm1 = norm_layer(dim)
else: else:
raise TypeError( self.norm1 = norm_layer(dim)
"The norm_layer must be str or paddle.nn.layer.Layer class")
if mixer == 'Global' or mixer == 'Local': if mixer == 'Global' or mixer == 'Local':
self.mixer = Attention( self.mixer = Attention(
dim, dim,
...@@ -250,15 +246,11 @@ class Block(nn.Layer): ...@@ -250,15 +246,11 @@ class Block(nn.Layer):
else: else:
raise TypeError("The mixer must be one of [Global, Local, Conv]") raise TypeError("The mixer must be one of [Global, Local, Conv]")
# NOTE: drop path for stochastic depth, we shall see if this is better than dropout here
self.drop_path = DropPath(drop_path) if drop_path > 0. else Identity() self.drop_path = DropPath(drop_path) if drop_path > 0. else Identity()
if isinstance(norm_layer, str): if isinstance(norm_layer, str):
self.norm2 = eval(norm_layer)(dim, epsilon=epsilon) self.norm2 = eval(norm_layer)(dim, epsilon=epsilon)
elif isinstance(norm_layer, Callable):
self.norm2 = norm_layer(dim)
else: else:
raise TypeError( self.norm2 = norm_layer(dim)
"The norm_layer must be str or paddle.nn.layer.Layer class")
mlp_hidden_dim = int(dim * mlp_ratio) mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp_ratio = mlp_ratio self.mlp_ratio = mlp_ratio
self.mlp = Mlp(in_features=dim, self.mlp = Mlp(in_features=dim,
...@@ -330,8 +322,6 @@ class PatchEmbed(nn.Layer): ...@@ -330,8 +322,6 @@ class PatchEmbed(nn.Layer):
act=nn.GELU, act=nn.GELU,
bias_attr=None), bias_attr=None),
ConvBNLayer( ConvBNLayer(
embed_dim // 2,
embed_dim,
in_channels=embed_dim // 2, in_channels=embed_dim // 2,
out_channels=embed_dim, out_channels=embed_dim,
kernel_size=3, kernel_size=3,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册