提交 41b33c9e 编写于 作者: W WenmuZhou

change KaimingNormal to KaimingUniform

上级 96c91907
...@@ -58,7 +58,7 @@ class Head(nn.Layer): ...@@ -58,7 +58,7 @@ class Head(nn.Layer):
stride=2, stride=2,
weight_attr=ParamAttr( weight_attr=ParamAttr(
name=name_list[2] + '.w_0', name=name_list[2] + '.w_0',
initializer=paddle.nn.initializer.KaimingNormal()), initializer=paddle.nn.initializer.KaimingUniform()),
bias_attr=get_bias_attr(in_channels // 4, name_list[-1] + "conv2")) bias_attr=get_bias_attr(in_channels // 4, name_list[-1] + "conv2"))
self.conv_bn2 = nn.BatchNorm( self.conv_bn2 = nn.BatchNorm(
num_channels=in_channels // 4, num_channels=in_channels // 4,
...@@ -78,7 +78,7 @@ class Head(nn.Layer): ...@@ -78,7 +78,7 @@ class Head(nn.Layer):
stride=2, stride=2,
weight_attr=ParamAttr( weight_attr=ParamAttr(
name=name_list[4] + '.w_0', name=name_list[4] + '.w_0',
initializer=paddle.nn.initializer.KaimingNormal()), initializer=paddle.nn.initializer.KaimingUniform()),
bias_attr=get_bias_attr(in_channels // 4, name_list[-1] + "conv3"), bias_attr=get_bias_attr(in_channels // 4, name_list[-1] + "conv3"),
) )
......
...@@ -26,7 +26,7 @@ class DBFPN(nn.Layer): ...@@ -26,7 +26,7 @@ class DBFPN(nn.Layer):
def __init__(self, in_channels, out_channels, **kwargs): def __init__(self, in_channels, out_channels, **kwargs):
super(DBFPN, self).__init__() super(DBFPN, self).__init__()
self.out_channels = out_channels self.out_channels = out_channels
weight_attr = paddle.nn.initializer.KaimingNormal() weight_attr = paddle.nn.initializer.KaimingUniform()
self.in2_conv = nn.Conv2D( self.in2_conv = nn.Conv2D(
in_channels=in_channels[0], in_channels=in_channels[0],
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册