提交 2eeb814f 编写于 作者: C chenguowei01

update layer_libs

上级 4231b3f7
...@@ -21,10 +21,10 @@ from paddle.nn import SyncBatchNorm as BatchNorm ...@@ -21,10 +21,10 @@ from paddle.nn import SyncBatchNorm as BatchNorm
from paddle.nn.layer import activation from paddle.nn.layer import activation
class ConvBnRelu(nn.Layer): class ConvBNRelu(nn.Layer):
def __init__(self, in_channels, out_channels, kernel_size, **kwargs): def __init__(self, in_channels, out_channels, kernel_size, **kwargs):
super(ConvBnRelu, self).__init__() super(ConvBNRelu, self).__init__()
self.conv = Conv2d(in_channels, out_channels, kernel_size, **kwargs) self.conv = Conv2d(in_channels, out_channels, kernel_size, **kwargs)
...@@ -37,10 +37,10 @@ class ConvBnRelu(nn.Layer): ...@@ -37,10 +37,10 @@ class ConvBnRelu(nn.Layer):
return x return x
class ConvBn(nn.Layer): class ConvBN(nn.Layer):
def __init__(self, in_channels, out_channels, kernel_size, **kwargs): def __init__(self, in_channels, out_channels, kernel_size, **kwargs):
super(ConvBn, self).__init__() super(ConvBN, self).__init__()
self.conv = Conv2d(in_channels, out_channels, kernel_size, **kwargs) self.conv = Conv2d(in_channels, out_channels, kernel_size, **kwargs)
...@@ -70,16 +70,16 @@ class ConvReluPool(nn.Layer): ...@@ -70,16 +70,16 @@ class ConvReluPool(nn.Layer):
return x return x
class DepthwiseConvBnRelu(nn.Layer): class DepthwiseConvBNRelu(nn.Layer):
def __init__(self, in_channels, out_channels, kernel_size, **kwargs): def __init__(self, in_channels, out_channels, kernel_size, **kwargs):
super(DepthwiseConvBnRelu, self).__init__() super(DepthwiseConvBNRelu, self).__init__()
self.depthwise_conv = ConvBn( self.depthwise_conv = ConvBN(
in_channels, in_channels,
out_channels=in_channels, out_channels=in_channels,
kernel_size=kernel_size, kernel_size=kernel_size,
groups=in_channels, groups=in_channels,
**kwargs) **kwargs)
self.piontwise_conv = ConvBnRelu( self.piontwise_conv = ConvBNRelu(
in_channels, out_channels, kernel_size=1, groups=1) in_channels, out_channels, kernel_size=1, groups=1)
def forward(self, x): def forward(self, x):
...@@ -109,7 +109,7 @@ class AuxLayer(nn.Layer): ...@@ -109,7 +109,7 @@ class AuxLayer(nn.Layer):
dropout_prob=0.1): dropout_prob=0.1):
super(AuxLayer, self).__init__() super(AuxLayer, self).__init__()
self.conv_bn_relu = ConvBnRelu( self.conv_bn_relu = ConvBNRelu(
in_channels=in_channels, in_channels=in_channels,
out_channels=inter_channels, out_channels=inter_channels,
kernel_size=3, kernel_size=3,
...@@ -127,4 +127,3 @@ class AuxLayer(nn.Layer): ...@@ -127,4 +127,3 @@ class AuxLayer(nn.Layer):
x = F.dropout(x, p=self.dropout_prob) x = F.dropout(x, p=self.dropout_prob)
x = self.conv(x) x = self.conv(x)
return x return x
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册