提交 e3340a1e 编写于 作者: M michaelowenliu

update syncbn package and modidy parameters in syncbn

上级 fe1b91b7
...@@ -16,16 +16,17 @@ from __future__ import absolute_import ...@@ -16,16 +16,17 @@ from __future__ import absolute_import
from __future__ import division from __future__ import division
from __future__ import print_function from __future__ import print_function
import math
import numpy as np import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout
from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm from paddle.nn import SyncBatchNorm as BatchNorm
import math
from dygraph.models.architectures import layer_utils
from dygraph.cvlibs import manager from dygraph.cvlibs import manager
__all__ = [ __all__ = [
...@@ -252,19 +253,18 @@ class ConvBNLayer(fluid.dygraph.Layer): ...@@ -252,19 +253,18 @@ class ConvBNLayer(fluid.dygraph.Layer):
bias_attr=False, bias_attr=False,
use_cudnn=use_cudnn, use_cudnn=use_cudnn,
act=None) act=None)
self.bn = fluid.dygraph.BatchNorm( self.bn = BatchNorm(
num_channels=out_c, num_features=out_c,
act=None, weight_attr=ParamAttr(
param_attr=ParamAttr(
name=name + "_bn_scale", name=name + "_bn_scale",
regularizer=fluid.regularizer.L2DecayRegularizer( regularizer=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=0.0)), regularization_coeff=0.0)),
bias_attr=ParamAttr( bias_attr=ParamAttr(
name=name + "_bn_offset", name=name + "_bn_offset",
regularizer=fluid.regularizer.L2DecayRegularizer( regularizer=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=0.0)), regularization_coeff=0.0)))
moving_mean_name=name + "_bn_mean",
moving_variance_name=name + "_bn_variance") self._act_op = layer_utils.Activation(act=None)
def forward(self, x): def forward(self, x):
x = self.conv(x) x = self.conv(x)
......
...@@ -25,10 +25,10 @@ import paddle.fluid as fluid ...@@ -25,10 +25,10 @@ import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout
from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm from paddle.nn import SyncBatchNorm as BatchNorm
from dygraph.utils import utils from dygraph.utils import utils
from dygraph.models.architectures import layer_utils
from dygraph.cvlibs import manager from dygraph.cvlibs import manager
__all__ = [ __all__ = [
...@@ -70,17 +70,17 @@ class ConvBNLayer(fluid.dygraph.Layer): ...@@ -70,17 +70,17 @@ class ConvBNLayer(fluid.dygraph.Layer):
bn_name = "bn" + name[3:] bn_name = "bn" + name[3:]
self._batch_norm = BatchNorm( self._batch_norm = BatchNorm(
num_filters, num_filters,
act=act, weight_attr=ParamAttr(name=bn_name + '_scale'),
param_attr=ParamAttr(name=bn_name + '_scale'), bias_attr=ParamAttr(bn_name + '_offset'))
bias_attr=ParamAttr(bn_name + '_offset'), self._act_op = layer_utils.Activation(act=act)
moving_mean_name=bn_name + '_mean',
moving_variance_name=bn_name + '_variance')
def forward(self, inputs): def forward(self, inputs):
if self.is_vd_mode: if self.is_vd_mode:
inputs = self._pool2d_avg(inputs) inputs = self._pool2d_avg(inputs)
y = self._conv(inputs) y = self._conv(inputs)
y = self._batch_norm(y) y = self._batch_norm(y)
y = self._act_op(y)
return y return y
......
...@@ -3,8 +3,9 @@ import paddle.fluid as fluid ...@@ -3,8 +3,9 @@ import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, Dropout
from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm from paddle.nn import SyncBatchNorm as BatchNorm
from dygraph.models.architectures import layer_utils
from dygraph.cvlibs import manager from dygraph.cvlibs import manager
__all__ = ["Xception41_deeplab", "Xception65_deeplab", "Xception71_deeplab"] __all__ = ["Xception41_deeplab", "Xception65_deeplab", "Xception71_deeplab"]
...@@ -80,17 +81,17 @@ class ConvBNLayer(fluid.dygraph.Layer): ...@@ -80,17 +81,17 @@ class ConvBNLayer(fluid.dygraph.Layer):
param_attr=ParamAttr(name=name + "/weights"), param_attr=ParamAttr(name=name + "/weights"),
bias_attr=False) bias_attr=False)
self._bn = BatchNorm( self._bn = BatchNorm(
num_channels=output_channels, num_features=output_channels,
act=act,
epsilon=1e-3, epsilon=1e-3,
momentum=0.99, momentum=0.99,
param_attr=ParamAttr(name=name + "/BatchNorm/gamma"), weight_attr=ParamAttr(name=name + "/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/BatchNorm/beta"), bias_attr=ParamAttr(name=name + "/BatchNorm/beta"))
moving_mean_name=name + "/BatchNorm/moving_mean",
moving_variance_name=name + "/BatchNorm/moving_variance") self._act_op = layer_utils.Activation(act=act)
def forward(self, inputs): def forward(self, inputs):
return self._bn(self._conv(inputs))
return self._act_op(self._bn(self._conv(inputs)))
class Seperate_Conv(fluid.dygraph.Layer): class Seperate_Conv(fluid.dygraph.Layer):
...@@ -116,13 +117,13 @@ class Seperate_Conv(fluid.dygraph.Layer): ...@@ -116,13 +117,13 @@ class Seperate_Conv(fluid.dygraph.Layer):
bias_attr=False) bias_attr=False)
self._bn1 = BatchNorm( self._bn1 = BatchNorm(
input_channels, input_channels,
act=act,
epsilon=1e-3, epsilon=1e-3,
momentum=0.99, momentum=0.99,
param_attr=ParamAttr(name=name + "/depthwise/BatchNorm/gamma"), weight_attr=ParamAttr(name=name + "/depthwise/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/depthwise/BatchNorm/beta"), bias_attr=ParamAttr(name=name + "/depthwise/BatchNorm/beta"))
moving_mean_name=name + "/depthwise/BatchNorm/moving_mean",
moving_variance_name=name + "/depthwise/BatchNorm/moving_variance") self._act_op1 = layer_utils.Activation(act=act)
self._conv2 = Conv2D( self._conv2 = Conv2D(
input_channels, input_channels,
output_channels, output_channels,
...@@ -134,19 +135,21 @@ class Seperate_Conv(fluid.dygraph.Layer): ...@@ -134,19 +135,21 @@ class Seperate_Conv(fluid.dygraph.Layer):
bias_attr=False) bias_attr=False)
self._bn2 = BatchNorm( self._bn2 = BatchNorm(
output_channels, output_channels,
act=act,
epsilon=1e-3, epsilon=1e-3,
momentum=0.99, momentum=0.99,
param_attr=ParamAttr(name=name + "/pointwise/BatchNorm/gamma"), weight_attr=ParamAttr(name=name + "/pointwise/BatchNorm/gamma"),
bias_attr=ParamAttr(name=name + "/pointwise/BatchNorm/beta"), bias_attr=ParamAttr(name=name + "/pointwise/BatchNorm/beta"))
moving_mean_name=name + "/pointwise/BatchNorm/moving_mean",
moving_variance_name=name + "/pointwise/BatchNorm/moving_variance") self._act_op2 = layer_utils.Activation(act=act)
def forward(self, inputs): def forward(self, inputs):
x = self._conv1(inputs) x = self._conv1(inputs)
x = self._bn1(x) x = self._bn1(x)
x = self._act_op1(x)
x = self._conv2(x) x = self._conv2(x)
x = self._bn2(x) x = self._bn2(x)
x = self._act_op2(x)
return x return x
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册