diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index 05e5e54b96b1f23e783d34a6ebe10a3af58fe92c..65df2d78ed0c18db1c435f1af02523c99cce13a5 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -44,7 +44,8 @@ class _BatchNorm(Cell): moving_mean_init='zeros', moving_var_init='ones', use_batch_statistics=None, - device_num_each_group=1): + device_num_each_group=1, + input_dims='1d'): super(_BatchNorm, self).__init__() if num_features < 1: raise ValueError("num_features must be at least 1") @@ -55,6 +56,7 @@ class _BatchNorm(Cell): self.use_batch_statistics = use_batch_statistics self.num_features = num_features self.eps = eps + self.input_dims = input_dims self.moving_mean = Parameter(initializer( moving_mean_init, num_features), name="mean", requires_grad=False) self.moving_variance = Parameter(initializer( @@ -145,6 +147,8 @@ class _BatchNorm(Cell): return y def construct(self, x): + if self.input_dims == '2d': + _shape_check(self.shape(x)) if self.use_batch_statistics is None: flag = self.training else: @@ -253,10 +257,10 @@ class BatchNorm1d(_BatchNorm): mean and variance. Default: None. Inputs: - - **input** (Tensor) - Tensor of shape :math:`(N, C_{in}, H_{in}, W_{in})`. + - **input** (Tensor) - Tensor of shape :math:`(N, C_{in})`. Outputs: - Tensor, the normalized, scaled, offset tensor, of shape :math:`(N, C_{out}, H_{out}, W_{out})`. + Tensor, the normalized, scaled, offset tensor, of shape :math:`(N, C_{out})`. Examples: >>> net = nn.BatchNorm1d(num_features=16) @@ -282,7 +286,8 @@ class BatchNorm1d(_BatchNorm): beta_init, moving_mean_init, moving_var_init, - use_batch_statistics) + use_batch_statistics, + input_dims='1d') def _check_data_dim(self, x): if x.dim() != 2: @@ -357,7 +362,8 @@ class BatchNorm2d(_BatchNorm): beta_init, moving_mean_init, moving_var_init, - use_batch_statistics) + use_batch_statistics, + input_dims='2d') def _check_data_dim(self, x): if x.dim() != 4: diff --git a/mindspore/ops/operations/math_ops.py b/mindspore/ops/operations/math_ops.py index 10dc7cbeec1f4374cffc622b5eb59da4ea3794cd..4e4e9187cb3e3c1ac461dcf0982f6d4c56e21a38 100644 --- a/mindspore/ops/operations/math_ops.py +++ b/mindspore/ops/operations/math_ops.py @@ -2931,7 +2931,7 @@ class Round(PrimitiveWithInfer): class Tan(PrimitiveWithInfer): """ - Computes tan of `input_x` element-wise. + Computes tangent of `input_x` element-wise. Inputs: - **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`. diff --git a/tests/ut/python/parallel/test_auto_parallel_tuple_depend.py b/tests/ut/python/parallel/test_auto_parallel_tuple_depend.py index ab6fdcb5ceffcab150a541548737083ce797595e..a80ccb550a4b7b7b35afb70ae695c902223acb0f 100644 --- a/tests/ut/python/parallel/test_auto_parallel_tuple_depend.py +++ b/tests/ut/python/parallel/test_auto_parallel_tuple_depend.py @@ -46,7 +46,7 @@ class GradWrap(nn.Cell): def bn_with_initialize(out_channels): - bn = nn.BatchNorm2d(out_channels, momentum=0.1, eps=1e-5) + bn = nn.BatchNorm1d(out_channels, momentum=0.1, eps=1e-5) return bn diff --git a/tests/ut/python/parallel/test_auto_parallel_two_bn.py b/tests/ut/python/parallel/test_auto_parallel_two_bn.py index 3c73290b1e2662fb1cc0ae9ac1928705d5b15535..029d85ab3ce306d5a4d2ace1485106ab5746dbda 100644 --- a/tests/ut/python/parallel/test_auto_parallel_two_bn.py +++ b/tests/ut/python/parallel/test_auto_parallel_two_bn.py @@ -40,7 +40,7 @@ class NetWithLoss(nn.Cell): class Blockcell(nn.Cell): def __init__(self): super(Blockcell, self).__init__() - self.bn = nn.BatchNorm2d(64, momentum=0.9) + self.bn = nn.BatchNorm1d(64, momentum=0.9) def construct(self, x): out = self.bn(x)