diff --git a/paddle/phi/api/yaml/legacy_api.yaml b/paddle/phi/api/yaml/legacy_api.yaml index ebbe497e8d5055c64dffff02978e8af59719c26d..d06d87ecf8f959499203e28ada3399bf85f3af02 100755 --- a/paddle/phi/api/yaml/legacy_api.yaml +++ b/paddle/phi/api/yaml/legacy_api.yaml @@ -2560,7 +2560,7 @@ backward : swish_grad # sync_batch_norm -- api : sync_batch_norm +- api : sync_batch_norm_ args : (Tensor x, Tensor scale, Tensor bias, Tensor mean, Tensor variance, float momentum, float epsilon, str data_layout, bool is_test, bool use_global_stats, bool trainable_statistics, bool fuse_with_relu) output : Tensor(out), Tensor(mean_out), Tensor(variance_out), Tensor(saved_mean), Tensor(saved_variance), Tensor(reserve_space) infer_meta : @@ -2569,6 +2569,7 @@ func : sync_batch_norm data_type : x backward : sync_batch_norm_grad + inplace : (mean -> mean_out), (variance -> variance_out) # take_along_axis - api : take_along_axis diff --git a/python/paddle/nn/layer/norm.py b/python/paddle/nn/layer/norm.py index d637c6dff8d218429f4c47c6537f78c83ad10bcc..ac65aab07a56c4cd4e0f6592d2dd3f3bc87f759b 100644 --- a/python/paddle/nn/layer/norm.py +++ b/python/paddle/nn/layer/norm.py @@ -1110,7 +1110,7 @@ class SyncBatchNorm(_BatchNormBase): ### train mode: use mini-batch stats, eval mode: use global stats ### use_global_stats only support False in sync_batch_norm if in_dygraph_mode(): - sync_batch_norm_out, _, _, _, _, _ = _C_ops.final_state_sync_batch_norm( + sync_batch_norm_out, _, _, _, _, _ = _C_ops.final_state_sync_batch_norm_( x, self.weight, self.bias, self._mean, self._variance, self._momentum, self._epsilon, self._data_format, not self.training, False, False, False)