diff --git a/python/paddle/nn/functional/__init__.py b/python/paddle/nn/functional/__init__.py index c2d6fce670207f65039ee62d3027965df82e7cdf..cec69d6998cb151843a00fb95ee587aaba173b6c 100644 --- a/python/paddle/nn/functional/__init__.py +++ b/python/paddle/nn/functional/__init__.py @@ -129,7 +129,6 @@ from .loss import binary_cross_entropy_with_logits #DEFINE_ALIAS # from .loss import bpr_loss #DEFINE_ALIAS # from .loss import center_loss #DEFINE_ALIAS #from .loss import cross_entropy #DEFINE_ALIAS -from .loss import softmax_cross_entropy #DEFINE_ALIAS from .loss import cross_entropy #DEFINE_ALIAS from .loss import dice_loss #DEFINE_ALIAS from .loss import hsigmoid_loss #DEFINE_ALIAS diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index b3ed491a54e5a9cfc45ea1b7f4b6f3f5b46bc1e9..c616f7bd221fadc90e23c3beead94e54ca4d0dcd 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -42,7 +42,6 @@ __all__ = [ 'binary_cross_entropy', 'binary_cross_entropy_with_logits', 'cross_entropy', - 'softmax_cross_entropy', 'dice_loss', 'hsigmoid_loss', 'kl_div', @@ -1125,25 +1124,6 @@ def cross_entropy(input, soft_label=False, axis=-1, name=None): - return softmax_cross_entropy( - input=input, - label=label, - weight=weight, - ignore_index=ignore_index, - reduction=reduction, - soft_label=soft_label, - axis=axis, - name=name) - - -def softmax_cross_entropy(input, - label, - weight=None, - ignore_index=-100, - reduction='mean', - soft_label=False, - axis=-1, - name=None): """ This operator implements the cross entropy loss function with softmax. This function combines the calculation of the softmax operation and the cross entropy loss function diff --git a/python/paddle/nn/layer/loss.py b/python/paddle/nn/layer/loss.py index a6d1152adfcfb83c82dc993ff16936e0170afdc1..5bc33d0f0fccd4a205bf34c92411e164747aba45 100644 --- a/python/paddle/nn/layer/loss.py +++ b/python/paddle/nn/layer/loss.py @@ -238,7 +238,7 @@ class CrossEntropyLoss(fluid.dygraph.Layer): self.name = name def forward(self, input, label): - ret = paddle.nn.functional.softmax_cross_entropy( + ret = paddle.nn.functional.cross_entropy( input, label, weight=self.weight,