未验证 提交 dfaf6b5e 编写于 作者: C chajchaj 提交者: GitHub

save one name in cross_entropy and softmax_cross_entropy, test=develop (#29074)

* save one name in cross_entropy and softmax_cross_entropy, test=develop

* change used function in CrossEntropy from softmax_cross_entropy to cross_entropy, test=develop
上级 a5aa4dc7
......@@ -129,7 +129,6 @@ from .loss import binary_cross_entropy_with_logits #DEFINE_ALIAS
# from .loss import bpr_loss #DEFINE_ALIAS
# from .loss import center_loss #DEFINE_ALIAS
#from .loss import cross_entropy #DEFINE_ALIAS
from .loss import softmax_cross_entropy #DEFINE_ALIAS
from .loss import cross_entropy #DEFINE_ALIAS
from .loss import dice_loss #DEFINE_ALIAS
from .loss import hsigmoid_loss #DEFINE_ALIAS
......
......@@ -42,7 +42,6 @@ __all__ = [
'binary_cross_entropy',
'binary_cross_entropy_with_logits',
'cross_entropy',
'softmax_cross_entropy',
'dice_loss',
'hsigmoid_loss',
'kl_div',
......@@ -1125,25 +1124,6 @@ def cross_entropy(input,
soft_label=False,
axis=-1,
name=None):
return softmax_cross_entropy(
input=input,
label=label,
weight=weight,
ignore_index=ignore_index,
reduction=reduction,
soft_label=soft_label,
axis=axis,
name=name)
def softmax_cross_entropy(input,
label,
weight=None,
ignore_index=-100,
reduction='mean',
soft_label=False,
axis=-1,
name=None):
"""
This operator implements the cross entropy loss function with softmax. This function
combines the calculation of the softmax operation and the cross entropy loss function
......
......@@ -238,7 +238,7 @@ class CrossEntropyLoss(fluid.dygraph.Layer):
self.name = name
def forward(self, input, label):
ret = paddle.nn.functional.softmax_cross_entropy(
ret = paddle.nn.functional.cross_entropy(
input,
label,
weight=self.weight,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册