未验证 提交 35734138 编写于 作者: C chajchaj 提交者: GitHub

del the DEFINE_ALIAS of sigmoid_cross_entropy_with_logits (#27883)

* del the DEFINE_ALIAS of sigmoid_cross_entropy_with_logits

* del sigmoid_cross_entropy_with_logits in python/paddle/nn/functional/loss.py, test=develop

* call paddle.fluid.layers.sigmoid_cross_entropy_with_logits in bce_with_logits_loss, test=develop
上级 3ee6ad6e
......@@ -143,7 +143,6 @@ from .loss import nll_loss #DEFINE_ALIAS
from .loss import npair_loss #DEFINE_ALIAS
from .loss import rank_loss #DEFINE_ALIAS
from .loss import sampled_softmax_with_cross_entropy #DEFINE_ALIAS
from .loss import sigmoid_cross_entropy_with_logits #DEFINE_ALIAS
from .loss import sigmoid_focal_loss #DEFINE_ALIAS
from .loss import smooth_l1 #DEFINE_ALIAS
from .loss import smooth_l1_loss #DEFINE_ALIAS
......
......@@ -31,7 +31,6 @@ from ...fluid.layers import log_loss #DEFINE_ALIAS
from ...fluid.layers import npair_loss #DEFINE_ALIAS
from ...fluid.layers import rank_loss #DEFINE_ALIAS
from ...fluid.layers import reshape
from ...fluid.layers import sigmoid_cross_entropy_with_logits #DEFINE_ALIAS
from ...fluid.layers import smooth_l1 #DEFINE_ALIAS
from ...fluid.layers import softmax_with_cross_entropy #DEFINE_ALIAS
from ...fluid.layers import square_error_cost #DEFINE_ALIAS
......@@ -66,7 +65,6 @@ __all__ = [
'npair_loss',
'rank_loss',
'sampled_softmax_with_cross_entropy',
'sigmoid_cross_entropy_with_logits',
'sigmoid_focal_loss',
'smooth_l1',
'smooth_l1_loss',
......@@ -316,7 +314,7 @@ def binary_cross_entropy_with_logits(logit,
if reduction == 'none' and pos_weight is None and weight is None:
sigmoid_name = name
out = paddle.nn.functional.sigmoid_cross_entropy_with_logits(
out = paddle.fluid.layers.sigmoid_cross_entropy_with_logits(
logit, label, name=sigmoid_name)
one = paddle.fill_constant(shape=[1], value=1.0, dtype=logit.dtype)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册