diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 9971df05fbcdc0cef401116f66a0cf73f2406d9a..e7763853bf7c2c9c06c0e94f53c6e8c04fbfc937 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -1818,12 +1818,16 @@ def cross_entropy(input, helper = LayerHelper('softmax_with_cross_entropy', **locals()) softmax = helper.create_variable_for_type_inference(dtype=input.dtype) out = helper.create_variable_for_type_inference(dtype=input.dtype) + + outputs = {'Softmax': softmax, 'Loss': out} + if core.is_compiled_with_npu() or core.is_compiled_with_mlu(): + backprop = helper.create_variable_for_type_inference(dtype=input.dtype) + outputs['Backprop'] = backprop helper.append_op( type='softmax_with_cross_entropy', inputs={'Logits': input, 'Label': label}, - outputs={'Softmax': softmax, - 'Loss': out}, + outputs=outputs, attrs=attrs) if weight is not None: