【有复现问题的代码片段】为什么经过了softmax_with_cross_entropy之后,针对loss能取梯度,但是针对logits无法取梯度呢?
Created by: OleNet
import unittest
import sys
import math
import numpy as np
import paddle
import paddle.fluid as fluid
mp, sp = fluid.Program(), fluid.Program()
with fluid.program_guard(mp, sp):
data = fluid.layers.data(name='data', shape=[128], dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
fc = fluid.layers.fc(input=data, size=100)
ce_loss, logits = fluid.layers.softmax_with_cross_entropy(logits=fc, label=label, return_softmax=True)
data.stop_gradient = False
print(fluid.backward.append_backward(ce_loss, parameter_list=[data]))
print(fluid.backward.append_backward(logits, parameter_list=[data]))