未验证 提交 fcdf633f 编写于 作者: K kangguangli 提交者: GitHub

replace cross_entropy in python/paddle/fluid/tests/unittests/test_[a-n]*.py...

replace cross_entropy in python/paddle/fluid/tests/unittests/test_[a-n]*.py except test_dist_transpiler.py (#48913)
上级 e729f16c
......@@ -806,7 +806,12 @@ class TestAdamOptimizer(unittest.TestCase):
input=fc_1, size=2, param_attr=weight_attr2, act='softmax'
)
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=label,
reduction='none',
use_softmax=False,
)
loss = paddle.mean(cost)
beta1_init = 0.9
beta2_init = 0.999
......@@ -966,7 +971,9 @@ class TestAdamOptimizer(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(cost)
adam = fluid.optimizer.Adam(use_global_beta_pow=True)
adam.minimize(loss)
......
......@@ -57,7 +57,9 @@ def convolutional_neural_network(use_py_reader):
)
prediction = fluid.layers.fc(input=conv_pool_2, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
acc = paddle.static.accuracy(input=prediction, label=label)
i = fluid.layers.zeros(shape=[1], dtype='int64')
......
......@@ -107,7 +107,9 @@ class TestCompiledProgramError(unittest.TestCase):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
prediction = fluid.layers.fc(input=img, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
def compile_program_not_compiled(self):
......
......@@ -17,6 +17,7 @@ import unittest
import numpy as np
from op_test import OpTest, randomize_probability
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid import Program, program_guard
......@@ -419,7 +420,9 @@ class TestCrossEntropyOpError(unittest.TestCase):
lab1 = fluid.create_lod_tensor(
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace()
)
fluid.layers.cross_entropy(x1, lab1)
paddle.nn.functional.cross_entropy(
x1, lab1, reduction='none', use_softmax=False
)
self.assertRaises(TypeError, test_Variable)
......@@ -432,7 +435,9 @@ class TestCrossEntropyOpError(unittest.TestCase):
lab2 = fluid.layers.data(
name='lab2', shape=[3, 4, 5, 6], dtype="int32"
)
fluid.layers.cross_entropy(x2, lab2)
paddle.nn.functional.cross_entropy(
x2, lab2, reduction='none', use_softmax=False
)
self.assertRaises(TypeError, test_dtype)
......
......@@ -67,7 +67,12 @@ def simple_fc_net(places, use_legacy_py_reader, use_double_buffer):
hidden, size=CLASS_NUM, act='softmax'
)
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)
optimizer = fluid.optimizer.Adam()
......
......@@ -73,7 +73,9 @@ def get_model(batch_size):
# Train program
predict = cnn_model(images)
cost = fluid.layers.cross_entropy(input=predict, label=label)
cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
# Evaluator
......@@ -188,9 +190,11 @@ class TestCloneWithStopGradient(unittest.TestCase):
hidden1 = fluid.layers.fc(input=img, size=200, act='relu')
hidden1.stop_gradient = True
hidden2 = fluid.layers.dropout(hidden1, dropout_prob=0.5)
loss = fluid.layers.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
input=fluid.layers.fc(hidden2, size=10, act='softmax'),
label=fluid.layers.data(name='label', shape=[1], dtype='int64'),
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=False)
......@@ -226,9 +230,11 @@ class TestCloneWithStopGradientInSubBlock(unittest.TestCase):
hidden2 = fluid.layers.cond(cond, true_fn, false_fn)
loss = fluid.layers.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
input=fluid.layers.fc(hidden2, size=10, act='softmax'),
label=fluid.layers.data(name='label', shape=[1], dtype='int64'),
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=False)
......@@ -266,9 +272,11 @@ class TestCloneWithRaise(unittest.TestCase):
return hidden2
hidden2 = fluid.layers.cond(cond, true_fn, false_fn)
loss = fluid.layers.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
input=fluid.layers.fc(hidden2, size=10, act='softmax'),
label=fluid.layers.data(name='label', shape=[1], dtype='int64'),
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=False)
......
......@@ -52,8 +52,8 @@ class TestFleetGradientMergeMetaOptimizer(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......
......@@ -68,8 +68,8 @@ class TestFleetGradientMergeMetaOptimizer(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=x_embedding, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......
......@@ -56,8 +56,8 @@ class TestFleetGradientMergeMetaOptimizer(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=emb, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
os.environ["FLAGS_LAUNCH_BARRIER"] = "0"
......
......@@ -51,8 +51,8 @@ class TestFleetGradientMergeMetaOptimizer(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......@@ -84,8 +84,8 @@ class TestFleetGradientMergeMetaOptimizer(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......
......@@ -150,7 +150,9 @@ class TestDistFleetHeterProgram(unittest.TestCase):
with fluid.device_guard("gpu"):
labels = fluid.layers.cast(inputs[-1], dtype="int64")
cost = fluid.layers.cross_entropy(input=predict, label=labels)
cost = paddle.nn.functional.cross_entropy(
input=predict, label=labels, reduction='none', use_softmax=False
)
avg_cost = paddle.sum(cost)
return avg_cost
......
......@@ -112,7 +112,9 @@ class MNIST(fluid.dygraph.Layer):
x = paddle.reshape(x, shape=[-1, self.pool_2_shape])
cost = self._linear(x)
cost = paddle.nn.functional.softmax(cost)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return avg_loss
......
......@@ -138,7 +138,9 @@ class TestDygraphMultiForward(unittest.TestCase):
label.stop_gradient = True
cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
dy_out = avg_loss.numpy()
......@@ -167,7 +169,9 @@ class TestDygraphMultiForward(unittest.TestCase):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
# initialize params and fetch them
......
......@@ -45,7 +45,9 @@ def simple_fc_net():
),
)
prediction = fluid.layers.fc(hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
optimizer = fluid.optimizer.Adam(learning_rate=1e-3)
optimizer.minimize(loss)
......
......@@ -43,7 +43,9 @@ def gru_net(
gru_max_tanh = paddle.tanh(gru_max)
fc1 = fluid.layers.fc(input=gru_max_tanh, size=hid_dim2, act='tanh')
prediction = fluid.layers.fc(input=fc1, size=class_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
return avg_cost
......
......@@ -45,7 +45,9 @@ def lstm_net(
lstm_max_tanh = paddle.tanh(lstm_max)
fc1 = fluid.layers.fc(input=lstm_max_tanh, size=hid_dim2, act='tanh')
prediction = fluid.layers.fc(input=fc1, size=class_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
return avg_cost
......
......@@ -67,7 +67,12 @@ class TestFeedData(unittest.TestCase):
predict_label = fluid.layers.fc(hidden, size=class_num, act='softmax')
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)
optimizer = fluid.optimizer.Adam()
......
......@@ -46,7 +46,9 @@ class TestFetchUnmerged(unittest.TestCase):
)
hidden = fluid.layers.fc(input=conv_pool_2, size=32, act='relu')
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return avg_loss, prediction
......
......@@ -40,8 +40,8 @@ class TestDistributedStrategyAuto(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......
......@@ -203,7 +203,12 @@ class TestFleetBaseSingleError(unittest.TestCase):
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=input_y,
reduction='none',
use_softmax=False,
)
avg_cost = paddle.mean(x=cost)
fleet.init(is_collective=True)
......
......@@ -54,8 +54,8 @@ class TestFleetBase(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......
......@@ -40,8 +40,8 @@ class TestFleetBase_1(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......@@ -71,8 +71,8 @@ class TestFleetBase(unittest.TestCase):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
......
......@@ -85,7 +85,9 @@ class TestFleetBaseSingleRunCollective(unittest.TestCase):
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
fleet.init(is_collective=True)
......@@ -124,7 +126,9 @@ class TestFleetBaseSingleRunPS(unittest.TestCase):
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
fleet.init()
......
......@@ -53,7 +53,9 @@ class TestFuseBatchNormActPass(unittest.TestCase):
input=hidden3, act='relu', data_layout='NHWC'
)
prediction = fluid.layers.fc(input=hidden4, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=y)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=y, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
sgd = fluid.optimizer.SGD(learning_rate=0.001)
if use_cuda:
......
......@@ -106,7 +106,9 @@ class TestFusedBnAddActAPI(unittest.TestCase):
act='softmax',
param_attr=self.fc_param_attr,
)
loss = fluid.layers.cross_entropy(input=prediction, label=y)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=y, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
sgd = fluid.optimizer.SGD(learning_rate=0.001)
sgd = fluid.contrib.mixed_precision.decorate(
......@@ -162,7 +164,9 @@ class TestFusedBnAddActAPI(unittest.TestCase):
prediction = fluid.layers.fc(
input=out, size=10, act='softmax', param_attr=self.fc_param_attr
)
loss = fluid.layers.cross_entropy(input=prediction, label=y)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=y, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
sgd = fluid.optimizer.SGD(learning_rate=0.001)
sgd = fluid.contrib.mixed_precision.decorate(
......
......@@ -61,7 +61,9 @@ def simple_depthwise_net(use_feed):
hidden = sep_conv(hidden, channel=200, stride=2, filter=5)
hidden = fluid.layers.relu(hidden)
prediction = fluid.layers.fc(hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
return loss
......
......@@ -68,7 +68,12 @@ def simple_fc_net(places, use_legacy_py_reader, use_double_buffer):
hidden, size=CLASS_NUM, act='softmax'
)
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)
optimizer = fluid.optimizer.Adam()
......
......@@ -41,7 +41,9 @@ def bow_net(
fc_1 = fluid.layers.fc(input=bow_tanh, size=hid_dim, act="tanh")
fc_2 = fluid.layers.fc(input=fc_1, size=hid_dim2, act="tanh")
prediction = fluid.layers.fc(input=[fc_2], size=class_dim, act="softmax")
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
return avg_cost
......@@ -83,7 +85,9 @@ class TestGradientClip(unittest.TestCase):
hidden = fluid.layers.fc(input=image, size=32, act='relu')
predict = fluid.layers.fc(input=hidden, size=10, act='softmax')
cost = fluid.layers.cross_entropy(input=predict, label=label)
cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(cost)
prog_clip = prog.clone()
......
......@@ -90,8 +90,10 @@ class AutoPruneLayer2(fluid.Layer):
label = self.linear2(label)
label = fluid.layers.cast(label, dtype="float32")
label = fluid.layers.cast(label, dtype='int64')
# Note that the label is not persistable in fluid.layers.cross_entropy.
loss = fluid.layers.cross_entropy(input=feature, label=label)
# Note that the label is not persistable in paddle.nn.functional.cross_entropy.
loss = paddle.nn.functional.cross_entropy(
input=feature, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
return loss
......@@ -107,7 +109,9 @@ class AutoPruneLayer3(fluid.Layer):
feature, num_or_sections=[10, 10], dim=1
)
# Note that: part2 is not used.
loss = fluid.layers.cross_entropy(input=part1, label=label)
loss = paddle.nn.functional.cross_entropy(
input=part1, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
if test_num == 1:
return loss, part2
......
......@@ -159,7 +159,9 @@ class TestImperativeMnist(unittest.TestCase):
cost_static = traced_layer([img])
helper.assertEachVar(cost, cost_static)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
dy_out = avg_loss.numpy()
......@@ -199,7 +201,9 @@ class TestImperativeMnist(unittest.TestCase):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
sgd.minimize(avg_loss)
......
......@@ -62,7 +62,9 @@ class TestImperativeMnistSortGradient(unittest.TestCase):
label2.stop_gradient = True
cost2 = mnist2(img2)
loss2 = fluid.layers.cross_entropy(cost2, label2)
loss2 = paddle.nn.functional.cross_entropy(
cost2, label2, reduction='none', use_softmax=False
)
avg_loss2 = paddle.mean(loss2)
dy_out2 = avg_loss2.numpy()
......@@ -102,7 +104,9 @@ class TestImperativeMnistSortGradient(unittest.TestCase):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
sgd.minimize(avg_loss)
......
......@@ -477,8 +477,11 @@ class TestDygraphOCRAttention(unittest.TestCase):
dy_prediction = paddle.reshape(
dy_prediction, [label_out.shape[0], -1]
)
loss = fluid.layers.cross_entropy(
input=dy_prediction, label=label_out
loss = paddle.nn.functional.cross_entropy(
input=dy_prediction,
label=label_out,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.sum(loss)
......@@ -555,8 +558,11 @@ class TestDygraphOCRAttention(unittest.TestCase):
static_prediction, shape=[-1, Config.num_classes + 2]
)
cost = fluid.layers.cross_entropy(
input=static_prediction, label=static_label_out
cost = paddle.nn.functional.cross_entropy(
input=static_prediction,
label=static_label_out,
reduction='none',
use_softmax=False,
)
static_avg_loss = paddle.sum(cost)
# param_grad_list = fluid.backward.append_backward(static_avg_loss)
......
......@@ -311,7 +311,9 @@ class TestDygraphResnet(unittest.TestCase):
helper.assertEachVar(out_dygraph, out_static)
resnet.train()
loss = fluid.layers.cross_entropy(input=out, label=label)
loss = paddle.nn.functional.cross_entropy(
input=out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss)
dy_out = avg_loss.numpy()
......@@ -364,7 +366,9 @@ class TestDygraphResnet(unittest.TestCase):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
out = resnet(img)
loss = fluid.layers.cross_entropy(input=out, label=label)
loss = paddle.nn.functional.cross_entropy(
input=out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss)
optimizer.minimize(avg_loss)
......
......@@ -118,7 +118,9 @@ class TestDygraphResnetSortGradient(unittest.TestCase):
label.stop_gradient = True
out = resnet(img)
loss = fluid.layers.cross_entropy(input=out, label=label)
loss = paddle.nn.functional.cross_entropy(
input=out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss)
dy_out = avg_loss.numpy()
......@@ -174,7 +176,9 @@ class TestDygraphResnetSortGradient(unittest.TestCase):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
out = resnet(img)
loss = fluid.layers.cross_entropy(input=out, label=label)
loss = paddle.nn.functional.cross_entropy(
input=out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss)
optimizer.minimize(avg_loss)
......
......@@ -373,8 +373,11 @@ class TestImperativeResneXt(unittest.TestCase):
out = se_resnext(img)
softmax_out = paddle.nn.functional.softmax(out)
loss = fluid.layers.cross_entropy(
input=softmax_out, label=label
loss = paddle.nn.functional.cross_entropy(
input=softmax_out,
label=label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(x=loss)
......@@ -453,7 +456,12 @@ class TestImperativeResneXt(unittest.TestCase):
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
out = se_resnext(img)
softmax_out = paddle.nn.function.softmax(out)
loss = fluid.layers.cross_entropy(input=softmax_out, label=label)
loss = paddle.nn.functional.cross_entropy(
input=softmax_out,
label=label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(x=loss)
optimizer.minimize(avg_loss)
......
......@@ -49,7 +49,9 @@ def convolutional_neural_network(img):
def static_train_net(img, label):
prediction = convolutional_neural_network(img)
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
optimizer = fluid.optimizer.SGD(learning_rate=0.001)
......@@ -172,7 +174,9 @@ class TestImperativeStaticModelRunnerMnist(unittest.TestCase):
cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
......
......@@ -78,7 +78,9 @@ class TestImperativeStaticModelRunnerWhile(unittest.TestCase):
pred = while_softmax_regression(img)
loss = fluid.layers.cross_entropy(input=pred, label=label)
loss = paddle.nn.functional.cross_entropy(
input=pred, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
optimizer = fluid.optimizer.SGD(learning_rate=0.001)
......@@ -149,7 +151,9 @@ class TestImperativeStaticModelRunnerWhile(unittest.TestCase):
cost = while_net(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
......@@ -174,7 +178,9 @@ class TestImperativeStaticModelRunnerWhile(unittest.TestCase):
pred = while_softmax_regression(img)
loss = fluid.layers.cross_entropy(input=pred, label=label)
loss = paddle.nn.functional.cross_entropy(
input=pred, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
optimizer = fluid.optimizer.SGD(learning_rate=0.001)
......
......@@ -182,7 +182,9 @@ class TestSaveInferenceModel(unittest.TestCase):
auc_var, batch_auc_var, auc_states = paddle.static.auc(
input=predict, label=y
)
cost = fluid.layers.cross_entropy(input=predict, label=y)
cost = paddle.nn.functional.cross_entropy(
input=predict, label=y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
place = core.CPUPlace()
......
......@@ -40,7 +40,9 @@ def fc_with_batchnorm(use_feed):
hidden = paddle.static.nn.batch_norm(input=hidden)
prediction = fluid.layers.fc(hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
return loss
......
......@@ -46,7 +46,9 @@ def lstm_net(
lstm_max_tanh = paddle.tanh(lstm_max)
fc1 = fluid.layers.fc(input=lstm_max_tanh, size=hid_dim2, act='tanh')
prediction = fluid.layers.fc(input=fc1, size=class_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
return avg_cost
......
......@@ -35,7 +35,9 @@ def simple_fc_net(use_feed):
for _ in range(hidden_layer):
x = fluid.layers.fc(input=x, size=20, act='relu')
y_predict = fluid.layers.fc(input=x, size=10, act='softmax')
cost = fluid.layers.cross_entropy(input=y_predict, label=y)
cost = paddle.nn.functional.cross_entropy(
input=y_predict, label=y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(cost)
return avg_cost
......@@ -48,7 +50,9 @@ def fc_with_inplace_net(use_feed):
reshape = paddle.reshape(x=fc, shape=[-1, 2, 5])
reshape = paddle.reshape(x=reshape, shape=[-1, 5, 2])
y_predict = fluid.layers.fc(input=reshape, size=10, act='softmax')
cost = fluid.layers.cross_entropy(input=y_predict, label=y)
cost = paddle.nn.functional.cross_entropy(
input=y_predict, label=y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(cost)
return avg_cost
......
......@@ -94,7 +94,9 @@ class LinerNetWithLabel(paddle.nn.Layer):
)
def forward(self, x, label):
out = self._linear(x)
loss = fluid.layers.cross_entropy(out, label)
loss = paddle.nn.functional.cross_entropy(
out, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return out, avg_loss
......@@ -112,7 +114,9 @@ class LinerNetWithPruneInput(paddle.nn.Layer):
)
def forward(self, x, label):
out = self._linear(x)
loss = fluid.layers.cross_entropy(out, label)
loss = paddle.nn.functional.cross_entropy(
out, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return out
......@@ -312,7 +316,9 @@ def train(layer, input_size=784, label_size=1):
cost = layer(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
......
......@@ -49,7 +49,9 @@ def convolutional_neural_network(img):
def static_train_net(img, label):
prediction = convolutional_neural_network(img)
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
optimizer = fluid.optimizer.SGD(learning_rate=0.001)
......
......@@ -28,7 +28,9 @@ img_shape = [1, 28, 28]
def loss_net(hidden, label):
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return avg_loss
......
......@@ -99,7 +99,9 @@ class TestDygraphDataLoader(unittest.TestCase):
step = 0
for image, label in dataloader():
out = fc_net(image)
loss = fluid.layers.cross_entropy(out, label)
loss = paddle.nn.functional.cross_entropy(
out, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
optimizer.minimize(avg_loss)
......@@ -169,7 +171,9 @@ class TestDygraphDataLoaderWithBatchedDataset(TestDygraphDataLoader):
step = 0
for image, label in dataloader():
out = fc_net(image)
loss = fluid.layers.cross_entropy(out, label)
loss = paddle.nn.functional.cross_entropy(
out, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
optimizer.minimize(avg_loss)
......
......@@ -99,7 +99,9 @@ class TestDygraphDataLoader(unittest.TestCase):
step = 0
for image, label in dataloader():
out = fc_net(image)
loss = fluid.layers.cross_entropy(out, label)
loss = paddle.nn.functional.cross_entropy(
out, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
optimizer.minimize(avg_loss)
......@@ -167,7 +169,9 @@ class TestDygraphDataLoaderWithBatchedDataset(TestDygraphDataLoader):
step = 0
for image, label in dataloader():
out = fc_net(image)
loss = fluid.layers.cross_entropy(out, label)
loss = paddle.nn.functional.cross_entropy(
out, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
avg_loss.backward()
optimizer.minimize(avg_loss)
......
......@@ -80,7 +80,12 @@ def simple_fc_net_static():
bias_attr=bias_attr,
)
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)
optimizer = fluid.optimizer.Adam()
......
......@@ -80,7 +80,12 @@ def simple_fc_net_static():
bias_attr=bias_attr,
)
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)
optimizer = fluid.optimizer.Adam()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册