未验证 提交 00f20313 编写于 作者: K kangguangli 提交者: GitHub

replace cross_entropy in python/paddle/fluid/tests/unittests/*/*.py except unittests/*.py (#48920)

上级 16e364d3
......@@ -60,7 +60,12 @@ class TestASPHelperPruningBase(unittest.TestCase):
def run_training_pruning_test(self, get_mask_gen_func, get_mask_check_func):
with fluid.program_guard(self.main_program, self.startup_program):
loss = paddle.mean(
fluid.layers.cross_entropy(input=self.predict, label=self.label)
paddle.nn.functional.cross_entropy(
input=self.predict,
label=self.label,
reduction='none',
use_softmax=False,
)
)
optimizer = paddle.incubate.asp.decorate(
fluid.optimizer.SGD(learning_rate=0.01)
......
......@@ -269,7 +269,12 @@ class TestASPStaticCustomerizedPruneFunc(unittest.TestCase):
def test_training_pruning(self):
with fluid.program_guard(self.main_program, self.startup_program):
loss = paddle.mean(
fluid.layers.cross_entropy(input=self.predict, label=self.label)
paddle.nn.functional.cross_entropy(
input=self.predict,
label=self.label,
reduction='none',
use_softmax=False,
)
)
optimizer = sparsity.decorate(
fluid.optimizer.SGD(learning_rate=0.01)
......
......@@ -45,7 +45,12 @@ class TestASPStaticOptimize(unittest.TestCase):
with fluid.program_guard(self.main_program, self.startup_program):
self.img, self.label, predict = build_model()
self.loss = paddle.mean(
fluid.layers.cross_entropy(input=predict, label=self.label)
paddle.nn.functional.cross_entropy(
input=predict,
label=self.label,
reduction='none',
use_softmax=False,
)
)
self.optimizer = fluid.optimizer.SGD(learning_rate=0.01)
......
......@@ -65,7 +65,12 @@ class TestASPStaticPruningBase(unittest.TestCase):
def test_training_pruning(self):
with fluid.program_guard(self.main_program, self.startup_program):
loss = paddle.mean(
fluid.layers.cross_entropy(input=self.predict, label=self.label)
paddle.nn.functional.cross_entropy(
input=self.predict,
label=self.label,
reduction='none',
use_softmax=False,
)
)
optimizer = paddle.incubate.asp.decorate(
fluid.optimizer.SGD(learning_rate=0.01)
......
......@@ -146,7 +146,12 @@ class TestASPStaticOptimize(unittest.TestCase):
with fluid.program_guard(self.main_program, self.startup_program):
self.img, self.label, predict = build_model()
self.loss = paddle.mean(
fluid.layers.cross_entropy(input=predict, label=self.label)
paddle.nn.functional.cross_entropy(
input=predict,
label=self.label,
reduction='none',
use_softmax=False,
)
)
self.optimizer = fluid.optimizer.SGD(learning_rate=0.01)
self.optimizer = paddle.incubate.asp.decorate(self.optimizer)
......
......@@ -60,7 +60,12 @@ class TestFleetWithASPSharding(unittest.TestCase):
fc_3 = fluid.layers.fc(input=fc_2, size=64, act='tanh')
fc_4 = fluid.layers.fc(input=fc_3, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_4, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=input_y,
reduction='none',
use_softmax=False,
)
avg_cost = paddle.mean(x=cost)
dist_strategy = paddle.distributed.fleet.DistributedStrategy()
......
......@@ -49,7 +49,12 @@ class TestFleetWithASPStatic(unittest.TestCase):
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=input_y,
reduction='none',
use_softmax=False,
)
avg_cost = paddle.mean(x=cost)
strategy = paddle.distributed.fleet.DistributedStrategy()
......@@ -122,7 +127,12 @@ class TestFleetWithASPAMPStatic(unittest.TestCase):
fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=input_y,
reduction='none',
use_softmax=False,
)
avg_cost = paddle.mean(x=cost)
strategy = paddle.distributed.fleet.DistributedStrategy()
......
......@@ -22,7 +22,9 @@ def add_fn(x):
def loss_fn(x, lable):
loss = fluid.layers.cross_entropy(x, lable)
loss = paddle.nn.functional.cross_entropy(
x, lable, reduction='none', use_softmax=False
)
return loss
......@@ -45,7 +47,9 @@ def dyfunc_with_if_else(x_v, label=None):
x_v = x_v + 1
# plain if in python
if label is not None:
loss = fluid.layers.cross_entropy(x_v, label)
loss = paddle.nn.functional.cross_entropy(
x_v, label, reduction='none', use_softmax=False
)
return loss
return x_v
......@@ -302,7 +306,9 @@ def if_with_and_or(x_v, label=None):
x_v = x_v + 1
if label is not None:
loss = fluid.layers.cross_entropy(x_v, label)
loss = paddle.nn.functional.cross_entropy(
x_v, label, reduction='none', use_softmax=False
)
return loss
return x_v
......
......@@ -107,7 +107,9 @@ class MNIST(fluid.dygraph.Layer):
x = self.inference(inputs)
if label is not None:
acc = paddle.static.accuracy(input=x, label=label)
loss = fluid.layers.cross_entropy(x, label)
loss = paddle.nn.functional.cross_entropy(
x, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return x, acc, avg_loss
......
......@@ -109,7 +109,9 @@ class StaticCode1:
def true_fn_1():
nonlocal __return_0, __return_1, __return_value_0, loss
loss = fluid.layers.cross_entropy(x_v, label)
loss = paddle.nn.functional.cross_entropy(
x_v, label, reduction='none', use_softmax=False
)
__return_0 = _jst.create_bool_as_type(label is not None, True)
__return_value_0 = loss
return
......@@ -178,7 +180,9 @@ class StaticCode2:
def true_fn_3():
nonlocal __return_2, __return_3, __return_value_1, loss
loss = fluid.layers.cross_entropy(x_v, label)
loss = paddle.nn.functional.cross_entropy(
x_v, label, reduction='none', use_softmax=False
)
__return_2 = _jst.create_bool_as_type(label is not None, True)
__return_value_1 = loss
return
......
......@@ -272,7 +272,12 @@ class ResNetHelper:
img, label = data
pred = resnet(img)
loss = fluid.layers.cross_entropy(input=pred, label=label)
loss = paddle.nn.functional.cross_entropy(
input=pred,
label=label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(x=loss)
acc_top1 = paddle.static.accuracy(
input=pred, label=label, k=1
......
......@@ -74,7 +74,12 @@ def train(to_static, build_strategy=None):
# FIXME(Aurelius84): The followding cross_entropy seems to bring out a
# precision problem, need to figure out the underlying reason.
# If we remove it, the loss between dygraph and dy2stat is exactly same.
loss = fluid.layers.cross_entropy(input=pred, label=label)
loss = paddle.nn.functional.cross_entropy(
input=pred,
label=label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(x=pred)
acc_top1 = paddle.static.accuracy(input=pred, label=label, k=1)
acc_top5 = paddle.static.accuracy(input=pred, label=label, k=5)
......
......@@ -75,7 +75,9 @@ def train(to_static, build_strategy=None):
level='O2',
):
pred = resnet(img)
loss = fluid.layers.cross_entropy(input=pred, label=label)
loss = paddle.nn.functional.cross_entropy(
input=pred, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=pred)
acc_top1 = paddle.static.accuracy(input=pred, label=label, k=1)
acc_top5 = paddle.static.accuracy(input=pred, label=label, k=5)
......
......@@ -340,7 +340,9 @@ class SeResNeXt(fluid.dygraph.Layer):
out = self.out(y)
softmax_out = paddle.nn.functional.softmax(out)
loss = fluid.layers.cross_entropy(input=softmax_out, label=label)
loss = paddle.nn.functional.cross_entropy(
input=softmax_out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss)
acc_top1 = paddle.static.accuracy(input=softmax_out, label=label, k=1)
......
......@@ -106,7 +106,9 @@ class CNN(fluid.dygraph.Layer):
prediction = self._fc_prediction(fc_1)
prediction = self._fc1_act(prediction)
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
acc = paddle.static.accuracy(input=prediction, label=label)
return avg_cost, prediction, acc
......@@ -149,7 +151,9 @@ class BOW(fluid.dygraph.Layer):
prediction = self._fc_prediction(fc_2)
prediction = paddle.nn.functional.softmax(prediction)
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
acc = paddle.static.accuracy(input=prediction, label=label)
return avg_cost, prediction, acc
......@@ -195,7 +199,9 @@ class GRU(fluid.dygraph.Layer):
fc_2 = paddle.tanh(fc_2)
prediction = self._fc_prediction(fc_2)
prediction = paddle.nn.functional.softmax(prediction)
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
acc = paddle.static.accuracy(input=prediction, label=label)
return avg_cost, prediction, acc
......@@ -254,7 +260,9 @@ class BiGRU(fluid.dygraph.Layer):
prediction = paddle.nn.functional.softmax(prediction)
# TODO(Aurelius84): Uncomment the following codes when we support return variable-length vars.
# if label is not None:
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
acc = paddle.static.accuracy(input=prediction, label=label)
return avg_cost, prediction, acc
......
......@@ -34,7 +34,9 @@ class SimpleLayer(paddle.nn.Layer):
x = paddle.flatten(x, 1, -1)
if target is not None:
x = paddle.nn.functional.softmax(x)
loss = paddle.fluid.layers.cross_entropy(x, target)
loss = paddle.paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
if self.use_ipu:
loss = paddle.incubate.identity_loss(loss, 1)
else:
......
......@@ -52,7 +52,9 @@ class SimpleLayer(paddle.nn.Layer):
if self.loss_op:
loss = self.loss_op(x, target)
else:
loss = paddle.fluid.layers.cross_entropy(x, target)
loss = paddle.paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
if self.use_reduction:
loss = paddle.mean(loss)
if self.use_identity_loss:
......
......@@ -33,7 +33,9 @@ class SimpleLayer(paddle.nn.Layer):
x = paddle.flatten(x, 1, -1)
if target is not None:
x = paddle.nn.functional.softmax(x)
loss = paddle.fluid.layers.cross_entropy(x, target)
loss = paddle.paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
return x, loss
return x
......
......@@ -120,7 +120,9 @@ class SimpleLayer(paddle.nn.Layer):
x = paddle.flatten(x, 1, -1)
if target is not None:
x = paddle.nn.functional.softmax(x)
loss = paddle.fluid.layers.cross_entropy(x, target)
loss = paddle.paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
loss = paddle.incubate.identity_loss(loss, 1)
return x, loss
return x
......
......@@ -35,7 +35,9 @@ class TestQuantizationSubGraph(unittest.TestCase):
hidden = data
for _ in range(num):
hidden = fluid.layers.fc(hidden, size=128, act='relu')
loss = fluid.layers.cross_entropy(input=hidden, label=label)
loss = paddle.nn.functional.cross_entropy(
input=hidden, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
return loss
......
......@@ -263,7 +263,7 @@ class TestNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
adam = fluid.optimizer.Adam(learning_rate=0.01)
adam.minimize(loss)
......
......@@ -214,7 +214,7 @@ class TestNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
adam = paddle.optimizer.AdamW(learning_rate=0.01, weight_decay=0.02)
adam.minimize(loss)
......
......@@ -343,7 +343,7 @@ class TestElementwiseMaxNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=c, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -189,7 +189,7 @@ class TestElementwiseMinOpNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=c, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -112,7 +112,7 @@ class TestGeluNet(unittest.TestCase):
fc_1_gelu = paddle.nn.functional.gelu(fc_1)
prediction = fluid.layers.fc(input=fc_1_gelu, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -106,7 +106,7 @@ class TestLeakyReluNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=y, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -125,7 +125,7 @@ class TestRelu6Net(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -126,7 +126,7 @@ class TestReluNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -107,7 +107,7 @@ class TestTanhNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=d, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -263,7 +263,7 @@ class TestNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
adam = fluid.optimizer.Adam(learning_rate=0.01)
adam.minimize(loss)
......@@ -348,7 +348,7 @@ class TestNetWithEpsilonTensor(unittest.TestCase):
input=fc_1, size=2, param_attr=weight_attr2, act='softmax'
)
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
beta1_init = 0.9
beta2_init = 0.999
......
......@@ -214,7 +214,7 @@ class TestNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
adam = paddle.optimizer.AdamW(learning_rate=0.01, weight_decay=0.02)
adam.minimize(loss)
......
......@@ -104,7 +104,7 @@ class TestCosNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=d, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -138,7 +138,7 @@ class TestElementwiseDivNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=g, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -302,7 +302,7 @@ class TestElementwiseMaxNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=c, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -189,7 +189,7 @@ class TestElementwiseMinOpNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=c, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -313,7 +313,7 @@ class TestElementwisePowNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=c, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -194,7 +194,7 @@ class TestSubtractNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -112,7 +112,7 @@ class TestGeluNet(unittest.TestCase):
fc_1_gelu = paddle.nn.functional.gelu(fc_1)
prediction = fluid.layers.fc(input=fc_1_gelu, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -106,7 +106,7 @@ class TestLeakyReluNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=y, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -104,7 +104,7 @@ class TestLogNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=d, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -247,7 +247,7 @@ class TestMulNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=result, size=8)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......@@ -324,7 +324,7 @@ class TestMulNet3_2(unittest.TestCase):
fc_1 = fluid.layers.fc(input=result, size=8)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......@@ -404,7 +404,7 @@ class TestMulNet3_2_xc2(unittest.TestCase):
fc_1 = fluid.layers.fc(input=result_re, size=8)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......@@ -485,7 +485,7 @@ class TestMulNet4_2(unittest.TestCase):
prediction = fluid.layers.fc(input=result, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -104,7 +104,7 @@ class TestPowNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -112,7 +112,7 @@ class TestReduceSumNet(unittest.TestCase):
prediction = fluid.layers.fc(input=z_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -125,7 +125,7 @@ class TestRelu6Net(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -118,7 +118,7 @@ class TestReluNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -52,7 +52,7 @@ class TestNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
rmsprop = fluid.optimizer.RMSProp(learning_rate=0.01)
rmsprop.minimize(loss)
......@@ -115,7 +115,7 @@ class TestCenteredNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
rmsprop = fluid.optimizer.RMSProp(learning_rate=0.01, centered=True)
rmsprop.minimize(loss)
......
......@@ -77,7 +77,7 @@ class TestNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -81,7 +81,7 @@ class TestSoftmaxNet(unittest.TestCase):
# 4 x 2
prob = paddle.nn.functional.softmax(prediction, axis=1)
cost = fluid.layers.cross_entropy(input=prob, label=label)
cost = paddle.nn.functional.cross_entropy(input=prob, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -107,7 +107,7 @@ class TestSqrtNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=d, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -104,7 +104,7 @@ class TestSquareNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=d, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
......@@ -107,7 +107,7 @@ class TestTanhNet(unittest.TestCase):
fc_1 = fluid.layers.fc(input=d, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(input=prediction, label=label, reduction='none', use_softmax=False)
loss = paddle.mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册