From 0601f5c4eeefdbff6a33da26c0a1fa2a33cfc215 Mon Sep 17 00:00:00 2001 From: minqiyang Date: Wed, 9 Jan 2019 22:41:39 +0800 Subject: [PATCH] Add cross_entropy loss to mnist ut --- .../fluid/tests/unittests/test_imperative_optimizer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py b/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py index a0f35ed6ec2..42896336b59 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py @@ -125,8 +125,8 @@ class TestImperativeMnist(unittest.TestCase): label._stop_gradient = True cost = mnist(img) - # loss = fluid.layers.cross_entropy(cost) - avg_loss = fluid.layers.reduce_mean(cost) + loss = fluid.layers.cross_entropy(cost, label) + avg_loss = fluid.layers.mean(loss) dy_out = avg_loss._numpy() if batch_id == 0: @@ -156,8 +156,8 @@ class TestImperativeMnist(unittest.TestCase): name='pixel', shape=[1, 28, 28], dtype='float32') label = fluid.layers.data(name='label', shape=[1], dtype='int64') cost = mnist(img) - # loss = fluid.layers.cross_entropy(cost) - avg_loss = fluid.layers.reduce_mean(cost) + loss = fluid.layers.cross_entropy(cost, label) + avg_loss = fluid.layers.mean(loss) sgd.minimize(avg_loss) # initialize params and fetch them -- GitLab