From 2472d8f9dd49c7865a8a8375eb83810333ac9c2c Mon Sep 17 00:00:00 2001 From: kangguangli Date: Thu, 15 Dec 2022 10:47:53 +0800 Subject: [PATCH] replace cross_entropy in python/paddle/fluid/tests/unittests/*.py (#48975) --- .../tests/unittests/test_activation_nn_grad.py | 2 +- .../paddle/fluid/tests/unittests/test_backward.py | 3 ++- .../tests/unittests/test_fuse_bn_add_act_pass.py | 3 ++- .../unittests/test_fuse_elewise_add_act_pass.py | 3 ++- .../unittests/test_fuse_relu_depthwise_conv_pass.py | 3 ++- .../unittests/test_imperative_data_loader_base.py | 3 ++- .../test_imperative_data_loader_exception.py | 3 ++- .../test_imperative_data_loader_fds_clear.py | 3 ++- .../tests/unittests/test_imperative_double_grad.py | 13 +++++++------ .../fluid/tests/unittests/test_imperative_gnn.py | 3 ++- .../test_imperative_parallel_coalesce_split.py | 3 ++- .../unittests/test_imperative_reinforcement.py | 3 ++- python/paddle/fluid/tests/unittests/test_layers.py | 6 +++--- .../unittests/test_memory_reuse_exclude_feed_var.py | 3 ++- .../tests/unittests/test_op_function_generator.py | 4 ++-- .../unittests/test_paddle_imperative_double_grad.py | 9 +++++---- ...parallel_executor_inference_feed_partial_data.py | 7 ++++--- .../paddle/fluid/tests/unittests/test_var_base.py | 5 +++-- .../fluid/tests/unittests/test_while_loop_op.py | 3 ++- 19 files changed, 49 insertions(+), 33 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py b/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py index f10232cf02b..4e4d6a08bc3 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py +++ b/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py @@ -186,7 +186,7 @@ class TestReluDoubleGradCheck(unittest.TestCase): x = layers.data('x', shape, False, dtype) x.persistable = True - y = layers.relu(x) + y = F.relu(x) x_arr = np.random.uniform(-1, 1, shape).astype(dtype) x_arr[np.abs(x_arr) < 0.005] = 0.02 diff --git a/python/paddle/fluid/tests/unittests/test_backward.py b/python/paddle/fluid/tests/unittests/test_backward.py index a24d49b5ead..2409e7a1ad5 100644 --- a/python/paddle/fluid/tests/unittests/test_backward.py +++ b/python/paddle/fluid/tests/unittests/test_backward.py @@ -18,6 +18,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F import paddle.static as static @@ -285,7 +286,7 @@ class TestGradientsError(unittest.TestCase): x = fluid.data(name='x', shape=[None, 2, 8, 8], dtype='float32') x.stop_gradient = False conv = fluid.layers.conv2d(x, 4, 1, bias_attr=False) - y = fluid.layers.relu(conv) + y = F.relu(conv) with self.assertRaises(TypeError): x_grad = fluid.gradients(y.name, x) diff --git a/python/paddle/fluid/tests/unittests/test_fuse_bn_add_act_pass.py b/python/paddle/fluid/tests/unittests/test_fuse_bn_add_act_pass.py index c644391eeea..503433df137 100644 --- a/python/paddle/fluid/tests/unittests/test_fuse_bn_add_act_pass.py +++ b/python/paddle/fluid/tests/unittests/test_fuse_bn_add_act_pass.py @@ -18,6 +18,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid import core paddle.enable_static() @@ -160,7 +161,7 @@ class TestFusedBnAddActAPI(unittest.TestCase): data_layout='NHWC', ) out = bn1 + bn2 - out = fluid.layers.relu(out) + out = F.relu(out) prediction = fluid.layers.fc( input=out, size=10, act='softmax', param_attr=self.fc_param_attr ) diff --git a/python/paddle/fluid/tests/unittests/test_fuse_elewise_add_act_pass.py b/python/paddle/fluid/tests/unittests/test_fuse_elewise_add_act_pass.py index ffa3fd5cba4..0eea48760e8 100644 --- a/python/paddle/fluid/tests/unittests/test_fuse_elewise_add_act_pass.py +++ b/python/paddle/fluid/tests/unittests/test_fuse_elewise_add_act_pass.py @@ -22,6 +22,7 @@ from simple_nets import fc_with_batchnorm, init_data, simple_fc_net import paddle import paddle.fluid as fluid import paddle.fluid.core as core +import paddle.nn.functional as F class TestMNIST(TestParallelExecutorBase): @@ -97,7 +98,7 @@ class TestFuseActElewiseAddInplaceGradPass(unittest.TestCase): X = fluid.data(name="X", shape=[3, 3], dtype='float32') Y = fluid.data(name="Y", shape=[3, 3], dtype='float32') Out1 = X * 5 - Out2 = fluid.layers.relu(Out1) + Out2 = F.relu(Out1) prediction = paddle.tensor.math._add_with_axis(Y, Out2, axis=1) loss = paddle.mean(prediction) sgd = fluid.optimizer.SGD(learning_rate=0.001) diff --git a/python/paddle/fluid/tests/unittests/test_fuse_relu_depthwise_conv_pass.py b/python/paddle/fluid/tests/unittests/test_fuse_relu_depthwise_conv_pass.py index bb2b22ff18e..649b61af7b5 100644 --- a/python/paddle/fluid/tests/unittests/test_fuse_relu_depthwise_conv_pass.py +++ b/python/paddle/fluid/tests/unittests/test_fuse_relu_depthwise_conv_pass.py @@ -20,6 +20,7 @@ from parallel_executor_test_base import DeviceType, TestParallelExecutorBase import paddle import paddle.fluid as fluid import paddle.fluid.core as core +import paddle.nn.functional as F def norm(*args, **kargs): @@ -59,7 +60,7 @@ def simple_depthwise_net(use_feed): hidden = paddle.reshape(img, (-1, 1, 28, 28)) for _ in range(4): hidden = sep_conv(hidden, channel=200, stride=2, filter=5) - hidden = fluid.layers.relu(hidden) + hidden = F.relu(hidden) prediction = fluid.layers.fc(hidden, size=10, act='softmax') loss = paddle.nn.functional.cross_entropy( input=prediction, label=label, reduction='none', use_softmax=False diff --git a/python/paddle/fluid/tests/unittests/test_imperative_data_loader_base.py b/python/paddle/fluid/tests/unittests/test_imperative_data_loader_base.py index 5e503c51b50..075e04664e1 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_data_loader_base.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_data_loader_base.py @@ -17,6 +17,7 @@ import unittest import numpy as np import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid.reader import use_pinned_memory @@ -45,7 +46,7 @@ class TestDygraphDataLoader(unittest.TestCase): def iter_loader_data(self, loader): for _ in range(self.epoch_num): for image, label in loader(): - relu = fluid.layers.relu(image) + relu = F.relu(image) self.assertEqual(image.shape, [self.batch_size, 784]) self.assertEqual(label.shape, [self.batch_size, 1]) self.assertEqual(relu.shape, [self.batch_size, 784]) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_data_loader_exception.py b/python/paddle/fluid/tests/unittests/test_imperative_data_loader_exception.py index 6986d9d7b50..7ecaa41d466 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_data_loader_exception.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_data_loader_exception.py @@ -18,6 +18,7 @@ import unittest import numpy as np import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid import core @@ -112,7 +113,7 @@ class TestDygraphDataLoaderWithException(unittest.TestCase): try: for _ in range(self.epoch_num): for image, _ in loader(): - fluid.layers.relu(image) + F.relu(image) except core.EnforceNotMet as ex: self.assertIn("Blocking queue is killed", str(ex)) exception = ex diff --git a/python/paddle/fluid/tests/unittests/test_imperative_data_loader_fds_clear.py b/python/paddle/fluid/tests/unittests/test_imperative_data_loader_fds_clear.py index d46df51de16..6bf4ed79732 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_data_loader_fds_clear.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_data_loader_fds_clear.py @@ -17,6 +17,7 @@ import unittest import numpy as np import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.io import DataLoader, Dataset @@ -71,7 +72,7 @@ class TestDygraphDataLoaderMmapFdsClear(unittest.TestCase): def run_one_epoch_with_break(self, loader): for step_id, data in enumerate(loader()): image, label = data - relu = fluid.layers.relu(image) + relu = F.relu(image) self.assertEqual(image.shape, [self.batch_size, 784]) self.assertEqual(label.shape, [self.batch_size, 1]) self.assertEqual(relu.shape, [self.batch_size, 784]) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_double_grad.py b/python/paddle/fluid/tests/unittests/test_imperative_double_grad.py index 39927e0a2da..495940d9788 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_double_grad.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_double_grad.py @@ -19,6 +19,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid.framework import _test_eager_guard from paddle.fluid.wrapped_decorator import wrap_decorator from paddle.vision.models import resnet50, resnet101 @@ -317,8 +318,8 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y1 = fluid.layers.relu(x) - y2 = fluid.layers.relu(x) + y1 = F.relu(x) + y2 = F.relu(x) z = y1 + y2 w = z * z @@ -436,7 +437,7 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y = fluid.layers.relu(x) + y = F.relu(x) z = y + 1 w = z * z @@ -489,8 +490,8 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y1 = fluid.layers.relu(x) - y2 = fluid.layers.relu(x) + y1 = F.relu(x) + y2 = F.relu(x) z = y1 + y2 w = z * z @@ -540,7 +541,7 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y = fluid.layers.relu(x) + y = F.relu(x) z = y + 1 w = z * z diff --git a/python/paddle/fluid/tests/unittests/test_imperative_gnn.py b/python/paddle/fluid/tests/unittests/test_imperative_gnn.py index db750a5aa11..bce19cc20c9 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_gnn.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_gnn.py @@ -21,6 +21,7 @@ from test_imperative_base import new_program_scope import paddle import paddle.fluid as fluid import paddle.fluid.core as core +import paddle.nn.functional as F from paddle.fluid.dygraph.base import to_variable from paddle.fluid.framework import _test_eager_guard from paddle.fluid.optimizer import AdamOptimizer @@ -58,7 +59,7 @@ class GCN(fluid.Layer): self.gc2 = GraphConv(self.full_name(), 32, 10) def forward(self, x, adj): - x = fluid.layers.relu(self.gc(x, adj)) + x = F.relu(self.gc(x, adj)) return self.gc2(x, adj) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_parallel_coalesce_split.py b/python/paddle/fluid/tests/unittests/test_imperative_parallel_coalesce_split.py index 2482359d745..fdfe2e0c082 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_parallel_coalesce_split.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_parallel_coalesce_split.py @@ -19,6 +19,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid import core from paddle.fluid.dygraph.base import to_variable from paddle.fluid.dygraph.parallel import ( @@ -34,7 +35,7 @@ class MyLayer(fluid.Layer): super().__init__(name_scope) def forward(self, inputs): - x = fluid.layers.relu(inputs) + x = F.relu(inputs) x = paddle.multiply(x, x) x = paddle.sum(x) return [x] diff --git a/python/paddle/fluid/tests/unittests/test_imperative_reinforcement.py b/python/paddle/fluid/tests/unittests/test_imperative_reinforcement.py index ae8ff27e5f8..1c548f5caea 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_reinforcement.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_reinforcement.py @@ -19,6 +19,7 @@ from test_imperative_base import new_program_scope import paddle import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid import core from paddle.fluid.optimizer import SGDOptimizer @@ -38,7 +39,7 @@ class Policy(fluid.dygraph.Layer): x = paddle.reshape(inputs, shape=[-1, 4]) x = self.affine1(x) x = paddle.nn.functional.dropout(x, self.dropout_ratio) - x = fluid.layers.relu(x) + x = F.relu(x) action_scores = self.affine2(x) return paddle.nn.functional.softmax(action_scores, axis=1) diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index 8640db18aa1..ea6c9399c8c 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -344,7 +344,7 @@ class TestLayer(LayerTest): def test_relu(self): with self.static_graph(): t = layers.data(name='t', shape=[3, 3], dtype='float32') - ret = layers.relu(t) + ret = F.relu(t) static_ret = self.get_static_graph_result( feed={'t': np.ones([3, 3], dtype='float32')}, fetch_list=[ret] )[0] @@ -352,11 +352,11 @@ class TestLayer(LayerTest): with self.dynamic_graph(): with _test_eager_guard(): t = np.ones([3, 3], dtype='float32') - dy_eager_ret = layers.relu(base.to_variable(t)) + dy_eager_ret = F.relu(base.to_variable(t)) dy_eager_ret_value = dy_eager_ret.numpy() t = np.ones([3, 3], dtype='float32') - dy_ret = layers.relu(base.to_variable(t)) + dy_ret = F.relu(base.to_variable(t)) dy_ret_value = dy_ret.numpy() np.testing.assert_allclose(static_ret, dy_ret_value, rtol=1e-05) diff --git a/python/paddle/fluid/tests/unittests/test_memory_reuse_exclude_feed_var.py b/python/paddle/fluid/tests/unittests/test_memory_reuse_exclude_feed_var.py index 232c0f5c492..bdd6b3d3048 100644 --- a/python/paddle/fluid/tests/unittests/test_memory_reuse_exclude_feed_var.py +++ b/python/paddle/fluid/tests/unittests/test_memory_reuse_exclude_feed_var.py @@ -18,6 +18,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F class TestMemoryReuseExcludeFeedVar(unittest.TestCase): @@ -29,7 +30,7 @@ class TestMemoryReuseExcludeFeedVar(unittest.TestCase): image = fluid.layers.data( name='image', shape=self.image_shape, dtype='float32' ) - relu_image = fluid.layers.relu(image) + relu_image = F.relu(image) loss = paddle.mean(relu_image) build_strategy = fluid.BuildStrategy() diff --git a/python/paddle/fluid/tests/unittests/test_op_function_generator.py b/python/paddle/fluid/tests/unittests/test_op_function_generator.py index f3991150193..eff73a4548f 100644 --- a/python/paddle/fluid/tests/unittests/test_op_function_generator.py +++ b/python/paddle/fluid/tests/unittests/test_op_function_generator.py @@ -18,7 +18,7 @@ import numpy as np import paddle import paddle.fluid as fluid -import paddle.fluid.layers as layers +import paddle.nn.functional as F from paddle import _legacy_C_ops @@ -66,7 +66,7 @@ class TestVariable(unittest.TestCase): a = np.random.uniform(-1, 1, self.shape).astype(self.dtype) x = fluid.dygraph.to_variable(a) - res1 = layers.relu(x) + res1 = F.relu(x) res2 = _legacy_C_ops.relu(x) np.testing.assert_array_equal(res1.numpy(), res2.numpy()) diff --git a/python/paddle/fluid/tests/unittests/test_paddle_imperative_double_grad.py b/python/paddle/fluid/tests/unittests/test_paddle_imperative_double_grad.py index 21ad7092f57..ff62b212337 100644 --- a/python/paddle/fluid/tests/unittests/test_paddle_imperative_double_grad.py +++ b/python/paddle/fluid/tests/unittests/test_paddle_imperative_double_grad.py @@ -19,6 +19,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F from paddle.fluid.framework import _in_legacy_dygraph from paddle.fluid.wrapped_decorator import wrap_decorator @@ -220,7 +221,7 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y = fluid.layers.relu(x) + y = F.relu(x) z = y + 1 w = z * z @@ -261,8 +262,8 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y1 = fluid.layers.relu(x) - y2 = fluid.layers.relu(x) + y1 = F.relu(x) + y2 = F.relu(x) z = y1 + y2 w = z * z @@ -308,7 +309,7 @@ class TestDygraphDoubleGrad(TestCase): numel = x_np.size x.stop_gradient = False - y = fluid.layers.relu(x) + y = F.relu(x) z = y + 1 w = z * z diff --git a/python/paddle/fluid/tests/unittests/test_parallel_executor_inference_feed_partial_data.py b/python/paddle/fluid/tests/unittests/test_parallel_executor_inference_feed_partial_data.py index 7d3823a07ee..675e39ececf 100644 --- a/python/paddle/fluid/tests/unittests/test_parallel_executor_inference_feed_partial_data.py +++ b/python/paddle/fluid/tests/unittests/test_parallel_executor_inference_feed_partial_data.py @@ -18,6 +18,7 @@ import numpy as np import paddle import paddle.fluid as fluid +import paddle.nn.functional as F class TestInferencePartialFeed(unittest.TestCase): @@ -38,9 +39,9 @@ class TestInferencePartialFeed(unittest.TestCase): else: lr = fluid.data(name='lr', shape=[None], dtype='float32') - relu_x = fluid.layers.relu(x) - relu_y = fluid.layers.relu(y) - relu_lr = fluid.layers.relu(lr) + relu_x = F.relu(x) + relu_y = F.relu(y) + relu_lr = F.relu(lr) exe = fluid.Executor(places[0]) exe.run(startup_prog) diff --git a/python/paddle/fluid/tests/unittests/test_var_base.py b/python/paddle/fluid/tests/unittests/test_var_base.py index 6fe6583b52c..e7fbd7bf814 100644 --- a/python/paddle/fluid/tests/unittests/test_var_base.py +++ b/python/paddle/fluid/tests/unittests/test_var_base.py @@ -20,6 +20,7 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.fluid.core as core +import paddle.nn.functional as F from paddle.fluid.framework import _in_legacy_dygraph, _test_eager_guard @@ -653,7 +654,7 @@ class TestVarBase(unittest.TestCase): with fluid.dygraph.guard(): var = fluid.dygraph.to_variable(self.array) var.stop_gradient = False - loss = fluid.layers.relu(var) + loss = F.relu(var) loss.backward() grad_var = var._grad_ivar() self.assertEqual(grad_var.shape, self.shape) @@ -667,7 +668,7 @@ class TestVarBase(unittest.TestCase): with fluid.dygraph.guard(): var = fluid.dygraph.to_variable(self.array) var.stop_gradient = False - loss = fluid.layers.relu(var) + loss = F.relu(var) loss.backward() grad_var = var.gradient() self.assertEqual(grad_var.shape, self.array.shape) diff --git a/python/paddle/fluid/tests/unittests/test_while_loop_op.py b/python/paddle/fluid/tests/unittests/test_while_loop_op.py index abc12ef1df8..c5c31ac5717 100644 --- a/python/paddle/fluid/tests/unittests/test_while_loop_op.py +++ b/python/paddle/fluid/tests/unittests/test_while_loop_op.py @@ -20,6 +20,7 @@ import paddle import paddle.fluid as fluid import paddle.fluid.core as core import paddle.fluid.layers as layers +import paddle.nn.functional as F from paddle.fluid.backward import append_backward from paddle.fluid.framework import Program, program_guard @@ -96,7 +97,7 @@ class TestApiWhileLoop(unittest.TestCase): test_list[0] = paddle.reshape(test_list[0], [2, -1]) + 1 test_list_dict[0]["test_key"] += 1 - test_list_dict[0]["test_key"] = fluid.layers.relu( + test_list_dict[0]["test_key"] = F.relu( test_list_dict[0]["test_key"] ) -- GitLab