From db41e39e9e09aadd77d2e8693ebc8dcdc222b4aa Mon Sep 17 00:00:00 2001 From: Weilong Wu Date: Wed, 23 Mar 2022 11:00:51 +0800 Subject: [PATCH] Support test_layers(group_norm,while_loop) with eager mode (#40816) --- paddle/fluid/pybind/op_function_generator.h | 1 + python/paddle/fluid/dygraph/nn.py | 6 ++++++ .../paddle/fluid/tests/unittests/test_layers.py | 16 ++++++++++++---- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/paddle/fluid/pybind/op_function_generator.h b/paddle/fluid/pybind/op_function_generator.h index d8750c1d6c1..0a389153b0e 100644 --- a/paddle/fluid/pybind/op_function_generator.h +++ b/paddle/fluid/pybind/op_function_generator.h @@ -89,6 +89,7 @@ std::map> op_ins_map = { {"Input", "Label", "Weight", "Bias", "SampleWeight", "CustomDistProbs", "CustomDistAlias", "CustomDistAliasProbs"}}, {"check_finite_and_unscale", {"X", "Scale", "FloatStatus"}}, + {"group_norm", {"X", "Scale", "Bias"}}, }; // NOTE(zhiqiu): Like op_ins_map. diff --git a/python/paddle/fluid/dygraph/nn.py b/python/paddle/fluid/dygraph/nn.py index 5bb1aef6d6e..b41e3e0b502 100644 --- a/python/paddle/fluid/dygraph/nn.py +++ b/python/paddle/fluid/dygraph/nn.py @@ -2986,6 +2986,12 @@ class GroupNorm(layers.Layer): is_bias=True) def forward(self, input): + if in_dygraph_mode(): + attrs = ('epsilon', self._epsilon, 'groups', self._groups) + out, _, _ = _C_ops.group_norm(input, self.weight, self.bias, *attrs) + + return dygraph_utils._append_activation_in_dygraph(out, self._act) + inputs = {'X': input} if self.bias is not None: inputs['Bias'] = self.bias diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index 36038d656b7..bb244a20bd8 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -1819,7 +1819,7 @@ class TestLayer(LayerTest): self.assertTrue(np.allclose(static_ret, static_ret2)) - def test_group_norm(self): + def func_group_norm(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) else: @@ -1873,7 +1873,6 @@ class TestLayer(LayerTest): with_lod=True)[0] with self.dynamic_graph(): - # TODO(wuweilong): Add with _test_eager_guard(): groupNorm = nn.GroupNorm( channels=shape[1], groups=2, @@ -1886,6 +1885,11 @@ class TestLayer(LayerTest): self.assertTrue(np.allclose(static_ret, dy_rlt_value)) self.assertTrue(np.allclose(static_ret, static_ret2)) + def test_group_norm(self): + with _test_eager_guard(): + self.func_group_norm() + self.func_group_norm() + def test_instance_norm(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) @@ -2348,7 +2352,7 @@ class TestLayer(LayerTest): with self.assertRaises(TypeError): layers.eye(num_rows=3, batch_shape=[-1]) - def test_while_loop(self): + def func_while_loop(self): with self.static_graph(): i = layers.fill_constant(shape=[1], dtype='int64', value=0) ten = layers.fill_constant(shape=[1], dtype='int64', value=10) @@ -2363,7 +2367,6 @@ class TestLayer(LayerTest): static_ret = self.get_static_graph_result(feed={}, fetch_list=out) with self.dynamic_graph(): - # TODO(wuweilong): Add with _test_eager_guard(): i = layers.fill_constant(shape=[1], dtype='int64', value=0) ten = layers.fill_constant(shape=[1], dtype='int64', value=10) @@ -2384,6 +2387,11 @@ class TestLayer(LayerTest): self.assertTrue(np.array_equal(static_ret[0], dy_ret[0].numpy())) + def test_while_loop(self): + with _test_eager_guard(): + self.func_while_loop() + self.func_while_loop() + def test_compare(self): value_a = np.arange(3) value_b = np.arange(3) -- GitLab