未验证 提交 47875ba7 编写于 作者: 傅剑寒 提交者: GitHub

remove hard_sigmoid in nn.py under fluid (#47890)

* remove hard_sigmoid in nn.py under fluid

* fix hardsigmoid test case

* fix hardsigmoid test case
上级 7d6a4a54
...@@ -124,7 +124,6 @@ __all__ = [ ...@@ -124,7 +124,6 @@ __all__ = [
'log', 'log',
'crop_tensor', 'crop_tensor',
'pow', 'pow',
'hard_sigmoid',
'prelu', 'prelu',
'brelu', 'brelu',
'leaky_relu', 'leaky_relu',
...@@ -9056,50 +9055,6 @@ def pow(x, factor=1.0, name=None): ...@@ -9056,50 +9055,6 @@ def pow(x, factor=1.0, name=None):
return out return out
@templatedoc()
def hard_sigmoid(x, slope=0.2, offset=0.5, name=None):
"""
${comment}
Parameters:
x (${x_type}): ${x_comment}
slope (float, optional): ${slope_comment}
offset (float, optional): ${offset_comment}
name (str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`
Returns:
${out_type}: ${out_comment}
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
data = fluid.layers.fill_constant(shape=[3, 2], value=0.5, dtype='float32') # [[0.5, 0.5], [0.5, 0.5], [0.5, 0.5]]
result = fluid.layers.hard_sigmoid(data) # [[0.6, 0.6], [0.6, 0.6], [0.6, 0.6]]
"""
if _non_static_mode():
return _legacy_C_ops.hard_sigmoid(x, 'slope', slope, 'offset', offset)
check_variable_and_dtype(
x, 'x', ['float16', 'float32', 'float64'], 'hard_sigmoid'
)
helper = LayerHelper('hard_sigmoid', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='hard_sigmoid',
inputs={'X': x},
outputs={'Out': out},
attrs={'slope': slope, 'offset': offset},
)
return out
@deprecated(since="2.0.0", update_to="paddle.static.nn.prelu") @deprecated(since="2.0.0", update_to="paddle.static.nn.prelu")
def prelu(x, mode, param_attr=None, data_format="NCHW", name=None): def prelu(x, mode, param_attr=None, data_format="NCHW", name=None):
r""" r"""
......
...@@ -92,7 +92,7 @@ class TensorRTSubgraphPassHardSwishTest(TensorRTSubgraphPassActivationTest): ...@@ -92,7 +92,7 @@ class TensorRTSubgraphPassHardSwishTest(TensorRTSubgraphPassActivationTest):
class TensorRTSubgraphPassHardSigmoidTest(TensorRTSubgraphPassActivationTest): class TensorRTSubgraphPassHardSigmoidTest(TensorRTSubgraphPassActivationTest):
def append_act(self, x): def append_act(self, x):
return fluid.layers.hard_sigmoid(x) return paddle.nn.functional.hardsigmoid(x)
class TensorRTSubgraphPassHardSwishPluginTest( class TensorRTSubgraphPassHardSwishPluginTest(
......
...@@ -162,7 +162,7 @@ class TestHardsigmoidAPI(unittest.TestCase): ...@@ -162,7 +162,7 @@ class TestHardsigmoidAPI(unittest.TestCase):
paddle.enable_static() paddle.enable_static()
with fluid.program_guard(fluid.Program()): with fluid.program_guard(fluid.Program()):
x = fluid.data('X', self.x_np.shape, self.x_np.dtype) x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
out = fluid.layers.hard_sigmoid(x) out = paddle.nn.functional.hardsigmoid(x)
exe = fluid.Executor(self.place) exe = fluid.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out]) res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
out_ref = ref_hardsigmoid(self.x_np, 0.2, 0.5) out_ref = ref_hardsigmoid(self.x_np, 0.2, 0.5)
...@@ -170,7 +170,7 @@ class TestHardsigmoidAPI(unittest.TestCase): ...@@ -170,7 +170,7 @@ class TestHardsigmoidAPI(unittest.TestCase):
paddle.disable_static(self.place) paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np) x = paddle.to_tensor(self.x_np)
out = paddle.fluid.layers.hard_sigmoid(x) out = paddle.nn.functional.hardsigmoid(x)
np.testing.assert_allclose(out_ref, out.numpy()) np.testing.assert_allclose(out_ref, out.numpy())
paddle.enable_static() paddle.enable_static()
......
...@@ -123,7 +123,7 @@ class TestHardsigmoidAPI(unittest.TestCase): ...@@ -123,7 +123,7 @@ class TestHardsigmoidAPI(unittest.TestCase):
def test_fluid_api(self): def test_fluid_api(self):
with fluid.program_guard(fluid.Program()): with fluid.program_guard(fluid.Program()):
x = fluid.data('X', self.x_np.shape, self.x_np.dtype) x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
out = fluid.layers.hard_sigmoid(x) out = paddle.nn.functional.hardsigmoid(x)
exe = fluid.Executor(self.place) exe = fluid.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out]) res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
out_ref = ref_hardsigmoid(self.x_np, 0.2, 0.5) out_ref = ref_hardsigmoid(self.x_np, 0.2, 0.5)
...@@ -131,7 +131,7 @@ class TestHardsigmoidAPI(unittest.TestCase): ...@@ -131,7 +131,7 @@ class TestHardsigmoidAPI(unittest.TestCase):
paddle.disable_static(self.place) paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np) x = paddle.to_tensor(self.x_np)
out = paddle.fluid.layers.hard_sigmoid(x) out = paddle.nn.functional.hardsigmoid(x)
np.testing.assert_allclose(out_ref, out.numpy()) np.testing.assert_allclose(out_ref, out.numpy())
paddle.enable_static() paddle.enable_static()
......
...@@ -3338,7 +3338,7 @@ class TestHardsigmoidAPI(unittest.TestCase): ...@@ -3338,7 +3338,7 @@ class TestHardsigmoidAPI(unittest.TestCase):
def test_fluid_api(self): def test_fluid_api(self):
with fluid.program_guard(fluid.Program()): with fluid.program_guard(fluid.Program()):
x = fluid.data('X', self.x_np.shape, self.x_np.dtype) x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
out = fluid.layers.hard_sigmoid(x) out = paddle.nn.functional.hardsigmoid(x, slope=0.2)
exe = fluid.Executor(self.place) exe = fluid.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out]) res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
out_ref = ref_hardsigmoid(self.x_np, 0.2, 0.5) out_ref = ref_hardsigmoid(self.x_np, 0.2, 0.5)
...@@ -3346,7 +3346,7 @@ class TestHardsigmoidAPI(unittest.TestCase): ...@@ -3346,7 +3346,7 @@ class TestHardsigmoidAPI(unittest.TestCase):
paddle.disable_static(self.place) paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np) x = paddle.to_tensor(self.x_np)
out = paddle.fluid.layers.hard_sigmoid(x) out = paddle.nn.functional.hardsigmoid(x, slope=0.2)
np.testing.assert_allclose(out_ref, out.numpy(), rtol=1e-05) np.testing.assert_allclose(out_ref, out.numpy(), rtol=1e-05)
paddle.enable_static() paddle.enable_static()
......
...@@ -896,7 +896,7 @@ class TestDygraphUtils(unittest.TestCase): ...@@ -896,7 +896,7 @@ class TestDygraphUtils(unittest.TestCase):
with fluid.dygraph.guard(): with fluid.dygraph.guard():
a = paddle.to_tensor(a_np) a = paddle.to_tensor(a_np)
res1 = func(a, act="hard_sigmoid") res1 = func(a, act="hard_sigmoid")
res2 = fluid.layers.hard_sigmoid(a) res2 = paddle.nn.functional.hardsigmoid(a, slope=0.2)
np.testing.assert_array_equal(res1.numpy(), res2.numpy()) np.testing.assert_array_equal(res1.numpy(), res2.numpy())
def test_append_activation_in_dygraph1(self): def test_append_activation_in_dygraph1(self):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册