diff --git a/python/paddle/fluid/dygraph/math_op_patch.py b/python/paddle/fluid/dygraph/math_op_patch.py index f9fe4198fec3a0a2237c0bcac6e20f4269160589..3aa7b9dfc262810686319819f717f3cfd06b5e50 100644 --- a/python/paddle/fluid/dygraph/math_op_patch.py +++ b/python/paddle/fluid/dygraph/math_op_patch.py @@ -285,7 +285,7 @@ def monkey_patch_math_varbase(): ('__ge__', _binary_creator_('__ge__', 'greater_equal', False, None)), ('__array_ufunc__', None), ('sigmoid', _method_creator_('sigmoid', 'name=None')), - ('logsigmoid', _method_creator_('logsigmoid', 'name=None')), + ('log_sigmoid', _method_creator_('logsigmoid', 'name=None')), ('exp', _method_creator_('exp', 'name=None')), ('tanh', _method_creator_('tanh', 'name=None')), ('atan', _method_creator_('atan', 'name=None')), diff --git a/python/paddle/fluid/layers/ops.py b/python/paddle/fluid/layers/ops.py index 1efae3ddf1f3422a53f69c4b5b8eeec6183fae96..6cdc617a0dc17ae9f0893083285c404ca73712f7 100644 --- a/python/paddle/fluid/layers/ops.py +++ b/python/paddle/fluid/layers/ops.py @@ -20,7 +20,10 @@ from ..framework import convert_np_dtype_to_dtype_, Variable from ..data_feeder import convert_dtype, check_variable_and_dtype, check_type, check_dtype from paddle.utils import deprecated -__deprecated_func_name__ = {'tanh_shrink': 'tanhshrink', } +__deprecated_func_name__ = { + 'tanh_shrink': 'tanhshrink', + 'logsigmoid': 'log_sigmoid' +} __activations_noattr__ = [ 'sigmoid', @@ -106,7 +109,7 @@ Examples: paddle.disable_static() x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3]) - out = F.logsigmoid(x) + out = F.log_sigmoid(x) print(out.numpy()) # [-0.91301525 -0.79813887 -0.64439666 -0.55435524] diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index ab61a5b3cfccb0e885debe9786ae91a9754e9345..f6ba03194aa909279aa2cd884fc575041b01a4cd 100755 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -128,7 +128,7 @@ class TestLogSigmoid(TestActivation): class TestLogSigmoidAPI(unittest.TestCase): - # test paddle.nn.LogSigmoid, paddle.nn.functional.logsigmoid + # test paddle.nn.LogSigmoid, paddle.nn.functional.log_sigmoid def setUp(self): self.x_np = np.random.uniform(-1, 1, [11, 17]).astype('float32') self.place=paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \ @@ -137,36 +137,45 @@ class TestLogSigmoidAPI(unittest.TestCase): def test_static_api(self): with paddle.static.program_guard(paddle.static.Program()): x = paddle.data('X', [11, 17]) - out1 = F.logsigmoid(x) + out1 = F.log_sigmoid(x) m = paddle.nn.LogSigmoid() out2 = m(x) exe = paddle.static.Executor(self.place) res = exe.run(feed={'X': self.x_np}, fetch_list=[out1, out2]) out_ref = np.log(1 / (1 + np.exp(-self.x_np))) for r in res: - self.assertEqual(np.allclose(out_ref, r), True) + self.assertTrue(np.allclose(out_ref, r)) def test_dygraph_api(self): paddle.disable_static(self.place) x = paddle.to_tensor(self.x_np) - out1 = F.logsigmoid(x) + out1 = F.log_sigmoid(x) m = paddle.nn.LogSigmoid() out2 = m(x) out_ref = np.log(1 / (1 + np.exp(-self.x_np))) for r in [out1, out2]: - self.assertEqual(np.allclose(out_ref, r.numpy()), True) + self.assertTrue(np.allclose(out_ref, r.numpy())) paddle.enable_static() + def test_fluid_api(self): + with paddle.static.program_guard(paddle.static.Program()): + x = paddle.data('X', [11, 17]) + out = paddle.fluid.layers.logsigmoid(x) + exe = paddle.static.Executor(self.place) + res = exe.run(feed={'X': self.x_np}, fetch_list=[out]) + out_ref = np.log(1 / (1 + np.exp(-self.x_np))) + self.assertTrue(np.allclose(out_ref, res[0])) + def test_errors(self): with paddle.static.program_guard(paddle.static.Program()): # The input type must be Variable. - self.assertRaises(TypeError, F.logsigmoid, 1) + self.assertRaises(TypeError, F.log_sigmoid, 1) # The input dtype must be float16, float32, float64. x_int32 = paddle.data(name='x_int32', shape=[11, 17], dtype='int32') - self.assertRaises(TypeError, F.logsigmoid, x_int32) + self.assertRaises(TypeError, F.log_sigmoid, x_int32) # support the input dtype is float16 x_fp16 = paddle.data(name='x_fp16', shape=[11, 17], dtype='float16') - F.logsigmoid(x_fp16) + F.log_sigmoid(x_fp16) class TestTanh(TestActivation, TestParameter): diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index 89e9f7aad8581228411b1983580ced5566e65765..26073f49bdd3d494da7b39346c5bafb2aefba56a 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -2677,13 +2677,6 @@ class TestBook(LayerTest): out = layers.sigmoid(input, name='sigmoid') return (out) - def make_logsigmoid(self): - with program_guard(fluid.default_main_program(), - fluid.default_startup_program()): - input = self._get_data(name="input", shape=[16], dtype="float32") - out = layers.logsigmoid(input, name='logsigmoid') - return (out) - def make_exp(self): with program_guard(fluid.default_main_program(), fluid.default_startup_program()): diff --git a/python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py b/python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py index 9bb12d546550a821e8a133dd9c91d5d41a50b1b2..a70862f40197c513a0cd04753553264708ee2a1c 100644 --- a/python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py +++ b/python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py @@ -307,7 +307,7 @@ class TestMathOpPatchesVarBase(unittest.TestCase): np.array_equal(x.sigmoid().numpy(), fluid.layers.sigmoid(x).numpy( ))) self.assertTrue( - np.array_equal(x.logsigmoid().numpy(), + np.array_equal(x.log_sigmoid().numpy(), fluid.layers.logsigmoid(x).numpy())) self.assertTrue(np.array_equal(x.exp().numpy(), paddle.exp(x).numpy())) self.assertTrue( diff --git a/python/paddle/nn/functional/__init__.py b/python/paddle/nn/functional/__init__.py index f3cc8c610ff4da16b6333931913396d84cc05981..163c249ab37457d7d4566553c71e3231f384a8b1 100644 --- a/python/paddle/nn/functional/__init__.py +++ b/python/paddle/nn/functional/__init__.py @@ -39,7 +39,7 @@ from .activation import hard_sigmoid #DEFINE_ALIAS from .activation import hard_swish #DEFINE_ALIAS from .activation import hsigmoid #DEFINE_ALIAS from .activation import leaky_relu #DEFINE_ALIAS -from .activation import logsigmoid #DEFINE_ALIAS +from .activation import log_sigmoid #DEFINE_ALIAS from .activation import maxout #DEFINE_ALIAS from .activation import prelu #DEFINE_ALIAS from .activation import relu #DEFINE_ALIAS diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index ffedb027330bda94db86dc0943a5c4a7281f254f..f7bbe0c94e03dc48ebfb21a62aeded9f446afc63 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -35,7 +35,7 @@ __all__ = [ 'hard_swish', 'hsigmoid', 'leaky_relu', - 'logsigmoid', + 'log_sigmoid', 'maxout', 'prelu', 'relu', @@ -552,13 +552,13 @@ def relu(x, name=None): return out -def logsigmoid(x, name=None): +def log_sigmoid(x, name=None): """ - logsigmoid activation. + log_sigmoid activation. .. math:: - logsigmoid(x) = log \\frac{1}{1 + e^{-x}} + log\\_sigmoid(x) = log \\frac{1}{1 + e^{-x}} Parameters: x (Tensor): The input Tensor with data type float32, float64. @@ -573,20 +573,19 @@ def logsigmoid(x, name=None): import paddle import paddle.nn.functional as F - import numpy as np paddle.disable_static() - x = paddle.to_tensor(np.array([1.0, 2.0, 3.0, 4.0])) - out = F.logsigmoid(x) # [-0.313262 -0.126928 -0.0485874 -0.0181499] + x = paddle.to_tensor([1.0, 2.0, 3.0, 4.0]) + out = F.log_sigmoid(x) # [-0.313262 -0.126928 -0.0485874 -0.0181499] """ if in_dygraph_mode(): return core.ops.logsigmoid(x) check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], - 'logsigmoid') - helper = LayerHelper("logsigmoid", **locals()) + 'log_sigmoid') + helper = LayerHelper("log_sigmoid", **locals()) out = helper.create_variable_for_type_inference(x.dtype) helper.append_op(type='logsigmoid', inputs={'X': x}, outputs={'Out': out}) return out diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index c38d6018a2500111280a482aa60d072e65e27742..585d369c607e5b6eb6a2a3bcb28bd8999a2e0dca 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -860,11 +860,10 @@ class LogSigmoid(layers.Layer): .. code-block:: python import paddle - import numpy as np paddle.disable_static() - x = paddle.to_tensor(np.array([1.0, 2.0, 3.0, 4.0])) + x = paddle.to_tensor([1.0, 2.0, 3.0, 4.0]) m = paddle.nn.LogSigmoid() out = m(x) # [-0.313262 -0.126928 -0.0485874 -0.0181499] """ @@ -874,7 +873,7 @@ class LogSigmoid(layers.Layer): self._name = name def forward(self, x): - return F.logsigmoid(x, self._name) + return F.log_sigmoid(x, self._name) class Softmax(layers.Layer):