From 4dfc375a26affbb6ac92ed69b1b56b879cc28ce2 Mon Sep 17 00:00:00 2001 From: Xiaoyu Zhang <35585791+BBuf@users.noreply.github.com> Date: Thu, 20 May 2021 00:53:39 +0800 Subject: [PATCH] add elu module (#4924) Co-authored-by: oneflow-ci-bot <69100618+oneflow-ci-bot@users.noreply.github.com> --- docs/source/experimental.rst | 1 + oneflow/python/nn/modules/activation.py | 49 +++++++++++++++++++ .../python/test/modules/test_activation.py | 22 +++++++++ 3 files changed, 72 insertions(+) diff --git a/docs/source/experimental.rst b/docs/source/experimental.rst index 3cc88d87b7..9cb04f8af6 100644 --- a/docs/source/experimental.rst +++ b/docs/source/experimental.rst @@ -9,6 +9,7 @@ Experimental features .. autofunction:: oneflow.experimental.nn.Tanh .. autofunction:: oneflow.experimental.tanh .. autofunction:: oneflow.experimental.Tensor.tanh +.. autofunction:: oneflow.experimental.nn.ELU .. autofunction:: oneflow.experimental.nn.GELU .. autofunction:: oneflow.experimental.gelu .. autofunction:: oneflow.experimental.Tensor.gelu diff --git a/oneflow/python/nn/modules/activation.py b/oneflow/python/nn/modules/activation.py index 95699e5f5f..6f24c8973f 100644 --- a/oneflow/python/nn/modules/activation.py +++ b/oneflow/python/nn/modules/activation.py @@ -208,6 +208,55 @@ def tanh_op(x): return Tanh()(x) +@oneflow_export("nn.ELU") +@experimental_api +class ELU(Module): + r"""Applies the element-wise function: + + .. math:: + + \text{ELU}(x) = \begin{cases} + x & \text{ if } x \gt 0 \\ + \alpha*(exp(x)-1) & \text{ if } x \le 0 \\ + \end{cases} + + Args: + alpha: the :math:`\alpha` value for the ELU formulation. Default: 1.0 + inplace: can optionally do the operation in-place. Default: ``False`` + + Shape: + - Input: :math:`(N, *)` where `*` means, any number of additional + dimensions + - Output: :math:`(N, *)`, same shape as the input + + For example: + + .. code-block:: python + + import oneflow.experimental as flow + + m = flow.nn.ELU() + input = flow.randn(2) + output = m(input) + + """ + + def __init__(self, alpha: float = 1.0, inplace: bool = False): + super().__init__() + assert inplace == False, f"ELU not support inplace equal true now!" + self._op = ( + flow.builtin_op("elu") + .Input("in") + .Attr("alpha", alpha) + .Output("out") + .Build() + ) + + def forward(self, x): + res = self._op(x)[0] + return res + + @oneflow_export("nn.GELU") @experimental_api class GELU(Module): diff --git a/oneflow/python/test/modules/test_activation.py b/oneflow/python/test/modules/test_activation.py index 6b61d24a2f..2796b913dd 100644 --- a/oneflow/python/test/modules/test_activation.py +++ b/oneflow/python/test/modules/test_activation.py @@ -99,6 +99,28 @@ class TestTanhModule(flow.unittest.TestCase): test_case.assertTrue(np.allclose(y.numpy(), z, rtol=1e-4, atol=1e-4)) +@unittest.skipIf( + not flow.unittest.env.eager_execution_enabled(), + ".numpy() doesn't work in lazy mode", +) +class TestELUModule(flow.unittest.TestCase): + def test_elu(test_case): + m = flow.nn.ELU() + arr = np.random.randn(2, 3, 4, 5) + np_out = np.where(arr > 0, arr, 1.0 * (np.exp(arr) - 1)) + x = flow.Tensor(arr) + of_out = m(x) + test_case.assertTrue(np.allclose(of_out.numpy(), np_out, rtol=1e-4, atol=1e-4)) + + def test_elu_alpha(test_case): + m = flow.nn.ELU(alpha=1.2) + arr = np.random.randn(2, 3, 4, 5) + np_out = np.where(arr > 0, arr, 1.2 * (np.exp(arr) - 1)) + x = flow.Tensor(arr) + of_out = m(x) + test_case.assertTrue(np.allclose(of_out.numpy(), np_out, rtol=1e-4, atol=1e-4)) + + @unittest.skipIf( not flow.unittest.env.eager_execution_enabled(), ".numpy() doesn't work in lazy mode", -- GitLab