From 56d624f5be1420cbe48b7d9df696352a9f5eae2d Mon Sep 17 00:00:00 2001 From: Megvii Engine Team Date: Fri, 9 Oct 2020 22:31:45 +0800 Subject: [PATCH] feat(mge): rename log_softmax -> logsoftmax GitOrigin-RevId: 40ccfea5a764a2ba37d5e53bbfbac65b8bf84d79 --- imperative/python/megengine/functional/nn.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/imperative/python/megengine/functional/nn.py b/imperative/python/megengine/functional/nn.py index ce5cc7841..ae03fb5b7 100644 --- a/imperative/python/megengine/functional/nn.py +++ b/imperative/python/megengine/functional/nn.py @@ -40,7 +40,7 @@ __all__ = [ "local_conv2d", "logsigmoid", "logsumexp", - "log_softmax", + "logsoftmax", "matmul", "max_pool2d", "nms", @@ -421,7 +421,7 @@ def softplus(inp: Tensor) -> Tensor: return log1p(exp(-abs(inp))) + relu(inp) -def log_softmax(inp: Tensor, axis: Union[int, Sequence[int]]) -> Tensor: +def logsoftmax(inp: Tensor, axis: Union[int, Sequence[int]]) -> Tensor: r"""Applies the :math:`\log(\text{Softmax}(x))` function to an n-dimensional input Tensor. The LogSoftmax formulation can be simplified as: @@ -437,7 +437,7 @@ def log_softmax(inp: Tensor, axis: Union[int, Sequence[int]]) -> Tensor: = x - logsumexp(x) :param inp: input tensor. - :param axis: axis along which log_softmax will be applied. + :param axis: axis along which logsoftmax will be applied. Examples: @@ -448,7 +448,7 @@ def log_softmax(inp: Tensor, axis: Union[int, Sequence[int]]) -> Tensor: import megengine.functional as F x = tensor(np.arange(-5, 5, dtype=np.float32)).reshape(2,5) - y = F.log_softmax(x, axis=1) + y = F.logsoftmax(x, axis=1) print(y.numpy()) Outputs: -- GitLab