diff --git a/doc/fluid/api/gen_doc.sh b/doc/fluid/api/gen_doc.sh index f30d5560880385d42b6cd0b60d8b619a90ed771b..5284b277e24cf9ea8eeaf79c0aeb86c8fe5f6904 100644 --- a/doc/fluid/api/gen_doc.sh +++ b/doc/fluid/api/gen_doc.sh @@ -30,7 +30,7 @@ python gen_module_index.py framework paddle.framework # nn -for module in loss +for module in loss activation do python gen_doc.py --module_name ${module} --module_prefix ${module} --output ${module} --output_name nn --to_multiple_files True --output_dir nn python gen_module_index.py nn.${module} ${module} diff --git a/doc/fluid/api/nn.rst b/doc/fluid/api/nn.rst index 60a4913abe305e8aceb3ff7335e253f47a635cc0..cfea0bdde22c80b86fadb444aaeee731f14d0eda 100644 --- a/doc/fluid/api/nn.rst +++ b/doc/fluid/api/nn.rst @@ -82,6 +82,7 @@ paddle.nn nn/Linear.rst nn/linear_lr_warmup.rst nn/log_loss.rst + nn/log_softmax.rst nn/logsigmoid.rst nn/loss.rst nn/lrn.rst diff --git a/doc/fluid/api/nn/log_softmax.rst b/doc/fluid/api/nn/log_softmax.rst new file mode 100644 index 0000000000000000000000000000000000000000..88e8b52219798fb016f567414ac88157e4e107b6 --- /dev/null +++ b/doc/fluid/api/nn/log_softmax.rst @@ -0,0 +1,10 @@ +.. THIS FILE IS GENERATED BY `gen_doc.{py|sh}` + !DO NOT EDIT THIS FILE MANUALLY! + +.. _api_nn_log_softmax: + +log_softmax +----------- + +.. autofunction:: paddle.nn.functional.log_softmax + :noindex: \ No newline at end of file diff --git a/doc/fluid/api_cn/nn_cn.rst b/doc/fluid/api_cn/nn_cn.rst index f377c90c7ab19a91228ea2eb3799c58f07b2a329..f7e47d72f084cff5c7bf4879d6ab59019455935d 100644 --- a/doc/fluid/api_cn/nn_cn.rst +++ b/doc/fluid/api_cn/nn_cn.rst @@ -94,6 +94,7 @@ paddle.nn nn_cn/linear_lr_warmup_cn.rst nn_cn/logsigmoid_cn.rst nn_cn/log_loss_cn.rst + nn_cn/log_softmax_cn.rst nn_cn/lrn_cn.rst nn_cn/margin_ranking_loss_cn.rst nn_cn/maxout_cn.rst diff --git a/doc/fluid/api_cn/nn_cn/LogSoftmax_cn.rst b/doc/fluid/api_cn/nn_cn/LogSoftmax_cn.rst deleted file mode 100644 index 72ed06ecc1caa1b4e7296274e50df6dc623da1e3..0000000000000000000000000000000000000000 --- a/doc/fluid/api_cn/nn_cn/LogSoftmax_cn.rst +++ /dev/null @@ -1,47 +0,0 @@ -.. _cn_api_nn_LogSoftmax: - -LogSoftmax -------------------------------- -.. py:class:: paddle.nn.LogSoftmax(axis=None) - -:alias_main: paddle.nn.LogSoftmax -:alias: paddle.nn.LogSoftmax,paddle.nn.layer.LogSoftmax,paddle.nn.layer.activation.LogSoftmax - - - - -**LogSoftmax激活层:** - -.. math:: - - \\output = \frac{1}{1 + e^{-input}}\\ - -参数: - - **axis** (int, 可选) - 指示进行LogSoftmax计算的维度索引,其范围应为 :math:`[-1,rank-1]` ,其中rank是输入变量的秩。默认值:None(与-1效果相同,表示对最后一维做LogSoftmax操作)。 - -返回:无 - -**代码示例** - -.. code-block:: python - - import paddle.fluid as fluid - import paddle.nn as nn - import numpy as np - - data = np.array([[[-2.0, 3.0, -4.0, 5.0], - [3.0, -4.0, 5.0, -6.0], - [-7.0, -8.0, 8.0, 9.0]], - [[1.0, -2.0, -3.0, 4.0], - [-5.0, 6.0, 7.0, -8.0], - [6.0, 7.0, 8.0, 9.0]]]).astype('float32') - my_log_softnmax = nn.LogSoftmax() - with fluid.dygraph.guard(): - data = fluid.dygraph.to_variable(data) - res = my_log_softnmax(data) - # [[[ -7.1278396 -2.1278396 -9.127839 -0.12783948] - # [ -2.1270514 -9.127051 -0.12705144 -11.127051 ] - # [-16.313261 -17.313261 -1.3132617 -0.31326184]] - # [[ -3.0518122 -6.051812 -7.051812 -0.051812 ] - # [-12.313267 -1.3132664 -0.3132665 -15.313267 ] - # [ -3.4401896 -2.4401896 -1.4401896 -0.44018966]]] diff --git a/doc/fluid/api_cn/nn_cn/activation_cn.rst b/doc/fluid/api_cn/nn_cn/activation_cn.rst index 337a4a8d926c8fdd9062098c283707ed8fac9853..1737e44725ff62785c64d90d52e7c308eee87851 100644 --- a/doc/fluid/api_cn/nn_cn/activation_cn.rst +++ b/doc/fluid/api_cn/nn_cn/activation_cn.rst @@ -10,4 +10,5 @@ activation activation_cn/Hardshrink_cn.rst activation_cn/LeakyReLU_cn.rst + activation_cn/LogSoftmax_cn.rst activation_cn/Sigmoid_cn.rst diff --git a/doc/fluid/api_cn/nn_cn/activation_cn/LogSoftmax_cn.rst b/doc/fluid/api_cn/nn_cn/activation_cn/LogSoftmax_cn.rst new file mode 100644 index 0000000000000000000000000000000000000000..96bbc3a886f9535bf02cb1954645d5182e669120 --- /dev/null +++ b/doc/fluid/api_cn/nn_cn/activation_cn/LogSoftmax_cn.rst @@ -0,0 +1,47 @@ +.. _cn_api_nn_LogSoftmax: + +LogSoftmax +------------------------------- +.. py:class:: paddle.nn.LogSoftmax(axis=-1, name=None) + +LogSoftmax激活层,计算公式如下: + +.. math:: + + Out[i, j] = log(softmax(x)) + = log(\\frac{\exp(X[i, j])}{\sum_j(exp(X[i, j])}) + +参数 +:::::::::: + - axis (int, 可选) - 指定对输入Tensor进行运算的轴。``axis`` 的有效范围是[-D, D),D是输入Tensor的维度, ``axis`` 为负值时与 :math:`axis + D` 等价。默认值为-1。 + - name (str, 可选) - 操作的名称(可选,默认值为None)。更多信息请参见 :ref:`api_guide_Name`。 + +形状: + - input: 任意形状的Tensor。 + - output: 和input具有相同形状的Tensor。 + +代码示例 +::::::::: + +.. code-block:: python + + import paddle + import numpy as np + + paddle.disable_static() + + x = np.array([[[-2.0, 3.0, -4.0, 5.0], + [3.0, -4.0, 5.0, -6.0], + [-7.0, -8.0, 8.0, 9.0]], + [[1.0, -2.0, -3.0, 4.0], + [-5.0, 6.0, 7.0, -8.0], + [6.0, 7.0, 8.0, 9.0]]], 'float32') + m = paddle.nn.LogSoftmax() + x = paddle.to_tensor(x) + out = m(x) + # [[[ -7.1278396 -2.1278396 -9.127839 -0.12783948] + # [ -2.1270514 -9.127051 -0.12705144 -11.127051 ] + # [-16.313261 -17.313261 -1.3132617 -0.31326184]] + # [[ -3.0518122 -6.051812 -7.051812 -0.051812 ] + # [-12.313267 -1.3132664 -0.3132665 -15.313267 ] + # [ -3.4401896 -2.4401896 -1.4401896 -0.44018966]]] diff --git a/doc/fluid/api_cn/nn_cn/log_softmax_cn.rst b/doc/fluid/api_cn/nn_cn/log_softmax_cn.rst new file mode 100644 index 0000000000000000000000000000000000000000..5509a6a4f18b928ffa4426b7bedfda88926f5017 --- /dev/null +++ b/doc/fluid/api_cn/nn_cn/log_softmax_cn.rst @@ -0,0 +1,51 @@ +.. _cn_api_nn_cn_log_softmax: + +log_softmax +------------------------------- +.. py:function:: paddle.nn.functional.log_softmax(x, axis=-1, dtype=None, name=None) + +该OP实现了log_softmax层。OP的计算公式如下: + +.. math:: + + Out[i, j] = log(softmax(x)) = log(\frac{\exp(X[i, j])}{\sum_j(exp(X[i, j])}) + +参数 +:::::::::: + - x (Tensor) - 输入的 ``Tensor`` ,数据类型为:float32、float64。 + - axis (int, 可选) - 指定对输入 ``x`` 进行运算的轴。``axis`` 的有效范围是[-D, D),D是输入 ``x`` 的维度, ``axis`` 为负值时与 :math:`axis + D` 等价。默认值为-1。 + - dtype (str|np.dtype|core.VarDesc.VarType, 可选) - 输入Tensor的数据类型。如果指定了 ``dtype`` ,则输入Tensor的数据类型会在计算前转换到 ``dtype`` 。``dtype``可以用来避免数据溢出。如果 ``dtype`` 为None,则输出Tensor的数据类型和 ``x`` 相同。默认值为None。 + - name (str, 可选) - 操作的名称(可选,默认值为None)。更多信息请参见 :ref:`api_guide_Name`。 + +返回 +:::::::::: + ``Tensor`` ,形状和 ``x`` 相同,数据类型为 ``dtype`` 或者和 ``x`` 相同。 + +代码示例 +:::::::::: + +.. code-block:: python + + import paddle + import paddle.nn.functional as F + import numpy as np + + paddle.disable_static() + + x = np.array([[[-2.0, 3.0, -4.0, 5.0], + [3.0, -4.0, 5.0, -6.0], + [-7.0, -8.0, 8.0, 9.0]], + [[1.0, -2.0, -3.0, 4.0], + [-5.0, 6.0, 7.0, -8.0], + [6.0, 7.0, 8.0, 9.0]]]).astype('float32') + x = paddle.to_tensor(x) + out1 = F.log_softmax(x) + out2 = F.log_softmax(x, dtype='float64') + # out1's data type is float32; out2's data type is float64 + # out1 and out2's value is as follows: + # [[[ -7.1278396 -2.1278396 -9.127839 -0.12783948] + # [ -2.1270514 -9.127051 -0.12705144 -11.127051 ] + # [-16.313261 -17.313261 -1.3132617 -0.31326184]] + # [[ -3.0518122 -6.051812 -7.051812 -0.051812 ] + # [-12.313267 -1.3132664 -0.3132665 -15.313267 ] + # [ -3.4401896 -2.4401896 -1.4401896 -0.44018966]]] diff --git a/doc/fluid/api_cn/nn_cn/softmax_cn.rst b/doc/fluid/api_cn/nn_cn/softmax_cn.rst index 2e7b07ccf9f1a15ce6fb9f148ac5420f57e16eb1..5c2e0cc806c78a831b0a66e6fa89c4bc233a6ecb 100644 --- a/doc/fluid/api_cn/nn_cn/softmax_cn.rst +++ b/doc/fluid/api_cn/nn_cn/softmax_cn.rst @@ -2,7 +2,9 @@ softmax ------------------------------- -.. py:functional:: paddle.nn.functional.softmax(x, axis=-1, name=None) + +.. py:function:: paddle.nn.functional.softmax(x, axis=-1, name=None) + 该OP实现了softmax层。OP的计算过程如下: