From e9f8af3a1f3054c0f449adf812e43151637e5cae Mon Sep 17 00:00:00 2001 From: nhzlx Date: Mon, 22 Oct 2018 16:10:26 +0800 Subject: [PATCH] add activation doc --- doc/fluid/api_guides/index_cn.rst | 1 + doc/fluid/api_guides/low_level/index_cn.rst | 9 ++++++++ .../low_level/layers/activations.rst | 23 +++++++++++++++++++ 3 files changed, 33 insertions(+) create mode 100644 doc/fluid/api_guides/low_level/index_cn.rst create mode 100644 doc/fluid/api_guides/low_level/layers/activations.rst diff --git a/doc/fluid/api_guides/index_cn.rst b/doc/fluid/api_guides/index_cn.rst index 9873d55c2..be8d4d5eb 100644 --- a/doc/fluid/api_guides/index_cn.rst +++ b/doc/fluid/api_guides/index_cn.rst @@ -7,3 +7,4 @@ API Guide :maxdepth: 1 high_low_level_api.md + low_level/index_cn.rst diff --git a/doc/fluid/api_guides/low_level/index_cn.rst b/doc/fluid/api_guides/low_level/index_cn.rst new file mode 100644 index 000000000..3118eb09a --- /dev/null +++ b/doc/fluid/api_guides/low_level/index_cn.rst @@ -0,0 +1,9 @@ +API Guide +========= + +.. toctree:: + + + :maxdepth: 1 + + layers/activations.rst diff --git a/doc/fluid/api_guides/low_level/layers/activations.rst b/doc/fluid/api_guides/low_level/layers/activations.rst new file mode 100644 index 000000000..6a502525c --- /dev/null +++ b/doc/fluid/api_guides/low_level/layers/activations.rst @@ -0,0 +1,23 @@ +# Activation Function + +Activation Function 是激活函数,它将非线性的特性引入到神经网络当中。 + +PaddlePaddle Fluid 对大部分的激活函数进行了支持,其中有: + `relu`, `tanh`, `sigmoid`, `elu`, `relu6`, `pow`, `stanh`, `hard_sigmoid`, `swish`, `prelu`, `brelu`, `leaky_relu`, `soft_relu`, `thresholded_relu`, `maxout`, `logsigmoid`, `hard_shrink`, `softsign`, `softplus`, `tanh_shrink`, `softshrink`, `exp` 等。 + + +## Fluid提供了两种激活函数的使用方式: + +1. 如果一个层的接口提供了`act`变量(默认值为`None`),我们可以通过该变量指定该层的激活函数类型。该方式支持常见的激活函数,如`relu`, `tanh`, `sigmoid`。 + + ``` + conv2d = fluid.layers.conv2d(input=data, num_filters=2, filter_size=3, act="relu") + ``` + + +2. Fluid为每个Activation提供了接口,我们可以显式的对它们进行调用。 + + ``` + conv2d = fluid.layers.conv2d(input=data, num_filters=2, filter_size=3) + relu1 = fluid.layer.relu(conv2d) + ``` -- GitLab