From 9d1685b35929dc34c615321143bf669e7974b50b Mon Sep 17 00:00:00 2001 From: Chris Yann Date: Mon, 25 Jun 2018 11:47:15 +0800 Subject: [PATCH] Fix relu and log function by changing input parameter name from 'input' to 'x' (#11683) * Fix relu and log * Update nn.py --- python/paddle/fluid/layers/nn.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 3a803bc4b..d3cd5849d 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -4920,16 +4920,16 @@ def random_crop(x, shape, seed=None): return out -def log(input): +def log(x): """ Calculates the natural log of the given input tensor, element-wise. .. math:: - Out = \\ln(input) + Out = \\ln(x) Args: - input (Variable): Input tensor. + x (Variable): Input tensor. Returns: Variable: The natural log of the input tensor computed element-wise. @@ -4938,7 +4938,7 @@ def log(input): .. code-block:: python - output = fluid.layers.log(input) + output = fluid.layers.log(x) """ helper = LayerHelper('log', **locals()) dtype = helper.input_dtype(input_param_name='x') @@ -4947,18 +4947,18 @@ def log(input): return out -def relu(input): +def relu(x): """ Relu takes one input data (Tensor) and produces one output data (Tensor) - where the rectified linear function, y = max(0, input), is applied to + where the rectified linear function, y = max(0, x), is applied to the tensor elementwise. .. math:: - Out = \\max(0, input) + Out = \\max(0, x) Args: - input (Variable): The input tensor. + x (Variable): The input tensor. Returns: Variable: The output tensor with the same shape as input. @@ -4967,7 +4967,7 @@ def relu(input): .. code-block:: python - output = fluid.layers.relu(input) + output = fluid.layers.relu(x) """ helper = LayerHelper('relu', **locals()) dtype = helper.input_dtype(input_param_name='x') -- GitLab