From f8ce3a2c21d72374f6acb6606bfb1377c984ddfe Mon Sep 17 00:00:00 2001 From: Epsilon Luoo Date: Mon, 27 Feb 2023 23:01:24 +0800 Subject: [PATCH] [Fix Typo] Unify the digital presentation of rrelu's Docs (#50859) Unify the digital presentation of rrelu's English and Chinese Documentations. Modify `Parameters` section of `rrelu` `s default `lower` value from `1.0 / 8.0` to `0.125`. Modify `Parameters` section of `rrelu` `s default `upper` value from `1.0 / 3.0` to `0.3333333333333333`. --- python/paddle/nn/functional/activation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 569a5334486..8ee01b5e58f 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -594,8 +594,8 @@ def rrelu(x, lower=1.0 / 8.0, upper=1.0 / 3.0, training=True, name=None): Parameters: x (Tensor): The input Tensor with data type float16, float32, float64. - lower (float, optional): The lower bound of uniform distribution. Default: 1.0/8.0. - upper (float, optional): The upper bound of uniform distribution. Default: 1.0/3.0. + lower (float, optional): The lower bound of uniform distribution. Default: 0.125. + upper (float, optional): The upper bound of uniform distribution. Default: 0.3333333333333333. training (bool, optional): Current mode is in training or others. Default is True. name (str, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None. -- GitLab