diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index b0faae089142e7c90811b8e4bf5ec6516a04e5d9..34f44fb2390eee3f7574e0c9c671204be2a51d9d 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -252,7 +252,7 @@ def hardtanh(x, min=-1.0, max=1.0, name=None): return out -def hardsigmoid(x, name=None): +def hardsigmoid(x, slope=0.1666667, offset=0.5, name=None): r""" hardsigmoid activation. @@ -266,12 +266,14 @@ def hardsigmoid(x, name=None): \\begin{aligned} &0, & & \\text{if } x \\leq -3 \\\\ &1, & & \\text{if } x \\geq 3 \\\\ - &x/6 + 1/2, & & \\text{otherwise} + &slope * x + offset, & & \\text{otherwise} \\end{aligned} \\right. Parameters: x (Tensor): The input Tensor with data type float32, float64. + slope (float, optional): The slope of hardsigmoid function. Default is 0.1666667. + offset (float, optional): The offset of hardsigmoid function. Default is 0.5. name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. @@ -289,8 +291,7 @@ def hardsigmoid(x, name=None): """ if in_dygraph_mode(): - return core.ops.hard_sigmoid(x, 'slope', 0.1666666666666667, 'offset', - 0.5) + return core.ops.hard_sigmoid(x, 'slope', slope, 'offset', offset) check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], 'hardsigmoid') @@ -301,8 +302,8 @@ def hardsigmoid(x, name=None): type='hard_sigmoid', inputs={'X': x}, outputs={'Out': out}, - attrs={'slope': 0.1666666666666667, - 'offset': 0.5}) + attrs={'slope': slope, + 'offset': offset}) return out diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index 482382300a784a7eea631b9d2e1d7995fb67b6ac..3350ab64057a3832b835070c244dcd7d1ef12164 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -611,7 +611,7 @@ class Hardsigmoid(layers.Layer): self.name = name def forward(self, x): - return F.hardsigmoid(x, self.name) + return F.hardsigmoid(x, name=self.name) class Softplus(layers.Layer):