未验证 提交 e16d9a8e 编写于 作者: 傅剑寒 提交者: GitHub

remove_leaky_relu in nn.py under fluid (#47901)

上级 db0ea0ce
......@@ -121,7 +121,6 @@ __all__ = [
'pow',
'prelu',
'brelu',
'leaky_relu',
'flatten',
'pad2d',
'unique',
......@@ -8238,33 +8237,6 @@ def brelu(x, t_min=0.0, t_max=24.0, name=None):
return out
@deprecated(since="2.0.0", update_to="paddle.nn.functional.leaky_relu")
@templatedoc()
def leaky_relu(x, alpha=0.02, name=None):
"""
${comment}
Args:
x(${x_type}): ${x_comment}
alpha(${alpha_type}|0.02): ${alpha_comment}
name(str|None): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
output(${out_type}): ${out_comment}
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[-1, 2], [3, -4]], dtype='float32')
y = paddle.fluid.layers.leaky_relu(x, alpha=0.1)
print(y) # [[-0.1, 2], [3, -0.4]]
"""
return paddle.nn.functional.leaky_relu(x, alpha, name)
def flatten(x, axis=1, name=None):
r"""
**Flatten op**
......
......@@ -65,7 +65,7 @@ class ConvBNLayer(fluid.dygraph.Layer):
out = self.conv(inputs)
out = self.batch_norm(out)
if self.act == 'leaky':
out = fluid.layers.leaky_relu(x=out, alpha=0.1)
out = paddle.nn.functional.leaky_relu(out, 0.1)
return out
......
......@@ -396,7 +396,7 @@ class conv2d(fluid.dygraph.Layer):
if self.norm:
conv = self.bn(conv)
if self.relu:
conv = fluid.layers.leaky_relu(conv, alpha=self.relufactor)
conv = paddle.nn.functional.leaky_relu(conv, self.relufactor)
return conv
......@@ -468,7 +468,7 @@ class DeConv2D(fluid.dygraph.Layer):
if self.norm:
conv = self.bn(conv)
if self.relu:
conv = fluid.layers.leaky_relu(conv, alpha=self.relufactor)
conv = paddle.nn.functional.leaky_relu(conv, self.relufactor)
return conv
......
......@@ -84,7 +84,7 @@ class ElementwiseActivationMkldnnFusePassTest_Add_LeakyRelu(
def set_params(self):
self.operand = fluid.layers.elementwise_add
self.act_alpha = 0.2
self.act = fluid.layers.leaky_relu
self.act = paddle.nn.functional.leaky_relu
class ElementwiseActivationMkldnnFusePassTest_Add_Swish(
......@@ -184,7 +184,7 @@ class ElementwiseActivationMkldnnFusePassTest_Sub_LeakyRelu(
def set_params(self):
self.operand = fluid.layers.elementwise_sub
self.act_alpha = 0.2
self.act = fluid.layers.leaky_relu
self.act = paddle.nn.functional.leaky_relu
class ElementwiseActivationMkldnnFusePassTest_Sub_Swish(
......@@ -276,7 +276,7 @@ class ElementwiseActivationMkldnnFusePassTest_Mul_LeakyRelu(
def set_params(self):
self.operand = fluid.layers.elementwise_mul
self.act_alpha = 0.2
self.act = fluid.layers.leaky_relu
self.act = paddle.nn.functional.leaky_relu
class ElementwiseActivationMkldnnFusePassTest_Mul_Swish(
......
......@@ -67,7 +67,7 @@ class TensorRTSubgraphPassActivationTest(InferencePassTest):
class TensorRTSubgraphPassLeakyReluTest(TensorRTSubgraphPassActivationTest):
def append_act(self, x):
return fluid.layers.leaky_relu(x)
return paddle.nn.functional.leaky_relu(x)
class TensorRTSubgraphPassRelu6Test(TensorRTSubgraphPassActivationTest):
......
......@@ -217,7 +217,7 @@ class TestLeakyReluDoubleGradCheck(unittest.TestCase):
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = layers.leaky_relu(x, alpha=alpha)
y = paddle.nn.functional.leaky_relu(x, alpha)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.02
......
......@@ -1689,7 +1689,6 @@ class TestLeakyRelu_ZeroDim(TestLeakyRelu):
class TestLeakyReluAPI(unittest.TestCase):
# test paddle.nn.LeakyReLU, paddle.nn.functional.leaky_relu,
# fluid.layers.leaky_relu
def setUp(self):
np.random.seed(1024)
self.x_np = np.random.uniform(-1, 1, [10, 12]).astype('float32')
......
......@@ -365,7 +365,7 @@ class TestDygraphDoubleGrad(TestCase):
x.stop_gradient = False
alpha = 0.2
y = fluid.layers.leaky_relu(x, alpha=alpha)
y = paddle.nn.functional.leaky_relu(x, alpha)
y = y * y
z = y * y
......
......@@ -163,7 +163,7 @@ class Conv2DLayer(fluid.dygraph.Layer):
conv = self._norm(conv)
if self.relufactor is not None:
conv = fluid.layers.leaky_relu(conv, alpha=self.relufactor)
conv = paddle.nn.functional.leaky_relu(conv, self.relufactor)
return conv
......@@ -205,7 +205,7 @@ class Deconv2DLayer(fluid.dygraph.Layer):
deconv = self._norm(deconv)
if self.relufactor is not None:
deconv = fluid.layers.leaky_relu(deconv, alpha=self.relufactor)
deconv = paddle.nn.functional.leaky_relu(deconv, self.relufactor)
return deconv
......
......@@ -78,7 +78,7 @@ class TestInplaceANBOpTraining(unittest.TestCase):
in_place=inplace,
)
if activation == 'leaky_relu':
bn = fluid.layers.leaky_relu(bn, alpha)
bn = paddle.nn.functional.leaky_relu(bn, alpha)
if activation == 'elu':
bn = paddle.nn.functional.elu(bn, alpha)
......
......@@ -164,7 +164,7 @@ class TestDygraphDoubleGrad(TestCase):
x.stop_gradient = False
alpha = 0.2
y = fluid.layers.leaky_relu(x, alpha=alpha)
y = paddle.nn.functional.leaky_relu(x, alpha)
y = y * y
z = y * y
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册