未验证 提交 e16d9a8e 编写于 作者: 傅剑寒 提交者: GitHub

remove_leaky_relu in nn.py under fluid (#47901)

上级 db0ea0ce
...@@ -121,7 +121,6 @@ __all__ = [ ...@@ -121,7 +121,6 @@ __all__ = [
'pow', 'pow',
'prelu', 'prelu',
'brelu', 'brelu',
'leaky_relu',
'flatten', 'flatten',
'pad2d', 'pad2d',
'unique', 'unique',
...@@ -8238,33 +8237,6 @@ def brelu(x, t_min=0.0, t_max=24.0, name=None): ...@@ -8238,33 +8237,6 @@ def brelu(x, t_min=0.0, t_max=24.0, name=None):
return out return out
@deprecated(since="2.0.0", update_to="paddle.nn.functional.leaky_relu")
@templatedoc()
def leaky_relu(x, alpha=0.02, name=None):
"""
${comment}
Args:
x(${x_type}): ${x_comment}
alpha(${alpha_type}|0.02): ${alpha_comment}
name(str|None): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
output(${out_type}): ${out_comment}
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[-1, 2], [3, -4]], dtype='float32')
y = paddle.fluid.layers.leaky_relu(x, alpha=0.1)
print(y) # [[-0.1, 2], [3, -0.4]]
"""
return paddle.nn.functional.leaky_relu(x, alpha, name)
def flatten(x, axis=1, name=None): def flatten(x, axis=1, name=None):
r""" r"""
**Flatten op** **Flatten op**
......
...@@ -65,7 +65,7 @@ class ConvBNLayer(fluid.dygraph.Layer): ...@@ -65,7 +65,7 @@ class ConvBNLayer(fluid.dygraph.Layer):
out = self.conv(inputs) out = self.conv(inputs)
out = self.batch_norm(out) out = self.batch_norm(out)
if self.act == 'leaky': if self.act == 'leaky':
out = fluid.layers.leaky_relu(x=out, alpha=0.1) out = paddle.nn.functional.leaky_relu(out, 0.1)
return out return out
......
...@@ -396,7 +396,7 @@ class conv2d(fluid.dygraph.Layer): ...@@ -396,7 +396,7 @@ class conv2d(fluid.dygraph.Layer):
if self.norm: if self.norm:
conv = self.bn(conv) conv = self.bn(conv)
if self.relu: if self.relu:
conv = fluid.layers.leaky_relu(conv, alpha=self.relufactor) conv = paddle.nn.functional.leaky_relu(conv, self.relufactor)
return conv return conv
...@@ -468,7 +468,7 @@ class DeConv2D(fluid.dygraph.Layer): ...@@ -468,7 +468,7 @@ class DeConv2D(fluid.dygraph.Layer):
if self.norm: if self.norm:
conv = self.bn(conv) conv = self.bn(conv)
if self.relu: if self.relu:
conv = fluid.layers.leaky_relu(conv, alpha=self.relufactor) conv = paddle.nn.functional.leaky_relu(conv, self.relufactor)
return conv return conv
......
...@@ -84,7 +84,7 @@ class ElementwiseActivationMkldnnFusePassTest_Add_LeakyRelu( ...@@ -84,7 +84,7 @@ class ElementwiseActivationMkldnnFusePassTest_Add_LeakyRelu(
def set_params(self): def set_params(self):
self.operand = fluid.layers.elementwise_add self.operand = fluid.layers.elementwise_add
self.act_alpha = 0.2 self.act_alpha = 0.2
self.act = fluid.layers.leaky_relu self.act = paddle.nn.functional.leaky_relu
class ElementwiseActivationMkldnnFusePassTest_Add_Swish( class ElementwiseActivationMkldnnFusePassTest_Add_Swish(
...@@ -184,7 +184,7 @@ class ElementwiseActivationMkldnnFusePassTest_Sub_LeakyRelu( ...@@ -184,7 +184,7 @@ class ElementwiseActivationMkldnnFusePassTest_Sub_LeakyRelu(
def set_params(self): def set_params(self):
self.operand = fluid.layers.elementwise_sub self.operand = fluid.layers.elementwise_sub
self.act_alpha = 0.2 self.act_alpha = 0.2
self.act = fluid.layers.leaky_relu self.act = paddle.nn.functional.leaky_relu
class ElementwiseActivationMkldnnFusePassTest_Sub_Swish( class ElementwiseActivationMkldnnFusePassTest_Sub_Swish(
...@@ -276,7 +276,7 @@ class ElementwiseActivationMkldnnFusePassTest_Mul_LeakyRelu( ...@@ -276,7 +276,7 @@ class ElementwiseActivationMkldnnFusePassTest_Mul_LeakyRelu(
def set_params(self): def set_params(self):
self.operand = fluid.layers.elementwise_mul self.operand = fluid.layers.elementwise_mul
self.act_alpha = 0.2 self.act_alpha = 0.2
self.act = fluid.layers.leaky_relu self.act = paddle.nn.functional.leaky_relu
class ElementwiseActivationMkldnnFusePassTest_Mul_Swish( class ElementwiseActivationMkldnnFusePassTest_Mul_Swish(
......
...@@ -67,7 +67,7 @@ class TensorRTSubgraphPassActivationTest(InferencePassTest): ...@@ -67,7 +67,7 @@ class TensorRTSubgraphPassActivationTest(InferencePassTest):
class TensorRTSubgraphPassLeakyReluTest(TensorRTSubgraphPassActivationTest): class TensorRTSubgraphPassLeakyReluTest(TensorRTSubgraphPassActivationTest):
def append_act(self, x): def append_act(self, x):
return fluid.layers.leaky_relu(x) return paddle.nn.functional.leaky_relu(x)
class TensorRTSubgraphPassRelu6Test(TensorRTSubgraphPassActivationTest): class TensorRTSubgraphPassRelu6Test(TensorRTSubgraphPassActivationTest):
......
...@@ -217,7 +217,7 @@ class TestLeakyReluDoubleGradCheck(unittest.TestCase): ...@@ -217,7 +217,7 @@ class TestLeakyReluDoubleGradCheck(unittest.TestCase):
x = layers.data('x', shape, False, dtype) x = layers.data('x', shape, False, dtype)
x.persistable = True x.persistable = True
y = layers.leaky_relu(x, alpha=alpha) y = paddle.nn.functional.leaky_relu(x, alpha)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype) x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.02 x_arr[np.abs(x_arr) < 0.005] = 0.02
......
...@@ -1689,7 +1689,6 @@ class TestLeakyRelu_ZeroDim(TestLeakyRelu): ...@@ -1689,7 +1689,6 @@ class TestLeakyRelu_ZeroDim(TestLeakyRelu):
class TestLeakyReluAPI(unittest.TestCase): class TestLeakyReluAPI(unittest.TestCase):
# test paddle.nn.LeakyReLU, paddle.nn.functional.leaky_relu, # test paddle.nn.LeakyReLU, paddle.nn.functional.leaky_relu,
# fluid.layers.leaky_relu
def setUp(self): def setUp(self):
np.random.seed(1024) np.random.seed(1024)
self.x_np = np.random.uniform(-1, 1, [10, 12]).astype('float32') self.x_np = np.random.uniform(-1, 1, [10, 12]).astype('float32')
......
...@@ -365,7 +365,7 @@ class TestDygraphDoubleGrad(TestCase): ...@@ -365,7 +365,7 @@ class TestDygraphDoubleGrad(TestCase):
x.stop_gradient = False x.stop_gradient = False
alpha = 0.2 alpha = 0.2
y = fluid.layers.leaky_relu(x, alpha=alpha) y = paddle.nn.functional.leaky_relu(x, alpha)
y = y * y y = y * y
z = y * y z = y * y
......
...@@ -163,7 +163,7 @@ class Conv2DLayer(fluid.dygraph.Layer): ...@@ -163,7 +163,7 @@ class Conv2DLayer(fluid.dygraph.Layer):
conv = self._norm(conv) conv = self._norm(conv)
if self.relufactor is not None: if self.relufactor is not None:
conv = fluid.layers.leaky_relu(conv, alpha=self.relufactor) conv = paddle.nn.functional.leaky_relu(conv, self.relufactor)
return conv return conv
...@@ -205,7 +205,7 @@ class Deconv2DLayer(fluid.dygraph.Layer): ...@@ -205,7 +205,7 @@ class Deconv2DLayer(fluid.dygraph.Layer):
deconv = self._norm(deconv) deconv = self._norm(deconv)
if self.relufactor is not None: if self.relufactor is not None:
deconv = fluid.layers.leaky_relu(deconv, alpha=self.relufactor) deconv = paddle.nn.functional.leaky_relu(deconv, self.relufactor)
return deconv return deconv
......
...@@ -78,7 +78,7 @@ class TestInplaceANBOpTraining(unittest.TestCase): ...@@ -78,7 +78,7 @@ class TestInplaceANBOpTraining(unittest.TestCase):
in_place=inplace, in_place=inplace,
) )
if activation == 'leaky_relu': if activation == 'leaky_relu':
bn = fluid.layers.leaky_relu(bn, alpha) bn = paddle.nn.functional.leaky_relu(bn, alpha)
if activation == 'elu': if activation == 'elu':
bn = paddle.nn.functional.elu(bn, alpha) bn = paddle.nn.functional.elu(bn, alpha)
......
...@@ -164,7 +164,7 @@ class TestDygraphDoubleGrad(TestCase): ...@@ -164,7 +164,7 @@ class TestDygraphDoubleGrad(TestCase):
x.stop_gradient = False x.stop_gradient = False
alpha = 0.2 alpha = 0.2
y = fluid.layers.leaky_relu(x, alpha=alpha) y = paddle.nn.functional.leaky_relu(x, alpha)
y = y * y y = y * y
z = y * y z = y * y
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册