From 567b711db92260e7c53c2a16cdacde2b0ec8d7a2 Mon Sep 17 00:00:00 2001 From: tensor-tang Date: Fri, 21 Sep 2018 11:08:17 +0800 Subject: [PATCH] remove kwargs of brelu, leaky_relu and soft_relu --- paddle/fluid/API.spec | 6 +-- python/paddle/fluid/layers/nn.py | 71 +++++++++++++++++++++++++++++++ python/paddle/fluid/layers/ops.py | 3 -- 3 files changed, 74 insertions(+), 6 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index b65f890c391..aecaabcc2f5 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -162,6 +162,9 @@ paddle.fluid.layers.log ArgSpec(args=['x', 'name'], varargs=None, keywords=None, paddle.fluid.layers.crop ArgSpec(args=['x', 'shape', 'offsets', 'name'], varargs=None, keywords=None, defaults=(None, None, None)) paddle.fluid.layers.rank_loss ArgSpec(args=['label', 'left', 'right', 'name'], varargs=None, keywords=None, defaults=(None,)) paddle.fluid.layers.prelu ArgSpec(args=['x', 'mode', 'param_attr', 'name'], varargs=None, keywords=None, defaults=(None, None)) +paddle.fluid.layers.brelu ArgSpec(args=['x', 't_min', 't_max', 'name'], varargs=None, keywords=None, defaults=None) +paddle.fluid.layers.leaky_relu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=None) +paddle.fluid.layers.soft_relu ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=None) paddle.fluid.layers.flatten ArgSpec(args=['x', 'axis', 'name'], varargs=None, keywords=None, defaults=(1, None)) paddle.fluid.layers.sequence_mask ArgSpec(args=['x', 'maxlen', 'dtype', 'name'], varargs=None, keywords=None, defaults=(None, 'int64', None)) paddle.fluid.layers.stack ArgSpec(args=['x', 'axis'], varargs=None, keywords=None, defaults=(0,)) @@ -258,9 +261,6 @@ paddle.fluid.layers.slice ArgSpec(args=[], varargs='args', keywords='kwargs', de paddle.fluid.layers.shape ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) paddle.fluid.layers.maxout ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) paddle.fluid.layers.softshrink ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) -paddle.fluid.layers.brelu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) -paddle.fluid.layers.leaky_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) -paddle.fluid.layers.soft_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) paddle.fluid.layers.elu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) paddle.fluid.layers.relu6 ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) paddle.fluid.layers.pow ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index f896cfa04b3..e99e8afd877 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -108,6 +108,9 @@ __all__ = [ 'crop', 'rank_loss', 'prelu', + 'brelu', + 'leaky_relu', + 'soft_relu', 'flatten', 'sequence_mask', 'stack', @@ -5948,6 +5951,74 @@ def prelu(x, mode, param_attr=None, name=None): return out +@templatedoc() +def brelu(x, t_min=0.0, t_max=24.0, name=None): + """ + ${comment} + Args: + x(${x_type}): ${x_comment} + t_min(${t_min_type}|0.0): ${t_min_comment} + t_max(${t_max_type}|24.0): ${t_max_comment} + name(str|None): A name for this layer(optional). If set None, the layer + will be named automatically. + Returns: + output(${out_type}): ${out_comment} + """ + helper = LayerHelper('brelu', **locals()) + out = helper.create_tmp_variable(dtype=x.dtype) + helper.append_op( + type='brelu', + inputs={'X': x}, + outputs={'Out': out}, + attrs={'t_min': t_min, + 't_max': t_max}) + return out + + +@templatedoc() +def leaky_relu(x, alpha=0.02, name=None): + """ + ${comment} + Args: + x(${x_type}): ${x_comment} + alpha(${alpha_type}|0.02): ${alpha_comment} + name(str|None): A name for this layer(optional). If set None, the layer + will be named automatically. + Returns: + output(${out_type}): ${out_comment} + """ + helper = LayerHelper('leaky_relu', **locals()) + out = helper.create_tmp_variable(dtype=x.dtype) + helper.append_op( + type='leaky_relu', + inputs={'X': x}, + outputs={'Out': out}, + attrs={'alpha': alpha}) + return out + + +@templatedoc() +def soft_relu(x, threshold=40.0, name=None): + """ + ${comment} + Args: + x(${x_type}): ${x_comment} + threshold(${threshold_type}|40.0): ${threshold_comment} + name(str|None): A name for this layer(optional). If set None, the layer + will be named automatically. + Returns: + output(${out_type}): ${out_comment} + """ + helper = LayerHelper('soft_relu', **locals()) + out = helper.create_tmp_variable(dtype=x.dtype) + helper.append_op( + type='soft_relu', + inputs={'X': x}, + outputs={'Out': out}, + attrs={'threshold': threshold}) + return out + + def flatten(x, axis=1, name=None): """ **Flatten layer** diff --git a/python/paddle/fluid/layers/ops.py b/python/paddle/fluid/layers/ops.py index 85476312dbb..6c9cbe740e8 100644 --- a/python/paddle/fluid/layers/ops.py +++ b/python/paddle/fluid/layers/ops.py @@ -17,9 +17,6 @@ from .layer_function_generator import generate_layer_fn, generate_layer_fn_noatt __activations__ = [ 'softshrink', - 'brelu', - 'leaky_relu', - 'soft_relu', 'elu', 'relu6', 'pow', -- GitLab