ops.py 4.5 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
9 10 11 12 13
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
14 15

from __future__ import print_function
P
peizhilin 已提交
16
import os
17
from .layer_function_generator import generate_layer_fn, generate_activation_fn
C
chengduo 已提交
18 19
from .. import core
from ..framework import convert_np_dtype_to_dtype_
Y
Yang Yu 已提交
20

21
__activations_noattr__ = [
22 23 24 25
    'sigmoid',
    'logsigmoid',
    'exp',
    'tanh',
26
    'atan',
27
    'tanh_shrink',
B
baiyf 已提交
28
    'softshrink',
29
    'sqrt',
Z
zhoukunsheng 已提交
30
    'rsqrt',
31 32 33
    'abs',
    'ceil',
    'floor',
C
add cos  
chengduoZH 已提交
34
    'cos',
35 36
    'acos',
    'asin',
C
add sin  
chengduoZH 已提交
37
    'sin',
38 39 40 41 42
    'round',
    'reciprocal',
    'square',
    'softplus',
    'softsign',
Y
Yu Yang 已提交
43 44
]

X
Xin Pan 已提交
45
__all__ = []
Y
Yang Yu 已提交
46

Y
Yu Yang 已提交
47
for _OP in set(__all__):
48
    globals()[_OP] = generate_layer_fn(_OP)
Y
yuyang18 已提交
49

S
sneaxiy 已提交
50 51 52 53 54
# It is a hot fix in some unittest using:
#   fluid.layers.scale(x=x, scale=10.0, out=out_var)
# e.g.: test_program_code.py, test_dist_train.py
globals()['_scale'] = generate_layer_fn('scale')

S
sneaxiy 已提交
55 56
globals()['_elementwise_div'] = generate_layer_fn('elementwise_div')

57 58 59
__all__ += __activations_noattr__

for _OP in set(__activations_noattr__):
60
    globals()[_OP] = generate_activation_fn(_OP)
61

Y
yuyang18 已提交
62 63 64 65 66
__all__ += ["uniform_random"]

_uniform_random_ = generate_layer_fn('uniform_random')


67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
def uniform_random(shape, dtype='float32', min=-1.0, max=1.0, seed=0):
    """
    This operator initializes a variable with random values sampled from a
    uniform distribution. The random result is in set [min, max].

    Args:
        shape (list): The shape of output variable.
        dtype(np.dtype|core.VarDesc.VarType|str): The type of data, such as
            float32, float64 etc. Default: float32.
        min (float): Minimum value of uniform random. Default -1.0.
        max (float): Maximun value of uniform random. Default 1.0.
        seed (int): Random seed used for generating samples. 0 means use a
            seed generated by the system. Note that if seed is not 0, this
            operator will always generate the same random numbers every time.
            Default 0.

    Examples:
        .. code-block:: python
85
     
86
            import paddle.fluid as fluid
87
            result = fluid.layers.uniform_random(shape=[32, 784])
88 89
    """

C
chengduo 已提交
90 91
    if not isinstance(dtype, core.VarDesc.VarType):
        dtype = convert_np_dtype_to_dtype_(dtype)
92
    locals_var = locals().copy()
Y
yuyang18 已提交
93
    kwargs = dict()
94
    for name, val in locals_var.items():
Y
yuyang18 已提交
95 96 97 98
        if val is not None:
            kwargs[name] = val
    return _uniform_random_(**kwargs)

Y
yuyang18 已提交
99 100 101 102 103 104 105

__all__ += ['hard_shrink']

_hard_shrink_ = generate_layer_fn('hard_shrink')


def hard_shrink(x, threshold=None):
106
    locals_var = locals().copy()
Y
yuyang18 已提交
107
    kwargs = dict()
108
    for name, val in locals_var.items():
Y
yuyang18 已提交
109 110 111 112 113
        if val is not None:
            kwargs[name] = val
    return _hard_shrink_(**kwargs)


Y
yuyang18 已提交
114
hard_shrink.__doc__ = _hard_shrink_.__doc__ + """
Y
yuyang18 已提交
115 116
Examples:

117
    >>> import paddle.fluid as fluid
Y
yuyang18 已提交
118 119 120
    >>> data = fluid.layers.data(name="input", shape=[784])
    >>> result = fluid.layers.hard_shrink(x=data, threshold=0.3)
"""
Y
yuyang18 已提交
121

W
wopeizl 已提交
122 123 124 125 126 127
__all__ += ['cumsum']

_cum_sum_ = generate_layer_fn('cumsum')


def cumsum(x, axis=None, exclusive=None, reverse=None):
128
    locals_var = locals().copy()
W
wopeizl 已提交
129
    kwargs = dict()
130
    for name, val in locals_var.items():
W
wopeizl 已提交
131 132 133 134 135 136 137 138
        if val is not None:
            kwargs[name] = val
    return _cum_sum_(**kwargs)


cumsum.__doc__ = _cum_sum_.__doc__ + """
Examples:

139
    >>> import paddle.fluid as fluid
W
wopeizl 已提交
140 141 142
    >>> data = fluid.layers.data(name="input", shape=[32, 784])
    >>> result = fluid.layers.cumsum(data, axis=0)
"""
Y
yuyang18 已提交
143 144 145 146 147 148 149

__all__ += ['thresholded_relu']

_thresholded_relu_ = generate_layer_fn('thresholded_relu')


def thresholded_relu(x, threshold=None):
150
    locals_var = locals().copy()
Y
yuyang18 已提交
151
    kwargs = dict()
152
    for name, val in locals_var.items():
Y
yuyang18 已提交
153 154 155
        if val is not None:
            kwargs[name] = val

C
chengduo 已提交
156
    return _thresholded_relu_(**kwargs)
Y
yuyang18 已提交
157 158 159 160 161


thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """
Examples:

162
    >>> import paddle.fluid as fluid
Y
yuyang18 已提交
163 164 165
    >>> data = fluid.layers.data(name="input", shape=[1])
    >>> result = fluid.layers.thresholded_relu(data, threshold=0.4)
"""