ops.py 11.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17 18 19 20
from .layer_function_generator import (
    generate_layer_fn,
    generate_activation_fn,
    generate_inplace_fn,
    add_sample_code,
)
21
from ..fluid.framework import in_dygraph_mode
22
from .. import _C_ops
23 24 25

__deprecated_func_name__ = {
    'tanh_shrink': 'tanhshrink',
26
    'logsigmoid': 'log_sigmoid',
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
}

__activations_noattr__ = [
    'sigmoid',
    'silu',
    'logsigmoid',
    'tanh_shrink',
    'softplus',
    'softsign',
    'tanh',
]

__unary_func__ = [
    'exp',
    'expm1',
    'atan',
    'sqrt',
    'rsqrt',
    'abs',
    'ceil',
    'floor',
    'cos',
    'tan',
    'acos',
    'sin',
    'sinh',
    'asin',
    'cosh',
    'round',
    'reciprocal',
    'square',
    'acosh',
    'asinh',
    'atanh',
]

__inplace_unary_func__ = [
    'exp_',
    'sqrt_',
    'rsqrt_',
    'ceil_',
    'floor_',
    'round_',
    'reciprocal_',
]

__all__ = []

for _OP in set(__all__):
    globals()[_OP] = generate_layer_fn(_OP)

# It is a hot fix in some unittest using:
#   fluid.layers.scale(x=x, scale=10.0, out=out_var)
# e.g.: test_program_code.py, test_dist_train.py
globals()['_scale'] = generate_layer_fn('scale')

globals()['_elementwise_div'] = generate_layer_fn('elementwise_div')

__all__ += __activations_noattr__
__all__ += __unary_func__
__all__ += __inplace_unary_func__

for _OP in set(__activations_noattr__):
    _new_OP = _OP
    if _OP in __deprecated_func_name__:
        _new_OP = __deprecated_func_name__[_OP]
    _func = generate_activation_fn(_OP)
    globals()[_OP] = _func

for _OP in set(__unary_func__):
    _new_OP = _OP
    if _OP in __deprecated_func_name__:
        _new_OP = __deprecated_func_name__[_OP]
    _func = generate_activation_fn(_OP)
    globals()[_OP] = _func

for _OP in set(__inplace_unary_func__):
    _new_OP = _OP
    if _OP in __deprecated_func_name__:
        _new_OP = __deprecated_func_name__[_OP]
    _func = generate_inplace_fn(_OP)
    globals()[_OP] = _func

110
add_sample_code(
111 112
    globals()["sigmoid"],
    r"""
113 114 115 116 117 118 119 120 121 122 123
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = F.sigmoid(x)
        print(out)
        # [0.40131234 0.450166   0.52497919 0.57444252]

124 125
""",
)
126

127
add_sample_code(
128 129
    globals()["silu"],
    r"""
130 131 132 133 134 135 136 137 138 139 140
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([1.0, 2.0, 3.0, 4.0])
        out = F.silu(x)
        print(out)
        # [ 0.7310586 1.7615942 2.8577224, 3.9280552 ]

141 142
""",
)
143

144
add_sample_code(
145 146
    globals()["logsigmoid"],
    r"""
147 148 149 150 151 152 153 154 155 156 157
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = F.log_sigmoid(x)
        print(out)
        # [-0.91301525 -0.79813887 -0.64439666 -0.55435524]

158 159
""",
)
160

161
add_sample_code(
162 163
    globals()["exp"],
    r"""
164 165 166 167 168 169 170 171 172 173
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.exp(x)
        print(out)
        # [0.67032005 0.81873075 1.10517092 1.34985881]

174 175
""",
)
176

177
add_sample_code(
178 179
    globals()["expm1"],
    r"""
180 181 182 183 184 185 186 187 188 189
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.expm1(x)
        print(out)
        # [-0.32967997, -0.18126924,  0.10517092,  0.34985882]

190 191
""",
)
192

193
add_sample_code(
194 195
    globals()["tanh"],
    r"""
196 197 198 199 200 201 202 203 204 205
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.tanh(x)
        print(out)
        # [-0.37994896 -0.19737532  0.09966799  0.29131261]

206 207
""",
)
208

209
add_sample_code(
210 211
    globals()["atan"],
    r"""
212 213 214 215 216 217 218 219 220 221
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.atan(x)
        print(out)
        # [-0.38050638 -0.19739556  0.09966865  0.29145679]

222 223
""",
)
224

225
add_sample_code(
226 227
    globals()["tanh_shrink"],
    r"""
228 229 230 231 232 233 234
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
235
        out = F.tanhshrink(x)
236 237 238
        print(out)
        # [-0.020051, -0.00262468, 0.000332005, 0.00868739]

239 240
""",
)
241

242
add_sample_code(
243 244
    globals()["sqrt"],
    r"""
245 246 247 248 249 250 251 252 253 254
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([0.1, 0.2, 0.3, 0.4])
        out = paddle.sqrt(x)
        print(out)
        # [0.31622777 0.4472136  0.54772256 0.63245553]

255 256
""",
)
257

258
add_sample_code(
259 260
    globals()["rsqrt"],
    r"""
261 262 263 264 265 266 267 268 269 270
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([0.1, 0.2, 0.3, 0.4])
        out = paddle.rsqrt(x)
        print(out)
        # [3.16227766 2.23606798 1.82574186 1.58113883]

271 272
""",
)
273

274
add_sample_code(
275 276
    globals()["abs"],
    r"""
277 278 279 280 281 282 283 284 285 286
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.abs(x)
        print(out)
        # [0.4 0.2 0.1 0.3]

287 288
""",
)
289

290
add_sample_code(
291 292
    globals()["ceil"],
    r"""
293 294 295 296 297 298 299 300 301 302
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.ceil(x)
        print(out)
        # [-0. -0.  1.  1.]

303 304
""",
)
305

306
add_sample_code(
307 308
    globals()["floor"],
    r"""
309 310 311 312 313 314 315 316 317 318
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.floor(x)
        print(out)
        # [-1. -1.  0.  0.]

319 320
""",
)
321

322
add_sample_code(
323 324
    globals()["cos"],
    r"""
325 326 327 328 329 330 331 332 333 334
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.cos(x)
        print(out)
        # [0.92106099 0.98006658 0.99500417 0.95533649]

335 336
""",
)
337

338
add_sample_code(
339 340
    globals()["tan"],
    r"""
341 342 343 344 345 346 347 348 349 350
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.tan(x)
        print(out)
        # [-0.42279324, -0.20271005, 0.10033467, 0.30933627]

351 352
""",
)
353

354
add_sample_code(
355 356
    globals()["acos"],
    r"""
357 358 359 360 361 362 363 364 365 366
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.acos(x)
        print(out)
        # [1.98231317 1.77215425 1.47062891 1.26610367]

367 368
""",
)
369

370
add_sample_code(
371 372
    globals()["sin"],
    r"""
373 374 375 376 377 378 379 380 381 382
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.sin(x)
        print(out)
        # [-0.38941834 -0.19866933  0.09983342  0.29552021]

383 384
""",
)
385

386
add_sample_code(
387 388
    globals()["asin"],
    r"""
389 390 391 392 393 394 395 396 397 398
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.asin(x)
        print(out)
        # [-0.41151685 -0.20135792  0.10016742  0.30469265]

399 400
""",
)
401

402
add_sample_code(
403 404
    globals()["cosh"],
    r"""
405 406 407 408 409 410 411 412 413 414
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.cosh(x)
        print(out)
        # [1.08107237 1.02006676 1.00500417 1.04533851]

415 416
""",
)
417

418
add_sample_code(
419 420
    globals()["sinh"],
    r"""
421 422 423 424 425 426 427 428 429 430
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.sinh(x)
        print(out)
        # [-0.41075233 -0.201336    0.10016675  0.30452029]

431 432
""",
)
433

434
add_sample_code(
435 436
    globals()["asinh"],
    r"""
437 438 439 440 441 442 443 444 445 446
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.asinh(x)
        print(out)
        # [-0.39003533, -0.19869010,  0.09983408,  0.29567307]

447 448
""",
)
449

450
add_sample_code(
451 452
    globals()["acosh"],
    r"""
453 454 455 456 457 458 459 460 461 462
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([1., 3., 4., 5.])
        out = paddle.acosh(x)
        print(out)
        # [0.        , 1.76274729, 2.06343699, 2.29243159]

463 464
""",
)
465

466
add_sample_code(
467 468
    globals()["atanh"],
    r"""
469 470 471 472 473 474 475 476 477 478
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.atanh(x)
        print(out)
        # [-0.42364895, -0.20273256,  0.10033535,  0.30951962]

479 480
""",
)
481

482
add_sample_code(
483 484
    globals()["round"],
    r"""
485 486 487 488 489 490 491 492 493 494
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.5, -0.2, 0.6, 1.5])
        out = paddle.round(x)
        print(out)
        # [-1. -0.  1.  2.]

495 496
""",
)
497

498
add_sample_code(
499 500
    globals()["reciprocal"],
    r"""
501 502 503 504 505 506 507 508 509 510
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.reciprocal(x)
        print(out)
        # [-2.5        -5.         10.          3.33333333]

511 512
""",
)
513

514
add_sample_code(
515 516
    globals()["square"],
    r"""
517 518 519 520 521 522 523 524 525 526
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.square(x)
        print(out)
        # [0.16 0.04 0.01 0.09]

527 528
""",
)
529

530
add_sample_code(
531 532
    globals()["softplus"],
    r"""
533 534 535 536 537 538 539
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
540
        out = F.softplus(x)
541 542 543
        print(out)
        # [0.513015, 0.598139, 0.744397, 0.854355]

544 545
""",
)
546

547
add_sample_code(
548 549
    globals()["softsign"],
    r"""
550 551 552 553 554 555 556
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
557
        out = F.softsign(x)
558 559 560
        print(out)
        # [-0.285714, -0.166667, 0.0909091, 0.230769]

561 562
""",
)
563 564 565 566 567 568 569

__all__ += ['erf']

_erf_ = generate_layer_fn('erf')


def erf(x, name=None):
570
    if in_dygraph_mode():
571
        return _C_ops.erf(x)
572

573 574 575 576 577 578 579 580 581 582
    locals_var = locals().copy()
    kwargs = dict()
    for name, val in locals_var.items():
        if val is not None:
            kwargs[name] = val
    return _erf_(**kwargs)


erf.__doc__ = r"""
:strong:`Erf Operator`
583
For more details, see `Error function <https://en.wikipedia.org/wiki/Error_function>`_.
584 585 586

Equation:
    ..  math::
587
        out = \frac{2}{\sqrt{\pi}} \int_{0}^{x}e^{- \eta^{2}}d\eta
588 589 590 591

Args:

    x (Tensor): The input tensor, it's data type should be float32, float64.
592
    name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
593 594 595

Returns:

596
    Tensor: The output of Erf, dtype: float32 or float64, the same as the input, shape: the same as the input.
597 598

Examples:
599

600
    .. code-block:: python
601

602
        import paddle
603

604 605 606 607 608
        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.erf(x)
        print(out)
        # [-0.42839236 -0.22270259  0.11246292  0.32862676]
"""