ops.py 12.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17 18 19 20
from .layer_function_generator import (
    generate_layer_fn,
    generate_activation_fn,
    generate_inplace_fn,
    add_sample_code,
)
21 22 23 24
from ..fluid.data_feeder import check_variable_and_dtype
from ..fluid.framework import in_dygraph_mode, _in_legacy_dygraph
from ..framework import LayerHelper
from .. import _C_ops, _legacy_C_ops
25 26 27

__deprecated_func_name__ = {
    'tanh_shrink': 'tanhshrink',
28
    'logsigmoid': 'log_sigmoid',
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
}

__activations_noattr__ = [
    'sigmoid',
    'silu',
    'logsigmoid',
    'tanh_shrink',
    'softplus',
    'softsign',
    'tanh',
]

__unary_func__ = [
    'expm1',
    'atan',
    'sqrt',
    'rsqrt',
    'abs',
    'ceil',
    'floor',
    'cos',
    'tan',
    'acos',
    'sin',
    'sinh',
    'asin',
    'cosh',
    'round',
    'reciprocal',
    'square',
    'acosh',
    'asinh',
    'atanh',
]

__inplace_unary_func__ = [
    'exp_',
    'sqrt_',
    'rsqrt_',
    'ceil_',
    'floor_',
    'round_',
    'reciprocal_',
]

__all__ = []

for _OP in set(__all__):
    globals()[_OP] = generate_layer_fn(_OP)

# It is a hot fix in some unittest using:
#   fluid.layers.scale(x=x, scale=10.0, out=out_var)
# e.g.: test_program_code.py, test_dist_train.py
globals()['_scale'] = generate_layer_fn('scale')

globals()['_elementwise_div'] = generate_layer_fn('elementwise_div')

__all__ += __activations_noattr__
__all__ += __unary_func__
__all__ += __inplace_unary_func__

for _OP in set(__activations_noattr__):
    _new_OP = _OP
    if _OP in __deprecated_func_name__:
        _new_OP = __deprecated_func_name__[_OP]
    _func = generate_activation_fn(_OP)
    globals()[_OP] = _func

for _OP in set(__unary_func__):
    _new_OP = _OP
    if _OP in __deprecated_func_name__:
        _new_OP = __deprecated_func_name__[_OP]
    _func = generate_activation_fn(_OP)
    globals()[_OP] = _func

for _OP in set(__inplace_unary_func__):
    _new_OP = _OP
    if _OP in __deprecated_func_name__:
        _new_OP = __deprecated_func_name__[_OP]
    _func = generate_inplace_fn(_OP)
    globals()[_OP] = _func

111
add_sample_code(
112 113
    globals()["sigmoid"],
    r"""
114 115 116 117 118 119 120 121 122 123 124
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = F.sigmoid(x)
        print(out)
        # [0.40131234 0.450166   0.52497919 0.57444252]

125 126
""",
)
127

128
add_sample_code(
129 130
    globals()["silu"],
    r"""
131 132 133 134 135 136 137 138 139 140 141
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([1.0, 2.0, 3.0, 4.0])
        out = F.silu(x)
        print(out)
        # [ 0.7310586 1.7615942 2.8577224, 3.9280552 ]

142 143
""",
)
144

145
add_sample_code(
146 147
    globals()["logsigmoid"],
    r"""
148 149 150 151 152 153 154 155 156 157 158
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = F.log_sigmoid(x)
        print(out)
        # [-0.91301525 -0.79813887 -0.64439666 -0.55435524]

159 160
""",
)
161

162
add_sample_code(
163 164
    globals()["expm1"],
    r"""
165 166 167 168 169 170 171 172 173 174
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.expm1(x)
        print(out)
        # [-0.32967997, -0.18126924,  0.10517092,  0.34985882]

175 176
""",
)
177

178
add_sample_code(
179 180
    globals()["tanh"],
    r"""
181 182 183 184 185 186 187 188 189 190
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.tanh(x)
        print(out)
        # [-0.37994896 -0.19737532  0.09966799  0.29131261]

191 192
""",
)
193

194
add_sample_code(
195 196
    globals()["atan"],
    r"""
197 198 199 200 201 202 203 204 205 206
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.atan(x)
        print(out)
        # [-0.38050638 -0.19739556  0.09966865  0.29145679]

207 208
""",
)
209

210
add_sample_code(
211 212
    globals()["tanh_shrink"],
    r"""
213 214 215 216 217 218 219
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
220
        out = F.tanhshrink(x)
221 222 223
        print(out)
        # [-0.020051, -0.00262468, 0.000332005, 0.00868739]

224 225
""",
)
226

227
add_sample_code(
228 229
    globals()["sqrt"],
    r"""
230 231 232 233 234 235 236 237 238 239
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([0.1, 0.2, 0.3, 0.4])
        out = paddle.sqrt(x)
        print(out)
        # [0.31622777 0.4472136  0.54772256 0.63245553]

240 241
""",
)
242

243
add_sample_code(
244 245
    globals()["rsqrt"],
    r"""
246 247 248 249 250 251 252 253 254 255
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([0.1, 0.2, 0.3, 0.4])
        out = paddle.rsqrt(x)
        print(out)
        # [3.16227766 2.23606798 1.82574186 1.58113883]

256 257
""",
)
258

259
add_sample_code(
260 261
    globals()["abs"],
    r"""
262 263 264 265 266 267 268 269 270 271
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.abs(x)
        print(out)
        # [0.4 0.2 0.1 0.3]

272 273
""",
)
274

275
add_sample_code(
276 277
    globals()["ceil"],
    r"""
278 279 280 281 282 283 284 285 286 287
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.ceil(x)
        print(out)
        # [-0. -0.  1.  1.]

288 289
""",
)
290

291
add_sample_code(
292 293
    globals()["floor"],
    r"""
294 295 296 297 298 299 300 301 302 303
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.floor(x)
        print(out)
        # [-1. -1.  0.  0.]

304 305
""",
)
306

307
add_sample_code(
308 309
    globals()["cos"],
    r"""
310 311 312 313 314 315 316 317 318 319
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.cos(x)
        print(out)
        # [0.92106099 0.98006658 0.99500417 0.95533649]

320 321
""",
)
322

323
add_sample_code(
324 325
    globals()["tan"],
    r"""
326 327 328 329 330 331 332 333 334 335
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.tan(x)
        print(out)
        # [-0.42279324, -0.20271005, 0.10033467, 0.30933627]

336 337
""",
)
338

339
add_sample_code(
340 341
    globals()["acos"],
    r"""
342 343 344 345 346 347 348 349 350 351
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.acos(x)
        print(out)
        # [1.98231317 1.77215425 1.47062891 1.26610367]

352 353
""",
)
354

355
add_sample_code(
356 357
    globals()["sin"],
    r"""
358 359 360 361 362 363 364 365 366 367
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.sin(x)
        print(out)
        # [-0.38941834 -0.19866933  0.09983342  0.29552021]

368 369
""",
)
370

371
add_sample_code(
372 373
    globals()["asin"],
    r"""
374 375 376 377 378 379 380 381 382 383
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.asin(x)
        print(out)
        # [-0.41151685 -0.20135792  0.10016742  0.30469265]

384 385
""",
)
386

387
add_sample_code(
388 389
    globals()["cosh"],
    r"""
390 391 392 393 394 395 396 397 398 399
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.cosh(x)
        print(out)
        # [1.08107237 1.02006676 1.00500417 1.04533851]

400 401
""",
)
402

403
add_sample_code(
404 405
    globals()["sinh"],
    r"""
406 407 408 409 410 411 412 413 414 415
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.sinh(x)
        print(out)
        # [-0.41075233 -0.201336    0.10016675  0.30452029]

416 417
""",
)
418

419
add_sample_code(
420 421
    globals()["asinh"],
    r"""
422 423 424 425 426 427 428 429 430 431
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.asinh(x)
        print(out)
        # [-0.39003533, -0.19869010,  0.09983408,  0.29567307]

432 433
""",
)
434

435
add_sample_code(
436 437
    globals()["acosh"],
    r"""
438 439 440 441 442 443 444 445 446 447
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([1., 3., 4., 5.])
        out = paddle.acosh(x)
        print(out)
        # [0.        , 1.76274729, 2.06343699, 2.29243159]

448 449
""",
)
450

451
add_sample_code(
452 453
    globals()["atanh"],
    r"""
454 455 456 457 458 459 460 461 462 463
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.atanh(x)
        print(out)
        # [-0.42364895, -0.20273256,  0.10033535,  0.30951962]

464 465
""",
)
466

467
add_sample_code(
468 469
    globals()["round"],
    r"""
470 471 472 473 474 475 476 477 478 479
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.5, -0.2, 0.6, 1.5])
        out = paddle.round(x)
        print(out)
        # [-1. -0.  1.  2.]

480 481
""",
)
482

483
add_sample_code(
484 485
    globals()["reciprocal"],
    r"""
486 487 488 489 490 491 492 493 494 495
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.reciprocal(x)
        print(out)
        # [-2.5        -5.         10.          3.33333333]

496 497
""",
)
498

499
add_sample_code(
500 501
    globals()["square"],
    r"""
502 503 504 505 506 507 508 509 510 511
Examples:
    .. code-block:: python

        import paddle

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.square(x)
        print(out)
        # [0.16 0.04 0.01 0.09]

512 513
""",
)
514

515
add_sample_code(
516 517
    globals()["softplus"],
    r"""
518 519 520 521 522 523 524
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
525
        out = F.softplus(x)
526 527 528
        print(out)
        # [0.513015, 0.598139, 0.744397, 0.854355]

529 530
""",
)
531

532
add_sample_code(
533 534
    globals()["softsign"],
    r"""
535 536 537 538 539 540 541
Examples:
    .. code-block:: python

        import paddle
        import paddle.nn.functional as F

        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
542
        out = F.softsign(x)
543 544 545
        print(out)
        # [-0.285714, -0.166667, 0.0909091, 0.230769]

546 547
""",
)
548

549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600

def exp(x, name=None):
    """

    Computes exp of x element-wise with a natural number `e` as the base.

    .. math::
        out = e^x

    Args:
        x (Tensor): Input of Exp operator, an N-D Tensor, with data type float32, float64 or float16.
        name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.

    Returns:
        Tensor. Output of Exp operator, a Tensor with shape same as input.

    Examples:
        .. code-block:: python

            import paddle

            x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
            out = paddle.exp(x)
            print(out)
            # [0.67032005 0.81873075 1.10517092 1.34985881]

    """
    if in_dygraph_mode():
        return _C_ops.exp(x)
    if _in_legacy_dygraph():
        return _legacy_C_ops.exp(x)

    check_variable_and_dtype(
        x,
        'x',
        [
            'int32',
            'int64',
            'float16',
            'float32',
            'float64',
            'complex64',
            'complex128',
        ],
        'exp',
    )
    helper = LayerHelper('exp', **locals())
    out = helper.create_variable_for_type_inference(dtype=x.dtype)
    helper.append_op(type='exp', inputs={"X": x}, outputs={"Out": out})
    return out


601 602 603 604 605 606
__all__ += ['erf']

_erf_ = generate_layer_fn('erf')


def erf(x, name=None):
607
    if in_dygraph_mode():
608
        return _C_ops.erf(x)
609

610 611 612 613 614 615 616 617 618 619
    locals_var = locals().copy()
    kwargs = dict()
    for name, val in locals_var.items():
        if val is not None:
            kwargs[name] = val
    return _erf_(**kwargs)


erf.__doc__ = r"""
:strong:`Erf Operator`
620
For more details, see `Error function <https://en.wikipedia.org/wiki/Error_function>`_.
621 622 623

Equation:
    ..  math::
624
        out = \frac{2}{\sqrt{\pi}} \int_{0}^{x}e^{- \eta^{2}}d\eta
625 626 627 628

Args:

    x (Tensor): The input tensor, it's data type should be float32, float64.
629
    name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
630 631 632

Returns:

633
    Tensor: The output of Erf, dtype: float32 or float64, the same as the input, shape: the same as the input.
634 635

Examples:
636

637
    .. code-block:: python
638

639
        import paddle
640

641 642 643 644 645
        x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
        out = paddle.erf(x)
        print(out)
        # [-0.42839236 -0.22270259  0.11246292  0.32862676]
"""