__init__.py 3.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
from .common import fc  # noqa: F401
16
from .common import batch_norm  # noqa: F401
17 18
from .common import instance_norm  # noqa: F401
from .common import data_norm  # noqa: F401
W
wangzhen38 已提交
19
from .common import continuous_value_model  # noqa: F401
20
from .common import group_norm  # noqa: F401
21
from .common import deform_conv2d  # noqa: F401
22
from .common import conv2d  # noqa: F401
23
from .common import conv3d  # noqa: F401
24 25
from .common import conv2d_transpose  # noqa: F401
from .common import conv3d_transpose  # noqa: F401
26 27 28 29 30
from .control_flow import (
    case,
    while_loop,
    switch_case,
)
31
from .common import bilinear_tensor_product  # noqa: F401
32
from .common import py_func  # noqa: F401
33 34
from .common import row_conv  # noqa: F401
from .common import spectral_norm  # noqa: F401
35
from ...tensor.creation import create_parameter  # noqa: F401
36
from .loss import nce  # noqa: F401
37
from .common import prelu  # noqa: F401
38
from .common import layer_norm  # noqa: F401
39

40

41
from .common import embedding  # noqa: F401
42
from ...fluid.contrib.layers import sparse_embedding  # noqa: F401
43
from ...fluid.layers import StaticRNN  # noqa: F401
44 45
from ...fluid.layers.nn import _pull_sparse  # noqa: F401
from ...fluid.layers.nn import _pull_sparse_v2  # noqa: F401
46

47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
from .sequence_lod import sequence_conv  # noqa: F401
from .sequence_lod import sequence_softmax  # noqa: F401
from .sequence_lod import sequence_pool  # noqa: F401
from .sequence_lod import sequence_concat  # noqa: F401
from .sequence_lod import sequence_first_step  # noqa: F401
from .sequence_lod import sequence_last_step  # noqa: F401
from .sequence_lod import sequence_slice  # noqa: F401
from .sequence_lod import sequence_expand  # noqa: F401
from .sequence_lod import sequence_expand_as  # noqa: F401
from .sequence_lod import sequence_pad  # noqa: F401
from .sequence_lod import sequence_unpad  # noqa: F401
from .sequence_lod import sequence_reshape  # noqa: F401
from .sequence_lod import sequence_scatter  # noqa: F401
from .sequence_lod import sequence_enumerate  # noqa: F401
from .sequence_lod import sequence_reverse  # noqa: F401
62

63 64
from .control_flow import cond

65
__all__ = [  # noqa
66 67 68
    'fc',
    'batch_norm',
    'bilinear_tensor_product',
69
    'embedding',
70
    'case',
71
    'cond',
72 73 74 75 76
    'conv2d',
    'conv2d_transpose',
    'conv3d',
    'conv3d_transpose',
    'data_norm',
77
    'deform_conv2d',
78 79 80 81
    'group_norm',
    'instance_norm',
    'layer_norm',
    'nce',
82
    'prelu',
83
    'py_func',
84 85
    'row_conv',
    'spectral_norm',
86
    'switch_case',
87
    'while_loop',
88
    'sparse_embedding',
89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
    'sequence_conv',
    'sequence_softmax',
    'sequence_pool',
    'sequence_concat',
    'sequence_first_step',
    'sequence_last_step',
    'sequence_slice',
    'sequence_expand',
    'sequence_expand_as',
    'sequence_pad',
    'sequence_unpad',
    'sequence_reshape',
    'sequence_scatter',
    'sequence_enumerate',
    'sequence_reverse',
104
    'StaticRNN',
105
    'prelu',
106
]