__init__.py 3.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
from .common import fc  # noqa: F401
16
from .common import batch_norm  # noqa: F401
17 18
from .common import instance_norm  # noqa: F401
from .common import data_norm  # noqa: F401
W
wangzhen38 已提交
19
from .common import continuous_value_model  # noqa: F401
20
from .common import group_norm  # noqa: F401
21
from .common import deform_conv2d  # noqa: F401
22
from .common import conv2d  # noqa: F401
23
from .common import conv3d  # noqa: F401
24 25
from .common import conv2d_transpose  # noqa: F401
from .common import conv3d_transpose  # noqa: F401
26 27 28 29 30
from .control_flow import (
    case,
    while_loop,
    switch_case,
)
31
from .common import bilinear_tensor_product  # noqa: F401
32
from .common import py_func  # noqa: F401
33 34
from .common import row_conv  # noqa: F401
from .common import spectral_norm  # noqa: F401
35
from ...tensor.creation import create_parameter  # noqa: F401
36
from .loss import nce  # noqa: F401
37
from .common import prelu  # noqa: F401
38
from .common import layer_norm  # noqa: F401
39

40 41 42

from ...fluid.input import embedding  # noqa: F401
from ...fluid.contrib.layers import sparse_embedding  # noqa: F401
43
from ...fluid.layers import StaticRNN  # noqa: F401
44

45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
from .sequence_lod import sequence_conv  # noqa: F401
from .sequence_lod import sequence_softmax  # noqa: F401
from .sequence_lod import sequence_pool  # noqa: F401
from .sequence_lod import sequence_concat  # noqa: F401
from .sequence_lod import sequence_first_step  # noqa: F401
from .sequence_lod import sequence_last_step  # noqa: F401
from .sequence_lod import sequence_slice  # noqa: F401
from .sequence_lod import sequence_expand  # noqa: F401
from .sequence_lod import sequence_expand_as  # noqa: F401
from .sequence_lod import sequence_pad  # noqa: F401
from .sequence_lod import sequence_unpad  # noqa: F401
from .sequence_lod import sequence_reshape  # noqa: F401
from .sequence_lod import sequence_scatter  # noqa: F401
from .sequence_lod import sequence_enumerate  # noqa: F401
from .sequence_lod import sequence_reverse  # noqa: F401
60

61 62
from .control_flow import cond

63
__all__ = [  # noqa
64 65 66
    'fc',
    'batch_norm',
    'bilinear_tensor_product',
67
    'embedding',
68
    'case',
69
    'cond',
70 71 72 73 74
    'conv2d',
    'conv2d_transpose',
    'conv3d',
    'conv3d_transpose',
    'data_norm',
75
    'deform_conv2d',
76 77 78 79
    'group_norm',
    'instance_norm',
    'layer_norm',
    'nce',
80
    'prelu',
81
    'py_func',
82 83
    'row_conv',
    'spectral_norm',
84
    'switch_case',
85
    'while_loop',
86
    'sparse_embedding',
87 88 89 90 91 92 93 94 95 96 97 98 99 100 101
    'sequence_conv',
    'sequence_softmax',
    'sequence_pool',
    'sequence_concat',
    'sequence_first_step',
    'sequence_last_step',
    'sequence_slice',
    'sequence_expand',
    'sequence_expand_as',
    'sequence_pad',
    'sequence_unpad',
    'sequence_reshape',
    'sequence_scatter',
    'sequence_enumerate',
    'sequence_reverse',
102
    'StaticRNN',
103
    'prelu',
104
]