CMakeLists.txt 3.8 KB
Newer Older
W
Wu Yi 已提交
1
include(operators)
2 3 4 5
if(WITH_UNITY_BUILD)
    # Load Unity Build rules for operators in paddle/fluid/operators/fused.
    include(unity_build_rule.cmake)
endif()
6
register_operators(EXCLUDES
Z
Zhen Wang 已提交
7
    fused_bn_activation_op
8 9 10 11
    conv_fusion_op
    fusion_transpose_flatten_concat_op
    fusion_conv_inception_op
    fused_fc_elementwise_layernorm_op
12
    multihead_matmul_op
13
    skip_layernorm_op
14
    fused_embedding_eltwise_layernorm_op
A
Adam 已提交
15
    fusion_group_op
Z
Zhang Ting 已提交
16
    fusion_gru_op
17
    fusion_lstm_op
F
Feng Xing 已提交
18
    fused_bn_add_activation_op
19
    fused_attention_op
20
    fused_feedforward_op
F
Feng Xing 已提交
21
    fused_transformer_op)
A
Adam 已提交
22 23 24

# fusion_gru_op does not have CUDA kernel
op_library(fusion_gru_op)
25
op_library(fusion_lstm_op)
26
file(APPEND ${pybind_file} "USE_CPU_ONLY_OP(fusion_gru);\n")
27 28
file(APPEND ${pybind_file} "USE_CPU_ONLY_OP(fusion_lstm);\n")

29

30
if (WITH_GPU OR WITH_ROCM)
Z
Zhen Wang 已提交
31
    # fused_bn_activation_op needs cudnn 7.4.1 above
32 33
    # HIP not support bn act fuse in MIOPEN
    if ((NOT WITH_ROCM) AND (NOT ${CUDNN_VERSION} VERSION_LESS 7401))
Z
Zhen Wang 已提交
34 35 36
        op_library(fused_bn_activation_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fused_batch_norm_act);\n")
    endif()
37
    # conv_fusion_op needs cudnn 7 above
R
ronnywang 已提交
38
    if (NOT ${CUDNN_VERSION} VERSION_LESS 7100)
39 40 41 42
        op_library(conv_fusion_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(conv2d_fusion);\n")
    endif()
    # fusion_transpose_flatten_concat_op
43 44 45 46 47
    # HIP not support cudnnTransformTensor
    if(NOT WITH_ROCM)
        op_library(fusion_transpose_flatten_concat_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fusion_transpose_flatten_concat);\n")
    endif()
48
    # fusion_conv_inception_op needs cudnn 7 above
49 50
    # HIP not support cudnnConvolutionBiasActivationForward
    if ((NOT WITH_ROCM) AND (NOT ${CUDNN_VERSION} VERSION_LESS 7100))
51 52 53 54 55 56 57 58 59
        op_library(fusion_conv_inception_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(conv2d_inception_fusion);\n")
    endif()
    # fused_fc_elementwise_layernorm_op
    op_library(fused_fc_elementwise_layernorm_op)
    file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fused_fc_elementwise_layernorm);\n")
    # multihead_matmul_op
    op_library(multihead_matmul_op)
    file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(multihead_matmul);\n")
60 61
    op_library(skip_layernorm_op)
    file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(skip_layernorm);\n")
62 63
    op_library(fused_embedding_eltwise_layernorm_op)
    file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fused_embedding_eltwise_layernorm);\n")
64 65 66 67 68 69
    # fusion_group
    if(NOT APPLE AND NOT WIN32)
        op_library(fusion_group_op DEPS device_code)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fusion_group);\n")
        cc_test(test_fusion_group_op SRCS fusion_group_op_test.cc DEPS fusion_group_op)
    endif()
Z
Zhang Ting 已提交
70
    # fused_bn_add_activation
71 72 73 74
    # HIP not support bn act fuse in MIOPEN
    if ((NOT WITH_ROCM) AND (NOT ${CUDNN_VERSION} VERSION_LESS 7401))
        op_library(fused_bn_add_activation_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fused_bn_add_activation);\n")
Z
Zhang Ting 已提交
75
    endif()
76 77 78
    # fused_dropout
    # only support CUDA
    if(NOT WITH_ROCM)
79 80 81
        nv_test(test_fused_residual_dropout_bias SRCS fused_residual_dropout_bias_test.cu DEPS tensor op_registry dropout_op layer_norm_op device_context generator memory)
        nv_test(test_fused_dropout_act_bias SRCS fused_dropout_act_bias_test.cu DEPS tensor op_registry dropout_op layer_norm_op device_context generator memory)
        nv_test(test_fused_layernorm_residual_dropout_bias SRCS fused_layernorm_residual_dropout_bias_test.cu DEPS tensor op_registry dropout_op layer_norm_op device_context generator memory)
82 83 84 85 86


        op_library(fused_feedforward_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fused_feedforward);\n")

87 88 89
        # fused_attention_op
        op_library(fused_attention_op)
        file(APPEND ${pybind_file} "USE_CUDA_ONLY_OP(fused_attention);\n")
90
    endif()
91
endif()