diff --git a/paddle/fluid/operators/fused/fused_dropout_helper.h b/paddle/fluid/operators/fused/fused_dropout_helper.h index 4ee01e058af1e68393c4f1133765dade01bdf6da..1e6be41315c61e44ba72dfd3ce7f6763d6945936 100644 --- a/paddle/fluid/operators/fused/fused_dropout_helper.h +++ b/paddle/fluid/operators/fused/fused_dropout_helper.h @@ -343,12 +343,12 @@ class FusedDropoutHelper { }; template -struct PDDataTypeTraits { +struct DataTypeTraits { using DataType = T; }; template <> -struct PDDataTypeTraits { +struct DataTypeTraits { // Since LayerNormDirectCUDAFunctor register half type, we need to convert // phi::float16 to half. using DataType = half; @@ -390,8 +390,8 @@ class FusedDropoutLayerNormHelper OutType* out, LayerNormParamType* mean, LayerNormParamType* variance) { - using InDataType = typename PDDataTypeTraits::DataType; - using OutDataType = typename PDDataTypeTraits::DataType; + using InDataType = typename DataTypeTraits::DataType; + using OutDataType = typename DataTypeTraits::DataType; phi::LayerNormDirectCUDAFunctor> layer_norm;