未验证 提交 535ddd3d 编写于 作者: R Ruibiao Chen 提交者: GitHub

Fix conflict of CppTypeToDataType (#51919)

上级 f06dd08d
......@@ -68,15 +68,15 @@ class AttnMatMul {
"The output (= input * weight) is expected to be nullptr or the "
"same as bias_out when fused is true."));
auto fused_impl = phi::funcs::MatmulPlanner(
vectorize(input->dims()),
vectorize(weight->dims()),
transA_,
transB_,
paddle::experimental::CppTypeToDataType<T>::Type(),
phi::funcs::MatmulFusedType::kMatmulBias,
static_cast<const void*>(bias->data<T>()),
nullptr);
auto fused_impl =
phi::funcs::MatmulPlanner(vectorize(input->dims()),
vectorize(weight->dims()),
transA_,
transB_,
phi::CppTypeToDataType<T>::Type(),
phi::funcs::MatmulFusedType::kMatmulBias,
static_cast<const void*>(bias->data<T>()),
nullptr);
phi::funcs::MatmulWithCublasLt<T>::Run(dev_ctx_,
input->data<T>(),
weight->data<T>(),
......
......@@ -96,15 +96,15 @@ class FusedGemmEpilogueKernel : public framework::OpKernel<T> {
<< ", activation=" << activation << ", fused_type=" << fused_type
<< ", reserve_space=" << reserve_space;
auto fused_impl = phi::funcs::MatmulPlanner(
vectorize(x->dims()),
vectorize(y->dims()),
trans_x,
trans_y,
paddle::experimental::CppTypeToDataType<T>::Type(),
fused_type,
static_cast<const void*>(bias->data<T>()),
reserve_data);
auto fused_impl =
phi::funcs::MatmulPlanner(vectorize(x->dims()),
vectorize(y->dims()),
trans_x,
trans_y,
phi::CppTypeToDataType<T>::Type(),
fused_type,
static_cast<const void*>(bias->data<T>()),
reserve_data);
phi::funcs::MatmulWithCublasLt<T>::Run(dev_ctx,
x->data<T>(),
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册