“bd680f157fb41177b1f2c3325879d5850505357b”上不存在“python/paddle/v2/fluid/tests/unittests/test_lstm_op.py”
未验证 提交 535ddd3d 编写于 作者: R Ruibiao Chen 提交者: GitHub

Fix conflict of CppTypeToDataType (#51919)

上级 f06dd08d
...@@ -68,12 +68,12 @@ class AttnMatMul { ...@@ -68,12 +68,12 @@ class AttnMatMul {
"The output (= input * weight) is expected to be nullptr or the " "The output (= input * weight) is expected to be nullptr or the "
"same as bias_out when fused is true.")); "same as bias_out when fused is true."));
auto fused_impl = phi::funcs::MatmulPlanner( auto fused_impl =
vectorize(input->dims()), phi::funcs::MatmulPlanner(vectorize(input->dims()),
vectorize(weight->dims()), vectorize(weight->dims()),
transA_, transA_,
transB_, transB_,
paddle::experimental::CppTypeToDataType<T>::Type(), phi::CppTypeToDataType<T>::Type(),
phi::funcs::MatmulFusedType::kMatmulBias, phi::funcs::MatmulFusedType::kMatmulBias,
static_cast<const void*>(bias->data<T>()), static_cast<const void*>(bias->data<T>()),
nullptr); nullptr);
......
...@@ -96,12 +96,12 @@ class FusedGemmEpilogueKernel : public framework::OpKernel<T> { ...@@ -96,12 +96,12 @@ class FusedGemmEpilogueKernel : public framework::OpKernel<T> {
<< ", activation=" << activation << ", fused_type=" << fused_type << ", activation=" << activation << ", fused_type=" << fused_type
<< ", reserve_space=" << reserve_space; << ", reserve_space=" << reserve_space;
auto fused_impl = phi::funcs::MatmulPlanner( auto fused_impl =
vectorize(x->dims()), phi::funcs::MatmulPlanner(vectorize(x->dims()),
vectorize(y->dims()), vectorize(y->dims()),
trans_x, trans_x,
trans_y, trans_y,
paddle::experimental::CppTypeToDataType<T>::Type(), phi::CppTypeToDataType<T>::Type(),
fused_type, fused_type,
static_cast<const void*>(bias->data<T>()), static_cast<const void*>(bias->data<T>()),
reserve_data); reserve_data);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册