diff --git a/paddle/fluid/framework/ir/graph_pattern_detector.cc b/paddle/fluid/framework/ir/graph_pattern_detector.cc index d9c351e288891379757ae93338e96fc5a051e374..6591ede1f652c6d86b8439dbfa85da576658dd54 100644 --- a/paddle/fluid/framework/ir/graph_pattern_detector.cc +++ b/paddle/fluid/framework/ir/graph_pattern_detector.cc @@ -2819,6 +2819,7 @@ PDNode *patterns::Bfloat16Placement::operator()( "layer_norm", "matmul", "matmul_v2", + "fused_matmul", "pool2d", "prelu", "relu", diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc index 61bd888715c702fe8974dc93a36626a65a715497..76529d3d1a32a1ab878bfcc55269b313238c62a0 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc @@ -146,50 +146,6 @@ MatmulActivationMkldnnFusePass::MatmulActivationMkldnnFusePass() { .End() .AddAttr("trans_y") .IsType() - .End() - .AddAttr("matmul_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_activation") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_beta") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_output_scale") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_reshape_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Out") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Out") - .IsType>() - .IsOptional() .End(); AddOpCompat(OpCompat("abs")) diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc index 680600a403251548dc47a416d2786653e19bf630..4e6c64ca78905553a018950c8452eace1019b239 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc @@ -150,50 +150,6 @@ MatmulElementwiseAddMKLDNNFusePass::MatmulElementwiseAddMKLDNNFusePass() { .End() .AddAttr("trans_y") .IsType() - .End() - .AddAttr("matmul_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_activation") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_beta") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_output_scale") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_reshape_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Out") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Out") - .IsType>() - .IsOptional() .End(); AddOpCompat(OpCompat("elementwise_add")) diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_mkldnn_fuse_pass.cc index 779c39834c6e3a1c04bd60610208d2ae56fbf252..5bba8606f46dbad62b2aed006ee4353d327faceb 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_mkldnn_fuse_pass.cc @@ -174,50 +174,6 @@ MatmulTransposeReshapeMKLDNNPass::MatmulTransposeReshapeMKLDNNPass() { .End() .AddAttr("trans_y") .IsType() - .End() - .AddAttr("matmul_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_activation") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_beta") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_output_scale") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_reshape_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Out") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Out") - .IsType>() - .IsOptional() .End(); AddOpCompat(OpCompat("transpose2")) diff --git a/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc index 508cad94e8136eca50afa0e6c27503aa9335511c..487099e94e4ff827019006a084d0d9772f129e44 100644 --- a/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc @@ -265,50 +265,6 @@ ReshapeTransposeMatmulMkldnnFusePass::ReshapeTransposeMatmulMkldnnFusePass() { .End() .AddAttr("trans_y") .IsType() - .End() - .AddAttr("matmul_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_activation") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_alpha") - .IsType() - .IsOptional() - .End() - .AddAttr("fuse_beta") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_output_scale") - .IsType() - .IsOptional() - .End() - .AddAttr("fused_reshape_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_X") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Y") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_reshape_Out") - .IsType>() - .IsOptional() - .End() - .AddAttr("fused_transpose_Out") - .IsType>() - .IsOptional() .End(); }