未验证 提交 0c9f09b8 编写于 作者: S Sławomir Siwek 提交者: GitHub

Migrate old C++ unit tests to Python framework (#47006)

* softplus+activation

* fc + elementwise_add test refactored

* rename MKLDNN to OneDNN

* fc+activation tests refactored

* remove softplus ut

* whitespace

* whitespace

* codestyle

* codestyle

* add more cases to fc+act

* remove softplus+hard_sigmoid pass

* remove softplus + hard_sigmoid UT

* add approximate for gelu

* swish beta range

* new codestyle

* reduce number of tests
上级 dc85b393
......@@ -399,13 +399,6 @@ if(WITH_MKLDNN)
test_params_quantization_mkldnn_pass SRCS
mkldnn/params_quantization_mkldnn_pass_tester.cc DEPS
params_quantization_mkldnn_pass)
cc_test_old(
test_fc_elementwise_add_mkldnn_fuse_pass SRCS
mkldnn/fc_elementwise_add_mkldnn_fuse_pass_tester.cc DEPS
fc_elementwise_add_mkldnn_fuse_pass pass_test_util)
cc_test_old(
test_fc_act_mkldnn_fuse_pass SRCS mkldnn/fc_act_mkldnn_fuse_pass_tester.cc
DEPS fc_act_mkldnn_fuse_pass pass_test_util)
cc_test_old(
test_batch_norm_act_fuse_pass SRCS
mkldnn/batch_norm_act_fuse_pass_tester.cc DEPS batch_norm_act_fuse_pass
......
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gtest/gtest.h>
#include "paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.h"
#include "paddle/fluid/framework/ir/pass_test_util.h"
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/op_version_registry.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/platform/errors.h"
namespace paddle {
namespace framework {
namespace ir {
// ------------------------------ Test cases -----------------------------------
TEST(FuseFCActOneDNNPass, ThrowUseMkldnn) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}},
false);
test::CreateOp(&prog, "gelu", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
Graph graph(prog);
// No fusion in this attribute configuration
constexpr int removed_nodes_count = 0;
EXPECT_THROW(
test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count),
paddle::platform::EnforceNotMet);
}
TEST(FuseFCActOneDNNPass, FuseWithGeluTanh) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
auto* act_op =
test::CreateOp(&prog, "gelu", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
act_op->SetAttr("approximate", true);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"gelu", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("gelu_tanh"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, FuseWithGeluErf) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
auto* act_op =
test::CreateOp(&prog, "gelu", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
act_op->SetAttr("approximate", false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"gelu", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("gelu_erf"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, FuseWithGeluAuto) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
test::CreateOp(&prog, "gelu", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"gelu", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("gelu"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, FuseWithTanh) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
test::CreateOp(&prog, "tanh", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"tanh", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("tanh"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, FuseWithSigmoid) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
test::CreateOp(&prog, "sigmoid", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"sigmoid", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("sigmoid"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, FuseWithMish) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
test::CreateOp(&prog, "mish", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"mish", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("mish"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, FuseWithHardSwish) {
auto prog =
test::BuildProgramDesc({"x", "fc_y", "act_y"}, {"weights", "bias"});
test::CreateOp(&prog,
"fc",
{
{"Input", "x"},
{"W", "weights"},
{"Bias", "bias"},
},
{{"Out", "fc_y"}});
test::CreateOp(
&prog, "hard_swish", {{"X", "fc_y"}}, {{"Out", "act_y"}}, false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_act_mkldnn_fuse_pass", "x", "act_y", removed_nodes_count));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"hard_swish", 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "fc") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto act_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(act_type.compare("hard_swish"), 0);
}
}
}
TEST(FuseFCActOneDNNPass, pass_op_version_check) {
ASSERT_TRUE(
paddle::framework::compatible::PassVersionCheckerRegistrar::GetInstance()
.IsPassCompatible("fc_act_mkldnn_fuse_pass"));
}
} // namespace ir
} // namespace framework
} // namespace paddle
USE_PASS(fc_act_mkldnn_fuse_pass);
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gtest/gtest.h>
#include "paddle/fluid/framework/ir/mkldnn/fc_elementwise_add_mkldnn_fuse_pass.h"
#include "paddle/fluid/framework/ir/pass_test_util.h"
#include "paddle/fluid/framework/op_proto_maker.h"
#include "paddle/fluid/framework/op_version_registry.h"
namespace paddle {
namespace framework {
namespace ir {
// Nodes elementwise_add and FC_output are deleted
// FC node is removed and new version with fuse-pass is added
// In general, the graph is 2 vertices smaller (per fuse-pass)
constexpr int nodes_removed = 3;
constexpr int nodes_added = 1;
OpDesc* Create_Op_FC(ProgramDesc* prog,
const std::vector<test::InOutVarNamePair>& inputs,
const std::vector<test::InOutVarNamePair>& outputs) {
auto* op = prog->MutableBlock(0)->AppendOp();
op->SetType("fc");
op->SetAttr("use_mkldnn", true);
op->SetAttr("in_num_col_dims", 1);
for (const auto& input : inputs) {
op->SetInput(input.first, {input.second});
}
for (const auto& output : outputs) {
op->SetOutput(output.first, {output.second});
}
op->SetAttr(OpProtoAndCheckerMaker::OpRoleAttrName(),
static_cast<int>(OpRole::kForward));
return op;
}
OpDesc* Create_Op_elementwise_add(
ProgramDesc* prog,
const std::vector<test::InOutVarNamePair>& inputs,
const std::vector<test::InOutVarNamePair>& outputs,
bool use_mkldnn = true) {
auto* op = prog->MutableBlock(0)->AppendOp();
op->SetType("elementwise_add");
op->SetAttr("use_mkldnn", use_mkldnn);
op->SetAttr("axis", -1);
for (const auto& input : inputs) {
op->SetInput(input.first, {input.second});
}
for (const auto& output : outputs) {
op->SetOutput(output.first, {output.second});
}
op->SetAttr(OpProtoAndCheckerMaker::OpRoleAttrName(),
static_cast<int>(OpRole::kForward));
return op;
}
TEST(FCElementwiseAddMKLDNNFusePass, FCBiasAsY) {
auto prog =
test::BuildProgramDesc({"a", "b", "c", "d", "e"}, {"bias", "weights"});
test::CreateOp(&prog, "sigmoid", {{"X", "a"}}, {{"Out", "b"}});
Create_Op_FC(&prog,
{{"Input", "b"}, {"Bias", "bias"}, {"W", "weights"}},
{{"Out", "c"}});
Create_Op_elementwise_add(&prog, {{"X", "a"}, {"Y", "c"}}, {{"Out", "d"}});
test::CreateOp(&prog, "relu", {{"X", "d"}}, {{"Out", "e"}});
Graph graph(prog);
EXPECT_TRUE(test::RunPassAndAssert(&graph,
"fc_elementwise_add_mkldnn_fuse_pass",
"a",
"e",
nodes_removed,
nodes_added));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"elementwise_add", 0}}));
}
TEST(FCElementwiseAddMKLDNNFusePass, FCBiasAsX) {
auto prog =
test::BuildProgramDesc({"a", "b", "c", "d", "e"}, {"bias", "weights"});
test::CreateOp(&prog, "sigmoid", {{"X", "a"}}, {{"Out", "b"}});
Create_Op_FC(&prog,
{{"Input", "b"}, {"Bias", "bias"}, {"W", "weights"}},
{{"Out", "c"}});
Create_Op_elementwise_add(&prog, {{"X", "c"}, {"Y", "a"}}, {{"Out", "d"}});
test::CreateOp(&prog, "relu", {{"X", "d"}}, {{"Out", "e"}});
Graph graph(prog);
EXPECT_TRUE(test::RunPassAndAssert(&graph,
"fc_elementwise_add_mkldnn_fuse_pass",
"a",
"e",
nodes_removed,
nodes_added));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"elementwise_add", 0}}));
}
TEST(FCElementwiseAddMKLDNNFusePass, NoFusion_NotResidualConnection) {
auto prog = test::BuildProgramDesc({"a", "b", "c", "d", "e", "f", "g"},
{"bias", "weights", "bias2", "weights2"});
test::CreateOp(&prog, "sigmoid", {{"X", "a"}}, {{"Out", "b"}});
Create_Op_FC(&prog,
{{"Input", "b"}, {"Bias", "bias"}, {"W", "weights"}},
{{"Out", "c"}});
Create_Op_FC(&prog,
{{"Input", "d"}, {"Bias", "bias2"}, {"W", "weights2"}},
{{"Out", "e"}});
Create_Op_elementwise_add(&prog, {{"X", "c"}, {"Y", "e"}}, {{"Out", "f"}});
test::CreateOp(&prog, "relu", {{"X", "f"}}, {{"Out", "g"}});
Graph graph(prog);
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_elementwise_add_mkldnn_fuse_pass", "a", "g", 0, 0));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 2}, {"elementwise_add", 1}}));
}
TEST(FCElementwiseAddMKLDNNFusePass, NoFusion_HasActivationFused) {
auto prog =
test::BuildProgramDesc({"a", "b", "c", "d", "e"}, {"bias", "weights"});
test::CreateOp(&prog, "sigmoid", {{"X", "a"}}, {{"Out", "b"}});
OpDesc* fc =
Create_Op_FC(&prog,
{{"Input", "b"}, {"Bias", "bias"}, {"W", "weights"}},
{{"Out", "c"}});
std::string activation{"relu"};
fc->SetAttr("activation_type", activation);
Create_Op_elementwise_add(&prog, {{"X", "c"}, {"Y", "a"}}, {{"Out", "d"}});
test::CreateOp(&prog, "relu", {{"X", "d"}}, {{"Out", "e"}});
Graph graph(prog);
EXPECT_TRUE(test::RunPassAndAssert(
&graph, "fc_elementwise_add_mkldnn_fuse_pass", "a", "e", 0, 0));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 1}, {"elementwise_add", 1}}));
}
TEST(FCElementwiseAddMKLDNNFusePass, FC_Residual_VITOCR) {
auto prog = test::BuildProgramDesc(
{"a", "b", "c", "d", "e", "f", "g", "h", "i"},
{"ln_bias", "ln_scale", "bias", "weights", "bias2", "weights2"});
Create_Op_elementwise_add(&prog, {{"X", "a"}, {"Y", "b"}}, {{"Out", "c"}});
test::CreateOp(&prog,
"layer_norm",
{{"X", "c"}, {"Bias", "ln_bias"}, {"Scale", "ln_scale"}},
{{"Y", "d"}});
Create_Op_FC(&prog,
{{"Input", "d"}, {"Bias", "bias"}, {"W", "weights"}},
{{"Out", "e"}});
test::CreateOp(&prog, "gelu", {{"X", "e"}}, {{"Out", "f"}});
Create_Op_FC(&prog,
{{"Input", "f"}, {"Bias", "bias2"}, {"W", "weights2"}},
{{"Out", "g"}});
Create_Op_elementwise_add(&prog, {{"X", "g"}, {"Y", "c"}}, {{"Out", "h"}});
test::CreateOp(&prog, "relu", {{"X", "h"}}, {{"Out", "i"}});
Graph graph(prog);
EXPECT_TRUE(test::RunPassAndAssert(&graph,
"fc_elementwise_add_mkldnn_fuse_pass",
"a",
"i",
nodes_removed,
nodes_added));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 2}, {"elementwise_add", 1}}));
}
TEST(FCElementwiseAddMKLDNNFusePass, FC_Residual_Sequence) {
auto prog = test::BuildProgramDesc(
{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"},
{"ln_bias",
"ln_scale",
"bias",
"weights",
"bias2",
"weights2",
"ln_bias2",
"ln_scale2",
"bias3",
"weights3",
"bias4",
"weights4"});
Create_Op_elementwise_add(&prog, {{"X", "a"}, {"Y", "b"}}, {{"Out", "c"}});
test::CreateOp(&prog,
"layer_norm",
{{"X", "c"}, {"Bias", "ln_bias"}, {"Scale", "ln_scale"}},
{{"Y", "d"}});
Create_Op_FC(&prog,
{{"Input", "d"}, {"Bias", "bias"}, {"W", "weights"}},
{{"Out", "e"}});
test::CreateOp(&prog, "gelu", {{"X", "e"}}, {{"Out", "f"}});
Create_Op_FC(&prog,
{{"Input", "f"}, {"Bias", "bias2"}, {"W", "weights2"}},
{{"Out", "g"}});
Create_Op_elementwise_add(&prog, {{"X", "g"}, {"Y", "c"}}, {{"Out", "h"}});
test::CreateOp(&prog,
"layer_norm",
{{"X", "h"}, {"Bias", "ln_bias2"}, {"Scale", "ln_scale2"}},
{{"Y", "i"}});
Create_Op_FC(&prog,
{{"Input", "i"}, {"Bias", "bias3"}, {"W", "weights3"}},
{{"Out", "j"}});
test::CreateOp(&prog, "gelu", {{"X", "j"}}, {{"Out", "k"}});
Create_Op_FC(&prog,
{{"Input", "k"}, {"Bias", "bias4"}, {"W", "weights4"}},
{{"Out", "l"}});
Create_Op_elementwise_add(&prog, {{"X", "h"}, {"Y", "l"}}, {{"Out", "m"}});
Graph graph(prog);
EXPECT_TRUE(test::RunPassAndAssert(&graph,
"fc_elementwise_add_mkldnn_fuse_pass",
"a",
"m",
nodes_removed * 2,
nodes_added * 2));
EXPECT_TRUE(test::AssertOpsCount(graph, {{"fc", 4}, {"elementwise_add", 1}}));
}
TEST(FCElementwiseAddMKLDNNFusePass, pass_op_version_check) {
ASSERT_TRUE(
paddle::framework::compatible::PassVersionCheckerRegistrar::GetInstance()
.IsPassCompatible("fc_elementwise_add_mkldnn_fuse_pass"));
}
} // namespace ir
} // namespace framework
} // namespace paddle
USE_PASS(fc_elementwise_add_mkldnn_fuse_pass);
......@@ -29,6 +29,11 @@ using string::PrettyLogDetail;
void SoftplusActivationOneDNNPass::ApplyImpl(Graph *graph) const {
auto act_types = paddle::platform::GetSupportedActivations();
// Currently softplus can't be fused with hard_sigmoid
act_types.erase(
std::remove(act_types.begin(), act_types.end(), "hard_sigmoid"),
act_types.end());
for (const auto &act_type : act_types) {
FuseSoftplusActivation(graph, act_type);
}
......@@ -113,7 +118,6 @@ REGISTER_PASS_CAPABILITY(softplus_activation_mkldnn_fuse_pass)
.EQ("abs", 0)
.LE("clip", 1)
.EQ("gelu", 0)
.EQ("hard_sigmoid", 0)
.LE("hard_swish", 0)
.LE("leaky_relu", 1)
.LE("mish", 1)
......
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gtest/gtest.h>
#include <vector>
#include "paddle/fluid/framework/ir/mkldnn/softplus_activation_mkldnn_fuse_pass.h"
#include "paddle/fluid/framework/op_proto_maker.h"
namespace paddle {
namespace framework {
namespace ir {
void MainTest(const std::string& activation_type) {
auto prog =
test::BuildProgramDesc({"softplus_x", "softplus_out", "activation_out"});
test::CreateOp(
&prog, "softplus", {{"X", "softplus_x"}}, {{"Out", "softplus_out"}});
test::CreateOp(&prog,
activation_type,
{{"X", "softplus_out"}},
{{"Out", "activation_out"}},
false);
Graph graph(prog);
constexpr int removed_nodes_count = 2;
EXPECT_TRUE(test::RunPassAndAssert(&graph,
"softplus_activation_mkldnn_fuse_pass",
"softplus_x",
"activation_out",
removed_nodes_count));
EXPECT_TRUE(
test::AssertOpsCount(graph, {{"softplus", 1}, {activation_type, 0}}));
for (const auto* node : graph.Nodes()) {
if (node->IsOp() && node->Op()->Type() == "softplus") {
const auto* op = node->Op();
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
EXPECT_TRUE(PADDLE_GET_CONST(bool, op->GetAttr("use_mkldnn")));
ASSERT_TRUE(op->HasAttr("fuse_activation"));
auto activation_type =
PADDLE_GET_CONST(std::string, op->GetAttr("fuse_activation"));
EXPECT_EQ(activation_type.compare(activation_type), 0);
}
}
}
// clang-format off
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithTanh) {MainTest("tanh")}
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithRelu) {MainTest("relu")}
TEST(FuseSoftplusActivationOneDNNPass,
FuseSoftplusWithLeakyRelu) {MainTest("leaky_relu")}
TEST(FuseSoftplusActivationOneDNNPass,
FuseSoftplusWithSwish) {MainTest("swish")}
TEST(FuseSoftplusActivationOneDNNPass,
FuseSoftplusWithHardswish) {MainTest("hardswish")}
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithSqrt) {MainTest("sqrt")}
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithAbs) {MainTest("abs")}
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithClip) {MainTest("clip")}
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithGelu) {MainTest("gelu")}
TEST(FuseSoftplusActivationOneDNNPass,
FuseSoftplusWithRelu6) {MainTest("relu6")}
TEST(FuseSoftplusActivationOneDNNPass, FuseSoftplusWithSigmoid) {
MainTest("sigmoid")
}
// clang-format on
} // namespace ir
} // namespace framework
} // namespace paddle
USE_PASS(softplus_activation_mkldnn_fuse_pass);
......@@ -232,7 +232,7 @@ if(WITH_GPU AND TENSORRT_FOUND)
set_tests_properties(test_mkldnn_conv_mish_fuse_pass PROPERTIES TIMEOUT 300)
set_tests_properties(test_onednn_fc_activation_fuse_pass PROPERTIES TIMEOUT
300)
set_tests_properties(test_mkldnn_fc_elementwise_add_fuse_pass
set_tests_properties(test_onednn_fc_elementwise_add_fuse_pass
PROPERTIES TIMEOUT 120)
set_tests_properties(test_mkldnn_conv_affine_channel_fuse_pass
PROPERTIES TIMEOUT 60)
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for fusion of fc and activation."""
import unittest
import numpy as np
import paddle.fluid as fluid
from inference_pass_test import InferencePassTest
from paddle import enable_static
from paddle.fluid.core import PassVersionChecker
enable_static()
class FCGeluTanhOneDnnFusePassTest(InferencePassTest):
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 128, 768], dtype="float32"
)
fc_out = fluid.layers.fc(input=data, size=3072, num_flatten_dims=2)
gelu_out = fluid.layers.gelu(fc_out, approximate=False)
self.feeds = {"data": np.random.random((1, 128, 768)).astype("float32")}
self.fetch_list = [gelu_out]
self.enable_mkldnn = True
def set_params(self):
self.pass_name = "fc_act_mkldnn_fuse_pass"
def test_check_output(self):
self.check_output()
class FCGeluErfOneDnnFusePassTest(InferencePassTest):
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 128, 768], dtype="float32"
)
fc_out = fluid.layers.fc(input=data, size=3072, num_flatten_dims=2)
gelu_out = fluid.layers.gelu(fc_out, approximate=True)
self.feeds = {"data": np.random.random((1, 128, 768)).astype("float32")}
self.fetch_list = [gelu_out]
self.enable_mkldnn = True
def set_params(self):
self.pass_name = "fc_act_mkldnn_fuse_pass"
def test_check_output(self):
self.check_output()
self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name))
class FCTanhOneDnnFusePassTest(InferencePassTest):
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 128, 768], dtype="float32"
)
fc_out = fluid.layers.fc(input=data, size=3072, num_flatten_dims=2)
tanh_out = fluid.layers.tanh(fc_out)
self.feeds = {"data": np.random.random((1, 128, 768)).astype("float32")}
self.fetch_list = [tanh_out]
self.enable_mkldnn = True
def set_params(self):
self.pass_name = "fc_act_mkldnn_fuse_pass"
def test_check_output(self):
self.check_output()
self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name))
class FCSigmoidOneDnnFusePassTest(InferencePassTest):
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 128, 768], dtype="float32"
)
fc_out = fluid.layers.fc(input=data, size=3072, num_flatten_dims=2)
sigmoid_out = fluid.layers.sigmoid(fc_out)
self.feeds = {"data": np.random.random((1, 128, 768)).astype("float32")}
self.fetch_list = [sigmoid_out]
self.enable_mkldnn = True
def set_params(self):
self.pass_name = "fc_act_mkldnn_fuse_pass"
def test_check_output(self):
self.check_output()
self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name))
class FCHardSwishOneDnnFusePassTest(InferencePassTest):
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 128, 768], dtype="float32"
)
fc_out = fluid.layers.fc(input=data, size=3072, num_flatten_dims=2)
hardswish_out = fluid.layers.hard_swish(fc_out)
self.feeds = {"data": np.random.random((1, 128, 768)).astype("float32")}
self.fetch_list = [hardswish_out]
self.enable_mkldnn = True
def set_params(self):
self.pass_name = "fc_act_mkldnn_fuse_pass"
def test_check_output(self):
self.check_output()
self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name))
class FCMishOneDnnFusePassTest(InferencePassTest):
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 128, 768], dtype="float32"
)
fc_out = fluid.layers.fc(input=data, size=3072, num_flatten_dims=2)
mish_out = fluid.layers.mish(fc_out)
self.feeds = {"data": np.random.random((1, 128, 768)).astype("float32")}
self.fetch_list = [mish_out]
self.enable_mkldnn = True
def set_params(self):
self.pass_name = "fc_act_mkldnn_fuse_pass"
def test_check_output(self):
self.check_output()
self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name))
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from inference_pass_test import InferencePassTest
import paddle
import paddle.fluid as fluid
from paddle.fluid.core import PassVersionChecker
class SoftplusActivationReluOneDNNFusePassTest(InferencePassTest):
fuse_alpha = None
fuse_beta = None
pass_name = 'softplus_activation_mkldnn_fuse_pass'
def setUp(self):
self.set_params()
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 3, 100, 100], dtype="float32"
)
softplus_out = fluid.layers.softplus(data)
if self.fuse_beta is not None:
activation_out = self.fuse_activation(
softplus_out, self.fuse_alpha, self.fuse_beta
)
elif self.fuse_alpha is not None:
activation_out = self.fuse_activation(
softplus_out, self.fuse_alpha
)
else:
activation_out = self.fuse_activation(softplus_out)
self.feeds = {
"data": np.random.random((1, 3, 100, 100)).astype("float32"),
}
self.fetch_list = [activation_out]
self.enable_mkldnn = True
def set_params(self):
self.fuse_activation = fluid.layers.relu
def test_check_output(self):
use_gpu = False
self.check_output_with_option(use_gpu)
def test_pass_compatible(self):
self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name))
class SoftplusActivationTanhOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.tanh
class SoftplusActivationLeakyReluOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.leaky_relu
self.fuse_alpha = 0.3
class SoftplusActivationSwishOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.swish
self.fuse_alpha = 3
class SoftplusActivationHardSwishOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.hard_swish
class SoftplusActivationSqrtOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.hard_swish
class SoftplusActivationAbsOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.abs
class SoftplusActivationClipOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.clip
self.fuse_alpha = 1.1
self.fuse_beta = 5.2
class SoftplusActivationGeluErfOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.gelu
class SoftplusActivationGeluTanhOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.gelu
self.fuse_alpha = True # simulated "Approximate" attr
class SoftplusActivationRelu6OneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.relu6
class SoftplusActivationSigmoidOneDNNFusePassTest(
SoftplusActivationReluOneDNNFusePassTest
):
def set_params(self):
self.fuse_activation = fluid.layers.sigmoid
if __name__ == "__main__":
paddle.enable_static()
unittest.main()
......@@ -12,64 +12,57 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from auto_scan_test import PassAutoScanTest
from program_config import TensorConfig, ProgramConfig, OpConfig
import numpy as np
from functools import partial
import unittest
import hypothesis.strategies as st
from auto_scan_test import PassAutoScanTest
from functools import partial
from program_config import TensorConfig, ProgramConfig, OpConfig
class TestFCElementwiseAddMkldnnFusePass(PassAutoScanTest):
class TestFCElementwiseAddOneDNNFusePass(PassAutoScanTest):
def sample_program_config(self, draw):
axis = draw(st.sampled_from([-1, 0, 1]))
fc_as_x = draw(st.sampled_from([True, False]))
fc_as_x = draw(st.booleans())
fc_in = draw(st.sampled_from([32, 64]))
fc_wei = draw(st.sampled_from([32, 64]))
def generate_input():
return np.random.random([fc_in, fc_wei]).astype(np.float32)
def generate_fc_weight():
return np.random.random([fc_wei, fc_wei]).astype(np.float32)
def generate_fc_bias():
return np.random.random([fc_wei]).astype(np.float32)
def generate_data(shape):
return np.random.random(shape).astype(np.float32)
relu_op = OpConfig(
type="relu",
inputs={"X": ["input_data"]},
outputs={"Out": ["relu_out"]},
type='relu',
inputs={'X': ['input_data']},
outputs={'Out': ['relu_out']},
attrs={},
)
fc_op = OpConfig(
type="fc",
type='fc',
inputs={
"Input": ["relu_out"],
"W": ["fc_weight"],
"Bias": ["fc_bias"],
'Input': ['relu_out'],
'W': ['fc_weight'],
'Bias': ['fc_bias'],
},
outputs={"Out": ["fc_output"]},
outputs={'Out': ['fc_output']},
attrs={
"use_mkldnn": True,
"padding_weights": False,
"activation_type": "",
"in_num_col_dims": 1,
'use_mkldnn': True,
'padding_weights': False,
'activation_type': '',
'in_num_col_dims': 1,
},
)
if fc_as_x:
inputs = {"X": ["fc_output"], "Y": ["input_data"]}
inputs = {'X': ['fc_output'], 'Y': ['input_data']}
else:
inputs = {"X": ["input_data"], "Y": ["fc_output"]}
inputs = {'X': ['input_data'], 'Y': ['fc_output']}
elt_add_op = OpConfig(
type="elementwise_add",
type='elementwise_add',
inputs=inputs,
outputs={"Out": ["elementwise_output"]},
attrs={'axis': axis},
outputs={'Out': ['elementwise_output']},
attrs={'axis': axis, 'use_mkldnn': True},
)
model_net = [relu_op, fc_op, elt_add_op]
......@@ -77,26 +70,34 @@ class TestFCElementwiseAddMkldnnFusePass(PassAutoScanTest):
program_config = ProgramConfig(
ops=model_net,
weights={
"fc_weight": TensorConfig(data_gen=partial(generate_fc_weight)),
"fc_bias": TensorConfig(data_gen=partial(generate_fc_bias)),
'fc_weight': TensorConfig(
data_gen=partial(generate_data, [fc_wei, fc_wei])
),
'fc_bias': TensorConfig(
data_gen=partial(generate_data, [fc_wei])
),
},
inputs={
"input_data": TensorConfig(data_gen=partial(generate_input))
'input_data': TensorConfig(
data_gen=partial(generate_data, [fc_in, fc_wei])
)
},
outputs=["elementwise_output"],
outputs=['elementwise_output'],
)
return program_config
def sample_predictor_configs(self, program_config):
config = self.create_inference_config(use_mkldnn=True)
yield config, ["relu", "fc"], (1e-5, 1e-5)
config = self.create_inference_config(
use_mkldnn=True, passes=['fc_elementwise_add_mkldnn_fuse_pass']
)
yield config, ['relu', 'fc'], (1e-5, 1e-5)
def test(self):
self.run_and_statis(
quant=False, passes=["fc_elementwise_add_mkldnn_fuse_pass"]
quant=False, passes=['fc_elementwise_add_mkldnn_fuse_pass']
)
if __name__ == "__main__":
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hypothesis.strategies as st
import numpy as np
import unittest
from auto_scan_test import PassAutoScanTest
from functools import partial
from program_config import TensorConfig, ProgramConfig, OpConfig
class TestSoftplusActivationOneDNNFusePass(PassAutoScanTest):
def sample_program_config(self, draw):
activation_type = draw(
st.sampled_from(
[
'relu',
'gelu',
'tanh',
'sigmoid',
'swish',
'mish',
'sqrt',
'hard_swish',
'abs',
'relu6',
'clip',
'leaky_relu',
]
)
)
def generate_input():
return np.random.random([4, 3, 100, 100]).astype(np.float32)
softplus_op = OpConfig(
type='softplus',
inputs={
'X': ['activation_X'],
},
outputs={'Out': ['softplus_out']},
attrs={
'beta': draw(st.floats(min_value=0.5, max_value=2)),
'threshold': draw(st.floats(min_value=15, max_value=30)),
},
)
if activation_type == 'clip':
activation_op = OpConfig(
activation_type,
inputs={'X': ['softplus_out']},
outputs={'Out': ['activation_output']},
min=draw(st.floats(min_value=0.1, max_value=0.49)),
max=draw(st.floats(min_value=0.5, max_value=1.0)),
)
elif activation_type == "gelu":
activation_op = OpConfig(
activation_type,
inputs={"X": ["softplus_out"]},
outputs={"Out": ["activation_output"]},
approximate=draw(st.booleans()),
)
elif activation_type == 'leaky_relu':
activation_op = OpConfig(
activation_type,
inputs={'X': ['softplus_out']},
outputs={'Out': ['activation_output']},
alpha=draw(st.floats(min_value=0.1, max_value=1.0)),
)
elif activation_type == 'relu6':
activation_op = OpConfig(
activation_type,
inputs={'X': ['softplus_out']},
outputs={'Out': ['activation_output']},
threshold=draw(st.floats(min_value=1.0, max_value=10.0)),
)
elif activation_type == 'swish':
activation_op = OpConfig(
activation_type,
inputs={'X': ['softplus_out']},
outputs={'Out': ['activation_output']},
beta=draw(st.floats(min_value=0.1, max_value=10.0)),
)
else:
activation_op = OpConfig(
activation_type,
inputs={'X': ['softplus_out']},
outputs={'Out': ['activation_output']},
)
model_net = [softplus_op, activation_op]
program_config = ProgramConfig(
ops=model_net,
weights={},
inputs={
'activation_X': TensorConfig(data_gen=partial(generate_input))
},
outputs=['activation_output'],
)
return program_config
def sample_predictor_configs(self, program_config):
config = self.create_inference_config(use_mkldnn=True)
yield config, ['softplus'], (1e-5, 1e-5)
def test(self):
self.run_and_statis(
quant=False,
max_examples=40,
passes=['softplus_activation_mkldnn_fuse_pass'],
)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册