From 3d7e737cc1c020f9b125862fd452e9fcb3b8815a Mon Sep 17 00:00:00 2001 From: baoachun <962571062@qq.com> Date: Wed, 22 Dec 2021 15:06:35 +0800 Subject: [PATCH] update mkldnn batch_norm_activation fuse pass ut (#37402) * update mkldnn batch_norm_activation fuse pass ut * update ut * update mkldnn batch_norm_act_fuse_pass ut * update batch_norm_act_fuse_pass ut * update ut --- .../ir/mkldnn/batch_norm_act_fuse_pass.cc | 23 ++- .../mkldnn/batch_norm_act_fuse_pass_tester.cc | 24 +-- .../unittests/ir/inference/CMakeLists.txt | 1 + .../test_mkldnn_batch_norm_act_fuse_pass.py | 162 +++++++++++------- 4 files changed, 126 insertions(+), 84 deletions(-) diff --git a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc index c5bb4bf0b2f..fe758a6b3a7 100644 --- a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc @@ -67,6 +67,12 @@ FuseBatchNormActOneDNNPass::FuseBatchNormActOneDNNPass() { .AddAttr("epsilon") .IsNumGE(0.0f) .IsNumLE(0.001f) + .End() + .AddAttr("trainable_statistics") + .IsBoolEQ(false) + .End() + .AddAttr("is_test") + .IsBoolEQ(true) .End(); AddOpCompat(OpCompat("relu")) @@ -108,7 +114,6 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( GET_IR_NODE_FROM_SUBGRAPH(act, act, bn_act_pattern); auto *bn_op = batch_norm->Op(); - if (bn_op->HasAttr("use_mkldnn")) { PADDLE_ENFORCE( BOOST_GET_CONST(bool, bn_op->GetAttr("use_mkldnn")), @@ -117,19 +122,13 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( "is used.")); } - if (bn_op->HasAttr("trainable_statistics")) { - PADDLE_ENFORCE( - !BOOST_GET_CONST(bool, bn_op->GetAttr("trainable_statistics")), - platform::errors::PreconditionNotMet( - "The BatchNorm+Act fusion may happen only when mean and variance " - "are not calculated by current batch statistics.")); - } - - if (bn_op->HasAttr("is_test")) { + auto *act_op = act->Op(); + if (act_op->HasAttr("use_mkldnn")) { PADDLE_ENFORCE( - BOOST_GET_CONST(bool, bn_op->GetAttr("is_test")), + BOOST_GET_CONST(bool, bn_op->GetAttr("use_mkldnn")), platform::errors::PreconditionNotMet( - "The BatchNorm+Act fusion may happen only during inference.")); + "The BatchNorm+Act fusion may happen only when oneDNN library " + "is used.")); } bn_op->SetAttr("use_mkldnn", true); diff --git a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass_tester.cc b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass_tester.cc index e13d44ac232..26828fdd94b 100644 --- a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass_tester.cc +++ b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass_tester.cc @@ -65,9 +65,9 @@ TEST(FuseBatchNormActOneDNNPass, ThrowIsTestTrainableStats) { // No fusion in this attribute configuration constexpr int removed_nodes_count = 0; - EXPECT_THROW(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", - "act_y", removed_nodes_count), - paddle::platform::EnforceNotMet); + EXPECT_TRUE(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", + "act_y", removed_nodes_count)); + EXPECT_TRUE(test::AssertOpsCount(graph, {{"batch_norm", 1}, {"relu", 1}})); } TEST(FuseBatchNormActOneDNNPass, FuseIsTest) { @@ -123,9 +123,9 @@ TEST(FuseBatchNormActOneDNNPass, ThrowTrainableStats) { // No fusion in this attribute configuration constexpr int removed_nodes_count = 0; - EXPECT_THROW(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", - "act_y", removed_nodes_count), - paddle::platform::EnforceNotMet); + EXPECT_TRUE(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", + "act_y", removed_nodes_count)); + EXPECT_TRUE(test::AssertOpsCount(graph, {{"batch_norm", 1}, {"relu", 1}})); } TEST(FuseBatchNormActOneDNNPass, AllAttrsFalse) { @@ -149,9 +149,9 @@ TEST(FuseBatchNormActOneDNNPass, AllAttrsFalse) { // No fusion in this attribute configuration constexpr int removed_nodes_count = 0; - EXPECT_THROW(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", - "act_y", removed_nodes_count), - paddle::platform::EnforceNotMet); + EXPECT_TRUE(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", + "act_y", removed_nodes_count)); + EXPECT_TRUE(test::AssertOpsCount(graph, {{"batch_norm", 1}, {"relu", 1}})); } TEST(FuseBatchNormActOneDNNPass, ThrowUseMkldnn) { @@ -176,9 +176,9 @@ TEST(FuseBatchNormActOneDNNPass, ThrowUseMkldnn) { // No fusion in this attribute configuration constexpr int removed_nodes_count = 0; - EXPECT_THROW(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", - "act_y", removed_nodes_count), - paddle::platform::EnforceNotMet); + EXPECT_TRUE(test::RunPassAndAssert(&graph, "batch_norm_act_fuse_pass", "x", + "act_y", removed_nodes_count)); + EXPECT_TRUE(test::AssertOpsCount(graph, {{"batch_norm", 1}, {"relu", 1}})); } TEST(FuseBatchNormActOneDNNPass, pass_op_version_check) { diff --git a/python/paddle/fluid/tests/unittests/ir/inference/CMakeLists.txt b/python/paddle/fluid/tests/unittests/ir/inference/CMakeLists.txt index 7c40d0e49f3..d7f72cc2461 100755 --- a/python/paddle/fluid/tests/unittests/ir/inference/CMakeLists.txt +++ b/python/paddle/fluid/tests/unittests/ir/inference/CMakeLists.txt @@ -94,6 +94,7 @@ if (WITH_MKLDNN) set_tests_properties(test_conv_act_mkldnn_fuse_pass PROPERTIES TIMEOUT 120) set_tests_properties(test_conv_transpose_eltwiseadd_bn_fuse_pass PROPERTIES TIMEOUT 250) set_tests_properties(test_conv_transpose_bn_fuse_pass PROPERTIES TIMEOUT 300) + set_tests_properties(test_mkldnn_batch_norm_act_fuse_pass PROPERTIES TIMEOUT 100) set_tests_properties(test_mkldnn_conv_transpose_bias_fuse_pass PROPERTIES TIMEOUT 100) set_tests_properties(test_conv_eltwiseadd_bn_fuse_pass PROPERTIES TIMEOUT 300) endif() diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_mkldnn_batch_norm_act_fuse_pass.py b/python/paddle/fluid/tests/unittests/ir/inference/test_mkldnn_batch_norm_act_fuse_pass.py index c119cbec884..0012ebb05b1 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_mkldnn_batch_norm_act_fuse_pass.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_mkldnn_batch_norm_act_fuse_pass.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,68 +11,110 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Test for fusion of batch norm and activation.""" -from __future__ import print_function -import unittest +from auto_scan_test import PassAutoScanTest, SkipReasons +from program_config import TensorConfig, ProgramConfig, OpConfig import numpy as np +import paddle.inference as paddle_infer +from functools import partial +from typing import Optional, List, Callable, Dict, Any, Set +import unittest + +import hypothesis +from hypothesis import given, settings, seed, example, assume +import hypothesis.strategies as st + + +class TestScaleMatmulMkldnnFusePass(PassAutoScanTest): + def is_program_valid(self, program_config: ProgramConfig) -> bool: + return True + + def sample_program_config(self, draw): + data_layout = draw(st.sampled_from(["NCHW", "NHWC"])) + epsilon = draw(st.floats(min_value=0.0, max_value=0.001)) + fuse_with_relu = draw(st.booleans()) + is_test = draw(st.sampled_from([True])) + momentum = draw(st.floats(min_value=0.0, max_value=5)) + trainable_statistics = False + use_global_stats = draw(st.booleans()) + use_mkldnn1 = draw(st.sampled_from([True])) + use_cudnn = draw(st.booleans()) + use_mkldnn2 = draw(st.sampled_from([True])) + batch_size = draw(st.integers(min_value=1, max_value=4)) + channel = draw(st.integers(min_value=1, max_value=64)) + input_dim1 = draw(st.integers(min_value=1, max_value=512)) + input_dim2 = draw(st.integers(min_value=1, max_value=512)) + + def generate_input(): + shape = [input_dim1, input_dim2] + if data_layout == "NCHW": + shape.insert(0, channel) + shape.insert(0, batch_size) + else: + shape.append(channel) + shape.insert(0, batch_size) + return np.random.random(shape).astype(np.float32) + + def generate_weight(): + return np.random.random(channel).astype(np.float32) + + batch_norm_op = OpConfig( + type="batch_norm", + inputs={ + "X": ["input_data"], + "Bias": ["Bias"], + "Mean": ["Mean"], + "Scale": ["Scale"], + "Variance": ["Variance"] + }, + outputs={ + "Y": ["norm_output"], + "MeanOut": ["Mean"], + "VarianceOut": ["Variance"], + "SavedMean": ["SavedMean"], + "SavedVariance": ["SavedVariance"] + }, + attrs={ + "data_layout": data_layout, + "epsilon": epsilon, + "fuse_with_relu": fuse_with_relu, + "is_test": is_test, + "momentum": momentum, + "trainable_statistics": trainable_statistics, + "use_global_stats": use_global_stats, + "use_mkldnn": use_mkldnn1 + }) + + relu_op = OpConfig( + type="relu", + inputs={"X": ["norm_output"]}, + outputs={"Out": ["relu_output"]}, + attrs={"use_cudnn": use_cudnn, + "use_mkldnn": use_mkldnn2}) + + model_net = [batch_norm_op, relu_op] + + program_config = ProgramConfig( + ops=model_net, + weights={ + "Bias": TensorConfig(data_gen=partial(generate_weight)), + "Mean": TensorConfig(data_gen=partial(generate_weight)), + "Scale": TensorConfig(data_gen=partial(generate_weight)), + "Variance": TensorConfig(data_gen=partial(generate_weight)) + }, + inputs={ + "input_data": TensorConfig(data_gen=partial(generate_input)) + }, + outputs=["relu_output"]) + + return program_config + + def sample_predictor_configs(self, program_config): + config = self.create_inference_config(use_mkldnn=True) + yield config, ["batch_norm"], (1e-5, 1e-5) -import paddle.fluid as fluid -from inference_pass_test import InferencePassTest -from paddle import enable_static -from paddle.fluid.core import PassVersionChecker - -enable_static() - - -class BnReluOneDnnFusePassTest(InferencePassTest): - def setUp(self): - self.set_params() - with fluid.program_guard(self.main_program, self.startup_program): - data = fluid.data( - name="data", shape=[-1, 3, 100, 100], dtype="float32") - bn_out = fluid.layers.batch_norm( - input=data, is_test=True, use_global_stats=self.global_stats) - relu_out = fluid.layers.relu(bn_out) - - self.feeds = { - "data": np.random.random((1, 3, 100, 100)).astype("float32") - } - self.fetch_list = [relu_out] - self.enable_mkldnn = True - - def set_params(self): - self.global_stats = False - self.pass_name = "batch_norm_act_fuse_pass" - - def test_check_output(self): - self.check_output() - self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name)) - - -class BnReluGlobalStatsOneDnnFusePassTest(InferencePassTest): - def setUp(self): - self.set_params() - with fluid.program_guard(self.main_program, self.startup_program): - data = fluid.data( - name="data", shape=[-1, 3, 100, 100], dtype="float32") - bn_out = fluid.layers.batch_norm( - input=data, is_test=True, use_global_stats=self.global_stats) - relu_out = fluid.layers.relu(bn_out) - - self.feeds = { - "data": np.random.random((1, 3, 100, 100)).astype("float32") - } - self.fetch_list = [relu_out] - self.enable_mkldnn = True - - def set_params(self): - self.global_stats = True - self.pass_name = "batch_norm_act_fuse_pass" - - def test_check_output(self): - self.check_output() - self.assertTrue(PassVersionChecker.IsCompatible(self.pass_name)) + def test(self): + self.run_and_statis(quant=False, passes=["batch_norm_act_fuse_pass"]) if __name__ == "__main__": -- GitLab