未验证 提交 0ce6d7fa 编写于 作者: J joanna.wozna.intel 提交者: GitHub

Fix bf16 activations test for softmax and gelu (#29502)

* Fix bf16 activations test for softmax and gelu

* Resolve conflict
上级 60bfd308
......@@ -447,7 +447,6 @@ test_imperative_static_runner_while^|^
test_optimizer_in_control_flow^|^
test_fuse_bn_act_pass^|^
test_fuse_bn_add_act_pass^|^
test_activation_mkldnn_op^|^
test_tsm^|^
test_gru_rnn_op^|^
test_rnn_op^|^
......
......@@ -79,6 +79,8 @@ class TestMKLDNNGeluDim2Approx(TestActivation):
self.attrs = {"use_mkldnn": True, "approximate": True}
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim2(TestActivation):
def setUp(self):
self.op_type = "gelu"
......@@ -98,6 +100,8 @@ class TestMKLDNNGeluBf16Dim2(TestActivation):
pass
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim2Approx(TestActivation):
def setUp(self):
self.op_type = "gelu"
......@@ -225,6 +229,8 @@ class TestMKLDNNGeluDim4Approx(TestActivation):
self.attrs = {"use_mkldnn": True, "approximate": True}
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim4(TestActivation):
def setUp(self):
self.op_type = "gelu"
......@@ -244,6 +250,8 @@ class TestMKLDNNGeluBf16Dim4(TestActivation):
pass
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim4Approx(TestActivation):
def setUp(self):
self.op_type = "gelu"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册