提交 f5221ac1 编写于 作者: J joanna.wozna.intel 提交者: Tao Luo

Disable conv requant squash (#20041)

* Fix conv2d+dequantize squash for residual fusion

test=develop

* Disable conv-requant squash

test=develop
上级 dd8fc981
...@@ -202,7 +202,6 @@ void CPUQuantizeSquashPass::ApplyImpl(ir::Graph* graph) const { ...@@ -202,7 +202,6 @@ void CPUQuantizeSquashPass::ApplyImpl(ir::Graph* graph) const {
std::unordered_map<const Node*, int> nodes_keep_counter; std::unordered_map<const Node*, int> nodes_keep_counter;
FindNodesToKeep(graph, &nodes_keep_counter); FindNodesToKeep(graph, &nodes_keep_counter);
DequantQuantSquash(graph, &nodes_keep_counter); DequantQuantSquash(graph, &nodes_keep_counter);
ConvRequantSquash(graph);
ConvDequantSquash(graph); ConvDequantSquash(graph);
} }
......
...@@ -277,22 +277,18 @@ TEST(CpuQuantizeSquashPass, equal_scales) { ...@@ -277,22 +277,18 @@ TEST(CpuQuantizeSquashPass, equal_scales) {
// From Conv1->d->Dequant->e->Quant->f->Conv2 // From Conv1->d->Dequant->e->Quant->f->Conv2
// First change to Conv1->d->Requant->f->Conv2 // First change to Conv1->d->Requant->f->Conv2
// Then Conv1->f->Conv2
TEST(CpuQuantizeSquashPass, unequal_scales) { TEST(CpuQuantizeSquashPass, unequal_scales) {
auto scale_out = 1.0f; auto scale_out = 1.0f;
auto scale1 = 1.2345f; auto scale1 = 1.2345f;
auto scale2 = 21.0f; auto scale2 = 21.0f;
auto use_mkldnn = true; auto use_mkldnn = true;
// Remove 4 nodes: Dequant, Quant, e, d // Remove 3 nodes: Dequant, Quant, e
auto remove_nodes = 4; // Insert 1 node: Requant
auto remove_nodes = 2;
CountNodeTest( CountNodeTest(
BuildConvRequantProgramDesc(use_mkldnn, scale_out, scale1, scale2), BuildConvRequantProgramDesc(use_mkldnn, scale_out, scale1, scale2),
remove_nodes); remove_nodes);
EqualScaleOutTest(
BuildConvRequantProgramDesc(use_mkldnn, scale_out, scale1, scale2),
"Conv1", scale2);
} }
// from // from
...@@ -322,29 +318,6 @@ TEST(CpuQuantizeSquashPass, branch_to_equal_unequal_and_fp32) { ...@@ -322,29 +318,6 @@ TEST(CpuQuantizeSquashPass, branch_to_equal_unequal_and_fp32) {
scale, scale2); scale, scale2);
} }
// a->Conv1->b->Requant->c
// d->Conv2->e->Requant->f
// {c,f}->Concat
TEST(CpuQuantizeSquashPass, equal_scales_squash_requantize) {
// Delete both requantize op
auto scale_out = 1.0f;
auto scale = 1.2345f;
auto use_mkldnn = true;
// Remove 4 nodes: b, Requant1, e, Requant2
auto remove_nodes = 4;
CountNodeTest(
BuildConvsRequantConcatProgramDesc(use_mkldnn, scale_out, scale, scale),
remove_nodes);
// check equal scale conv->scale_out and requant->scale_out
EqualScaleOutTest(
BuildConvsRequantConcatProgramDesc(use_mkldnn, scale_out, scale, scale),
"Conv1", scale);
EqualScaleOutTest(
BuildConvsRequantConcatProgramDesc(use_mkldnn, scale_out, scale, scale),
"Conv2", scale);
}
// a->Concat->b->Dequant->c->Quant->d->Conv->e // a->Concat->b->Dequant->c->Quant->d->Conv->e
// to a->Concat->b->Requant->d->Conv->e // to a->Concat->b->Requant->d->Conv->e
TEST(CpuQuantizeSquashPass, TEST(CpuQuantizeSquashPass,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册