From f2ad30c4dd74bd9f75ce1f416981d3ab12c74c7b Mon Sep 17 00:00:00 2001 From: baojun <32073718+baojun-nervana@users.noreply.github.com> Date: Mon, 2 Sep 2019 22:49:00 -0700 Subject: [PATCH] Some ngraph op and unittest fix (#19515) * update ngraph ops test=develop * update unittest test=develop * increase coverage test=develop --- .../fluid/operators/ngraph/ngraph_bridge.cc | 3 +-- paddle/fluid/operators/ngraph/ops/concat_op.h | 5 ++++- paddle/fluid/operators/ngraph/ops/conv2d_op.h | 4 ++-- .../operators/ngraph/ops/cross_entropy_op.h | 3 ++- .../fluid/operators/ngraph/ops/dropout_op.h | 4 +++- .../operators/ngraph/ops/lookup_table_op.h | 22 ++++++++++++------- paddle/fluid/operators/ngraph/ops/slice_op.h | 12 +++++++++- .../unittests/ngraph/test_assign_ngraph_op.py | 2 +- .../unittests/ngraph/test_concat_ngraph_op.py | 2 +- .../ngraph/test_lookup_table_ngraph_op.py | 2 +- .../ngraph/test_reshape_ngraph_op.py | 2 +- .../unittests/ngraph/test_slice_ngraph_op.py | 2 +- .../paddle/fluid/tests/unittests/op_test.py | 6 +++++ 13 files changed, 48 insertions(+), 21 deletions(-) diff --git a/paddle/fluid/operators/ngraph/ngraph_bridge.cc b/paddle/fluid/operators/ngraph/ngraph_bridge.cc index db8a7ca94a..9ea7db2a67 100644 --- a/paddle/fluid/operators/ngraph/ngraph_bridge.cc +++ b/paddle/fluid/operators/ngraph/ngraph_bridge.cc @@ -44,8 +44,7 @@ bool NgraphBridge::isSupported( if (!isRegister(op_type)) { if (skip_op_list.count(op_type)) { if (op_type == "lookup_table" || op_type == "lookup_table_grad") { - if (op_attrs.Get("is_sparse") || - (op_attrs.Get("padding_idx") != kNoPadding)) { + if (op_attrs.Get("is_sparse")) { result = false; } } else if ((op_type == "reshape") || (op_type == "reshape2")) { diff --git a/paddle/fluid/operators/ngraph/ops/concat_op.h b/paddle/fluid/operators/ngraph/ops/concat_op.h index 27d7968515..f34e161177 100644 --- a/paddle/fluid/operators/ngraph/ops/concat_op.h +++ b/paddle/fluid/operators/ngraph/ops/concat_op.h @@ -39,7 +39,10 @@ void BuildConcatNode( } } auto op_attrs = framework::AttrReader(op->Attrs()); - const size_t axis = op_attrs.Get("axis"); + int axis = op_attrs.Get("axis"); + if (axis < 0) { + axis = axis + args[0]->get_shape().size(); + } auto out = std::make_shared(args, axis); platform::SetOutputNode(op, "Out", out, ngb_node_map); } diff --git a/paddle/fluid/operators/ngraph/ops/conv2d_op.h b/paddle/fluid/operators/ngraph/ops/conv2d_op.h index b8ad7491d5..ab88d870c4 100644 --- a/paddle/fluid/operators/ngraph/ops/conv2d_op.h +++ b/paddle/fluid/operators/ngraph/ops/conv2d_op.h @@ -80,7 +80,7 @@ std::shared_ptr GroupedGradConvolutionFilter( auto data_slice = std::make_shared( data_batch, lower_bound, upper_bound); - size_t filter_step = data_shape.at(0); + size_t filter_step = filter_shape.at(0) / groups; const std::vector filter_lower_bound{i * filter_step, 0, 0, 0}; const std::vector filter_upper_bound{ @@ -127,7 +127,7 @@ std::shared_ptr GroupedGradConvolutionData( auto data_slice = std::make_shared( data_batch, lower_bound, upper_bound); - size_t filter_step = data_shape.at(0); + size_t filter_step = filter_shape.at(0) / groups; const std::vector filter_lower_bound{i * filter_step, 0, 0, 0}; const std::vector filter_upper_bound{ diff --git a/paddle/fluid/operators/ngraph/ops/cross_entropy_op.h b/paddle/fluid/operators/ngraph/ops/cross_entropy_op.h index bc91be4532..e06446aca9 100644 --- a/paddle/fluid/operators/ngraph/ops/cross_entropy_op.h +++ b/paddle/fluid/operators/ngraph/ops/cross_entropy_op.h @@ -29,7 +29,7 @@ namespace ngraphs { std::shared_ptr remove_trailing_one( const std::shared_ptr& input) { auto shape = input->get_shape(); - if (shape.back() == 1) { + if (shape.back() == 1 && shape.size() > 1) { shape.pop_back(); return platform::NgReshaper(input, shape); } else { @@ -73,6 +73,7 @@ std::shared_ptr create_xe( shape.back() = 1; return platform::NgReshaper(-node_sum, shape); } + std::shared_ptr create_mask( const std::shared_ptr& label, int ignore_index) { auto ignore_node = paddle::platform::CreateConstant( diff --git a/paddle/fluid/operators/ngraph/ops/dropout_op.h b/paddle/fluid/operators/ngraph/ops/dropout_op.h index cf19a58573..3fb55980d7 100644 --- a/paddle/fluid/operators/ngraph/ops/dropout_op.h +++ b/paddle/fluid/operators/ngraph/ops/dropout_op.h @@ -41,6 +41,7 @@ static void BuildDropoutNode( op_attrs.Get("dropout_implementation"); auto is_test = op_attrs.Get("is_test"); auto seed = op_attrs.Get("seed"); + auto fix_seed = op_attrs.Get("fix_seed"); float value = 1.0f - dropout_prob; bool upscale_in_train = (dropout_implementation == "upscale_in_train"); @@ -58,7 +59,8 @@ static void BuildDropoutNode( ngraph::Shape{}, {1}); auto gen_mask = std::make_shared( - one, input->get_shape(), input->get_element_type(), seed, value); + one, input->get_shape(), input->get_element_type(), seed, value, + fix_seed); if (upscale_in_train) { auto mask_val = paddle::platform::CreateConstant( diff --git a/paddle/fluid/operators/ngraph/ops/lookup_table_op.h b/paddle/fluid/operators/ngraph/ops/lookup_table_op.h index 5126854dc2..45bb31599b 100644 --- a/paddle/fluid/operators/ngraph/ops/lookup_table_op.h +++ b/paddle/fluid/operators/ngraph/ops/lookup_table_op.h @@ -47,16 +47,27 @@ void BuildLookupTableNode( if (is_sparse) { PADDLE_THROW("Sparsity is not yet supported in nGraph lookup_table op."); } - + auto ng_w_mask = ng_w; if (padding_idx != kNoPadding) { - PADDLE_THROW("Padding is not yet supported in nGraph lookup_table op."); + auto w_shape = ng_w->get_shape(); + + std::vector maskV(w_shape[0], 1); + maskV[padding_idx] = 0; + auto maskV_node = std::make_shared( + ng_w->get_element_type(), ngraph::Shape{w_shape[0]}, maskV); + ngraph::AxisSet axis_set; + for (unsigned int i = 1; i < w_shape.size(); ++i) axis_set.insert(i); + auto maskV_bd = + std::make_shared(maskV_node, w_shape, axis_set); + ng_w_mask = std::make_shared(ng_w, maskV_bd); } auto shape = ng_ids->get_shape(); if (shape.back() == 1) { shape.pop_back(); ng_ids = platform::NgReshaper(ng_ids, shape); } - auto ng_lookup = std::make_shared(ng_w, ng_ids); + + auto ng_lookup = std::make_shared(ng_w_mask, ng_ids); platform::SetOutputNode(op, "Out", ng_lookup, ngb_node_map); } @@ -67,8 +78,6 @@ void BuildLookupTableGradNode( ngb_node_map) { auto op_attrs = paddle::framework::AttrReader(op->Attrs()); const bool is_sparse = op_attrs.Get("is_sparse"); - const int64_t padding_idx = op_attrs.Get("padding_idx"); - auto ng_ids = paddle::platform::GetInputNode(op, "Ids", ngb_node_map); PADDLE_ENFORCE_NOT_NULL(ng_ids); @@ -81,9 +90,6 @@ void BuildLookupTableGradNode( PADDLE_THROW("Sparsity is not yet supported in nGraph lookup_table op."); } - if (padding_idx != kNoPadding) { - PADDLE_THROW("Padding is not yet supported in nGraph lookup_table op."); - } auto shape = ng_ids->get_shape(); if (shape.back() == 1) { shape.pop_back(); diff --git a/paddle/fluid/operators/ngraph/ops/slice_op.h b/paddle/fluid/operators/ngraph/ops/slice_op.h index 1ae4d198c2..f5ab413540 100644 --- a/paddle/fluid/operators/ngraph/ops/slice_op.h +++ b/paddle/fluid/operators/ngraph/ops/slice_op.h @@ -57,8 +57,18 @@ void BuildSliceNode( ng_end[axes[i]] = end; } auto out = std::make_shared(input, ng_start, ng_end); - platform::SetOutputNode(op, "Out", out, ngb_node_map); + auto out_shape = out->get_shape(); + + std::vector out_axis_vec(out_shape.size()); + std::iota(out_axis_vec.begin(), out_axis_vec.end(), 0); + + paddle::platform::TrimTrailingSingularDims(&out_shape); + auto out_dim = std::make_shared( + out, ngraph::AxisVector(out_axis_vec), ngraph::Shape(out_shape)); + + platform::SetOutputNode(op, "Out", out_dim, ngb_node_map); } + void BuildSliceGradNode( const std::shared_ptr& op, std::shared_ptr< diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_assign_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_assign_ngraph_op.py index ccb30504d0..2c3e7ee6cc 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_assign_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_assign_ngraph_op.py @@ -16,7 +16,7 @@ from __future__ import print_function import unittest, sys sys.path.append("../") -from test_assign_op import * +from test_assign_op import TestAssignOp if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_concat_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_concat_ngraph_op.py index a223d73a74..8517f7cc87 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_concat_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_concat_ngraph_op.py @@ -15,7 +15,7 @@ from __future__ import print_function import unittest -from paddle.fluid.tests.unittests.test_concat_op import TestConcatOp, TestConcatOp2, TestConcatOp3 +from paddle.fluid.tests.unittests.test_concat_op import TestConcatOp, TestConcatOp2, TestConcatOp3, TestConcatOp4, TestConcatOp5 if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_lookup_table_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_lookup_table_ngraph_op.py index c9111c2210..d6ec4b2232 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_lookup_table_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_lookup_table_ngraph_op.py @@ -15,7 +15,7 @@ from __future__ import print_function import unittest, sys sys.path.append("../") -from test_lookup_table_op import * +from test_lookup_table_op import TestLookupTableOp, TestLookupTableOpWithTensorIds, TestLookupTableOpWithPadding, TestLookupTableOpWithTensorIdsAndPadding, TestLookupTableWIsSelectedRows, TestLookupTableWithTensorIdsWIsSelectedRows if __name__ == "__main__": unittest.main() diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_reshape_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_reshape_ngraph_op.py index cffa283271..928e1cb4de 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_reshape_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_reshape_ngraph_op.py @@ -17,7 +17,7 @@ from __future__ import print_function import unittest, sys sys.path.append("../") -from test_reshape_op import TestReshapeOp, TestReshapeOpDimInfer1, TestReshapeOpDimInfer2, TestReshapeOpWithInputShape +from test_reshape_op import TestReshapeOp, TestReshapeOpDimInfer1, TestReshapeOpDimInfer2 if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_slice_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_slice_ngraph_op.py index dc41e8a98a..b6f1f4e0dc 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_slice_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_slice_ngraph_op.py @@ -16,7 +16,7 @@ from __future__ import print_function import unittest, sys sys.path.append("../") -from test_slice_op import TestSliceOp, TestCase1, TestCase2 +from test_slice_op import TestSliceOp, TestSliceOp_decs_dim, TestSliceOp_decs_dim_2, TestSliceOp_decs_dim_3, TestSliceOp_decs_dim_5, TestSliceOp_decs_dim_6, TestCase1, TestCase2 if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/op_test.py b/python/paddle/fluid/tests/unittests/op_test.py index da3d9cb1b9..2af2f259e2 100644 --- a/python/paddle/fluid/tests/unittests/op_test.py +++ b/python/paddle/fluid/tests/unittests/op_test.py @@ -664,6 +664,12 @@ class OpTest(unittest.TestCase): warnings.warn( "check inplace_grad for ops using mkldnn is not supported") return + use_ngraph = fluid.core.is_compiled_with_ngraph( + ) and fluid.core.get_flags_use_ngraph() + if use_ngraph: + warnings.warn( + "check inplace_grad for ops using ngraph is not supported") + return self.check_inplace_grad_output_with_place( place, no_check_set=no_check_set, inplace_atol=inplace_atol) -- GitLab