diff --git a/paddle/fluid/operators/ngraph/ngraph_engine.cc b/paddle/fluid/operators/ngraph/ngraph_engine.cc index b37ccf3f0d9b9c4b0812df349d7aeb31a1a75257..41f845e26e7d166e0361585c961a5209fae3fc7a 100644 --- a/paddle/fluid/operators/ngraph/ngraph_engine.cc +++ b/paddle/fluid/operators/ngraph/ngraph_engine.cc @@ -177,36 +177,6 @@ std::string SerializedBlock(const framework::BlockDesc& bdesc) { return block_desc.Proto()->SerializeAsString(); } -std::string GenerateEngineKey(const framework::BlockDesc& bdesc) { - framework::proto::BlockDesc block_proto; - framework::BlockDesc block_desc(nullptr, &block_proto); - block_desc.Proto()->set_parent_idx(-1); - block_desc.Proto()->set_idx(0); - - for (auto& op_desc : bdesc.AllOps()) { - auto* op = block_desc.AppendOp(); - *op->Proto() = *op_desc->Proto(); - } - auto engine_key = std::to_string( - std::hash()(block_desc.Proto()->SerializeAsString())); - return engine_key; -} - -std::string GenerateEngineKey(const std::vector& engine_inputs, - const std::vector& engine_outputs, - int size) { - std::string engine_hash_key = ""; - for (auto name : engine_inputs) { - engine_hash_key += name; - } - for (auto name : engine_outputs) { - engine_hash_key += name; - } - engine_hash_key += std::to_string(size); - auto engine_key = std::to_string(std::hash()(engine_hash_key)); - return engine_key; -} - void NgraphEngine::FuseNgraphOps( const framework::BlockDesc& block_desc, std::vector>* ops) { diff --git a/paddle/fluid/operators/ngraph/ops/cast_op.h b/paddle/fluid/operators/ngraph/ops/cast_op.h index 8e385f61bee10b8d4dfb2fdcc723637a6f3c2a07..ae26e7fde6a8d9d930f5ee93250a00f24c58851c 100644 --- a/paddle/fluid/operators/ngraph/ops/cast_op.h +++ b/paddle/fluid/operators/ngraph/ops/cast_op.h @@ -40,23 +40,8 @@ static void BuildCastNode( auto out = std::make_shared(input, ng_dtype); paddle::platform::SetOutputNode(op, "Out", out, ngb_node_map); } - -static void BuildCastGradNode( - const std::shared_ptr& op, - std::shared_ptr< - std::unordered_map>> - ngb_node_map) { - auto input = platform::GetInputNode(op, "Out@GRAD", ngb_node_map); - auto op_attrs = framework::AttrReader(op->Attrs()); - auto ng_dtype = - platform::GetNgType(static_cast( - op_attrs.Get("out_dtype"))); - auto out = std::make_shared(input, ng_dtype); - platform::SetOutputNode(op, "X@GRAD", out, ngb_node_map); -} } // namespace ngraphs } // namespace operators } // namespace paddle REGISTER_NG_OP(cast, BuildCastNode); -REGISTER_NG_OP(cast_grad, BuildCastGradNode); diff --git a/paddle/fluid/operators/ngraph/ops/elementwise_node.h b/paddle/fluid/operators/ngraph/ops/elementwise_node.h index 2b10af4588c350e8581e304cdfdd075f56be53fd..a555f57f999d27fd1c49b485d5967b3f71888a5a 100644 --- a/paddle/fluid/operators/ngraph/ops/elementwise_node.h +++ b/paddle/fluid/operators/ngraph/ops/elementwise_node.h @@ -37,9 +37,7 @@ void BuildElementwiseBinaryNode( std::shared_ptr& x = nodes.at(0); std::shared_ptr& y = nodes.at(1); - if (x->get_element_type() != y->get_element_type()) { - y = std::make_shared(y, x->get_element_type()); - } + y = std::make_shared(y, x->get_element_type()); auto out = std::make_shared(x, y); paddle::platform::SetOutputNode(op, "Out", out, ngb_node_map); } diff --git a/paddle/fluid/operators/ngraph/ops/reshape_op.h b/paddle/fluid/operators/ngraph/ops/reshape_op.h index 89ad04f06f61ba0b91c06965ed985c84842ee634..a0fe441e476df9105ca42644053a09ab37b643c5 100644 --- a/paddle/fluid/operators/ngraph/ops/reshape_op.h +++ b/paddle/fluid/operators/ngraph/ops/reshape_op.h @@ -23,6 +23,7 @@ limitations under the License. */ #include "ngraph/ngraph.hpp" #include "paddle/fluid/operators/ngraph/ops/op_bridge.h" +#include "paddle/fluid/platform/enforce.h" #include "paddle/fluid/platform/ngraph_helper.h" namespace paddle { @@ -60,20 +61,16 @@ static void BuildReshapeNode( std::shared_ptr shape = platform::GetInputNode(op, "Shape", ngb_node_map); + PADDLE_ENFORCE_EQ(shape, nullptr, + platform::errors::Unimplemented( + "Support for Shape input is not implemented")); auto op_attrs = framework::AttrReader(op->Attrs()); std::vector v_shape = op_attrs.Get>("shape"); - auto out = input; - if (shape != nullptr) { - ngraph::Shape new_shape; - for (auto& it : shape->get_shape()) { - new_shape.push_back(it); - } - out = platform::NgReshaper(input, shape->get_shape()); - } else { - auto out_shape = calc_output_shape(input_shape, v_shape); - out = platform::NgReshaper(input, out_shape); - } + + auto out_shape = calc_output_shape(input_shape, v_shape); + auto out = platform::NgReshaper(input, out_shape); + platform::SetOutputNode(op, "Out", out, ngb_node_map); if (is_v2) { ngraph::Shape input_xshape(input_shape.size() + 1); @@ -83,7 +80,6 @@ static void BuildReshapeNode( input->get_element_type(), input_xshape, std::vector{}); platform::SetOutputNode(op, "XShape", xshape_node, ngb_node_map); } - platform::SetOutputNode(op, "Out", out, ngb_node_map); } template diff --git a/paddle/fluid/operators/ngraph/ops/sum_op.h b/paddle/fluid/operators/ngraph/ops/sum_op.h index ab8cdb8f4d847c0acb60b39d07dc83f085b60bbd..804f932d24460e0715e804e4c347538a68664d07 100644 --- a/paddle/fluid/operators/ngraph/ops/sum_op.h +++ b/paddle/fluid/operators/ngraph/ops/sum_op.h @@ -14,7 +14,9 @@ limitations under the License. */ #pragma once +#include #include +#include #include #include "ngraph/ngraph.hpp" @@ -34,19 +36,18 @@ void BuildSumNode( for (auto& var_name_item : op->Inputs()) { for (auto& var_name : var_name_item.second) { op_inputs.push_back(var_name); - if (ngb_node_map->find(var_name) == ngb_node_map->end()) { - PADDLE_THROW("op % input varname %s is not found in var_node_map", - op->Type(), var_name); - } + PADDLE_ENFORCE_NE( + ngb_node_map->find(var_name), ngb_node_map->end(), + platform::errors::NotFound( + "op %s input varname %s is not found in var_node_map", op->Type(), + var_name)); } } std::shared_ptr& sum = ngb_node_map->at(op_inputs[0]); for (size_t k = 1; k < op_inputs.size(); ++k) { std::shared_ptr& nodek = ngb_node_map->at(op_inputs[k]); - if (nodek->get_element_type() != sum->get_element_type()) { - nodek = - std::make_shared(nodek, sum->get_element_type()); - } + nodek = + std::make_shared(nodek, sum->get_element_type()); sum = sum + nodek; } platform::SetOutputNode(op, "Out", sum, ngb_node_map); diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_compare_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_compare_ngraph_op.py index 2f731a7693bbcee9540a40b3996e9aaef59d13ea..54dab6c475510acd895c88adab43d2feb7c3a037 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_compare_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_compare_ngraph_op.py @@ -17,7 +17,7 @@ from __future__ import print_function import unittest import sys sys.path.append("../") -import test_compare_op +from test_compare_op import * if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_logical_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_logical_ngraph_op.py index 2f227ce87ca483ccd9af78ce02262d8f9effd39c..01f6008dba0e5a4f34ba3c1bb6d999b9a7b7e670 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_logical_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_logical_ngraph_op.py @@ -18,11 +18,7 @@ import unittest, sys sys.path.append("../") import numpy as np -from test_logical_op import create_test_class - -create_test_class('logical_and', lambda _a, _b: np.logical_and(_a, _b)) -create_test_class('logical_or', lambda _a, _b: np.logical_or(_a, _b)) -create_test_class('logical_not', lambda _a: np.logical_not(_a), False) +from test_logical_op import * if __name__ == '__main__': unittest.main()