提交 162637b6 编写于 作者: Y Yu Yang

Fix ngraph compile

test=develop
上级 194e66f7
...@@ -471,27 +471,23 @@ void NgraphEngine::Run(const Scope& scope, const platform::Place& place) const { ...@@ -471,27 +471,23 @@ void NgraphEngine::Run(const Scope& scope, const platform::Place& place) const {
auto* tensor_pd = GetLoDTensorOrSelectedRowsValueFromVar(*var); auto* tensor_pd = GetLoDTensorOrSelectedRowsValueFromVar(*var);
PADDLE_ENFORCE(sp == Ddim2Shape(tensor_pd->dims()), PADDLE_ENFORCE(sp == Ddim2Shape(tensor_pd->dims()),
"Ensure ngraph tensor layout align with paddle tensor"); "Ensure ngraph tensor layout align with paddle tensor");
if (tensor_pd->type().hash_code() == if (tensor_pd->type() == proto::VarType::FP32) {
typeid(float).hash_code()) { // NOLINT
const float* arr = tensor_pd->data<float>(); const float* arr = tensor_pd->data<float>();
ti = backend_->create_tensor(ngraph::element::f32, sp, ti = backend_->create_tensor(ngraph::element::f32, sp,
const_cast<float*>(arr)); const_cast<float*>(arr));
} else if (tensor_pd->type().hash_code() == } else if (tensor_pd->type() == proto::VarType::INT32) {
typeid(int).hash_code()) { // NOLINT
const int* arr = tensor_pd->data<int>(); const int* arr = tensor_pd->data<int>();
ti = backend_->create_tensor(ngraph::element::i32, sp, ti = backend_->create_tensor(ngraph::element::i32, sp,
const_cast<int*>(arr)); const_cast<int*>(arr));
} else if (tensor_pd->type().hash_code() == typeid(int64_t).hash_code()) { } else if (tensor_pd->type() == proto::VarType::INT64) {
const int64_t* arr = tensor_pd->data<int64_t>(); const int64_t* arr = tensor_pd->data<int64_t>();
ti = backend_->create_tensor(ngraph::element::i64, sp, ti = backend_->create_tensor(ngraph::element::i64, sp,
const_cast<int64_t*>(arr)); const_cast<int64_t*>(arr));
} else if (tensor_pd->type().hash_code() == } else if (tensor_pd->type() == proto::VarType::FP64) {
typeid(double).hash_code()) { // NOLINT
const double* arr = tensor_pd->data<double>(); const double* arr = tensor_pd->data<double>();
ti = backend_->create_tensor(ngraph::element::f64, sp, ti = backend_->create_tensor(ngraph::element::f64, sp,
const_cast<double*>(arr)); const_cast<double*>(arr));
} else if (tensor_pd->type().hash_code() == } else if (tensor_pd->type() == proto::VarType::BOOL) {
typeid(bool).hash_code()) { // NOLINT
const bool* arr = tensor_pd->data<bool>(); const bool* arr = tensor_pd->data<bool>();
ti = backend_->create_tensor(ngraph::element::boolean, sp, ti = backend_->create_tensor(ngraph::element::boolean, sp,
const_cast<bool*>(arr)); const_cast<bool*>(arr));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册