提交 71267a90 编写于 作者: Z zhupengyang 提交者: GitHub

[NPU] add check for reshape when "Shape" from Tensors (#2483)

fix errors for conv and conv_transpose
test=develop
上级 c15e5e05
......@@ -59,8 +59,8 @@ node_map_type ConvConverter(const std::shared_ptr<lite::OpLite> conv_op,
bool pad_equal =
((paddings[0] == paddings[1]) && (paddings[2] == paddings[3]));
if (!pad_equal) {
LOG(FATA) << "This pad not support ! " << paddings[0] << ", " << paddings[1]
<< ", " << paddings[2] << ", " << paddings[3];
LOG(FATAL) << "This pad not support ! " << paddings[0] << ", "
<< paddings[1] << ", " << paddings[2] << ", " << paddings[3];
}
// check depthwise mode, and decide whether use ConvolutionDepthwise Op
......
......@@ -54,8 +54,8 @@ node_map_type ConvTransposeConverter(
bool pad_equal =
((paddings[0] == paddings[1]) && (paddings[2] == paddings[3]));
if (!pad_equal) {
LOG(FATA) << "This pad not support ! " << paddings[0] << ", " << paddings[1]
<< ", " << paddings[2] << ", " << paddings[3];
LOG(FATAL) << "This pad not support ! " << paddings[0] << ", "
<< paddings[1] << ", " << paddings[2] << ", " << paddings[3];
}
// create input sizes node to describe the dimensions of input tensor
std::vector<int32_t> output_shape;
......
......@@ -41,8 +41,10 @@ node_map_type ReshapeConverter(const std::shared_ptr<lite::OpLite> reshape_op,
reshape_node->set_input_tensor(*inputs_map.at(x_var_name));
lite::npu::OpList::Global().add(inputs_map.at(x_var_name));
// read shape from actual shape tensor as input "w" if 'Shape' is found
if (lite::npu::HasInputArg(op_info, scope, "Shape")) {
// read shape from "ShapeTensor"(input), or "Shape"(input), or "shape"(attr)
if (lite::npu::HasInputArg(op_info, scope, "ShapeTensor")) {
LOG(FATAL) << "[NPU] not support \"Shape\" from more than one Tensor.";
} else if (lite::npu::HasInputArg(op_info, scope, "Shape")) {
auto actual_shape_var_name = op_info->Input("Shape").front();
if (!inputs_map.count(actual_shape_var_name)) {
auto actual_shape =
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册