未验证 提交 72afff36 编写于 作者: H hong19860320 提交者: GitHub

[NPU] Fix build error caused by flatbuffer if the target is tiny_publish (#4340)

上级 571f6651
...@@ -60,9 +60,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -60,9 +60,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) {
CHECK_EQ(output_dims[0], bs); CHECK_EQ(output_dims[0], bs);
CHECK_EQ(output_dims[1], oc); CHECK_EQ(output_dims[1], oc);
auto strides = op_info->GetAttr<std::vector<int>>("strides"); auto strides = op_info->GetAttr<std::vector<int>>("strides");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
auto groups = op_info->GetAttr<int>("groups"); auto groups = op_info->GetAttr<int>("groups");
auto dilations = op_info->GetAttr<std::vector<int>>("dilations"); std::vector<int> dilations = op_info->GetAttr<std::vector<int>>("dilations");
bool with_act = bool with_act =
op_info->HasAttr("with_act") && op_info->GetAttr<bool>("with_act"); op_info->HasAttr("with_act") && op_info->GetAttr<bool>("with_act");
std::string act_type = std::string act_type =
......
...@@ -45,7 +45,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -45,7 +45,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto pooling_type = op_info->GetAttr<std::string>("pooling_type"); auto pooling_type = op_info->GetAttr<std::string>("pooling_type");
auto global_pooling = op_info->GetAttr<bool>("global_pooling"); auto global_pooling = op_info->GetAttr<bool>("global_pooling");
auto ksize = op_info->GetAttr<std::vector<int>>("ksize"); auto ksize = op_info->GetAttr<std::vector<int>>("ksize");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
// pool mode // pool mode
if ((pooling_type == "max") || (pooling_type == "avg")) { if ((pooling_type == "max") || (pooling_type == "avg")) {
......
...@@ -53,9 +53,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -53,9 +53,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) {
CHECK_EQ(output_dims[0], bs); CHECK_EQ(output_dims[0], bs);
CHECK_EQ(output_dims[1], oc); CHECK_EQ(output_dims[1], oc);
auto strides = op_info->GetAttr<std::vector<int>>("strides"); auto strides = op_info->GetAttr<std::vector<int>>("strides");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
auto groups = op_info->GetAttr<int>("groups"); auto groups = op_info->GetAttr<int>("groups");
auto dilations = op_info->GetAttr<std::vector<int>>("dilations"); std::vector<int> dilations = op_info->GetAttr<std::vector<int>>("dilations");
bool with_act = bool with_act =
op_info->HasAttr("with_act") && op_info->GetAttr<bool>("with_act"); op_info->HasAttr("with_act") && op_info->GetAttr<bool>("with_act");
std::string act_type = std::string act_type =
......
...@@ -59,8 +59,8 @@ int ConvTransposeConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -59,8 +59,8 @@ int ConvTransposeConverter(void* ctx, OpLite* op, KernelBase* kernel) {
output_size = op_info->GetAttr<std::vector<int>>("output_size"); output_size = op_info->GetAttr<std::vector<int>>("output_size");
} }
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
auto dilations = op_info->GetAttr<std::vector<int>>("dilations"); std::vector<int> dilations = op_info->GetAttr<std::vector<int>>("dilations");
CHECK_EQ(dilations.size(), 2L); CHECK_EQ(dilations.size(), 2L);
std::string padding_algorithm = std::string padding_algorithm =
op_info->HasAttr("padding_algorithm") op_info->HasAttr("padding_algorithm")
......
...@@ -35,7 +35,7 @@ int Pad2dConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -35,7 +35,7 @@ int Pad2dConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto x = scope->FindMutableTensor(x_name); auto x = scope->FindMutableTensor(x_name);
auto x_dims = x->dims(); auto x_dims = x->dims();
auto out_name = op_info->Output("Out").front(); auto out_name = op_info->Output("Out").front();
auto padding = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> padding = op_info->GetAttr<std::vector<int>>("paddings");
CHECK_EQ(padding.size(), 4); CHECK_EQ(padding.size(), 4);
// X node // X node
......
...@@ -39,7 +39,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -39,7 +39,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto pooling_type = op_info->GetAttr<std::string>("pooling_type"); auto pooling_type = op_info->GetAttr<std::string>("pooling_type");
auto global_pooling = op_info->GetAttr<bool>("global_pooling"); auto global_pooling = op_info->GetAttr<bool>("global_pooling");
auto ksize = op_info->GetAttr<std::vector<int>>("ksize"); auto ksize = op_info->GetAttr<std::vector<int>>("ksize");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
// X node // X node
std::shared_ptr<Node> x_node = nullptr; std::shared_ptr<Node> x_node = nullptr;
......
...@@ -36,7 +36,7 @@ int ReduceMeanConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -36,7 +36,7 @@ int ReduceMeanConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto x_dims = x->dims(); auto x_dims = x->dims();
auto out_name = op_info->Input("Out").front(); auto out_name = op_info->Input("Out").front();
auto keep_dim = op_info->GetAttr<bool>("keep_dim"); auto keep_dim = op_info->GetAttr<bool>("keep_dim");
auto dim = op_info->GetAttr<std::vector<int>>("dim"); std::vector<int> dim = op_info->GetAttr<std::vector<int>>("dim");
CHECK(!dim.empty()) << "[NPU] \"dim\" of reduce_mean should not be empty."; CHECK(!dim.empty()) << "[NPU] \"dim\" of reduce_mean should not be empty.";
for (size_t i = 0; i < dim.size(); i++) { for (size_t i = 0; i < dim.size(); i++) {
if (dim[i] < 0) { if (dim[i] < 0) {
......
...@@ -51,9 +51,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -51,9 +51,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) {
CHECK_EQ(output_dims[0], bs); CHECK_EQ(output_dims[0], bs);
CHECK_EQ(output_dims[1], oc); CHECK_EQ(output_dims[1], oc);
auto strides = op_info->GetAttr<std::vector<int>>("strides"); auto strides = op_info->GetAttr<std::vector<int>>("strides");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
auto groups = op_info->GetAttr<int>("groups"); auto groups = op_info->GetAttr<int>("groups");
auto dilations = op_info->GetAttr<std::vector<int>>("dilations"); std::vector<int> dilations = op_info->GetAttr<std::vector<int>>("dilations");
auto fuse_relu = op_info->GetAttr<bool>("fuse_relu"); auto fuse_relu = op_info->GetAttr<bool>("fuse_relu");
CHECK_EQ(strides.size(), 2L); CHECK_EQ(strides.size(), 2L);
CHECK_EQ(dilations.size(), 2L); CHECK_EQ(dilations.size(), 2L);
......
...@@ -42,7 +42,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -42,7 +42,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto pooling_type = op_info->GetAttr<std::string>("pooling_type"); auto pooling_type = op_info->GetAttr<std::string>("pooling_type");
auto global_pooling = op_info->GetAttr<bool>("global_pooling"); auto global_pooling = op_info->GetAttr<bool>("global_pooling");
auto ksize = op_info->GetAttr<std::vector<int>>("ksize"); auto ksize = op_info->GetAttr<std::vector<int>>("ksize");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
// for quantization // for quantization
bool enable_int8 = false; bool enable_int8 = false;
......
...@@ -44,9 +44,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -44,9 +44,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) {
CHECK_EQ(input_dims.size(), 4); CHECK_EQ(input_dims.size(), 4);
CHECK_EQ(filter_dims.size(), 4); CHECK_EQ(filter_dims.size(), 4);
auto strides = op_info->GetAttr<std::vector<int>>("strides"); auto strides = op_info->GetAttr<std::vector<int>>("strides");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
auto groups = op_info->GetAttr<int>("groups"); auto groups = op_info->GetAttr<int>("groups");
auto dilations = op_info->GetAttr<std::vector<int>>("dilations"); std::vector<int> dilations = op_info->GetAttr<std::vector<int>>("dilations");
auto fuse_relu = op_info->GetAttr<bool>("fuse_relu"); auto fuse_relu = op_info->GetAttr<bool>("fuse_relu");
CHECK_EQ(strides.size(), 2L); CHECK_EQ(strides.size(), 2L);
CHECK_EQ(dilations.size(), 2L); CHECK_EQ(dilations.size(), 2L);
......
...@@ -37,7 +37,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) { ...@@ -37,7 +37,7 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto out_name = op_info->Output("Out").front(); auto out_name = op_info->Output("Out").front();
auto pooling_type = op_info->GetAttr<std::string>("pooling_type"); auto pooling_type = op_info->GetAttr<std::string>("pooling_type");
auto ceil_mode = op_info->GetAttr<bool>("ceil_mode"); auto ceil_mode = op_info->GetAttr<bool>("ceil_mode");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings"); std::vector<int> paddings = op_info->GetAttr<std::vector<int>>("paddings");
auto global_pooling = op_info->GetAttr<bool>("global_pooling"); auto global_pooling = op_info->GetAttr<bool>("global_pooling");
auto ksize = op_info->GetAttr<std::vector<int>>("ksize"); auto ksize = op_info->GetAttr<std::vector<int>>("ksize");
auto strides = op_info->GetAttr<std::vector<int>>("strides"); auto strides = op_info->GetAttr<std::vector<int>>("strides");
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册