diff --git a/lite/backends/npu/builder.cc b/lite/backends/npu/builder.cc index 954fad8c916e152c5de06ce285b4ac17ecf22a01..367363e44b6747e4383fbe962bcbe2a0692dbd64 100644 --- a/lite/backends/npu/builder.cc +++ b/lite/backends/npu/builder.cc @@ -148,7 +148,7 @@ int CvtActMode(std::string act_type) { act_mode = 1; } else if (act_type == "tanh") { act_mode = 2; - } else if (act_type == "relu_clipped") { + } else if (act_type == "relu_clipped" || act_type == "relu6") { act_mode = 3; } else if (act_type == "elu") { act_mode = 4; diff --git a/lite/kernels/npu/bridges/act_op.cc b/lite/kernels/npu/bridges/act_op.cc index ac62891113b1899036c35ffd3058f1d409b00a36..623d36eac07e1893b8a1f341e9ff5ea2c28fd643 100644 --- a/lite/kernels/npu/bridges/act_op.cc +++ b/lite/kernels/npu/bridges/act_op.cc @@ -44,6 +44,9 @@ node_map_type ActConverter(const std::shared_ptr act_op, if (op_type == "relu_clipped") { auto Relu_clipped_coef = op_info->GetAttr("Relu_clipped_coef"); act_node->set_attr_coef(Relu_clipped_coef); + } else if (op_type == "relu6") { + float Relu_clipped_coef = 6.f; + act_node->set_attr_coef(Relu_clipped_coef); } else if (op_type == "leaky_relu") { auto alpha = op_info->GetAttr("alpha"); act_node->set_attr_negative_slope(alpha); @@ -70,6 +73,7 @@ REGISTER_NPU_BRIDGE(relu, paddle::lite::kernels::npu::bridges::ActConverter); REGISTER_NPU_BRIDGE(tanh, paddle::lite::kernels::npu::bridges::ActConverter); REGISTER_NPU_BRIDGE(relu_clipped, paddle::lite::kernels::npu::bridges::ActConverter); +REGISTER_NPU_BRIDGE(relu6, paddle::lite::kernels::npu::bridges::ActConverter); // REGISTER_NPU_BRIDGE(elu, paddle::lite::kernels::npu::bridges::ActConverter); REGISTER_NPU_BRIDGE(leaky_relu, paddle::lite::kernels::npu::bridges::ActConverter); diff --git a/lite/kernels/npu/bridges/act_op_test.cc b/lite/kernels/npu/bridges/act_op_test.cc index d50b1968b14cc33efd7ab9bcd0c4427d8ca2e508..e2670a008b11fcd7e65971ba2beac707b839896d 100644 --- a/lite/kernels/npu/bridges/act_op_test.cc +++ b/lite/kernels/npu/bridges/act_op_test.cc @@ -55,6 +55,10 @@ void act_ref(const std::shared_ptr op) { for (size_t i = 0; i < out->numel(); i++) { out_data[i] = std::min(std::max(0.f, x_data[i]), relu_clipped_coef); } + } else if (op_type == "relu6") { + for (size_t i = 0; i < out->numel(); i++) { + out_data[i] = std::min(std::max(0.f, x_data[i]), 6.f); + } } else if (op_type == "leaky_relu") { auto alpha = op_info->GetAttr("alpha"); for (size_t i = 0; i < out->numel(); i++) { @@ -96,6 +100,8 @@ void test_act(std::vector x_shape, std::string op_type) { opdesc.SetInput("X", {x_var_name}); opdesc.SetOutput("Out", {out_var_name}); if (op_type == "relu_clipped") { + opdesc.SetAttr("Relu_clipped_coef", 3.f); + } else if (op_type == "relu6") { opdesc.SetAttr("Relu_clipped_coef", 6.f); } else if (op_type == "leaky_relu") { opdesc.SetAttr("alpha", 0.02f); @@ -125,6 +131,7 @@ TEST(NPUBridges, activation) { "relu", "tanh", "relu_clipped", + "relu6", "leaky_relu", "softsign", "hard_sigmoid"}; @@ -149,6 +156,8 @@ USE_LITE_OP(tanh); USE_NPU_BRIDGE(tanh); USE_LITE_OP(relu_clipped); USE_NPU_BRIDGE(relu_clipped); +USE_LITE_OP(relu6); +USE_NPU_BRIDGE(relu6); USE_LITE_OP(leaky_relu); USE_NPU_BRIDGE(leaky_relu); diff --git a/lite/kernels/npu/bridges/paddle_use_npu_bridges.h b/lite/kernels/npu/bridges/paddle_use_npu_bridges.h index 40b1a5e31f080712de854a07eec0fb1e3d80e6a2..ce5088aaf63afdeaefecac2d1c5eab70a1470d6d 100644 --- a/lite/kernels/npu/bridges/paddle_use_npu_bridges.h +++ b/lite/kernels/npu/bridges/paddle_use_npu_bridges.h @@ -20,6 +20,7 @@ USE_NPU_BRIDGE(sigmoid); USE_NPU_BRIDGE(relu); USE_NPU_BRIDGE(tanh); USE_NPU_BRIDGE(relu_clipped); +USE_NPU_BRIDGE(relu6); USE_NPU_BRIDGE(leaky_relu); USE_NPU_BRIDGE(softsign); USE_NPU_BRIDGE(hard_sigmoid);