act_op_test.cc 5.5 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include <gtest/gtest.h>
#include <random>
#include "lite/core/op_registry.h"
Z
zhupengyang 已提交
18 19
#include "lite/kernels/npu/bridges/registry.h"
#include "lite/kernels/npu/bridges/test_helper.h"
20
#include "lite/operators/activation_ops.h"
Y
Yan Chunwei 已提交
21 22 23

namespace paddle {
namespace lite {
Z
zhupengyang 已提交
24
namespace kernels {
Y
Yan Chunwei 已提交
25
namespace npu {
Z
zhupengyang 已提交
26
namespace bridges {
Y
Yan Chunwei 已提交
27

28
void act_ref(const std::shared_ptr<operators::ActivationOp> op) {
Y
Yan Chunwei 已提交
29 30
  Scope* scope = op->scope();
  const OpInfo* op_info = op->op_info();
31 32 33 34
  auto op_type = op_info->Type();
  auto x = scope->FindTensor("x");
  auto out = scope->FindMutableTensor("out_ref");
  out->Resize(x->dims());
Y
Yan Chunwei 已提交
35 36
  auto x_data = x->data<float>();
  auto out_data = out->mutable_data<float>();
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57
  CHECK_EQ(x->numel(), out->numel());

  // "sigmoid","relu","tanh","relu_clipped","leaky_relu","softsign","hard_sigmoid"
  if (op_type == "sigmoid") {
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = 1.f / (1.f + std::exp(-x_data[i]));
    }
  } else if (op_type == "relu") {
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = std::max(0.f, x_data[i]);
    }
  } else if (op_type == "tanh") {
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = (std::exp(x_data[i]) - std::exp(-x_data[i])) /
                    (std::exp(x_data[i]) + std::exp(-x_data[i]));
    }
  } else if (op_type == "relu_clipped") {
    auto relu_clipped_coef = op_info->GetAttr<float>("Relu_clipped_coef");
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = std::min(std::max(0.f, x_data[i]), relu_clipped_coef);
    }
Z
zhupengyang 已提交
58 59 60 61
  } else if (op_type == "relu6") {
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = std::min(std::max(0.f, x_data[i]), 6.f);
    }
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
  } else if (op_type == "leaky_relu") {
    auto alpha = op_info->GetAttr<float>("alpha");
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = std::max(x_data[i], x_data[i] * alpha);
    }
  } else if (op_type == "softsign") {
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = x_data[i] / (1 + std::abs(x_data[i]));
    }
  } else if (op_type == "hard_sigmoid") {
    auto slope = op_info->GetAttr<float>("slope");
    auto offset = op_info->GetAttr<float>("offset");
    for (size_t i = 0; i < out->numel(); i++) {
      out_data[i] = std::min(1.f, slope * x_data[i] + offset);
      out_data[i] = std::max(0.f, out_data[i]);
    }
  } else {
    LOG(FATAL) << "unsupported activation type: " << op_type;
Y
Yan Chunwei 已提交
80 81 82
  }
}

83
void test_act(std::vector<int64_t> x_shape, std::string op_type) {
Y
Yan Chunwei 已提交
84 85 86 87 88
  // prepare input&output variables
  Scope scope;
  std::string x_var_name("x");
  std::string out_var_name("out");
  std::string out_ref_var_name("out_ref");
89 90 91 92
  auto* x = scope.NewTensor(x_var_name);
  auto* out = scope.NewTensor(out_var_name);
  auto* out_ref = scope.NewTensor(out_ref_var_name);
  x->Resize(x_shape);
Y
Yan Chunwei 已提交
93 94

  // initialize input&output data
95
  FillTensor<float>(x, -8, 8);
Y
Yan Chunwei 已提交
96 97 98

  // initialize op desc
  cpp::OpDesc opdesc;
99
  opdesc.SetType(op_type);
Y
Yan Chunwei 已提交
100 101
  opdesc.SetInput("X", {x_var_name});
  opdesc.SetOutput("Out", {out_var_name});
102
  if (op_type == "relu_clipped") {
Z
zhupengyang 已提交
103 104
    opdesc.SetAttr("Relu_clipped_coef", 3.f);
  } else if (op_type == "relu6") {
105 106 107 108 109 110 111
    opdesc.SetAttr("Relu_clipped_coef", 6.f);
  } else if (op_type == "leaky_relu") {
    opdesc.SetAttr("alpha", 0.02f);
  } else if (op_type == "hard_sigmoid") {
    opdesc.SetAttr("slope", 0.2f);
    opdesc.SetAttr("offset", 0.5f);
  }
Y
Yan Chunwei 已提交
112 113

  // create and convert op to NPU model, then run it on NPU
114
  auto op = CreateOp<operators::ActivationOp>(opdesc, &scope);
Y
Yan Chunwei 已提交
115 116 117
  LauchOp(op, {x_var_name}, {out_var_name});

  // execute reference implementation and save to output tensor
118
  act_ref(op);
Y
Yan Chunwei 已提交
119 120 121 122 123

  // compare results
  auto* out_data = out->mutable_data<float>();
  auto* out_ref_data = out_ref->mutable_data<float>();
  for (int i = 0; i < out->dims().production(); i++) {
124
    EXPECT_NEAR(out_data[i], out_ref_data[i], 1e-2);
Y
Yan Chunwei 已提交
125 126 127
  }
}

128 129 130 131 132 133
TEST(NPUBridges, activation) {
  std::vector<std::vector<int64_t>> shapes{{1}, {2, 3}, {1, 2, 3, 4}};
  std::vector<std::string> types{"sigmoid",
                                 "relu",
                                 "tanh",
                                 "relu_clipped",
Z
zhupengyang 已提交
134
                                 "relu6",
135 136 137 138 139 140
                                 "leaky_relu",
                                 "softsign",
                                 "hard_sigmoid"};
  for (auto x_shape : shapes) {
    for (auto op_type : types) {
      test_act(x_shape, op_type);
Y
Yan Chunwei 已提交
141 142 143 144
    }
  }
}

Z
zhupengyang 已提交
145
}  // namespace bridges
Y
Yan Chunwei 已提交
146
}  // namespace npu
Z
zhupengyang 已提交
147
}  // namespace kernels
Y
Yan Chunwei 已提交
148 149 150
}  // namespace lite
}  // namespace paddle

151 152
USE_LITE_OP(sigmoid);
USE_NPU_BRIDGE(sigmoid);
Y
Yan Chunwei 已提交
153 154
USE_LITE_OP(relu);
USE_NPU_BRIDGE(relu);
155 156 157 158
USE_LITE_OP(tanh);
USE_NPU_BRIDGE(tanh);
USE_LITE_OP(relu_clipped);
USE_NPU_BRIDGE(relu_clipped);
Z
zhupengyang 已提交
159 160
USE_LITE_OP(relu6);
USE_NPU_BRIDGE(relu6);
161 162 163 164 165 166 167 168 169

USE_LITE_OP(leaky_relu);
USE_NPU_BRIDGE(leaky_relu);

USE_LITE_OP(softsign);
USE_NPU_BRIDGE(softsign);

USE_LITE_OP(hard_sigmoid);
USE_NPU_BRIDGE(hard_sigmoid);