elementwise_ops.cc 3.2 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include "lite/backends/npu/builder.h"
Z
zhupengyang 已提交
16
#include "lite/kernels/npu/bridges/registry.h"
Y
Yan Chunwei 已提交
17 18 19

namespace paddle {
namespace lite {
Z
zhupengyang 已提交
20
namespace kernels {
Y
Yan Chunwei 已提交
21
namespace npu {
Z
zhupengyang 已提交
22
namespace bridges {
Y
Yan Chunwei 已提交
23 24 25 26

node_map_type ElementwiseConverter(
    const std::shared_ptr<lite::OpLite> elementwise_op,
    const node_map_type& inputs_map) {
27 28 29
  auto scope = elementwise_op->scope();
  auto op_info = elementwise_op->op_info();
  auto op_type = op_info->Type();
30
  auto unique_op_type = lite::npu::UniqueName(op_type);
31
  LOG(INFO) << "[NPU] Converting " + op_type + "...";
32 33 34

  std::shared_ptr<ge::op::Eltwise> elementwise_node =
      std::make_shared<ge::op::Eltwise>(unique_op_type);
Y
Yan Chunwei 已提交
35 36 37 38 39

  auto x_var_name = op_info->Input("X").front();
  auto y_var_name = op_info->Input("Y").front();

  CHECK_EQ(op_info->GetAttr<int>("axis"), -1)
40
      << "[NPU] elementwise only support inputs with same size";
Y
Yan Chunwei 已提交
41 42

  CHECK(inputs_map.find(x_var_name) != inputs_map.end());
43
  elementwise_node->set_input_x1(*inputs_map.at(x_var_name));
44
  lite::npu::OpList::Global().add(inputs_map.at(x_var_name));
Y
Yan Chunwei 已提交
45 46

  if (inputs_map.find(y_var_name) != inputs_map.end()) {
47
    elementwise_node->set_input_x2(*inputs_map.at(y_var_name));
48
    lite::npu::OpList::Global().add(inputs_map.at(y_var_name));
Y
Yan Chunwei 已提交
49
  } else {
50
    auto y_const_node = std::make_shared<ge::op::Const>(y_var_name);
Y
Yan Chunwei 已提交
51
    auto* y = scope->FindVar(y_var_name)->GetMutable<Tensor>();
52 53 54
    y_const_node->set_attr_value(lite::npu::CvtTensor(y));
    elementwise_node->set_input_x2(*y_const_node);
    lite::npu::OpList::Global().add(y_const_node);
Y
Yan Chunwei 已提交
55 56
  }

57
  lite::npu::OpList::Global().add(elementwise_node);
Y
Yan Chunwei 已提交
58 59

  // paddlelite has sum only
60
  elementwise_node->set_attr_mode(1);
Y
Yan Chunwei 已提交
61 62

  node_map_type outputs_map;
63 64 65 66 67 68 69 70 71 72 73 74 75
  if (op_type == "fusion_elementwise_add_activation") {
    auto act_type = op_info->GetAttr<std::string>("act_type");
    auto act_node =
        std::make_shared<ge::op::Activation>(unique_op_type + "/act");
    act_node->set_input_x(*elementwise_node);
    // TODO(hong19860320) set the coef value for act Ops, such as leaky_relu,
    // clipped_relu etc.
    act_node->set_attr_mode(lite::npu::CvtActMode(act_type));
    lite::npu::OpList::Global().add(act_node);
    outputs_map[op_info->Output("Out").front()] = act_node;
  } else {
    outputs_map[op_info->Output("Out").front()] = elementwise_node;
  }
Y
Yan Chunwei 已提交
76 77 78
  return outputs_map;
}

Z
zhupengyang 已提交
79
}  // namespace bridges
Y
Yan Chunwei 已提交
80
}  // namespace npu
Z
zhupengyang 已提交
81
}  // namespace kernels
Y
Yan Chunwei 已提交
82 83 84 85
}  // namespace lite
}  // namespace paddle

REGISTER_NPU_BRIDGE(elementwise_add,
Z
zhupengyang 已提交
86
                    paddle::lite::kernels::npu::bridges::ElementwiseConverter);
87 88
REGISTER_NPU_BRIDGE(fusion_elementwise_add_activation,
                    paddle::lite::kernels::npu::bridges::ElementwiseConverter);