softmax_op.cc 2.6 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include "lite/kernels/npu/bridges/graph.h"
Z
zhupengyang 已提交
16
#include "lite/kernels/npu/bridges/registry.h"
17
#include "lite/kernels/npu/bridges/utility.h"
Y
Yan Chunwei 已提交
18 19 20

namespace paddle {
namespace lite {
21
namespace subgraph {
Y
Yan Chunwei 已提交
22 23
namespace npu {

24
int SoftmaxConverter(void* ctx, OpLite* op, KernelBase* kernel) {
25 26 27 28
  CHECK(ctx != nullptr);
  CHECK(op != nullptr);
  auto graph = static_cast<Graph*>(ctx);
  auto op_info = op->op_info();
29
  auto op_type = op_info->Type();
30 31
  auto scope = op->scope();
  VLOG(3) << "[NPU] Converting " + op_type + "...";
Y
Yan Chunwei 已提交
32

33 34 35 36 37 38 39
  // Get input and output vars and op attributes
  auto x_name = op_info->Input("X").front();
  auto x_type = kernel->GetInputDeclType("X");
  CHECK(x_type->precision() == PRECISION(kFloat));
  CHECK(x_type->layout() == DATALAYOUT(kNCHW));
  auto x = scope->FindMutableTensor(x_name);
  auto x_dims = x->dims();
40
  auto x_rank = x_dims.size();
41 42 43 44
  auto out_name = op_info->Output("Out").front();
  auto out_type = kernel->GetOutputDeclType("Out");
  CHECK(out_type->precision() == PRECISION(kFloat));
  CHECK(out_type->layout() == DATALAYOUT(kNCHW));
45
  int axis = op_info->HasAttr("axis") ? op_info->GetAttr<int>("axis") : -1;
46 47 48 49 50 51 52
  if (axis < 0) {
    axis += x_rank;
  }
  if (axis == 2 && x_rank > 3 && x_dims[3] != 1) {
    LOG(WARNING) << "[NPU] Unsupported softmax params: axis = " << axis
                 << "  :x_w = " << x_dims[3];
    return FAILED;
Y
Yan Chunwei 已提交
53 54
  }

55
  // X node
56 57 58
  std::shared_ptr<Node> x_node = nullptr;
  if (graph->Has(x_name)) {
    x_node = graph->Get(x_name);
59
  } else {
60
    x_node = graph->Add(x_name, *x);
61 62 63
  }

  // Softmax node
64 65 66 67
  auto softmax_node = graph->Add<ge::op::Softmax>(out_name);
  auto softmax_op = softmax_node->data<ge::op::Softmax>();
  softmax_op->set_input_x(*x_node->data());
  softmax_op->set_attr_axis(axis);
68
  return REBUILD_WHEN_SHAPE_CHANGED;
Y
Yan Chunwei 已提交
69 70 71
}

}  // namespace npu
72
}  // namespace subgraph
Y
Yan Chunwei 已提交
73 74 75
}  // namespace lite
}  // namespace paddle

76 77
REGISTER_SUBGRAPH_BRIDGE(softmax,
                         kNPU,
78
                         paddle::lite::subgraph::npu::SoftmaxConverter);