op_lite.cc 3.8 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

S
update  
superjomn 已提交
15
#include "paddle/fluid/lite/core/op_lite.h"
S
superjomn 已提交
16
#include "op_lite.h"
S
update  
superjomn 已提交
17 18 19 20 21 22
#include "paddle/fluid/lite/core/op_registry.h"

namespace paddle {
namespace lite {

std::vector<std::unique_ptr<KernelBase>> OpLite::CreateKernels(
S
superjomn 已提交
23
    const std::vector<Place> &places, const std::string &kernel_type) {
S
update  
superjomn 已提交
24 25 26
  std::vector<std::unique_ptr<KernelBase>> kernels;
  CHECK(!op_type_.empty()) << "op_type_ should be set first";

27
  auto pick_kernel = [&](const Place &place) {
28
    auto ks = KernelRegistry::Global().Create(
S
superjomn 已提交
29
        (kernel_type.empty() ? op_type_ : kernel_type), place.target,
S
superjomn 已提交
30
        place.precision, place.layout);
31
    for (auto &&it : ks) {
32
      AttachKernel(it.get());
33 34
      kernels.emplace_back(std::move(it));
    }
35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
  };

  std::set<Place> place_set;
  for (auto place : places) {
    place_set.insert(place);
    // Pick kernels those support any Precision and any DataLayout
    place.precision = PRECISION(kAny);
    place_set.insert(place);
    place.layout = DATALAYOUT(kAny);
    place_set.insert(place);
  }

  std::set<TargetType> targets;
  for (auto place : place_set) {
    pick_kernel(place);
    targets.insert(place.target);
S
update  
superjomn 已提交
51 52
  }

S
superjomn 已提交
53
  CHECK(!kernels.empty()) << "No kernel found for Op " << op_type_;
S
Superjomn 已提交
54
  VLOG(2) << "op " << op_type_ << " get " << kernels.size() << " kernels";
S
update  
superjomn 已提交
55 56 57
  return kernels;
}

S
superjomn 已提交
58
void OpLite::PickKernel(const std::vector<Place> &valid_places,
S
update  
superjomn 已提交
59 60 61 62 63 64 65 66 67 68
                        OpLite::KernelStrategy kernel_strategy) {
  switch (kernel_strategy) {
    case KernelStrategy::kStatic:
      StaticPickKernel(valid_places);
      break;
    default:
      LOG(FATAL) << "unsupported kernel strategy";
  }
}

S
superjomn 已提交
69 70 71 72 73 74 75 76 77 78
bool OpLite::Run() {
  CHECK(kernel_);
  SyncInputEvents();

  kernel_->Run();

  RecordOutputEvents();
  return true;
}

79
bool OpLite::Attach(const framework::OpDesc &opdesc, lite::Scope *scope) {
S
superjomn 已提交
80 81 82 83
  CHECK(scope);
  scope_ = scope;
  op_info_.reset(new OpInfo);  // Force clean the out-of-date infomation.
  op_info_->Build(opdesc.ReadonlyProto());
84 85 86
  return AttachImpl(opdesc, scope);
}

87 88 89 90 91 92 93 94 95 96 97 98 99 100
const Tensor *OpLite::GetTensor(lite::Scope *scope,
                                const std::string &name) const {
  auto *var = scope->FindVar(name);
  CHECK(var) << "no variable called " << name << " found";
  return &var->Get<lite::Tensor>();
}

Tensor *OpLite::GetMutableTensor(lite::Scope *scope,
                                 const std::string &name) const {
  auto *var = scope->FindVar(name);
  CHECK(var) << "no variable called " << name << " found";
  return var->GetMutable<lite::Tensor>();
}

S
superjomn 已提交
101 102
bool OpInfo::GetInputArgname(const std::string &value_name,
                             std::string *out) const {
S
Superjomn 已提交
103 104 105 106 107 108 109 110 111
  for (auto &item : input_argument_) {
    auto it = std::find(item.second.begin(), item.second.end(), value_name);
    if (it != item.second.end()) {
      *out = item.first;
      return true;
    }
  }
  return false;
}
S
superjomn 已提交
112 113
bool OpInfo::GetOutputArgname(const std::string &value_name,
                              std::string *out) const {
S
Superjomn 已提交
114 115 116 117 118 119 120 121 122
  for (auto &item : output_argument_) {
    auto it = std::find(item.second.begin(), item.second.end(), value_name);
    if (it != item.second.end()) {
      *out = item.first;
      return true;
    }
  }
  return false;
}
S
update  
superjomn 已提交
123 124
}  // namespace lite
}  // namespace paddle