op_lite.cc 3.1 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

S
update  
superjomn 已提交
15
#include "paddle/fluid/lite/core/op_lite.h"
S
superjomn 已提交
16
#include "op_lite.h"
S
update  
superjomn 已提交
17 18 19 20 21 22
#include "paddle/fluid/lite/core/op_registry.h"

namespace paddle {
namespace lite {

std::vector<std::unique_ptr<KernelBase>> OpLite::CreateKernels(
S
superjomn 已提交
23
    const std::vector<Place> &places, const std::string &kernel_type) {
S
update  
superjomn 已提交
24 25 26 27
  std::vector<std::unique_ptr<KernelBase>> kernels;
  CHECK(!op_type_.empty()) << "op_type_ should be set first";

  for (auto place : places) {
28
    auto ks = KernelRegistry::Global().Create(
S
superjomn 已提交
29
        (kernel_type.empty() ? op_type_ : kernel_type), place.target,
30 31 32 33
        place.precision);
    for (auto &&it : ks) {
      kernels.emplace_back(std::move(it));
    }
S
update  
superjomn 已提交
34 35 36 37 38
  }

  return kernels;
}

S
superjomn 已提交
39
void OpLite::PickKernel(const std::vector<Place> &valid_places,
S
update  
superjomn 已提交
40 41 42 43 44 45 46 47 48 49
                        OpLite::KernelStrategy kernel_strategy) {
  switch (kernel_strategy) {
    case KernelStrategy::kStatic:
      StaticPickKernel(valid_places);
      break;
    default:
      LOG(FATAL) << "unsupported kernel strategy";
  }
}

S
superjomn 已提交
50 51 52 53 54 55 56 57 58 59
bool OpLite::Run() {
  CHECK(kernel_);
  SyncInputEvents();

  kernel_->Run();

  RecordOutputEvents();
  return true;
}

60 61 62 63 64 65 66
bool OpLite::Attach(const framework::OpDesc &opdesc, lite::Scope *scope) {
  CHECK(!op_info_) << "op_info duplicate build found";
  op_info_ = std::make_shared<OpInfo>();
  op_info_->Build(opdesc);
  return AttachImpl(opdesc, scope);
}

67 68 69 70 71 72 73 74 75 76 77 78 79 80
const Tensor *OpLite::GetTensor(lite::Scope *scope,
                                const std::string &name) const {
  auto *var = scope->FindVar(name);
  CHECK(var) << "no variable called " << name << " found";
  return &var->Get<lite::Tensor>();
}

Tensor *OpLite::GetMutableTensor(lite::Scope *scope,
                                 const std::string &name) const {
  auto *var = scope->FindVar(name);
  CHECK(var) << "no variable called " << name << " found";
  return var->GetMutable<lite::Tensor>();
}

S
Superjomn 已提交
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
bool OpInfo::GetInputArgname(const std::string &value_name, std::string *out) {
  for (auto &item : input_argument_) {
    auto it = std::find(item.second.begin(), item.second.end(), value_name);
    if (it != item.second.end()) {
      *out = item.first;
      return true;
    }
  }
  return false;
}
bool OpInfo::GetOutputArgname(const std::string &value_name, std::string *out) {
  for (auto &item : output_argument_) {
    auto it = std::find(item.second.begin(), item.second.end(), value_name);
    if (it != item.second.end()) {
      *out = item.first;
      return true;
    }
  }
  return false;
}
S
update  
superjomn 已提交
101 102
}  // namespace lite
}  // namespace paddle