framework.cc 2.8 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "lite/core/arena/framework.h"
#include "lite/core/context.h"

namespace paddle {
namespace lite {
namespace arena {

void TestCase::CreateInstruction() {
  auto op = LiteOpRegistry::Global().Create(op_desc().Type());
  CHECK(op) << "no op for " << op_desc().Type();
  op->Attach(*op_desc_, inst_scope_);
  auto kernels = op->CreateKernels({place_});
  // filter out the target kernel
  CHECK(!kernels.empty()) << "No kernel found for place "
                          << place_.DebugString();
  auto it = std::remove_if(
      kernels.begin(), kernels.end(), [&](std::unique_ptr<KernelBase>& k) {
        return k->alias() == alias_;
      });
  CHECK(it != kernels.end()) << "failed to create the kernel in "
                             << place_.DebugString()
                             << " with alias: " << alias_;
  // prepare context
  (*it)->SetContext(std::move(ctx_));
  instruction_.reset(new Instruction(op, std::move(*it)));
}

void TestCase::PrepareInputsForInstruction() {
  for (auto& arg : op_desc().InputArgumentNames()) {
    for (auto& var : op_desc().Input(arg)) {
      std::string kernel_key = instruction_->kernel()->key_with_alias();
      const auto* param_type = ParamTypeRegistry::Global().RetrieveInArgument(
          place_, kernel_key, arg);

      const auto* inst_type = Type::GetTensorTy(TARGET(kHost));
      CHECK(scope_->FindVar(var));
      const auto* shared_tensor = scope_->FindTensor((var));
      if (!TargetCompatibleTo(*inst_type, *param_type->type)) {
        /// Create a tensor in the instruction's scope, alloc memory and then
        /// copy data there.
        auto* target_tensor = inst_scope_->NewTensor(var);
        CHECK(!shared_tensor->dims().empty()) << "shared_tensor is empty yet";
        target_tensor->Resize(shared_tensor->dims());
        TargetCopy(param_type->type->target(),
                   target_tensor->mutable_data(param_type->type->target(),
                                               shared_tensor->memory_size()),
                   shared_tensor->raw_data(),
                   shared_tensor->memory_size());
      }
    }
  }
}

}  // namespace arena
}  // namespace lite
}  // namespace paddle