framework.cc 5.7 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "lite/core/arena/framework.h"
#include "lite/core/context.h"
17
#include "lite/operators/subgraph_op.h"
Y
Yan Chunwei 已提交
18 19 20 21 22 23

namespace paddle {
namespace lite {
namespace arena {

void TestCase::CreateInstruction() {
24 25 26 27 28 29 30 31 32 33 34 35 36 37
  std::shared_ptr<lite::OpLite> op = nullptr;
  if (place_.target == TARGET(kNPU) || place_.target == TARGET(kXPU)) {
    // Create a new block desc to wrap the original op desc
    int sub_block_idx = 0;
    auto sub_block_desc = new cpp::BlockDesc();
    sub_block_desc->ClearOps();
    sub_block_desc->ClearVars();
    auto sub_block_op_desc = sub_block_desc->AddOp<cpp::OpDesc>();
    *sub_block_op_desc = *op_desc_;
    // Add the block desc into the subgraph op which used to replace the
    // original op
    op_desc_.reset(new cpp::OpDesc());
    op_desc_->SetType("subgraph");
    op_desc_->SetAttr<int32_t>("sub_block", sub_block_idx);
38 39 40 41 42 43
    auto in_names = sub_block_op_desc->input_vars();
    auto out_names = sub_block_op_desc->output_vars();
    op_desc_->SetInput("Inputs", in_names);
    op_desc_->SetOutput("Outputs", out_names);
    op_desc_->SetAttr<std::vector<std::string>>("input_data_names", in_names);
    op_desc_->SetAttr<std::vector<std::string>>("output_data_names", out_names);
44 45 46 47 48
    op = LiteOpRegistry::Global().Create(op_desc().Type());
    static_cast<operators::SubgraphOp*>(op.get())->SetSubBlock(sub_block_desc);
  } else {
    op = LiteOpRegistry::Global().Create(op_desc().Type());
  }
Y
Yan Chunwei 已提交
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
  CHECK(op) << "no op for " << op_desc().Type();
  op->Attach(*op_desc_, inst_scope_);
  auto kernels = op->CreateKernels({place_});
  // filter out the target kernel
  CHECK(!kernels.empty()) << "No kernel found for place "
                          << place_.DebugString();
  auto it = std::remove_if(
      kernels.begin(), kernels.end(), [&](std::unique_ptr<KernelBase>& k) {
        return k->alias() == alias_;
      });
  CHECK(it != kernels.end()) << "failed to create the kernel in "
                             << place_.DebugString()
                             << " with alias: " << alias_;
  // prepare context
  (*it)->SetContext(std::move(ctx_));
  instruction_.reset(new Instruction(op, std::move(*it)));
65 66 67
#ifdef LITE_WITH_PROFILE
  instruction_->set_profiler(new profile::Profiler());
#endif
Y
Yan Chunwei 已提交
68 69 70 71 72 73 74 75 76
}

void TestCase::PrepareInputsForInstruction() {
  for (auto& arg : op_desc().InputArgumentNames()) {
    for (auto& var : op_desc().Input(arg)) {
      std::string kernel_key = instruction_->kernel()->key_with_alias();
      const auto* param_type = ParamTypeRegistry::Global().RetrieveInArgument(
          place_, kernel_key, arg);

77 78 79 80 81 82 83 84 85
      const Type* inst_type = nullptr;
      if (param_type->type->IsTensor()) {
        inst_type = Type::GetTensorTy(TARGET(kHost));
      } else if (param_type->type->IsTensorList()) {
        inst_type = Type::GetTensorListTy(TARGET(kHost));
      } else {
        LOG(FATAL) << "unsupported param_type";
      }

Y
Yan Chunwei 已提交
86 87
      CHECK(scope_->FindVar(var));
      if (!TargetCompatibleTo(*inst_type, *param_type->type)) {
88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
        /// Create a tensor or tensor_array in the instruction's scope,
        /// alloc memory and then copy data there.
        if (param_type->type->IsTensor()) {
          const auto* shared_tensor = scope_->FindTensor(var);
          auto* target_tensor = inst_scope_->NewTensor(var);
          CHECK(!shared_tensor->dims().empty()) << "shared_tensor is empty yet";
          target_tensor->Resize(shared_tensor->dims());
          TargetCopy(param_type->type->target(),
                     target_tensor->mutable_data(param_type->type->target(),
                                                 shared_tensor->memory_size()),
                     shared_tensor->raw_data(),
                     shared_tensor->memory_size());
        } else if (param_type->type->IsTensorList()) {
          const auto* shared_tensor_array =
              scope_->FindVar(var)->GetMutable<std::vector<Tensor>>();
          auto* target_tensor_array =
              inst_scope_->Var(var)->GetMutable<std::vector<Tensor>>();
          CHECK(!shared_tensor_array->empty())
              << "shared_tensor_array is empty yet";
          target_tensor_array->resize(shared_tensor_array->size());
          for (int i = 0; i < shared_tensor_array->size(); i++) {
            target_tensor_array->at(i).Resize(
                shared_tensor_array->at(i).dims());
            TargetCopy(param_type->type->target(),
                       target_tensor_array->at(i).mutable_data(
                           param_type->type->target(),
                           shared_tensor_array->at(i).memory_size()),
                       shared_tensor_array->at(i).raw_data(),
                       shared_tensor_array->at(i).memory_size());
          }
        } else {
          LOG(FATAL) << "not support";
        }
Y
Yan Chunwei 已提交
121 122 123 124 125
      }
    }
  }
}

126 127 128 129 130 131 132 133 134 135 136 137 138
TestCase::~TestCase() {
  if (op_desc_->Type() == "subgraph") {
    // Release the subblock desc of Subgraph op
    auto subgraph_op = const_cast<operators::SubgraphOp*>(
        static_cast<const operators::SubgraphOp*>(instruction_->op()));
    CHECK(subgraph_op);
    auto sub_block_desc = subgraph_op->GetSubBlock();
    if (sub_block_desc) {
      delete sub_block_desc;
    }
  }
}

Y
Yan Chunwei 已提交
139 140 141
}  // namespace arena
}  // namespace lite
}  // namespace paddle