program.h 5.2 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once
#include <list>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "lite/core/kernel.h"
#include "lite/core/op_lite.h"
#include "lite/core/op_registry.h"
#include "lite/model_parser/cpp/program_desc.h"

namespace paddle {
namespace lite {

static const char kKernelTypeAttr[] = "__@kernel_type_attr@__";

// A program is used to represent a code program, in Paddle, a code program
// contains:
// - main block, which is a list of OpLite
// - scope: which contains all the weights
struct Program {
 public:
  explicit Program(const std::shared_ptr<Scope>& root) { scope_ = root; }
  Program(const cpp::ProgramDesc& desc,
          const std::shared_ptr<Scope>& root,
          const std::vector<Place>& valid_places)
      : scope_(root), valid_places_(valid_places), desc_(desc) {
    CHECK(scope_) << "scope should be init first";
    VLOG(4) << "prepare work";
    PrepareWorkspace(desc);
    VLOG(4) << "build desc";
    Build(desc);
    VLOG(4) << "build desc finished";
  }

  std::unique_ptr<Program> Clone() const {
    std::unique_ptr<Program> res(new Program(desc_, scope_, valid_places_));
    return res;
  }

  const std::list<std::string>& weights() const { return weights_; }
  const std::list<std::string>& tmp_vars() const { return tmp_vars_; }
  std::list<std::string>* mutable_weights() { return &weights_; }
  std::list<std::string>* mutable_tmp_vars() { return &tmp_vars_; }

  const std::list<std::shared_ptr<OpLite>>& ops() const { return ops_; }
  std::list<std::shared_ptr<OpLite>>* mutable_ops() { return &ops_; }

  lite::Scope* exec_scope() { return exec_scope_; }
  lite::Scope* scope() { return scope_.get(); }

 private:
  // Build from a program and scope.
  void Build(const cpp::ProgramDesc& program);
  // Create temporary variables.
  void PrepareWorkspace(const cpp::ProgramDesc& program);

 private:
  std::list<std::string> tmp_vars_;
  std::list<std::string> weights_;
  std::list<std::shared_ptr<OpLite>> ops_;
  // the scope to run the kernels, NOTE this is the execution scope.
  std::shared_ptr<lite::Scope> scope_;
  std::vector<Place> valid_places_;
  // Runtime scope.
  lite::Scope* exec_scope_{};
  cpp::ProgramDesc desc_;
};

struct Instruction {
  Instruction(const std::shared_ptr<OpLite>& op,
              std::unique_ptr<KernelBase>&& kernel)
87
      : op_(op), kernel_(std::move(kernel)) {}
Y
Yan Chunwei 已提交
88 89 90 91 92 93 94 95 96 97

  // Run the instruction.
  void Run();

  friend STL::ostream& operator<<(STL::ostream& os, const Instruction& other);

  const OpLite* op() const { return op_.get(); }
  const KernelBase* kernel() const { return kernel_.get(); }
  KernelBase* mutable_kernel() { return kernel_.get(); }

98 99 100 101 102 103 104 105 106 107 108 109 110 111
#ifdef LITE_WITH_PROFILE
  void set_profiler(profile::Profiler* profiler) {
    profiler_ = profiler;
    if (op_->Type() != "feed" && op_->Type() != "fetch") {
      profile::OpCharacter ch;
      ch.target = kernel()->target();
      ch.op_type = op_->Type();
      ch.kernel_name = kernel()->name();
      profile_id_ = profiler->NewTimer(ch);
      kernel_->SetProfiler(profiler_, profile_id_);
    }
  }
#endif

Y
Yan Chunwei 已提交
112 113 114 115 116 117 118
 private:
  std::shared_ptr<OpLite> op_;
  std::unique_ptr<KernelBase> kernel_;
  bool first_epoch_{true};
  bool has_run_{false};

#ifdef LITE_WITH_PROFILE
119
  profile::Profiler* profiler_;
Y
Yan Chunwei 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132 133
  int profile_id_{-1};
#endif  // LITE_WITH_PROFILE
};

/*
 * A program contains kernels for runtime.
 */
class LITE_API RuntimeProgram {
 public:
  explicit RuntimeProgram(std::vector<Instruction>&& insts)
      : instructions_(std::move(insts)) {
    if (instructions_.empty()) {
      LOG(FATAL) << "no instructions";
    }
134 135 136
#ifdef LITE_WITH_PROFILE
    set_profiler();
#endif
Y
Yan Chunwei 已提交
137 138 139 140 141 142 143 144 145 146 147
  }

  void Run();

  void set_exec_scope(lite::Scope* x) { exec_scope_ = x; }
  lite::Scope* exec_scope() { return exec_scope_; }

  size_t num_instructions() const { return instructions_.size(); }

  const std::vector<Instruction>& instructions() const { return instructions_; }

148 149
  // `SaveOpInfosToProgram` will update the op list(ops_) of the block 0
  // in ProgramDesc.
Y
Yan Chunwei 已提交
150 151
  void SaveOpInfosToProgram(cpp::ProgramDesc* desc);

152 153 154 155 156
  // `UpdateVarsOfProgram` will update the var list(vars_) of the block 0 in
  // ProgramDesc. Namely, if a new var created in some passes, its var_desc will
  // be added in vars_.
  void UpdateVarsOfProgram(cpp::ProgramDesc* desc);

Y
Yan Chunwei 已提交
157 158 159 160
 private:
  RuntimeProgram(const RuntimeProgram&) = delete;
  std::vector<Instruction> instructions_;
  lite::Scope* exec_scope_{};
161 162 163 164 165 166 167 168 169

#ifdef LITE_WITH_PROFILE
  profile::Profiler profiler_;
  void set_profiler() {
    for (auto i = instructions_.begin(); i != instructions_.end(); ++i) {
      i->set_profiler(&profiler_);
    }
  }
#endif
Y
Yan Chunwei 已提交
170 171 172 173
};

}  // namespace lite
}  // namespace paddle