op_lite.h 5.5 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <glog/logging.h>
#include <boost/variant.hpp>
#include <map>
20
#include <memory>
S
superjomn 已提交
21 22
#include <string>
#include "paddle/fluid/framework/variable.h"
S
superjomn 已提交
23 24 25
#include "paddle/fluid/lite/core/context.h"
#include "paddle/fluid/lite/core/kernel.h"
#include "paddle/fluid/lite/core/scope.h"
26
#include "paddle/fluid/lite/model_parser/compatible_pb.h"
S
superjomn 已提交
27 28 29 30 31 32 33 34 35 36 37 38

namespace paddle {
namespace lite {

using any_t = boost::variant<int, float, framework::Variable *>;
using anys_t = std::map<std::string, any_t>;

// For registry factory.
struct Registry {
  void Touch() {}
};

S
superjomn 已提交
39 40 41 42 43
namespace mir {
class Node;
class SSAGraph;
}

44 45
class OpInfo;

S
superjomn 已提交
46 47 48 49 50
/**
 * The base class of an light-weight operators, currently just used in inference
 * to eliminate overhead of some operations in current framework.
 *
 * The Operator are designed as follows:
S
update  
superjomn 已提交
51 52 53
 * - it can has some members to hold the argument and some other computation
 * resources,
 * - it should act like a function call, no more logic included.
S
superjomn 已提交
54 55 56
 */
class OpLite : public Registry {
 public:
S
superjomn 已提交
57
  OpLite() = default;
S
superjomn 已提交
58
  OpLite(const std::string &type) : op_type_(type) {}
59 60
  OpLite(const std::vector<Place> &valid_places)
      : valid_places_(valid_places) {}
S
superjomn 已提交
61

S
superjomn 已提交
62 63 64 65
  void SetValidPlaces(const std::vector<Place> &places) {
    valid_places_ = places;
  }
  const std::vector<Place> &valid_places() const { return valid_places_; }
S
update  
superjomn 已提交
66
  // Check the shape.
S
superjomn 已提交
67
  virtual bool CheckShape() const { return true; }
S
update  
superjomn 已提交
68
  // Inference the outputs' shape.
S
superjomn 已提交
69
  virtual bool InferShape() const { return true; }
S
update  
superjomn 已提交
70
  // Run this operator.
S
superjomn 已提交
71
  virtual bool Run();
S
update  
superjomn 已提交
72

S
superjomn 已提交
73
  // Link the external execution environ to internal context.
74
  bool Attach(const OpDesc &opdesc, lite::Scope *scope);
75

S
superjomn 已提交
76 77
  const OpInfo *op_info() const { return op_info_.get(); }
  OpInfo *mutable_op_info() { return op_info_.get(); }
S
superjomn 已提交
78

S
update  
superjomn 已提交
79
  // Human-readable information.
S
superjomn 已提交
80 81
  virtual std::string DebugString() const = 0;

S
superjomn 已提交
82 83
  const Place &kernel_place() const { return kernel_place_; }

S
superjomn 已提交
84 85 86 87 88 89 90 91 92
  // Create all the kernels for the valid targets.
  std::vector<std::unique_ptr<KernelBase>> CreateKernels(
      const std::vector<Place> &places, const std::string &kernel_type = "");

  lite::Scope *scope() { return scope_; }

  // Assign op param to kernel.
  virtual void AttachKernel(KernelBase *kernel) = 0;

S
superjomn 已提交
93 94 95
  virtual ~OpLite() = default;

 protected:
S
superjomn 已提交
96
  // Attach it with the runtime environment.
97
  virtual bool AttachImpl(const OpDesc &opdesc, lite::Scope *scope) = 0;
S
superjomn 已提交
98

S
superjomn 已提交
99 100
  // Specify the kernel to run by default. This will specify the value of
  // `kernel_place_`.
S
superjomn 已提交
101 102 103 104
  virtual void StaticPickKernel(const std::vector<Place> &valid_targets) {
    auto kernels = CreateKernels(valid_targets);
    kernel_ = std::move(kernels.front());
  }
S
superjomn 已提交
105

S
update  
superjomn 已提交
106 107 108 109 110 111
  // Wait until all the inputs' events are ready.
  void SyncInputEvents() {}

  // Record the output events, and that will tell all the dependent operators
  // some inputs are ready.
  void RecordOutputEvents() {}
S
superjomn 已提交
112

113 114 115
  const Tensor *GetTensor(lite::Scope *scope, const std::string &name) const;
  Tensor *GetMutableTensor(lite::Scope *scope, const std::string &name) const;

S
superjomn 已提交
116 117 118
  friend class mir::Node;
  friend class mir::SSAGraph;

S
superjomn 已提交
119
 protected:
S
superjomn 已提交
120
  lite::Scope *scope_{};
S
update  
superjomn 已提交
121 122
  std::unique_ptr<KernelBase> kernel_;
  std::string op_type_;
S
superjomn 已提交
123 124
  std::vector<Place> valid_places_;
  Place kernel_place_{TARGET(kHost), PRECISION(kFloat)};
S
superjomn 已提交
125
  std::unique_ptr<OpInfo> op_info_;
126 127 128 129 130 131 132 133
};

/*
 * Operator Information, such as some description. It will be shared by all the
 * kernels of the same operator.
 */
class OpInfo {
 public:
S
superjomn 已提交
134 135
  // To avoid the bugs from legancy framework::OpDesc, we use the ProtoBuf
  // message instead.
S
superjomn 已提交
136
  void Build(const framework::proto::OpDesc &desc);
137

S
superjomn 已提交
138
  const framework::proto::OpDesc &desc() const;
S
superjomn 已提交
139
  framework::proto::OpDesc *mutable_desc() { return desc_.get(); }
140 141
  const std::list<std::string> &input_names() const { return input_names_; }
  const std::list<std::string> &output_names() const { return output_names_; }
S
superjomn 已提交
142 143
  const std::map<std::string, std::list<std::string>> &input_argument() const;
  const std::map<std::string, std::list<std::string>> &output_argument() const;
S
superjomn 已提交
144 145
  bool GetInputArgname(const std::string &value_name, std::string *out) const;
  bool GetOutputArgname(const std::string &value_name, std::string *out) const;
146

S
superjomn 已提交
147 148
  const std::list<std::string> &input_argnames() const;
  const std::list<std::string> &output_argnames() const;
149 150

 private:
S
superjomn 已提交
151
  void ExtractInputsAndOutputs(const framework::proto::OpDesc &opdesc);
152

S
superjomn 已提交
153
  void CollectInputAndOutputArgnames(const framework::proto::OpDesc &opdesc);
154

S
superjomn 已提交
155
  void CollectArguments(const framework::proto::OpDesc &opdesc);
156 157

 private:
S
superjomn 已提交
158 159
  std::list<std::string> input_names_;
  std::list<std::string> output_names_;
160 161 162 163
  std::list<std::string> input_argnames_;
  std::list<std::string> output_argnames_;
  std::map<std::string, std::list<std::string>> input_argument_;
  std::map<std::string, std::list<std::string>> output_argument_;
S
superjomn 已提交
164 165
  // NOTE too heavy.
  std::unique_ptr<framework::proto::OpDesc> desc_;
S
superjomn 已提交
166 167 168 169
};

}  // namespace lite
}  // namespace paddle