operator.h 9.1 KB
Newer Older
Q
Qiao Longfei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include <boost/variant.hpp>
#include <string>
#include <unordered_map>
#include <vector>

Q
qijun 已提交
22 23
#include "paddle/framework/attr_checker.h"
#include "paddle/framework/op_desc.pb.h"
Y
Yan Chunwei 已提交
24
#include "paddle/framework/op_proto.pb.h"
Q
qijun 已提交
25 26 27 28 29
#include "paddle/framework/scope.h"
#include "paddle/framework/tensor.h"
#include "paddle/platform/device_context.h"
#include "paddle/platform/place.h"
#include "paddle/utils/Error.h"
Q
Qiao Longfei 已提交
30 31 32 33 34

namespace paddle {
namespace framework {

class OperatorBase;
35 36
class InferShapeContext;
class ExecutionContext;
Q
Qiao Longfei 已提交
37 38 39 40 41 42 43 44
/**
 * OperatorBase has the basic element that Net will call to do computation.
 * Only CreateOperator from OpRegistry will new Operator directly. User
 * should always construct a proto message OpDesc and call
 * OpRegistry::CreateOp(op_desc) to get an Operator instance.
 */
class OperatorBase {
 public:
45 46 47 48 49 50 51
  /// If a variable is a empty variable, that name will be used.
  static std::string EMPTY_VAR_NAME() { return "@EMPTY@"; }

  /// If a variable is a temporary variable, that name will be set in Python,
  /// but it will be convert to a unique name in scope after OpCreator.
  static std::string TMP_VAR_NAME() { return "@TEMP@"; }

F
fengjiayi 已提交
52 53 54 55 56
  /// If a variable's name has a certain suffix, it means that the
  /// variable is the gradient of another varibale.
  /// e.g. Variable "x@GRAD" is the gradient of varibale "x".
  static std::string GRAD_VAR_SUFFIX() { return "@GRAD"; }

Q
Qiao Longfei 已提交
57 58 59 60 61 62 63 64 65
  virtual ~OperatorBase() {}

  template <typename T>
  inline const T& GetAttr(const std::string& name) const {
    PADDLE_ENFORCE(attrs_.count(name) != 0, "%s should be in AttributeMap",
                   name);
    return boost::get<T>(attrs_.at(name));
  }

66
  virtual std::string DebugString() const;
Q
Qiao Longfei 已提交
67

Q
Qiao Longfei 已提交
68 69 70 71
  /// Init will be called after CreateOperator, you can put some initialization
  /// logic here.
  virtual void Init() {}

Q
Qiao Longfei 已提交
72 73
  /// InferShape infer the size of Variables used by this Operator with
  /// information inside scope
Y
Yu Yang 已提交
74
  virtual void InferShape(const std::shared_ptr<Scope>& scope) const = 0;
Q
Qiao Longfei 已提交
75 76

  /// Net will call this function to Run an op.
Y
Yu Yang 已提交
77
  virtual void Run(const std::shared_ptr<Scope>& scope,
Y
Yu Yang 已提交
78 79
                   const platform::DeviceContext& dev_ctx) const = 0;

Y
Yu Yang 已提交
80 81 82
  virtual bool IsNetOp() const { return false; }

  //! Get a input with argument's name described in `op_proto`
Y
Yan Chunwei 已提交
83
  const std::string& Input(const std::string& name) const;
Y
Yu Yang 已提交
84 85
  //! Get a input which has multiple variables.
  //! TODO add a vector_view to prevent memory copy.
Y
Yan Chunwei 已提交
86
  std::vector<std::string> Inputs(const std::string& name) const;
Y
Yu Yang 已提交
87
  //! Get a output with argument's name described in `op_proto`
Y
Yan Chunwei 已提交
88
  const std::string& Output(const std::string& name) const;
Y
Yu Yang 已提交
89 90
  //! Get an output which has multiple variables.
  //! TODO add a vector_view to prevent memory copy.
Y
Yan Chunwei 已提交
91 92
  std::vector<std::string> Outputs(const std::string& name) const;

Q
Qiao Longfei 已提交
93
 public:
Q
Qiao Longfei 已提交
94
  std::string type_;
Q
Qiao Longfei 已提交
95 96 97
  std::vector<std::string> inputs_;
  std::vector<std::string> outputs_;
  AttributeMap attrs_;
Y
Yan Chunwei 已提交
98
  // store the arguments' offset described in op_desc.
Y
Yu Yang 已提交
99
  std::shared_ptr<std::unordered_map<std::string, int>> in_out_idxs_;
Y
Yan Chunwei 已提交
100 101
};

102
class OperatorContext {
Y
Yan Chunwei 已提交
103
 public:
104 105 106 107
  OperatorContext(const OperatorBase* op, const std::shared_ptr<Scope>& scope)
      : op_(*op), scope_(scope) {}

  size_t InputSize() const { return op_.inputs_.size(); }
Y
Yan Chunwei 已提交
108

109 110 111 112
  size_t OutputSize() const { return op_.outputs_.size(); }

  const Variable* InputVar(const size_t& index) const {
    return scope_->GetVariable(op_.inputs_.at(index));
Y
Yan Chunwei 已提交
113 114
  }

115 116
  Variable* OutputVar(const size_t& index) const {
    return scope_->GetVariable(op_.outputs_.at(index));
Y
Yan Chunwei 已提交
117 118
  }

119
  const Variable* InputVar(const std::string& name) const {
Y
Yan Chunwei 已提交
120 121 122
    return scope_->GetVariable(op_.Input(name));
  }

123
  Variable* OutputVar(const std::string& name) const {
Y
Yan Chunwei 已提交
124 125 126
    return scope_->GetVariable(op_.Output(name));
  }

127 128
  const std::vector<const Variable*> MultiInputVar(
      const std::string& name) const {
Y
Yan Chunwei 已提交
129 130
    auto names = op_.Inputs(name);
    std::vector<const Variable*> res;
131
    res.reserve(names.size());
Y
Yan Chunwei 已提交
132
    std::transform(
133
        names.begin(), names.end(), std::back_inserter(res),
Y
Yan Chunwei 已提交
134 135 136 137
        [this](const std::string& name) { return scope_->GetVariable(name); });
    return res;
  }

138
  std::vector<const Variable*> MultiOutputVar(const std::string& name) const {
Y
Yan Chunwei 已提交
139 140
    auto names = op_.Outputs(name);
    std::vector<const Variable*> res;
141
    res.reserve(names.size());
Y
Yan Chunwei 已提交
142
    std::transform(
143
        names.begin(), names.end(), std::back_inserter(res),
Y
Yan Chunwei 已提交
144 145 146 147
        [this](const std::string& name) { return scope_->GetVariable(name); });
    return res;
  }

148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222
  template <typename T>
  const T* Input(const size_t& index) const {
    return &(InputVar(index)->Get<T>());
  }

  template <typename T>
  T* Output(const size_t& index) const {
    return OutputVar(index)->GetMutable<T>();
  }

  template <typename T>
  const T* Input(const std::string& name) const {
    return &(InputVar(name)->Get<T>());
  }

  template <typename T>
  T* Output(const std::string& name) const {
    return OutputVar(name)->GetMutable<T>();
  }

  template <typename T>
  const std::vector<const T*> MultiInput(const std::string& name) const {
    auto names = op_.Inputs(name);
    std::vector<const T*> res;
    res.reserve(names.size());
    std::transform(names.begin(), names.end(), std::back_inserter(res),
                   [this](const std::string& name) {
                     return &scope_->GetVariable(name)->Get<T>();
                   });
    return res;
  }

  template <typename T>
  std::vector<const T*> MultiOutput(const std::string& name) const {
    auto names = op_.Outputs(name);
    std::vector<const T*> res;
    res.reserve(names.size());
    std::transform(names.begin(), names.end(), std::back_inserter(res),
                   [this](const std::string& name) {
                     return scope_->GetVariable(name)->GetMutable<T>();
                   });
    return res;
  }

  const OperatorBase& op_;
  const std::shared_ptr<Scope>& scope_;
};

class InferShapeContext : public OperatorContext {
 public:
  InferShapeContext(const OperatorBase* op, const std::shared_ptr<Scope>& scope)
      : OperatorContext(op, scope) {}
};

template <typename T>
struct EigenDeviceConverter;

template <>
struct EigenDeviceConverter<platform::CPUPlace> {
  using EigenDeviceType = Eigen::DefaultDevice;
};

#ifndef PADDLE_ONLY_CPU
template <>
struct EigenDeviceConverter<platform::GPUPlace> {
  using EigenDeviceType = Eigen::GpuDevice;
};
#endif

class ExecutionContext : public OperatorContext {
 public:
  ExecutionContext(const OperatorBase* op, const std::shared_ptr<Scope>& scope,
                   const platform::DeviceContext& device_context)
      : OperatorContext(op, scope), device_context_(device_context) {}

Q
qijun 已提交
223 224 225 226 227 228 229
  template <typename PlaceType,
            typename DeviceType =
                typename EigenDeviceConverter<PlaceType>::EigenDeviceType>
  DeviceType* GetEigenDevice() const;

  platform::Place GetPlace() const { return device_context_.GetPlace(); }

Y
Yan Chunwei 已提交
230
  const platform::DeviceContext& device_context_;
Q
Qiao Longfei 已提交
231 232
};

Q
qijun 已提交
233 234
class OpKernel {
 public:
Q
qijun 已提交
235
  /**
236
   * ExecutionContext is the only parameter of Kernel Run function.
Q
qijun 已提交
237 238
   * Run will get input/output variables, state such as momentum and
   * device resource such as CUDA stream, cublas handle, etc. from
239
   * ExecutionContext. User should construct it before run the Operator.
Q
qijun 已提交
240 241
   */

242
  virtual void Compute(const ExecutionContext& context) const = 0;
Y
Yu Yang 已提交
243 244 245 246

  virtual ~OpKernel() {}
};

Q
Qiao Longfei 已提交
247 248
class OperatorWithKernel : public OperatorBase {
 public:
Y
Yu Yang 已提交
249 250
  struct OpKernelKey {
    platform::Place place_;
Q
Qiao Longfei 已提交
251

Y
Yu Yang 已提交
252 253 254 255 256
    OpKernelKey() = default;
    OpKernelKey(const platform::DeviceContext& dev_ctx) {
      place_ = dev_ctx.GetPlace();
    }

Q
qijun 已提交
257 258 259
    bool operator==(const OpKernelKey& o) const {
      return platform::places_are_same_class(place_, o.place_);
    }
Y
Yu Yang 已提交
260 261 262 263 264 265 266 267 268 269 270
  };

  struct OpKernelHash {
    std::hash<bool> hash_;
    size_t operator()(const OpKernelKey& key) const {
      return hash_(platform::is_gpu_place(key.place_));
    }
  };

  using OpKernelMap =
      std::unordered_map<OpKernelKey, std::unique_ptr<OpKernel>, OpKernelHash>;
Q
Qiao Longfei 已提交
271

272 273 274 275
  void InferShape(const std::shared_ptr<Scope>& scope) const {
    InferShape(InferShapeContext(this, scope));
  }

Y
Yu Yang 已提交
276
  void Run(const std::shared_ptr<Scope>& scope,
Y
Yu Yang 已提交
277
           const platform::DeviceContext& dev_ctx) const final {
Q
Qiao Longfei 已提交
278
    auto& opKernel = AllOpKernels().at(type_).at(OpKernelKey(dev_ctx));
279
    opKernel->Compute(ExecutionContext(this, scope, dev_ctx));
Q
Qiao Longfei 已提交
280 281
  }

Y
Yu Yang 已提交
282 283 284 285
  static std::unordered_map<std::string /* op_type */, OpKernelMap>&
  AllOpKernels() {
    static std::unordered_map<std::string, OpKernelMap> g_all_op_kernels;
    return g_all_op_kernels;
Y
Yu Yang 已提交
286
  }
Y
Yan Chunwei 已提交
287

Y
Yu Yang 已提交
288
 protected:
289
  virtual void InferShape(const InferShapeContext& ctx) const = 0;
Q
Qiao Longfei 已提交
290 291 292 293
};

}  // namespace framework
}  // namespace paddle