op_desc.cc 15.7 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/op_desc.h"
Y
Yu Yang 已提交
16
#include <functional>
17
#include <mutex>
Y
Yu Yang 已提交
18
#include <unordered_map>
19
#include "glog/logging.h"
F
fengjiayi 已提交
20
#include "paddle/framework/block_desc.h"
Y
Yu Yang 已提交
21
#include "paddle/framework/operator.h"
22
#include "paddle/framework/program_desc.h"
23
#include "paddle/framework/shape_inference.h"
Y
Yu Yang 已提交
24

F
fengjiayi 已提交
25 26 27
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
28 29
class OpDesc;
class BlockDesc;
30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

  const std::vector<std::string> &Inputs(
      const std::string &name) const override;

  const std::vector<std::string> &Outputs(
      const std::string &name) const override;

Q
Qiao Longfei 已提交
50 51 52 53 54 55
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
56
    if (in_var->GetType() != proto::VarDesc::LOD_TENSOR) {
Q
Qiao Longfei 已提交
57
      VLOG(3) << "input " << in << " is not LodTensor";
Q
Qiao Longfei 已提交
58 59
      return;
    }
60
    PADDLE_ENFORCE_EQ(in_var->GetType(), proto::VarDesc::LOD_TENSOR,
Q
Qiao Longfei 已提交
61 62
                      "The %d-th output of Output(%s) must be LoDTensor.", j,
                      out);
63
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
64
  }
D
dzhwinter 已提交
65

66 67 68
  bool IsRuntime() const override;

 protected:
69
  proto::VarDesc::VarType GetVarType(const std::string &name) const override;
Q
Qiao Longfei 已提交
70

71 72 73 74
  DDim GetDim(const std::string &name) const override;

  void SetDim(const std::string &name, const DDim &dim) override;

F
fengjiayi 已提交
75 76
  std::vector<DDim> GetRepeatedDim(const std::string &name) const override;

Y
Yu Yang 已提交
77 78
  const OpDesc &op_;
  const BlockDesc &block_;
79 80
};

Y
Yu Yang 已提交
81 82
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
83
  desc_.set_type(type);
F
fengjiayi 已提交
84 85 86
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
87
  need_update_ = true;
F
fengjiayi 已提交
88 89
}

90
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
91 92 93 94 95 96 97
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

98
OpDesc::OpDesc(const proto::OpDesc &desc, ProgramDesc *prog, BlockDesc *block)
99 100 101 102
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
103
    const proto::OpDesc::Var &var = desc_.inputs(i);
104 105 106 107 108 109 110 111 112 113
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
114
    const proto::OpDesc::Var &var = desc_.outputs(i);
115 116 117 118 119 120 121 122
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
123
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
124
    std::string attr_name = attr.name();
125
    if (attr.type() != proto::AttrType::BLOCK) {
126 127 128 129 130
      attrs_[attr_name] = GetAttrValue(attr);
    } else {
      auto bid = attr.block_idx();
      attrs_[attr_name] = prog->MutableBlock(bid);
    }
131
  }
132
  this->block_ = block;
133 134
}

Y
Yu Yang 已提交
135
proto::OpDesc *OpDesc::Proto() {
136
  Flush();
137
  return &desc_;
F
fengjiayi 已提交
138 139
}

Y
Yu Yang 已提交
140
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
141 142 143 144 145 146
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
147
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
148 149 150 151 152 153 154
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
155 156
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
157 158 159 160
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
161
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
162 163 164 165 166 167
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

Y
Yu Yang 已提交
168
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
169 170 171 172 173 174 175
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
176 177
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
178 179 180 181
  need_update_ = true;
  this->outputs_[param_name] = args;
}

Y
Yu Yang 已提交
182
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
183 184
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
185
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
186 187
}

Y
Yu Yang 已提交
188
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
189 190 191 192 193 194 195 196
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

Y
Yu Yang 已提交
197
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
F
fengjiayi 已提交
198 199 200 201
  this->attrs_[name] = v;
  need_update_ = true;
}

Y
Yu Yang 已提交
202
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc &block) {
203
  this->attrs_[name] = &block;
F
fengjiayi 已提交
204
  need_update_ = true;
F
fengjiayi 已提交
205 206
}

Y
Yu Yang 已提交
207
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
208 209 210 211 212
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
213
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
214 215 216 217 218
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

Y
Yu Yang 已提交
219
int OpDesc::GetBlockAttr(const std::string &name) const {
F
fengjiayi 已提交
220 221
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
Y
Yu Yang 已提交
222
  return boost::get<BlockDesc *>(it->second)->ID();
F
fengjiayi 已提交
223 224
}

Y
Yu Yang 已提交
225
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
226 227 228
  return attrs_;
}

Y
Yu Yang 已提交
229
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
F
Update  
fengjiayi 已提交
230
  for (auto &input : inputs_) {
F
fengjiayi 已提交
231 232
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
F
Update  
fengjiayi 已提交
233 234
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
F
fengjiayi 已提交
235 236 237 238 239
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
240 241
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
242 243 244 245 246 247 248
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
249 250
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
251 252 253 254 255 256
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
257
struct SetAttrDescVisitor : public boost::static_visitor<void> {
258 259
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
260 261 262
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
263 264 265 266 267 268 269

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
270 271 272 273 274 275 276 277 278 279 280 281 282

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
Q
QI JUN 已提交
283
  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }
284
  void operator()(int64_t v) const { attr_->set_l(v); }
Y
Yu Yang 已提交
285 286 287
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
288
void OpDesc::Flush() {
F
fengjiayi 已提交
289
  if (need_update_) {
290
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
291
    for (auto &ipt : inputs_) {
292
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
293 294 295 296
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

297
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
298
    for (auto &opt : outputs_) {
299
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
300 301 302 303
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

304
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
305
    for (auto &attr : attrs_) {
306
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
307 308
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
309
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
310 311
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
312 313 314 315 316
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
317

318 319 320 321 322 323 324 325 326 327
static std::once_flag init_infer_shape_funcs;

static void InitInferShapeFuncs() {
  std::call_once(init_infer_shape_funcs, [] {
    auto &map = OpInfoMap::Instance();
    auto &info_map = *map.mutable_map();

    for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
      auto op_type = kern_pair.first;
      auto &op_info = info_map.at(op_type);
Y
Yiqun Liu 已提交
328 329
      auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
          "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
330 331 332 333 334 335
      if (op_info.infer_shape_) {  // infer_shape has been registered.
        continue;
      }
      op_info.infer_shape_ = [op](InferShapeContext *ctx) {
        op->InferShape(ctx);
      };
Y
Yu Yang 已提交
336
    }
337
  });
Y
Yu Yang 已提交
338 339
}

Y
Yu Yang 已提交
340
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
341 342
  PADDLE_ENFORCE(!Type().empty(),
                 "CheckAttr() can not be called before type is setted.");
Y
Yu Yang 已提交
343 344 345 346 347 348
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
F
fengjiayi 已提交
349 350 351
  checker->Check(attrs_);
}

Y
Yu Yang 已提交
352
void OpDesc::InferShape(const BlockDesc &block) const {
Y
Yu Yang 已提交
353
  VLOG(3) << "CompileTime infer shape on " << Type();
354 355 356 357
  InitInferShapeFuncs();
  auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
  PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                 "%s's infer_shape has not been registered", this->Type());
Y
Yu Yang 已提交
358
  CompileTimeInferShapeContext ctx(*this, block);
Y
Yu Yang 已提交
359 360 361 362 363 364 365 366 367 368 369 370 371
  if (VLOG_IS_ON(10)) {
    std::ostringstream sout;
    auto inames = this->InputArgumentNames();
    sout << " From [";
    std::copy(inames.begin(), inames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "] to [";
    auto onames = this->OutputArgumentNames();
    std::copy(onames.begin(), onames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "]";
    VLOG(10) << sout.str();
  }
372
  infer_shape(&ctx);
Y
Yu Yang 已提交
373 374
}

Y
Yu Yang 已提交
375
void OpDesc::InferVarType(BlockDesc *block) const {
Y
Yu Yang 已提交
376 377 378 379 380
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
    info.infer_var_type_(*this, block);
  } else {
    // all output type is LoDTensor by default
Y
Yu Yang 已提交
381 382 383
    VLOG(10) << this->Type()
             << " has not registered InferVarType. Set output variables to "
                "LOD_TENSOR";
Y
Yu Yang 已提交
384 385
    for (auto &out_pair : this->outputs_) {
      for (auto &out_var_name : out_pair.second) {
Y
Yang Yang(Tony) 已提交
386
        block->FindRecursiveOrCreateVar(out_var_name)
Y
Yang Yu 已提交
387
            .SetType(proto::VarDesc::LOD_TENSOR);
Y
Yu Yang 已提交
388 389 390 391 392
      }
    }
  }
}

393
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
394
    const OpDesc &op, const BlockDesc &block)
395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

const std::vector<std::string> &CompileTimeInferShapeContext::Inputs(
    const std::string &name) const {
  return op_.Input(name);
}

const std::vector<std::string> &CompileTimeInferShapeContext::Outputs(
    const std::string &name) const {
  return op_.Output(name);
}

DDim CompileTimeInferShapeContext::GetDim(const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
F
fengjiayi 已提交
462
  DDim res;
Y
Yang Yang(Tony) 已提交
463
  try {
F
fengjiayi 已提交
464
    auto shape = var->GetShape();
F
fengjiayi 已提交
465
    res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
Y
Yang Yang(Tony) 已提交
466 467 468 469
  } catch (...) {
    VLOG(5) << "GetDim of variable " << name << " error";
    std::rethrow_exception(std::current_exception());
  }
F
fengjiayi 已提交
470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487
  return res;
}

std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDim(
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
    std::rethrow_exception(std::current_exception());
  }
  return res;
488 489 490 491
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
492
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
493
}
494 495
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

496
proto::VarDesc::VarType CompileTimeInferShapeContext::GetVarType(
497 498 499
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
500

F
fengjiayi 已提交
501 502
}  // namespace framework
}  // namespace paddle