op_desc.cc 15.5 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/op_desc.h"
Y
Yu Yang 已提交
16
#include <functional>
17
#include <mutex>
Y
Yu Yang 已提交
18
#include <unordered_map>
19
#include "glog/logging.h"
F
fengjiayi 已提交
20
#include "paddle/framework/block_desc.h"
Y
Yu Yang 已提交
21
#include "paddle/framework/operator.h"
22
#include "paddle/framework/program_desc.h"
23
#include "paddle/framework/shape_inference.h"
Y
Yu Yang 已提交
24

F
fengjiayi 已提交
25 26 27
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
28 29
class OpDesc;
class BlockDesc;
30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  DDim GetInputDim(const std::string &name) const override;

  void SetOutputDim(const std::string &name, const DDim &dim) override;

  AttrReader Attrs() const override;

  const std::vector<std::string> &Inputs(
      const std::string &name) const override;

  const std::vector<std::string> &Outputs(
      const std::string &name) const override;

Q
Qiao Longfei 已提交
54 55 56 57 58 59
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
60
    if (in_var->GetType() != proto::VarDesc::LOD_TENSOR) {
Q
Qiao Longfei 已提交
61
      VLOG(3) << "input " << in << " is not LodTensor";
Q
Qiao Longfei 已提交
62 63
      return;
    }
64
    PADDLE_ENFORCE_EQ(in_var->GetType(), proto::VarDesc::LOD_TENSOR,
Q
Qiao Longfei 已提交
65 66
                      "The %d-th output of Output(%s) must be LoDTensor.", j,
                      out);
Y
Yu Yang 已提交
67
    out_var->SetLoDLevel(in_var->GetLodLevel());
Q
Qiao Longfei 已提交
68
  }
69 70 71
  bool IsRuntime() const override;

 protected:
72
  proto::VarDesc::VarType GetVarType(const std::string &name) const override;
Q
Qiao Longfei 已提交
73

74 75 76 77
  DDim GetDim(const std::string &name) const override;

  void SetDim(const std::string &name, const DDim &dim) override;

Y
Yu Yang 已提交
78 79
  const OpDesc &op_;
  const BlockDesc &block_;
80 81
};

Y
Yu Yang 已提交
82 83
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
84
  desc_.set_type(type);
F
fengjiayi 已提交
85 86 87
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
88
  need_update_ = true;
F
fengjiayi 已提交
89 90
}

91
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
92 93 94 95 96 97 98
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

Y
Yu Yang 已提交
99
OpDesc::OpDesc(const proto::OpDesc &desc, ProgramDesc *prog)
100 101 102 103
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
104
    const proto::OpDesc::Var &var = desc_.inputs(i);
105 106 107 108 109 110 111 112 113 114
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
115
    const proto::OpDesc::Var &var = desc_.outputs(i);
116 117 118 119 120 121 122 123
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
124
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
125
    std::string attr_name = attr.name();
126
    if (attr.type() != proto::AttrType::BLOCK) {
127 128 129 130 131
      attrs_[attr_name] = GetAttrValue(attr);
    } else {
      auto bid = attr.block_idx();
      attrs_[attr_name] = prog->MutableBlock(bid);
    }
132 133 134
  }
}

Y
Yu Yang 已提交
135
proto::OpDesc *OpDesc::Proto() {
136
  Flush();
137
  return &desc_;
F
fengjiayi 已提交
138 139
}

Y
Yu Yang 已提交
140
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
141 142 143 144 145 146
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
147
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
148 149 150 151 152 153 154
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
155 156
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
157 158 159 160
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
161
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
162 163 164 165 166 167
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

Y
Yu Yang 已提交
168
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
169 170 171 172 173 174 175
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
176 177
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
178 179 180 181
  need_update_ = true;
  this->outputs_[param_name] = args;
}

Y
Yu Yang 已提交
182
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
183 184
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
185
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
186 187
}

Y
Yu Yang 已提交
188
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
189 190 191 192 193 194 195 196
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

Y
Yu Yang 已提交
197
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
F
fengjiayi 已提交
198 199 200 201
  this->attrs_[name] = v;
  need_update_ = true;
}

Y
Yu Yang 已提交
202
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc &block) {
203
  this->attrs_[name] = &block;
F
fengjiayi 已提交
204
  need_update_ = true;
F
fengjiayi 已提交
205 206
}

Y
Yu Yang 已提交
207
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
208 209 210 211 212
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
213
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
214 215 216 217 218
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

Y
Yu Yang 已提交
219
int OpDesc::GetBlockAttr(const std::string &name) const {
F
fengjiayi 已提交
220 221
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
Y
Yu Yang 已提交
222
  return boost::get<BlockDesc *>(it->second)->ID();
F
fengjiayi 已提交
223 224
}

Y
Yu Yang 已提交
225
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
226 227 228
  return attrs_;
}

Y
Yu Yang 已提交
229
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
F
Update  
fengjiayi 已提交
230
  for (auto &input : inputs_) {
F
fengjiayi 已提交
231 232
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
F
Update  
fengjiayi 已提交
233 234
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
F
fengjiayi 已提交
235 236 237 238 239
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
240 241
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
242 243 244 245 246 247 248
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
249 250
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
251 252 253 254 255 256
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
257
struct SetAttrDescVisitor : public boost::static_visitor<void> {
258 259
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
  void operator()(bool b) const { attr_->set_b(b); }

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
277 278 279
  void operator()(proto::BlockDesc *desc) const {
    attr_->set_block_idx(desc->idx());
  }
Y
Yu Yang 已提交
280 281 282
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
283
void OpDesc::Flush() {
F
fengjiayi 已提交
284
  if (need_update_) {
285
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
286
    for (auto &ipt : inputs_) {
287
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
288 289 290 291
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

292
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
293
    for (auto &opt : outputs_) {
294
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
295 296 297 298
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

299
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
300
    for (auto &attr : attrs_) {
301
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
302 303
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
304
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
305 306
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
307 308 309 310 311
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
312

313 314 315 316 317 318 319 320 321 322
static std::once_flag init_infer_shape_funcs;

static void InitInferShapeFuncs() {
  std::call_once(init_infer_shape_funcs, [] {
    auto &map = OpInfoMap::Instance();
    auto &info_map = *map.mutable_map();

    for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
      auto op_type = kern_pair.first;
      auto &op_info = info_map.at(op_type);
Y
Yiqun Liu 已提交
323 324
      auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
          "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
325 326 327 328 329 330
      if (op_info.infer_shape_) {  // infer_shape has been registered.
        continue;
      }
      op_info.infer_shape_ = [op](InferShapeContext *ctx) {
        op->InferShape(ctx);
      };
Y
Yu Yang 已提交
331
    }
332
  });
Y
Yu Yang 已提交
333 334
}

Y
Yu Yang 已提交
335
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
336 337
  PADDLE_ENFORCE(!Type().empty(),
                 "CheckAttr() can not be called before type is setted.");
Y
Yu Yang 已提交
338 339 340 341 342 343
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
F
fengjiayi 已提交
344 345 346
  checker->Check(attrs_);
}

Y
Yu Yang 已提交
347
void OpDesc::InferShape(const BlockDesc &block) const {
Y
Yu Yang 已提交
348
  VLOG(3) << "CompileTime infer shape on " << Type();
349 350 351 352
  InitInferShapeFuncs();
  auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
  PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                 "%s's infer_shape has not been registered", this->Type());
Y
Yu Yang 已提交
353
  CompileTimeInferShapeContext ctx(*this, block);
Y
Yu Yang 已提交
354 355 356 357 358 359 360 361 362 363 364 365 366
  if (VLOG_IS_ON(10)) {
    std::ostringstream sout;
    auto inames = this->InputArgumentNames();
    sout << " From [";
    std::copy(inames.begin(), inames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "] to [";
    auto onames = this->OutputArgumentNames();
    std::copy(onames.begin(), onames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "]";
    VLOG(10) << sout.str();
  }
367
  infer_shape(&ctx);
Y
Yu Yang 已提交
368 369
}

Y
Yu Yang 已提交
370
void OpDesc::InferVarType(BlockDesc *block) const {
Y
Yu Yang 已提交
371 372 373 374 375
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
    info.infer_var_type_(*this, block);
  } else {
    // all output type is LoDTensor by default
Y
Yu Yang 已提交
376 377 378
    VLOG(10) << this->Type()
             << " has not registered InferVarType. Set output variables to "
                "LOD_TENSOR";
Y
Yu Yang 已提交
379 380
    for (auto &out_pair : this->outputs_) {
      for (auto &out_var_name : out_pair.second) {
Y
Yang Yang(Tony) 已提交
381
        block->FindRecursiveOrCreateVar(out_var_name)
382
            ->SetType(proto::VarDesc::LOD_TENSOR);
Y
Yu Yang 已提交
383 384 385 386 387
      }
    }
  }
}

388
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
389
    const OpDesc &op, const BlockDesc &block)
390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

DDim CompileTimeInferShapeContext::GetInputDim(const std::string &name) const {
  std::vector<DDim> ddims = GetInputsDim(name);
  auto length = ddims.size();
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have 1 value, "
                    "but it has %d now",
                    name, length);
  return ddims[0];
}

void CompileTimeInferShapeContext::SetOutputDim(const std::string &name,
                                                const DDim &dim) {
  SetOutputsDim(name, {dim});
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

const std::vector<std::string> &CompileTimeInferShapeContext::Inputs(
    const std::string &name) const {
  return op_.Input(name);
}

const std::vector<std::string> &CompileTimeInferShapeContext::Outputs(
    const std::string &name) const {
  return op_.Output(name);
}

DDim CompileTimeInferShapeContext::GetDim(const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
Y
Yang Yang(Tony) 已提交
472
  try {
473 474 475 476 477 478
    auto shape = var->Shape();
    if (shape.empty()) {
      return framework::make_ddim({0UL});
    } else {
      return framework::make_ddim(var->Shape());
    }
Y
Yang Yang(Tony) 已提交
479 480 481 482
  } catch (...) {
    VLOG(5) << "GetDim of variable " << name << " error";
    std::rethrow_exception(std::current_exception());
  }
483 484 485 486 487 488
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
  block_.FindVarRecursive(name)->SetShape(framework::vectorize(dim));
}
489 490
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

491
proto::VarDesc::VarType CompileTimeInferShapeContext::GetVarType(
492 493 494
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
495

F
fengjiayi 已提交
496 497
}  // namespace framework
}  // namespace paddle