op_desc.cc 15.7 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/op_desc.h"
Y
Yu Yang 已提交
16
#include <functional>
17
#include <mutex>
Y
Yu Yang 已提交
18
#include <unordered_map>
19
#include "glog/logging.h"
F
fengjiayi 已提交
20
#include "paddle/framework/block_desc.h"
Y
Yu Yang 已提交
21
#include "paddle/framework/operator.h"
22
#include "paddle/framework/program_desc.h"
23
#include "paddle/framework/shape_inference.h"
Y
Yu Yang 已提交
24

F
fengjiayi 已提交
25 26 27
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
28 29
class OpDesc;
class BlockDesc;
30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  DDim GetInputDim(const std::string &name) const override;

  void SetOutputDim(const std::string &name, const DDim &dim) override;

  AttrReader Attrs() const override;

  const std::vector<std::string> &Inputs(
      const std::string &name) const override;

  const std::vector<std::string> &Outputs(
      const std::string &name) const override;

Q
Qiao Longfei 已提交
54 55 56 57 58 59
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
60
    if (in_var->GetType() != proto::VarDesc::LOD_TENSOR) {
Q
Qiao Longfei 已提交
61
      VLOG(3) << "input " << in << " is not LodTensor";
Q
Qiao Longfei 已提交
62 63
      return;
    }
64
    PADDLE_ENFORCE_EQ(in_var->GetType(), proto::VarDesc::LOD_TENSOR,
Q
Qiao Longfei 已提交
65 66
                      "The %d-th output of Output(%s) must be LoDTensor.", j,
                      out);
67
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
68
  }
D
dzhwinter 已提交
69

70 71 72
  bool IsRuntime() const override;

 protected:
73
  proto::VarDesc::VarType GetVarType(const std::string &name) const override;
Q
Qiao Longfei 已提交
74

75 76 77 78
  DDim GetDim(const std::string &name) const override;

  void SetDim(const std::string &name, const DDim &dim) override;

Y
Yu Yang 已提交
79 80
  const OpDesc &op_;
  const BlockDesc &block_;
81 82
};

Y
Yu Yang 已提交
83 84
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
85
  desc_.set_type(type);
F
fengjiayi 已提交
86 87 88
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
89
  need_update_ = true;
F
fengjiayi 已提交
90 91
}

92
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
93 94 95 96 97 98 99
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

100
OpDesc::OpDesc(const proto::OpDesc &desc, ProgramDesc *prog, BlockDesc *block)
101 102 103 104
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
105
    const proto::OpDesc::Var &var = desc_.inputs(i);
106 107 108 109 110 111 112 113 114 115
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
116
    const proto::OpDesc::Var &var = desc_.outputs(i);
117 118 119 120 121 122 123 124
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
125
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
126
    std::string attr_name = attr.name();
127
    if (attr.type() != proto::AttrType::BLOCK) {
128 129 130 131 132
      attrs_[attr_name] = GetAttrValue(attr);
    } else {
      auto bid = attr.block_idx();
      attrs_[attr_name] = prog->MutableBlock(bid);
    }
133
  }
134
  this->block_ = block;
135 136
}

Y
Yu Yang 已提交
137
proto::OpDesc *OpDesc::Proto() {
138
  Flush();
139
  return &desc_;
F
fengjiayi 已提交
140 141
}

Y
Yu Yang 已提交
142
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
143 144 145 146 147 148
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
149
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
150 151 152 153 154 155 156
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
157 158
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
159 160 161 162
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
163
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
164 165 166 167 168 169
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

Y
Yu Yang 已提交
170
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
171 172 173 174 175 176 177
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
178 179
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
180 181 182 183
  need_update_ = true;
  this->outputs_[param_name] = args;
}

Y
Yu Yang 已提交
184
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
185 186
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
187
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
188 189
}

Y
Yu Yang 已提交
190
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
191 192 193 194 195 196 197 198
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

Y
Yu Yang 已提交
199
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
F
fengjiayi 已提交
200 201 202 203
  this->attrs_[name] = v;
  need_update_ = true;
}

Y
Yu Yang 已提交
204
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc &block) {
205
  this->attrs_[name] = &block;
F
fengjiayi 已提交
206
  need_update_ = true;
F
fengjiayi 已提交
207 208
}

Y
Yu Yang 已提交
209
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
210 211 212 213 214
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
215
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
216 217 218 219 220
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

Y
Yu Yang 已提交
221
int OpDesc::GetBlockAttr(const std::string &name) const {
F
fengjiayi 已提交
222 223
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
Y
Yu Yang 已提交
224
  return boost::get<BlockDesc *>(it->second)->ID();
F
fengjiayi 已提交
225 226
}

Y
Yu Yang 已提交
227
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
228 229 230
  return attrs_;
}

Y
Yu Yang 已提交
231
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
F
Update  
fengjiayi 已提交
232
  for (auto &input : inputs_) {
F
fengjiayi 已提交
233 234
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
F
Update  
fengjiayi 已提交
235 236
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
F
fengjiayi 已提交
237 238 239 240 241
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
242 243
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
244 245 246 247 248 249 250
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
251 252
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
253 254 255 256 257 258
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
259
struct SetAttrDescVisitor : public boost::static_visitor<void> {
260 261
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
262 263 264
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
265 266 267 268 269 270 271

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
272 273 274 275 276 277 278 279 280 281 282 283 284

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
Q
QI JUN 已提交
285
  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }
Y
Yu Yang 已提交
286 287 288
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
289
void OpDesc::Flush() {
F
fengjiayi 已提交
290
  if (need_update_) {
291
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
292
    for (auto &ipt : inputs_) {
293
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
294 295 296 297
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

298
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
299
    for (auto &opt : outputs_) {
300
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
301 302 303 304
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

305
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
306
    for (auto &attr : attrs_) {
307
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
308 309
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
310
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
311 312
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
313 314 315 316 317
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
318

319 320 321 322 323 324 325 326 327 328
static std::once_flag init_infer_shape_funcs;

static void InitInferShapeFuncs() {
  std::call_once(init_infer_shape_funcs, [] {
    auto &map = OpInfoMap::Instance();
    auto &info_map = *map.mutable_map();

    for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
      auto op_type = kern_pair.first;
      auto &op_info = info_map.at(op_type);
Y
Yiqun Liu 已提交
329 330
      auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
          "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
331 332 333 334 335 336
      if (op_info.infer_shape_) {  // infer_shape has been registered.
        continue;
      }
      op_info.infer_shape_ = [op](InferShapeContext *ctx) {
        op->InferShape(ctx);
      };
Y
Yu Yang 已提交
337
    }
338
  });
Y
Yu Yang 已提交
339 340
}

Y
Yu Yang 已提交
341
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
342 343
  PADDLE_ENFORCE(!Type().empty(),
                 "CheckAttr() can not be called before type is setted.");
Y
Yu Yang 已提交
344 345 346 347 348 349
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
F
fengjiayi 已提交
350 351 352
  checker->Check(attrs_);
}

Y
Yu Yang 已提交
353
void OpDesc::InferShape(const BlockDesc &block) const {
Y
Yu Yang 已提交
354
  VLOG(3) << "CompileTime infer shape on " << Type();
355 356 357 358
  InitInferShapeFuncs();
  auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
  PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                 "%s's infer_shape has not been registered", this->Type());
Y
Yu Yang 已提交
359
  CompileTimeInferShapeContext ctx(*this, block);
Y
Yu Yang 已提交
360 361 362 363 364 365 366 367 368 369 370 371 372
  if (VLOG_IS_ON(10)) {
    std::ostringstream sout;
    auto inames = this->InputArgumentNames();
    sout << " From [";
    std::copy(inames.begin(), inames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "] to [";
    auto onames = this->OutputArgumentNames();
    std::copy(onames.begin(), onames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "]";
    VLOG(10) << sout.str();
  }
373
  infer_shape(&ctx);
Y
Yu Yang 已提交
374 375
}

Y
Yu Yang 已提交
376
void OpDesc::InferVarType(BlockDesc *block) const {
Y
Yu Yang 已提交
377 378 379 380 381
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
    info.infer_var_type_(*this, block);
  } else {
    // all output type is LoDTensor by default
Y
Yu Yang 已提交
382 383 384
    VLOG(10) << this->Type()
             << " has not registered InferVarType. Set output variables to "
                "LOD_TENSOR";
Y
Yu Yang 已提交
385 386
    for (auto &out_pair : this->outputs_) {
      for (auto &out_var_name : out_pair.second) {
Y
Yang Yang(Tony) 已提交
387
        block->FindRecursiveOrCreateVar(out_var_name)
Y
Yang Yu 已提交
388
            .SetType(proto::VarDesc::LOD_TENSOR);
Y
Yu Yang 已提交
389 390 391 392 393
      }
    }
  }
}

394
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
395
    const OpDesc &op, const BlockDesc &block)
396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

DDim CompileTimeInferShapeContext::GetInputDim(const std::string &name) const {
  std::vector<DDim> ddims = GetInputsDim(name);
  auto length = ddims.size();
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have 1 value, "
                    "but it has %d now",
                    name, length);
  return ddims[0];
}

void CompileTimeInferShapeContext::SetOutputDim(const std::string &name,
                                                const DDim &dim) {
  SetOutputsDim(name, {dim});
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

const std::vector<std::string> &CompileTimeInferShapeContext::Inputs(
    const std::string &name) const {
  return op_.Input(name);
}

const std::vector<std::string> &CompileTimeInferShapeContext::Outputs(
    const std::string &name) const {
  return op_.Output(name);
}

DDim CompileTimeInferShapeContext::GetDim(const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
Y
Yang Yang(Tony) 已提交
478
  try {
479 480 481 482 483 484
    auto shape = var->Shape();
    if (shape.empty()) {
      return framework::make_ddim({0UL});
    } else {
      return framework::make_ddim(var->Shape());
    }
Y
Yang Yang(Tony) 已提交
485 486 487 488
  } catch (...) {
    VLOG(5) << "GetDim of variable " << name << " error";
    std::rethrow_exception(std::current_exception());
  }
489 490 491 492 493 494
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
  block_.FindVarRecursive(name)->SetShape(framework::vectorize(dim));
}
495 496
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

497
proto::VarDesc::VarType CompileTimeInferShapeContext::GetVarType(
498 499 500
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
501

F
fengjiayi 已提交
502 503
}  // namespace framework
}  // namespace paddle