op_desc.cc 15.6 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/op_desc.h"
Y
Yu Yang 已提交
16
#include <functional>
17
#include <mutex>
Y
Yu Yang 已提交
18
#include <unordered_map>
19
#include "glog/logging.h"
F
fengjiayi 已提交
20
#include "paddle/framework/block_desc.h"
Y
Yu Yang 已提交
21
#include "paddle/framework/operator.h"
22
#include "paddle/framework/program_desc.h"
23
#include "paddle/framework/shape_inference.h"
Y
Yu Yang 已提交
24

F
fengjiayi 已提交
25 26 27
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
28 29
class OpDesc;
class BlockDesc;
30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  DDim GetInputDim(const std::string &name) const override;

  void SetOutputDim(const std::string &name, const DDim &dim) override;

  AttrReader Attrs() const override;

  const std::vector<std::string> &Inputs(
      const std::string &name) const override;

  const std::vector<std::string> &Outputs(
      const std::string &name) const override;

Q
Qiao Longfei 已提交
54 55 56 57 58 59
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
60
    if (in_var->GetType() != proto::VarDesc::LOD_TENSOR) {
Q
Qiao Longfei 已提交
61
      VLOG(3) << "input " << in << " is not LodTensor";
Q
Qiao Longfei 已提交
62 63
      return;
    }
64
    PADDLE_ENFORCE_EQ(in_var->GetType(), proto::VarDesc::LOD_TENSOR,
Q
Qiao Longfei 已提交
65 66
                      "The %d-th output of Output(%s) must be LoDTensor.", j,
                      out);
67
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
68
  }
D
dzhwinter 已提交
69

70 71 72
  bool IsRuntime() const override;

 protected:
73
  proto::VarDesc::VarType GetVarType(const std::string &name) const override;
Q
Qiao Longfei 已提交
74

75 76 77 78
  DDim GetDim(const std::string &name) const override;

  void SetDim(const std::string &name, const DDim &dim) override;

Y
Yu Yang 已提交
79 80
  const OpDesc &op_;
  const BlockDesc &block_;
81 82
};

Y
Yu Yang 已提交
83 84
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
85
  desc_.set_type(type);
F
fengjiayi 已提交
86 87 88
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
89
  need_update_ = true;
F
fengjiayi 已提交
90 91
}

92
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
93 94 95 96 97 98 99
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

Y
Yu Yang 已提交
100
OpDesc::OpDesc(const proto::OpDesc &desc, ProgramDesc *prog)
101 102 103 104
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
105
    const proto::OpDesc::Var &var = desc_.inputs(i);
106 107 108 109 110 111 112 113 114 115
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
116
    const proto::OpDesc::Var &var = desc_.outputs(i);
117 118 119 120 121 122 123 124
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
125
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
126
    std::string attr_name = attr.name();
127
    if (attr.type() != proto::AttrType::BLOCK) {
128 129 130 131 132
      attrs_[attr_name] = GetAttrValue(attr);
    } else {
      auto bid = attr.block_idx();
      attrs_[attr_name] = prog->MutableBlock(bid);
    }
133 134 135
  }
}

Y
Yu Yang 已提交
136
proto::OpDesc *OpDesc::Proto() {
137
  Flush();
138
  return &desc_;
F
fengjiayi 已提交
139 140
}

Y
Yu Yang 已提交
141
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
142 143 144 145 146 147
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
148
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
149 150 151 152 153 154 155
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
156 157
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
158 159 160 161
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
162
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
163 164 165 166 167 168
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

Y
Yu Yang 已提交
169
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
170 171 172 173 174 175 176
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
177 178
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
179 180 181 182
  need_update_ = true;
  this->outputs_[param_name] = args;
}

Y
Yu Yang 已提交
183
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
184 185
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
186
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
187 188
}

Y
Yu Yang 已提交
189
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
190 191 192 193 194 195 196 197
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

Y
Yu Yang 已提交
198
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
F
fengjiayi 已提交
199 200 201 202
  this->attrs_[name] = v;
  need_update_ = true;
}

Y
Yu Yang 已提交
203
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc &block) {
204
  this->attrs_[name] = &block;
F
fengjiayi 已提交
205
  need_update_ = true;
F
fengjiayi 已提交
206 207
}

Y
Yu Yang 已提交
208
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
209 210 211 212 213
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
214
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
215 216 217 218 219
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

Y
Yu Yang 已提交
220
int OpDesc::GetBlockAttr(const std::string &name) const {
F
fengjiayi 已提交
221 222
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
Y
Yu Yang 已提交
223
  return boost::get<BlockDesc *>(it->second)->ID();
F
fengjiayi 已提交
224 225
}

Y
Yu Yang 已提交
226
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
227 228 229
  return attrs_;
}

Y
Yu Yang 已提交
230
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
F
Update  
fengjiayi 已提交
231
  for (auto &input : inputs_) {
F
fengjiayi 已提交
232 233
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
F
Update  
fengjiayi 已提交
234 235
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
F
fengjiayi 已提交
236 237 238 239 240
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
241 242
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
243 244 245 246 247 248 249
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
250 251
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
252 253 254 255 256 257
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
  need_update_ = true;
}

Y
Yu Yang 已提交
258
struct SetAttrDescVisitor : public boost::static_visitor<void> {
259 260
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
261 262 263
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
264 265 266 267 268 269 270

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
271 272 273 274 275 276 277 278 279 280 281 282 283

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
Q
QI JUN 已提交
284
  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }
Y
Yu Yang 已提交
285 286 287
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
288
void OpDesc::Flush() {
F
fengjiayi 已提交
289
  if (need_update_) {
290
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
291
    for (auto &ipt : inputs_) {
292
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
293 294 295 296
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

297
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
298
    for (auto &opt : outputs_) {
299
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
300 301 302 303
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

304
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
305
    for (auto &attr : attrs_) {
306
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
307 308
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
309
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
310 311
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
312 313 314 315 316
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
317

318 319 320 321 322 323 324 325 326 327
static std::once_flag init_infer_shape_funcs;

static void InitInferShapeFuncs() {
  std::call_once(init_infer_shape_funcs, [] {
    auto &map = OpInfoMap::Instance();
    auto &info_map = *map.mutable_map();

    for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
      auto op_type = kern_pair.first;
      auto &op_info = info_map.at(op_type);
Y
Yiqun Liu 已提交
328 329
      auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
          "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
330 331 332 333 334 335
      if (op_info.infer_shape_) {  // infer_shape has been registered.
        continue;
      }
      op_info.infer_shape_ = [op](InferShapeContext *ctx) {
        op->InferShape(ctx);
      };
Y
Yu Yang 已提交
336
    }
337
  });
Y
Yu Yang 已提交
338 339
}

Y
Yu Yang 已提交
340
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
341 342
  PADDLE_ENFORCE(!Type().empty(),
                 "CheckAttr() can not be called before type is setted.");
Y
Yu Yang 已提交
343 344 345 346 347 348
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
F
fengjiayi 已提交
349 350 351
  checker->Check(attrs_);
}

Y
Yu Yang 已提交
352
void OpDesc::InferShape(const BlockDesc &block) const {
Y
Yu Yang 已提交
353
  VLOG(3) << "CompileTime infer shape on " << Type();
354 355 356 357
  InitInferShapeFuncs();
  auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
  PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                 "%s's infer_shape has not been registered", this->Type());
Y
Yu Yang 已提交
358
  CompileTimeInferShapeContext ctx(*this, block);
Y
Yu Yang 已提交
359 360 361 362 363 364 365 366 367 368 369 370 371
  if (VLOG_IS_ON(10)) {
    std::ostringstream sout;
    auto inames = this->InputArgumentNames();
    sout << " From [";
    std::copy(inames.begin(), inames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "] to [";
    auto onames = this->OutputArgumentNames();
    std::copy(onames.begin(), onames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "]";
    VLOG(10) << sout.str();
  }
372
  infer_shape(&ctx);
Y
Yu Yang 已提交
373 374
}

Y
Yu Yang 已提交
375
void OpDesc::InferVarType(BlockDesc *block) const {
Y
Yu Yang 已提交
376 377 378 379 380
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
    info.infer_var_type_(*this, block);
  } else {
    // all output type is LoDTensor by default
Y
Yu Yang 已提交
381 382 383
    VLOG(10) << this->Type()
             << " has not registered InferVarType. Set output variables to "
                "LOD_TENSOR";
Y
Yu Yang 已提交
384 385
    for (auto &out_pair : this->outputs_) {
      for (auto &out_var_name : out_pair.second) {
Y
Yang Yang(Tony) 已提交
386
        block->FindRecursiveOrCreateVar(out_var_name)
387
            ->SetType(proto::VarDesc::LOD_TENSOR);
Y
Yu Yang 已提交
388 389 390 391 392
      }
    }
  }
}

393
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
394
    const OpDesc &op, const BlockDesc &block)
395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

DDim CompileTimeInferShapeContext::GetInputDim(const std::string &name) const {
  std::vector<DDim> ddims = GetInputsDim(name);
  auto length = ddims.size();
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have 1 value, "
                    "but it has %d now",
                    name, length);
  return ddims[0];
}

void CompileTimeInferShapeContext::SetOutputDim(const std::string &name,
                                                const DDim &dim) {
  SetOutputsDim(name, {dim});
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

const std::vector<std::string> &CompileTimeInferShapeContext::Inputs(
    const std::string &name) const {
  return op_.Input(name);
}

const std::vector<std::string> &CompileTimeInferShapeContext::Outputs(
    const std::string &name) const {
  return op_.Output(name);
}

DDim CompileTimeInferShapeContext::GetDim(const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
Y
Yang Yang(Tony) 已提交
477
  try {
478 479 480 481 482 483
    auto shape = var->Shape();
    if (shape.empty()) {
      return framework::make_ddim({0UL});
    } else {
      return framework::make_ddim(var->Shape());
    }
Y
Yang Yang(Tony) 已提交
484 485 486 487
  } catch (...) {
    VLOG(5) << "GetDim of variable " << name << " error";
    std::rethrow_exception(std::current_exception());
  }
488 489 490 491 492 493
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
  block_.FindVarRecursive(name)->SetShape(framework::vectorize(dim));
}
494 495
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

496
proto::VarDesc::VarType CompileTimeInferShapeContext::GetVarType(
497 498 499
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
500

F
fengjiayi 已提交
501 502
}  // namespace framework
}  // namespace paddle