op_desc.cc 32.3 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16

17
#include <string>
18

19
#include "glog/logging.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/block_desc.h"
21
#include "paddle/fluid/framework/op_call_stack.h"
Y
yuyang18 已提交
22
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
23 24
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/shape_inference.h"
M
minqiyang 已提交
25
#include "paddle/fluid/framework/var_type_inference.h"
Y
Yu Yang 已提交
26

F
fengjiayi 已提交
27 28 29
namespace paddle {
namespace framework {

30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

H
hong 已提交
44
  std::vector<std::string> Inputs(const std::string &name) const override;
45

H
hong 已提交
46
  std::vector<std::string> Outputs(const std::string &name) const override;
47

48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
  std::string GetInputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_.Type(), idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_.Type(), idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }

71 72
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
73 74 75 76 77 78 79 80 81 82 83
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));

H
hong 已提交
84 85
    std::string input_n = Inputs(in)[i];
    std::string output_n = Outputs(out)[j];
86

87 88 89 90 91 92
    PADDLE_ENFORCE_NE(input_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(output_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
93 94 95 96

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

97 98 99 100 101 102 103
    PADDLE_ENFORCE_EQ(
        in_var->GetType(), out_var->GetType(),
        platform::errors::InvalidArgument(
            "The type of input %s and output %s do not match. The input type "
            "is %s, output type is %s.",
            input_n, output_n, DataTypeToString(in_var->GetType()),
            DataTypeToString(out_var->GetType())));
104 105 106 107

    SetDim(output_n, GetDim(input_n));
  }

H
hong 已提交
108 109 110 111 112 113 114 115
  void ShareAllLoD(const std::string &in,
                   const std::string &out) const override {
    auto &in_var_names = op_.Input(in);
    auto &out_var_names = op_.Output(out);

    PADDLE_ENFORCE_EQ(
        in_var_names.size(), out_var_names.size(),
        platform::errors::PreconditionNotMet(
T
tianshuo78520a 已提交
116
            "Op [%s]:  Input var number should be equal with output var number",
H
hong 已提交
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
            op_.Type()));

    for (size_t i = 0; i < in_var_names.size(); ++i) {
      if (out_var_names[i] == framework::kEmptyVarName) {
        continue;
      }

      auto *in_var = block_.FindVarRecursive(in_var_names[i]);
      auto *out_var = block_.FindVarRecursive(out_var_names[i]);
      if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
          in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
        VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
        return;
      }
      out_var->SetLoDLevel(in_var->GetLoDLevel());
    }
  }

Q
Qiao Longfei 已提交
135 136
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
Q
Qiao Longfei 已提交
153 154
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
155 156
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
157
      VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
X
fix  
Xin Pan 已提交
158 159
      return;
    }
160
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
161
  }
D
dzhwinter 已提交
162

163 164
  int32_t GetLoDLevel(const std::string &in, size_t i = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
165 166 167 168
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, input "
                          "variable %s of operator %s only has %d elements.",
                          in, op_.Type(), Inputs(in).size()));
169
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
170 171 172
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] of operator %s is empty.",
                          in, i, op_.Type()));
C
chengduo 已提交
173
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
174
    PADDLE_ENFORCE_NOT_NULL(
175 176 177
        in_var, platform::errors::NotFound(
                    "The input variable %s[%d] of operator %s is not found.",
                    in, i, op_.Type()));
178
    return in_var->GetLoDLevel();
C
chengduo 已提交
179 180
  }

181 182 183
  void SetLoDLevel(const std::string &out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
184 185 186 187
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, output "
                          "variable %s of operator %s only has %d elements.",
                          out, op_.Type(), Outputs(out).size()));
188
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
189 190 191
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] of operator %s is empty.",
                          out, j, op_.Type()));
192
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
193
    PADDLE_ENFORCE_NOT_NULL(
194 195 196
        out_var, platform::errors::NotFound(
                     "The output variable %s[%d] of operator %s is not found.",
                     out, j, op_.Type()));
197 198 199
    if (lod_level >= 0) {
      out_var->SetLoDLevel(lod_level);
    }
200 201
  }

202
  std::vector<InferShapeVarPtr> GetInputVarPtrs(
203
      const std::string &name) const override {
204 205 206 207 208 209 210 211 212 213 214
    const std::vector<std::string> arg_names = Inputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

  std::vector<InferShapeVarPtr> GetOutputVarPtrs(
215
      const std::string &name) const override {
216 217 218 219 220 221 222 223 224 225
    const std::vector<std::string> arg_names = Outputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
226 227 228
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
229 230 231 232
                      platform::errors::InvalidArgument(
                          "The input(%s) should hold only one element, but now "
                          "it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
233 234 235 236 237 238 239 240
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

241 242
  bool IsRuntime() const override;

X
Xin Pan 已提交
243 244 245 246 247 248 249 250 251 252
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

X
Xin Pan 已提交
253
  void SetOutputDim(const std::string &name, const DDim &dim) override {
H
hong 已提交
254
    auto arg_names = Outputs(name);
X
Xin Pan 已提交
255
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
256 257 258 259
                      platform::errors::InvalidArgument(
                          "The iutput(%s) should hold only one element, but "
                          "now it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
260 261 262 263 264
    SetDim(arg_names[0], dim);
  }

  void SetOutputsDim(const std::string &name,
                     const std::vector<DDim> &dims) override {
H
hong 已提交
265
    auto names = Outputs(name);
X
Xin Pan 已提交
266 267 268
    SetDims(names, dims);
  }

269
 protected:
X
Xin Pan 已提交
270 271 272 273 274 275 276 277 278 279 280 281
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
282

X
Xin Pan 已提交
283 284
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
285 286
    PADDLE_ENFORCE_NOT_NULL(
        var, platform::errors::NotFound("Variable %s is not found.", name));
X
Xin Pan 已提交
287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305
    DDim res;
    try {
      auto shape = var->GetShape();
      res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
306

X
Xin Pan 已提交
307 308 309 310 311
  void SetDim(const std::string &name, const DDim &dim);

  void SetDims(const std::vector<std::string> &names,
               const std::vector<DDim> &dims) {
    size_t length = names.size();
312 313 314 315 316
    PADDLE_ENFORCE_EQ(length, dims.size(),
                      platform::errors::InvalidArgument(
                          "The input variables number(%d) and input dimensions "
                          "number(%d) do not match.",
                          length, dims.size()));
X
Xin Pan 已提交
317 318 319 320 321 322 323
    for (size_t i = 0; i < length; ++i) {
      if (names[i] == framework::kEmptyVarName) {
        continue;
      }
      SetDim(names[i], dims[i]);
    }
  }
324

F
fengjiayi 已提交
325 326 327 328
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
329

Y
Yu Yang 已提交
330 331
  const OpDesc &op_;
  const BlockDesc &block_;
332 333
};

Y
Yu Yang 已提交
334 335
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
336
  desc_.set_type(type);
F
fengjiayi 已提交
337 338 339
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
340
  need_update_ = true;
L
liuwei1031 已提交
341
  block_ = nullptr;
F
fengjiayi 已提交
342 343
}

X
Xin Pan 已提交
344 345 346 347 348 349
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

350
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
351 352 353 354
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
355 356
  // The record of original_id_ is only for auto parallel.
  original_id_ = op_desc.original_id_;
F
fengjiayi 已提交
357 358 359
  need_update_ = true;
}

F
fengjiayi 已提交
360
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
361 362 363 364
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
365
    const proto::OpDesc::Var &var = desc_.inputs(i);
366 367 368 369 370 371 372 373 374 375
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
376
    const proto::OpDesc::Var &var = desc_.outputs(i);
377 378 379 380 381 382 383 384
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
385
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
386
    std::string attr_name = attr.name();
387
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
388 389 390
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
391 392
      attrs_[attr_name] = GetAttrValue(attr);
    }
393
  }
394
  this->block_ = block;
395 396
}

Y
Yu Yang 已提交
397
proto::OpDesc *OpDesc::Proto() {
398
  Flush();
399
  return &desc_;
F
fengjiayi 已提交
400 401
}

Y
Yu Yang 已提交
402
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
403
  auto it = inputs_.find(name);
404 405 406 407
  PADDLE_ENFORCE_NE(
      it, inputs_.end(),
      platform::errors::NotFound("Input %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
408 409 410
  return it->second;
}

Y
Yu Yang 已提交
411
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
412 413 414 415 416 417 418
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
419 420
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
421 422 423 424
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
425
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
426
  auto it = outputs_.find(name);
427 428 429 430
  PADDLE_ENFORCE_NE(
      it, outputs_.end(),
      platform::errors::NotFound("Output %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
431 432 433
  return it->second;
}

434 435 436 437
bool OpDesc::HasOutput(const std::string &name) const {
  return outputs_.find(name) != outputs_.end();
}

Y
Yu Yang 已提交
438
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
439 440 441 442 443 444 445
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
446 447
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
448 449 450 451
  need_update_ = true;
  this->outputs_[param_name] = args;
}

452 453 454 455 456
void OpDesc::RemoveOutput(const std::string &name) {
  outputs_.erase(name);
  need_update_ = true;
}

457 458 459 460 461
void OpDesc::RemoveInput(const std::string &name) {
  inputs_.erase(name);
  need_update_ = true;
}

462 463 464 465 466 467 468 469 470 471
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
472 473
        }
      }
L
luotao1 已提交
474 475 476 477 478
    }
  }
  return false;
}

Y
Yu Yang 已提交
479
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
480
  auto it = attrs_.find(name);
481 482
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
483
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
484 485
}

Y
Yu Yang 已提交
486
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
487 488 489 490 491 492 493 494
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

495 496 497 498 499
void OpDesc::RemoveAttr(const std::string &name) {
  attrs_.erase(name);
  need_update_ = true;
}

Y
Yu Yang 已提交
500
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
501 502 503 504 505
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
506
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
M
minqiyang 已提交
507
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
508
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
509 510
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
511 512
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
513 514 515 516
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
517 518
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
519 520 521
        this->attrs_[name] = std::vector<int>();
        break;
      }
522
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
523 524
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
525 526 527
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
528
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
529 530
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
531 532 533 534
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
535 536
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
537 538 539 540
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
541 542
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
543
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
544 545
        return;
      }
M
minqiyang 已提交
546
      default:
547 548
        PADDLE_THROW(platform::errors::Unimplemented(
            "Unsupported attribute type (code %d).", attr.type()));
M
minqiyang 已提交
549
    }
M
minqiyang 已提交
550 551
    need_update_ = true;
    return;
M
minqiyang 已提交
552 553
  }

554 555 556
  // In order to set bool attr properly
  if (attr_type == proto::AttrType::INT && HasProtoAttr(name) &&
      GetProtoAttr(name).type() == proto::AttrType::BOOLEAN) {
557
    this->attrs_[name] = static_cast<bool>(BOOST_GET_CONST(int, v));
558 559 560 561
    need_update_ = true;
    return;
  }

F
fengjiayi 已提交
562 563 564 565
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
566 567
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
568
  need_update_ = true;
F
fengjiayi 已提交
569 570
}

571 572 573 574 575 576
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
577
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
578 579 580 581 582
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
583
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
584
  auto it = attrs_.find(name);
585 586
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
F
fengjiayi 已提交
587 588 589
  return it->second;
}

M
minqiyang 已提交
590 591 592
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
593 594 595 596 597 598 599
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

600 601
  PADDLE_THROW(platform::errors::NotFound(
      "Attribute %s is not found in proto %s.", name, proto.type()));
M
minqiyang 已提交
602 603
}

Y
yuyang18 已提交
604
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
605 606 607 608
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
609
    return Attribute();
Y
Fix bug  
yuyang18 已提交
610 611 612
  }
}

G
gongweibao 已提交
613 614
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
615 616 617 618
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
619
  auto blocks = BOOST_GET_CONST(std::vector<BlockDesc *>, it->second);
G
gongweibao 已提交
620 621 622 623 624 625 626 627 628 629

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
630
  auto it = attrs_.find(name);
631 632 633 634
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
635
  return BOOST_GET_CONST(BlockDesc *, it->second)->ID();
F
fengjiayi 已提交
636 637
}

Y
Yu Yang 已提交
638
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
639 640 641
  return attrs_;
}

Y
Yu Yang 已提交
642
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
643 644
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
645 646 647
  need_update_ = true;
}

Y
Yu Yang 已提交
648 649
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
650 651 652 653
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
654 655 656

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
657
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
yuyang18 已提交
658 659 660
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
661 662 663
  need_update_ = true;
}

Y
Yu Yang 已提交
664 665
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
666 667 668
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
669 670 671

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
672
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
Yancey1989 已提交
673 674 675
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
676 677 678
  need_update_ = true;
}

Y
Yu Yang 已提交
679
struct SetAttrDescVisitor : public boost::static_visitor<void> {
680 681
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
682 683 684
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
685 686 687 688 689 690 691

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
692 693 694 695 696 697 698 699 700 701 702 703 704

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
705 706 707
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
708
      blocks_idx.push_back(blk->ID());
709 710 711
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
712 713 714

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

715
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
716 717 718 719 720

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

721 722 723 724
  void operator()(const std::vector<double> &v) const {
    VectorToRepeated(v, attr_->mutable_float64s());
  }

725 726 727 728 729
  void operator()(boost::blank) const {
    PADDLE_THROW(platform::errors::Unavailable(
        "Unsupported calling method of SetAttrDescVisitor object for "
        "`boosst::blank` type."));
  }
Y
Yu Yang 已提交
730 731
};

Y
Yu Yang 已提交
732
void OpDesc::Flush() {
F
fengjiayi 已提交
733
  if (need_update_) {
734
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
735
    for (auto &ipt : inputs_) {
736
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
737 738 739 740
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

741
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
742
    for (auto &opt : outputs_) {
743
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
744 745 746 747
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

748
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
749
    for (auto &attr : attrs_) {
750
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
751 752
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
753
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
754 755
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
756 757 758 759 760
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
761

Y
Yu Yang 已提交
762
void OpDesc::CheckAttrs() {
763 764 765
  PADDLE_ENFORCE_EQ(Type().empty(), false,
                    platform::errors::PreconditionNotMet(
                        "CheckAttrs() can not be called before type is set."));
Y
Yu Yang 已提交
766 767 768 769 770 771
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
772
  VLOG(10) << "begin to check attribute of " << Type();
T
tangwei12 已提交
773
  checker->Check(&attrs_);
F
fengjiayi 已提交
774 775
}

Y
Yu Yang 已提交
776
void OpDesc::InferShape(const BlockDesc &block) const {
777 778 779
  try {
    VLOG(3) << "CompileTime infer shape on " << Type();
    auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
780 781 782 783
    PADDLE_ENFORCE_EQ(
        static_cast<bool>(infer_shape), true,
        platform::errors::NotFound(
            "Operator %s's infer_shape is not registered.", this->Type()));
784 785 786 787 788 789 790 791 792 793 794 795 796 797 798
    CompileTimeInferShapeContext ctx(*this, block);
    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      auto inames = this->InputArgumentNames();
      sout << " From [";
      std::copy(inames.begin(), inames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "] to [";
      auto onames = this->OutputArgumentNames();
      std::copy(onames.begin(), onames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "]";
      VLOG(10) << sout.str();
    }
    infer_shape(&ctx);
799
  } catch (platform::EnforceNotMet &exception) {
800
    framework::AppendErrorOpHint(Type(), &exception);
801 802 803 804
    throw std::move(exception);
  } catch (...) {
    std::rethrow_exception(std::current_exception());
  }
Y
Yu Yang 已提交
805 806
}

Y
Yu Yang 已提交
807
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
808 809
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
T
tianshuo78520a 已提交
810
  // When output variable is created, default is default set to LOD_TENSOR.
X
Xin Pan 已提交
811 812
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
813 814
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
M
minqiyang 已提交
815
    InferVarTypeContext context(this, block);
M
minqiyang 已提交
816
    info.infer_var_type_(&context);
Y
Yu Yang 已提交
817 818 819
  }
}

820
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
821
    const OpDesc &op, const BlockDesc &block)
822 823 824
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
825 826 827
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
828 829 830 831 832
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
833 834 835 836
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Input(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
837 838 839 840
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
841 842 843
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
844 845 846 847 848
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
849 850 851 852
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Output(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
853 854 855 856
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
857 858 859
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
860 861 862 863 864 865 866 867 868 869 870
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
871 872 873
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
874 875 876 877 878 879 880 881 882 883 884 885 886 887
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

H
hong 已提交
888
std::vector<std::string> CompileTimeInferShapeContext::Inputs(
889 890 891 892
    const std::string &name) const {
  return op_.Input(name);
}

H
hong 已提交
893
std::vector<std::string> CompileTimeInferShapeContext::Outputs(
894 895 896 897
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
898
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
899 900
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
901 902
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
903 904 905 906 907 908 909
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
M
minqiyang 已提交
910
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
911 912 913
    std::rethrow_exception(std::current_exception());
  }
  return res;
914 915 916 917
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
918
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
919
}
F
fengjiayi 已提交
920 921 922 923

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
924 925
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
926
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
927
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), vectorize<>);
F
fengjiayi 已提交
928
  var->SetShapes(dim_vec);
929
}
F
fengjiayi 已提交
930

931 932
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

933
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
934 935 936
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
937

F
fengjiayi 已提交
938 939
}  // namespace framework
}  // namespace paddle