op_desc.cc 32.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16

17
#include <string>
18

19
#include "glog/logging.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/block_desc.h"
21
#include "paddle/fluid/framework/op_call_stack.h"
Y
yuyang18 已提交
22
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
23 24
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/shape_inference.h"
M
minqiyang 已提交
25
#include "paddle/fluid/framework/var_type_inference.h"
Y
Yu Yang 已提交
26

F
fengjiayi 已提交
27 28 29
namespace paddle {
namespace framework {

30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

H
hong 已提交
44
  std::vector<std::string> Inputs(const std::string &name) const override;
45

H
hong 已提交
46
  std::vector<std::string> Outputs(const std::string &name) const override;
47

48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
  std::string GetInputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_.Type(), idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_.Type(), idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }

71 72
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
73 74 75 76 77 78 79 80 81 82 83
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));

H
hong 已提交
84 85
    std::string input_n = Inputs(in)[i];
    std::string output_n = Outputs(out)[j];
86

87 88 89 90 91 92
    PADDLE_ENFORCE_NE(input_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(output_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
93 94 95 96

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

97 98 99 100 101 102 103
    PADDLE_ENFORCE_EQ(
        in_var->GetType(), out_var->GetType(),
        platform::errors::InvalidArgument(
            "The type of input %s and output %s do not match. The input type "
            "is %s, output type is %s.",
            input_n, output_n, DataTypeToString(in_var->GetType()),
            DataTypeToString(out_var->GetType())));
104 105 106 107

    SetDim(output_n, GetDim(input_n));
  }

H
hong 已提交
108 109 110 111 112 113 114 115
  void ShareAllLoD(const std::string &in,
                   const std::string &out) const override {
    auto &in_var_names = op_.Input(in);
    auto &out_var_names = op_.Output(out);

    PADDLE_ENFORCE_EQ(
        in_var_names.size(), out_var_names.size(),
        platform::errors::PreconditionNotMet(
T
tianshuo78520a 已提交
116
            "Op [%s]:  Input var number should be equal with output var number",
H
hong 已提交
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
            op_.Type()));

    for (size_t i = 0; i < in_var_names.size(); ++i) {
      if (out_var_names[i] == framework::kEmptyVarName) {
        continue;
      }

      auto *in_var = block_.FindVarRecursive(in_var_names[i]);
      auto *out_var = block_.FindVarRecursive(out_var_names[i]);
      if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
          in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
        VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
        return;
      }
      out_var->SetLoDLevel(in_var->GetLoDLevel());
    }
  }

Q
Qiao Longfei 已提交
135 136
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
Q
Qiao Longfei 已提交
153 154
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
155 156
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
157
      VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
X
fix  
Xin Pan 已提交
158 159
      return;
    }
160
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
161
  }
D
dzhwinter 已提交
162

163 164
  int32_t GetLoDLevel(const std::string &in, size_t i = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
165 166 167 168
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, input "
                          "variable %s of operator %s only has %d elements.",
                          in, op_.Type(), Inputs(in).size()));
169
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
170 171 172
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] of operator %s is empty.",
                          in, i, op_.Type()));
C
chengduo 已提交
173
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
174
    PADDLE_ENFORCE_NOT_NULL(
175 176 177
        in_var, platform::errors::NotFound(
                    "The input variable %s[%d] of operator %s is not found.",
                    in, i, op_.Type()));
178
    return in_var->GetLoDLevel();
C
chengduo 已提交
179 180
  }

181 182 183
  void SetLoDLevel(const std::string &out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
184 185 186 187
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, output "
                          "variable %s of operator %s only has %d elements.",
                          out, op_.Type(), Outputs(out).size()));
188
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
189 190 191
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] of operator %s is empty.",
                          out, j, op_.Type()));
192
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
193
    PADDLE_ENFORCE_NOT_NULL(
194 195 196
        out_var, platform::errors::NotFound(
                     "The output variable %s[%d] of operator %s is not found.",
                     out, j, op_.Type()));
197 198 199
    if (lod_level >= 0) {
      out_var->SetLoDLevel(lod_level);
    }
200 201
  }

202
  std::vector<InferShapeVarPtr> GetInputVarPtrs(
203
      const std::string &name) const override {
204 205 206 207 208 209 210 211 212 213 214
    const std::vector<std::string> arg_names = Inputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

  std::vector<InferShapeVarPtr> GetOutputVarPtrs(
215
      const std::string &name) const override {
216 217 218 219 220 221 222 223 224 225
    const std::vector<std::string> arg_names = Outputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
226 227 228
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
229 230 231 232
                      platform::errors::InvalidArgument(
                          "The input(%s) should hold only one element, but now "
                          "it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
233 234 235 236 237 238 239 240
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

241 242
  bool IsRuntime() const override;

243 244
  bool IsRunMKLDNNKernel() const override;

X
Xin Pan 已提交
245 246 247 248 249 250 251 252 253 254
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

X
Xin Pan 已提交
255
  void SetOutputDim(const std::string &name, const DDim &dim) override {
H
hong 已提交
256
    auto arg_names = Outputs(name);
X
Xin Pan 已提交
257
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
258 259 260 261
                      platform::errors::InvalidArgument(
                          "The iutput(%s) should hold only one element, but "
                          "now it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
262 263 264 265 266
    SetDim(arg_names[0], dim);
  }

  void SetOutputsDim(const std::string &name,
                     const std::vector<DDim> &dims) override {
H
hong 已提交
267
    auto names = Outputs(name);
X
Xin Pan 已提交
268 269 270
    SetDims(names, dims);
  }

271
 protected:
X
Xin Pan 已提交
272 273 274 275 276 277 278 279 280 281 282 283
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
284

X
Xin Pan 已提交
285 286
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
287 288
    PADDLE_ENFORCE_NOT_NULL(
        var, platform::errors::NotFound("Variable %s is not found.", name));
X
Xin Pan 已提交
289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307
    DDim res;
    try {
      auto shape = var->GetShape();
      res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
308

X
Xin Pan 已提交
309 310 311 312 313
  void SetDim(const std::string &name, const DDim &dim);

  void SetDims(const std::vector<std::string> &names,
               const std::vector<DDim> &dims) {
    size_t length = names.size();
314 315 316 317 318
    PADDLE_ENFORCE_EQ(length, dims.size(),
                      platform::errors::InvalidArgument(
                          "The input variables number(%d) and input dimensions "
                          "number(%d) do not match.",
                          length, dims.size()));
X
Xin Pan 已提交
319 320 321 322 323 324 325
    for (size_t i = 0; i < length; ++i) {
      if (names[i] == framework::kEmptyVarName) {
        continue;
      }
      SetDim(names[i], dims[i]);
    }
  }
326

F
fengjiayi 已提交
327 328 329 330
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
331

Y
Yu Yang 已提交
332 333
  const OpDesc &op_;
  const BlockDesc &block_;
334 335
};

Y
Yu Yang 已提交
336 337
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
338
  desc_.set_type(type);
F
fengjiayi 已提交
339 340 341
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
342
  need_update_ = true;
L
liuwei1031 已提交
343
  block_ = nullptr;
F
fengjiayi 已提交
344 345
}

X
Xin Pan 已提交
346 347 348 349 350 351
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

352
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
353 354 355 356
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
357 358
  // The record of original_id_ is only for auto parallel.
  original_id_ = op_desc.original_id_;
F
fengjiayi 已提交
359 360 361
  need_update_ = true;
}

F
fengjiayi 已提交
362
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
363 364 365 366
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
367
    const proto::OpDesc::Var &var = desc_.inputs(i);
368 369 370 371 372 373 374 375 376 377
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
378
    const proto::OpDesc::Var &var = desc_.outputs(i);
379 380 381 382 383 384 385 386
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
387
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
388
    std::string attr_name = attr.name();
389
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
390 391 392
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
393 394
      attrs_[attr_name] = GetAttrValue(attr);
    }
395
  }
396
  this->block_ = block;
397 398
}

Y
Yu Yang 已提交
399
proto::OpDesc *OpDesc::Proto() {
400
  Flush();
401
  return &desc_;
F
fengjiayi 已提交
402 403
}

Y
Yu Yang 已提交
404
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
405
  auto it = inputs_.find(name);
406 407 408 409
  PADDLE_ENFORCE_NE(
      it, inputs_.end(),
      platform::errors::NotFound("Input %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
410 411 412
  return it->second;
}

Y
Yu Yang 已提交
413
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
414 415 416 417 418 419 420
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
421 422
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
423 424 425 426
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
427
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
428
  auto it = outputs_.find(name);
429 430 431 432
  PADDLE_ENFORCE_NE(
      it, outputs_.end(),
      platform::errors::NotFound("Output %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
433 434 435
  return it->second;
}

436 437 438 439
bool OpDesc::HasOutput(const std::string &name) const {
  return outputs_.find(name) != outputs_.end();
}

Y
Yu Yang 已提交
440
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
441 442 443 444 445 446 447
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
448 449
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
450 451 452 453
  need_update_ = true;
  this->outputs_[param_name] = args;
}

454 455 456 457 458
void OpDesc::RemoveOutput(const std::string &name) {
  outputs_.erase(name);
  need_update_ = true;
}

459 460 461 462 463
void OpDesc::RemoveInput(const std::string &name) {
  inputs_.erase(name);
  need_update_ = true;
}

464 465 466 467 468 469 470 471 472 473
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
474 475
        }
      }
L
luotao1 已提交
476 477 478 479 480
    }
  }
  return false;
}

Y
Yu Yang 已提交
481
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
482
  auto it = attrs_.find(name);
483 484
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
485
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
486 487
}

Y
Yu Yang 已提交
488
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
489 490 491 492 493 494 495 496
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

497 498 499 500 501
void OpDesc::RemoveAttr(const std::string &name) {
  attrs_.erase(name);
  need_update_ = true;
}

Y
Yu Yang 已提交
502
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
503 504 505 506 507
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
508
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
M
minqiyang 已提交
509
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
510
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
511 512
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
513 514
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
515 516 517 518
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
519 520
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
521 522 523
        this->attrs_[name] = std::vector<int>();
        break;
      }
524
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
525 526
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
527 528 529
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
530
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
531 532
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
533 534 535 536
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
537 538
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
539 540 541 542
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
543 544
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
545
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
546 547
        return;
      }
M
minqiyang 已提交
548
      default:
549 550
        PADDLE_THROW(platform::errors::Unimplemented(
            "Unsupported attribute type (code %d).", attr.type()));
M
minqiyang 已提交
551
    }
M
minqiyang 已提交
552 553
    need_update_ = true;
    return;
M
minqiyang 已提交
554 555
  }

556 557 558
  // In order to set bool attr properly
  if (attr_type == proto::AttrType::INT && HasProtoAttr(name) &&
      GetProtoAttr(name).type() == proto::AttrType::BOOLEAN) {
559
    this->attrs_[name] = static_cast<bool>(BOOST_GET_CONST(int, v));
560 561 562 563
    need_update_ = true;
    return;
  }

F
fengjiayi 已提交
564 565 566 567
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
568 569
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
570
  need_update_ = true;
F
fengjiayi 已提交
571 572
}

573 574 575 576 577 578
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
579
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
580 581 582 583 584
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
585
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
586
  auto it = attrs_.find(name);
587 588
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
F
fengjiayi 已提交
589 590 591
  return it->second;
}

M
minqiyang 已提交
592 593 594
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
595 596 597 598 599 600 601
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

602 603
  PADDLE_THROW(platform::errors::NotFound(
      "Attribute %s is not found in proto %s.", name, proto.type()));
M
minqiyang 已提交
604 605
}

Y
yuyang18 已提交
606
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
607 608 609 610
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
611
    return Attribute();
Y
Fix bug  
yuyang18 已提交
612 613 614
  }
}

G
gongweibao 已提交
615 616
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
617 618 619 620
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
621
  auto blocks = BOOST_GET_CONST(std::vector<BlockDesc *>, it->second);
G
gongweibao 已提交
622 623 624 625 626 627 628 629 630 631

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
632
  auto it = attrs_.find(name);
633 634 635 636
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
637
  return BOOST_GET_CONST(BlockDesc *, it->second)->ID();
F
fengjiayi 已提交
638 639
}

Y
Yu Yang 已提交
640
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
641 642 643
  return attrs_;
}

Y
Yu Yang 已提交
644
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
645 646
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
647 648 649
  need_update_ = true;
}

Y
Yu Yang 已提交
650 651
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
652 653 654 655
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
656 657 658

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
659
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
yuyang18 已提交
660 661 662
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
663 664 665
  need_update_ = true;
}

Y
Yu Yang 已提交
666 667
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
668 669 670
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
671 672 673

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
674
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
Yancey1989 已提交
675 676 677
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
678 679 680
  need_update_ = true;
}

Y
Yu Yang 已提交
681
struct SetAttrDescVisitor : public boost::static_visitor<void> {
682 683
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
684 685 686
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
687 688 689 690 691 692 693

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
694 695 696 697 698 699 700 701 702 703 704 705 706

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
707 708 709
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
710
      blocks_idx.push_back(blk->ID());
711 712 713
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
714 715 716

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

717
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
718 719 720 721 722

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

723 724 725 726
  void operator()(const std::vector<double> &v) const {
    VectorToRepeated(v, attr_->mutable_float64s());
  }

727 728 729 730 731
  void operator()(boost::blank) const {
    PADDLE_THROW(platform::errors::Unavailable(
        "Unsupported calling method of SetAttrDescVisitor object for "
        "`boosst::blank` type."));
  }
Y
Yu Yang 已提交
732 733
};

Y
Yu Yang 已提交
734
void OpDesc::Flush() {
F
fengjiayi 已提交
735
  if (need_update_) {
736
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
737
    for (auto &ipt : inputs_) {
738
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
739 740 741 742
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

743
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
744
    for (auto &opt : outputs_) {
745
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
746 747 748 749
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

750
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
751
    for (auto &attr : attrs_) {
752
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
753 754
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
755
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
756 757
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
758 759 760 761 762
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
763

Y
Yu Yang 已提交
764
void OpDesc::CheckAttrs() {
765 766 767
  PADDLE_ENFORCE_EQ(Type().empty(), false,
                    platform::errors::PreconditionNotMet(
                        "CheckAttrs() can not be called before type is set."));
Y
Yu Yang 已提交
768 769 770 771 772 773
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
774
  VLOG(10) << "begin to check attribute of " << Type();
T
tangwei12 已提交
775
  checker->Check(&attrs_);
F
fengjiayi 已提交
776 777
}

Y
Yu Yang 已提交
778
void OpDesc::InferShape(const BlockDesc &block) const {
779 780 781
  try {
    VLOG(3) << "CompileTime infer shape on " << Type();
    auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
782 783 784 785
    PADDLE_ENFORCE_EQ(
        static_cast<bool>(infer_shape), true,
        platform::errors::NotFound(
            "Operator %s's infer_shape is not registered.", this->Type()));
786 787 788 789 790 791 792 793 794 795 796 797 798 799 800
    CompileTimeInferShapeContext ctx(*this, block);
    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      auto inames = this->InputArgumentNames();
      sout << " From [";
      std::copy(inames.begin(), inames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "] to [";
      auto onames = this->OutputArgumentNames();
      std::copy(onames.begin(), onames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "]";
      VLOG(10) << sout.str();
    }
    infer_shape(&ctx);
801
  } catch (platform::EnforceNotMet &exception) {
802
    framework::AppendErrorOpHint(Type(), &exception);
803 804 805 806
    throw std::move(exception);
  } catch (...) {
    std::rethrow_exception(std::current_exception());
  }
Y
Yu Yang 已提交
807 808
}

Y
Yu Yang 已提交
809
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
810 811
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
T
tianshuo78520a 已提交
812
  // When output variable is created, default is default set to LOD_TENSOR.
X
Xin Pan 已提交
813 814
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
815 816
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
M
minqiyang 已提交
817
    InferVarTypeContext context(this, block);
M
minqiyang 已提交
818
    info.infer_var_type_(&context);
Y
Yu Yang 已提交
819 820 821
  }
}

822
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
823
    const OpDesc &op, const BlockDesc &block)
824 825 826
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
827 828 829
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
830 831 832 833 834
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
835 836 837 838
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Input(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
839 840 841 842
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
843 844 845
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
846 847 848 849 850
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
851 852 853 854
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Output(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
855 856 857 858
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
859 860 861
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
862 863 864 865 866 867 868 869 870 871 872
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
873 874 875
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
876 877 878 879 880 881 882 883 884 885 886 887 888 889
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

H
hong 已提交
890
std::vector<std::string> CompileTimeInferShapeContext::Inputs(
891 892 893 894
    const std::string &name) const {
  return op_.Input(name);
}

H
hong 已提交
895
std::vector<std::string> CompileTimeInferShapeContext::Outputs(
896 897 898 899
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
900
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
901 902
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
903 904
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
905 906 907 908 909 910 911
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
M
minqiyang 已提交
912
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
913 914 915
    std::rethrow_exception(std::current_exception());
  }
  return res;
916 917 918 919
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
920
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
921
}
F
fengjiayi 已提交
922 923 924 925

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
926 927
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
928
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
929
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), vectorize<>);
F
fengjiayi 已提交
930
  var->SetShapes(dim_vec);
931
}
F
fengjiayi 已提交
932

933 934
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

935 936
bool CompileTimeInferShapeContext::IsRunMKLDNNKernel() const { return false; }

937
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
938 939 940
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
941

F
fengjiayi 已提交
942 943
}  // namespace framework
}  // namespace paddle