infer_shape_context.h 16.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <string>
#include <vector>
19 20

#include "paddle/fluid/framework/operator.h"
21 22 23
#include "paddle/fluid/framework/shape_inference.h"
#include "paddle/fluid/framework/type_defs.h"
#include "paddle/fluid/imperative/type_defs.h"
J
Jiabin Yang 已提交
24
#include "paddle/fluid/imperative/var_helper.h"
25
#include "paddle/fluid/imperative/variable_wrapper.h"
26
#include "paddle/phi/core/ddim.h"
27 28 29 30 31 32 33 34 35

namespace paddle {
namespace imperative {

template <typename VarType>
class DygraphInferShapeContext : public framework::InferShapeContext {
  using DDim = framework::DDim;

 public:
36 37 38 39
  DygraphInferShapeContext(
      const NameVarMap<VarType>* in, const NameVarMap<VarType>* out,
      const framework::AttributeMap* attr,
      const framework::AttributeMap* default_attr, const std::string op_type,
40 41 42
      const framework::OpKernelType* op_kernel_type = nullptr,
      const phi::ArgumentMappingFn* arg_map_fn = nullptr,
      const phi::KernelSignature* default_kernel_signature = nullptr)
J
Jiabin Yang 已提交
43 44
      : var_map_in_(in),
        var_map_out_(out),
45
        attrs_(attr),
46
        default_attrs_(default_attr),
47
        op_type_(op_type),
48 49 50
        op_kernel_type_(op_kernel_type),
        arg_map_fn_(arg_map_fn),
        default_kernel_signature_(default_kernel_signature) {}
51 52 53

  bool HasInput(const std::string& name) const override {
    // has only one input
J
Jiabin Yang 已提交
54
    auto it = var_map_in_->find(name);
55

J
Jiabin Yang 已提交
56
    if (it == var_map_in_->end()) {
57 58 59 60 61 62 63 64 65 66 67 68 69
      return false;
    }
    const auto& in = it->second;
    if (in.size() == 0) return false;
    PADDLE_ENFORCE_EQ(
        in.size(), 1UL,
        platform::errors::PreconditionNotMet(
            "Input %s should not have more than one inputs", name));
    return in[0] != nullptr;
  }

  bool HasOutput(const std::string& name) const override {
    // has only one output
J
Jiabin Yang 已提交
70 71
    auto it = var_map_out_->find(name);
    if (it == var_map_out_->end()) {
72 73 74 75 76 77 78 79 80 81 82 83 84
      return false;
    }
    const auto& out = it->second;
    if (out.size() == 0) {
      return false;
    }
    PADDLE_ENFORCE_EQ(
        out.size(), 1UL,
        platform::errors::PreconditionNotMet(
            "Output %s should not have more than one outputs", name));
    return out[0] != nullptr;
  }

85 86 87 88
  bool HasAttr(const std::string& name) const override {
    return attrs_->count(name) > 0 || default_attrs_->count(name) > 0;
  }

89
  bool HasInputs(const std::string& name) const override {
J
Jiabin Yang 已提交
90 91
    auto it = var_map_in_->find(name);
    if (it == var_map_in_->end() || it->second.empty()) {
92 93 94 95 96 97 98 99 100 101
      return false;
    }
    for (auto& input : it->second) {
      if (input == nullptr) {
        return false;
      }
    }
    return true;
  }

102 103
  bool HasOutputs(const std::string& name,
                  bool allow_null = false) const override {
J
Jiabin Yang 已提交
104 105
    auto it = var_map_out_->find(name);
    if (it == var_map_out_->end() || it->second.empty()) {
106 107
      return false;
    }
108 109 110 111 112 113 114 115 116 117 118 119
    if (allow_null) {
      for (auto& output : it->second) {
        if (output != nullptr) {
          return true;
        }
      }
      return false;
    } else {
      for (auto& output : it->second) {
        if (output == nullptr) {
          return false;
        }
120
      }
121
      return true;
122 123 124 125
    }
  }

  framework::AttrReader Attrs() const override {
126
    return framework::AttrReader(*attrs_, *default_attrs_);
127 128 129 130
  }

  std::vector<std::string> Inputs(const std::string& name) const override {
    std::vector<std::string> vec_res;
J
Jiabin Yang 已提交
131
    auto it = var_map_in_->find(name);
132
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
133
        it, var_map_in_->end(),
134 135 136 137 138
        platform::errors::NotFound("can not find [%s] in input", name));

    vec_res.reserve(it->second.size());
    for (auto& var : it->second) {
      if (var) {
J
Jiabin Yang 已提交
139
        vec_res.push_back(GetNameFromVar(var));
140 141 142 143 144 145 146 147 148 149
      } else {
        vec_res.push_back(framework::kEmptyVarName);
      }
    }

    return vec_res;
  }

  std::vector<std::string> Outputs(const std::string& name) const override {
    std::vector<std::string> vec_res;
J
Jiabin Yang 已提交
150
    auto it = var_map_out_->find(name);
151
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
152
        it, var_map_out_->end(),
153 154 155 156 157
        platform::errors::NotFound("can not find [%s] in output", name));

    vec_res.reserve(it->second.size());
    for (auto& var : it->second) {
      if (var) {
J
Jiabin Yang 已提交
158
        vec_res.push_back(GetNameFromVar(var));
159 160 161 162 163 164 165
      } else {
        vec_res.push_back(framework::kEmptyVarName);
      }
    }

    return vec_res;
  }
166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
  std::string GetInputNameByIdx(size_t idx) const override {
    auto& op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_type_).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_type_, idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto& op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_type_).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_type_, idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }
188 189 190

  void ShareDim(const std::string& in, const std::string& out, size_t i = 0,
                size_t j = 0) override {
J
Jiabin Yang 已提交
191 192
    auto in_it = var_map_in_->find(in);
    auto out_it = var_map_out_->find(out);
193
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
194
        in_it, var_map_in_->end(),
195 196 197 198 199
        platform::errors::NotFound("can not found [%s] in input", in));
    PADDLE_ENFORCE_GT(in_it->second.size(), i,
                      platform::errors::PreconditionNotMet(
                          "Inputs %s should have %llu argument", in, i));
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
200
        out_it, var_map_out_->end(),
201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
        platform::errors::NotFound("can not found [%s] in input", in));
    PADDLE_ENFORCE_GT(out_it->second.size(), j,
                      platform::errors::PreconditionNotMet(
                          "Outputs %s should have %llu argument", out, j));

    framework::Variable* in_var = in_it->second[i]->MutableVar();
    framework::Variable* out_var = out_it->second[j]->MutableVar();

    PADDLE_ENFORCE_EQ(in_var->Type(), out_var->Type(),
                      platform::errors::PreconditionNotMet(
                          "The type of %s and %s is not the same.", in, out));

    if (in_var->IsType<framework::LoDTensor>()) {
      auto& in_lod_tensor = in_var->Get<framework::LoDTensor>();
      auto* out_lod_tensor = out_var->GetMutable<framework::LoDTensor>();
      out_lod_tensor->Resize(in_lod_tensor.dims());
    } else {
218 219
      auto& in_sele_rows = in_var->Get<phi::SelectedRows>();
      auto out_sele_rows = out_var->GetMutable<phi::SelectedRows>();
220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236
      out_sele_rows->mutable_value()->Resize(in_sele_rows.value().dims());
      out_sele_rows->set_rows(in_sele_rows.rows());
      out_sele_rows->set_height(in_sele_rows.height());
    }
  }

  void ShareAllLoD(const std::string& in,
                   const std::string& out) const override {
    // do nothing
  }
  void ShareLoD(const std::string& in, const std::string& out, size_t i = 0,
                size_t j = 0) const override {
    // do nothing
  }

  bool IsRuntime() const override { return true; }

237 238 239 240 241
  bool IsRunMKLDNNKernel() const override {
    return (op_kernel_type_ &&
            (op_kernel_type_->data_layout_ == framework::DataLayout::kMKLDNN));
  }

242 243 244 245
  paddle::SmallVector<framework::InferShapeVarPtr, phi::kInputSmallVectorSize>
  GetInputVarPtrs(const std::string& name) const override {
    paddle::SmallVector<framework::InferShapeVarPtr, phi::kInputSmallVectorSize>
        res;
J
Jiabin Yang 已提交
246
    auto it = var_map_in_->find(name);
247
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
248
        it, var_map_in_->end(),
249 250 251 252 253
        platform::errors::NotFound("Can not find [%s] in inputs.", name));
    for (auto& var : it->second) {
      res.emplace_back(var->MutableVar());
    }
    return res;
254 255
  }

256 257 258 259 260
  paddle::SmallVector<framework::InferShapeVarPtr, phi::kOutputSmallVectorSize>
  GetOutputVarPtrs(const std::string& name) const override {
    paddle::SmallVector<framework::InferShapeVarPtr,
                        phi::kOutputSmallVectorSize>
        res;
J
Jiabin Yang 已提交
261
    auto it = var_map_out_->find(name);
262
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
263
        it, var_map_out_->end(),
264 265 266 267 268
        platform::errors::NotFound("Can not find [%s] in outputs.", name));
    for (auto& var : it->second) {
      res.emplace_back(var->MutableVar());
    }
    return res;
269 270 271
  }

  DDim GetInputDim(const std::string& name) const override {
J
Jiabin Yang 已提交
272
    auto it = var_map_in_->find(name);
273
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
274
        it, var_map_in_->end(),
275 276 277 278 279 280 281 282 283 284 285 286
        platform::errors::NotFound("can not find [%s] in input", name));
    PADDLE_ENFORCE_EQ(
        it->second.size(), 1UL,
        platform::errors::PreconditionNotMet(
            "Input(%s) should hold one element, but now it holds %d", name,
            it->second.size()));
    return this->GetDim(it->second[0]->MutableVar());
  }

  std::vector<DDim> GetInputsDim(const std::string& name) const override {
    // const std::vector<Variable*>& vars = InputVars(name);
    std::vector<DDim> vec_res;
J
Jiabin Yang 已提交
287
    auto it = var_map_in_->find(name);
288
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
289
        it, var_map_in_->end(),
290 291 292 293 294 295 296 297 298 299 300 301 302
        platform::errors::NotFound("can not find [%s] in output", name));
    vec_res.reserve(it->second.size());
    for (size_t i = 0; i < it->second.size(); ++i) {
      if (it->second[i]) {
        vec_res.emplace_back(GetDim(it->second[i]->MutableVar()));
      } else {
        vec_res.emplace_back();
      }
    }

    return vec_res;
  }

303 304 305 306 307 308 309 310 311
  framework::proto::VarType::Type GetInputVarType(
      const std::string& name) const override {
    auto it = var_map_in_->find(name);
    PADDLE_ENFORCE_NE(
        it, var_map_in_->end(),
        platform::errors::NotFound("can not find [%s] in input", name));
    return framework::ToVarType(it->second[0]->Var().Type());
  }

312 313 314
  std::vector<framework::proto::VarType::Type> GetInputsVarType(
      const std::string& name) const override {
    std::vector<framework::proto::VarType::Type> vec_res;
J
Jiabin Yang 已提交
315
    auto it = var_map_in_->find(name);
316
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
317
        it, var_map_in_->end(),
318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333
        platform::errors::NotFound("can not find [%s] in input", name));
    vec_res.reserve(it->second.size());
    for (size_t i = 0; i < it->second.size(); ++i) {
      if (it->second[i]) {
        vec_res.emplace_back(
            framework::ToVarType(it->second[i]->MutableVar()->Type()));
      } else {
        vec_res.emplace_back();
      }
    }
    return vec_res;
  }

  std::vector<framework::proto::VarType::Type> GetOutputsVarType(
      const std::string& name) const override {
    std::vector<framework::proto::VarType::Type> vec_res;
J
Jiabin Yang 已提交
334
    auto it = var_map_out_->find(name);
335
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
336
        it, var_map_out_->end(),
337 338 339 340 341 342 343 344 345 346 347 348 349 350
        platform::errors::NotFound("can not find [%s] in output", name));
    vec_res.reserve(it->second.size());
    for (size_t i = 0; i < it->second.size(); ++i) {
      if (it->second[i]) {
        vec_res.emplace_back(
            framework::ToVarType(it->second[i]->MutableVar()->Type()));
      } else {
        vec_res.emplace_back(static_cast<framework::proto::VarType::Type>(-1));
      }
    }
    return vec_res;
  }

  void SetOutputDim(const std::string& name, const DDim& dim) override {
J
Jiabin Yang 已提交
351
    auto it = var_map_out_->find(name);
352
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
353
        it, var_map_out_->end(),
354 355 356 357 358 359 360 361 362
        platform::errors::NotFound("can not find [%s] in output", name));

    if (it->second[0]) {
      SetDim(it->second[0]->MutableVar(), dim);
    }
  }

  void SetOutputsDim(const std::string& name,
                     const std::vector<DDim>& dims) override {
J
Jiabin Yang 已提交
363
    auto it = var_map_out_->find(name);
364
    PADDLE_ENFORCE_NE(
J
Jiabin Yang 已提交
365
        it, var_map_out_->end(),
366 367
        platform::errors::NotFound("can not find [%s] in output", name));

368 369 370 371 372 373
    PADDLE_ENFORCE_EQ(dims.size(), it->second.size(),
                      platform::errors::InvalidArgument(
                          "The number of dims is expected to be equal to the "
                          "number of Outputs(%s). But receieved: the number of "
                          "dims = %d, the number of Outputs(%s) = %d.",
                          name, dims.size(), name, it->second.size()));
374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392

    for (size_t i = 0; i < dims.size(); ++i) {
      if (it->second[i]) {
        SetDim(it->second[i]->MutableVar(), dims[i]);
      }
    }
  }

  int32_t GetLoDLevel(const std::string& in, size_t i = 0) const override {
    PADDLE_THROW(platform::errors::PermissionDenied(
        "GetLoDLevel function not support in dygraph mode"));
  }

  void SetLoDLevel(const std::string& out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_THROW(platform::errors::PermissionDenied(
        "SetLoDLevel function not support in dygraph mode"));
  }

393 394 395 396 397 398 399 400
  const phi::ArgumentMappingFn* GetPhiArgumentMappingFn() const override {
    return arg_map_fn_;
  }

  const phi::KernelSignature* GetPhiDefaultKernelSignature() const override {
    return default_kernel_signature_;
  }

401 402 403 404 405 406
 protected:
  DDim GetDim(framework::Variable* var) const {
    PADDLE_ENFORCE_NOT_NULL(var, platform::errors::PreconditionNotMet(
                                     "Input variable should not be null"));
    if (var->IsType<framework::LoDTensor>()) {
      return var->Get<framework::LoDTensor>().dims();
407 408
    } else if (var->IsType<phi::SelectedRows>()) {
      return var->Get<phi::SelectedRows>().GetCompleteDims();
409 410 411
    } else {
      PADDLE_THROW(platform::errors::PermissionDenied(
          "Only LoDTensor/SelectedRows support 'GetDim', but Variables "
412 413
          "type_id is: %s.",
          framework::ToTypeName(var->Type())));
414 415 416 417 418 419 420 421 422 423 424
    }
  }

  std::vector<DDim> GetRepeatedDims(const std::string& name) const override {
    PADDLE_THROW(platform::errors::PermissionDenied(
        "GetRepeatedDims not support in dygraph runtime"));
  }

  void SetDim(framework::Variable* var, const DDim& dim) {
    if (var->IsType<framework::LoDTensor>()) {
      var->GetMutable<framework::LoDTensor>()->Resize(dim);
425 426
    } else if (var->IsType<phi::SelectedRows>()) {
      var->GetMutable<phi::SelectedRows>()->set_height(dim[0]);
427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455
    } else {
      PADDLE_THROW(platform::errors::PermissionDenied(
          "Variable type_id %s, expect LoDTensor/SelectedRows."));
    }
  }

  void SetDims(const std::vector<framework::Variable*>& vars,
               const std::vector<DDim>& dims) {
    size_t length = vars.size();
    PADDLE_ENFORCE_EQ(
        length, dims.size(),
        platform::errors::PreconditionNotMet(
            "Vars number [%d] should be equal with dims number [%d]", length,
            dims.size()));
    for (size_t i = 0; i < length; ++i) {
      if (vars[i] == nullptr) {
        continue;
      }
      SetDim(vars[i], dims[i]);
    }
  }

  void SetRepeatedDims(const std::string& name,
                       const std::vector<DDim>& dims) override {
    PADDLE_THROW(platform::errors::PermissionDenied(
        "SetRepeatedDims not support in dygraph runtime"));
  }

 private:
J
Jiabin Yang 已提交
456 457
  const NameVarMap<VarType>* var_map_in_;
  const NameVarMap<VarType>* var_map_out_;
458
  const framework::AttributeMap* attrs_;
459
  const framework::AttributeMap* default_attrs_;
460
  const std::string op_type_;
461
  const framework::OpKernelType* op_kernel_type_;
462 463 464
  // arg_map_fn_ and default_kernel_signature_ may be nullptr
  const phi::ArgumentMappingFn* arg_map_fn_;
  const phi::KernelSignature* default_kernel_signature_;
465 466 467 468
};

}  // namespace imperative
}  // namespace paddle