var_desc.cc 11.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/var_desc.h"
16 17

#include "glog/logging.h"
18
#include "paddle/fluid/framework/data_type.h"
Y
Yi Wang 已提交
19
#include "paddle/fluid/platform/enforce.h"
F
fengjiayi 已提交
20 21 22 23

namespace paddle {
namespace framework {

24
proto::VarType::Type VarDesc::GetType() const { return desc_.type().type(); }
Q
QI JUN 已提交
25

26 27 28
void VarDesc::SetType(proto::VarType::Type type) {
  desc_.mutable_type()->set_type(type);
}
Q
QI JUN 已提交
29

Y
Yu Yang 已提交
30
void VarDesc::SetShape(const std::vector<int64_t> &dims) {
Y
Yu Yang 已提交
31
  VectorToRepeated(dims, mutable_tensor_desc()->mutable_dims());
F
fengjiayi 已提交
32 33
}

F
fengjiayi 已提交
34
void VarDesc::SetTensorDescNum(size_t num) {
35 36 37 38
  switch (desc_.type().type()) {
    case proto::VarType::READER: {
      auto *lod_tensors_ptr =
          desc_.mutable_type()->mutable_reader()->mutable_lod_tensor();
F
fengjiayi 已提交
39 40 41 42 43 44 45 46
      lod_tensors_ptr->Clear();
      for (size_t i = 0; i < num; ++i) {
        lod_tensors_ptr->Add();
      }
      return;
    } break;
    default:
      PADDLE_THROW(
47 48 49
          platform::errors::Unavailable("Setting 'sub_tensor_number' is not "
                                        "supported by the %s type variable.",
                                        this->Name()));
F
fengjiayi 已提交
50 51 52 53
  }
}

size_t VarDesc::GetTensorDescNum() const {
54 55 56
  switch (desc_.type().type()) {
    case proto::VarType::READER:
      return desc_.type().reader().lod_tensor_size();
F
fengjiayi 已提交
57 58 59
      break;
    default:
      PADDLE_THROW(
60 61 62
          platform::errors::Unavailable("Getting 'sub_tensor_number' is not "
                                        "supported by the %s type variable.",
                                        this->Name()));
F
fengjiayi 已提交
63 64 65 66
  }
}

void VarDesc::SetShapes(
F
fengjiayi 已提交
67
    const std::vector<std::vector<int64_t>> &multiple_dims) {
F
fengjiayi 已提交
68
  if (multiple_dims.size() != GetTensorDescNum()) {
M
minqiyang 已提交
69 70 71 72
    VLOG(3) << "WARNING: The number of given shapes(" << multiple_dims.size()
            << ") doesn't match the existing tensor number("
            << GetTensorDescNum()
            << "). The Reader is going to be reinitialized.";
F
fengjiayi 已提交
73 74
    SetTensorDescNum(multiple_dims.size());
  }
75
  std::vector<proto::VarType::TensorDesc *> tensors = mutable_tensor_descs();
F
fengjiayi 已提交
76 77 78 79 80 81 82 83 84 85
  for (size_t i = 0; i < multiple_dims.size(); ++i) {
    VectorToRepeated(multiple_dims[i], tensors[i]->mutable_dims());
  }
}

std::vector<int64_t> VarDesc::GetShape() const {
  return RepeatedToVector(tensor_desc().dims());
}

std::vector<std::vector<int64_t>> VarDesc::GetShapes() const {
86
  std::vector<proto::VarType::TensorDesc> descs = tensor_descs();
F
fengjiayi 已提交
87 88 89 90 91 92 93 94
  std::vector<std::vector<int64_t>> res;
  res.reserve(descs.size());
  for (const auto &tensor_desc : descs) {
    res.push_back(RepeatedToVector(tensor_desc.dims()));
  }
  return res;
}

95
void VarDesc::SetDataType(proto::VarType::Type data_type) {
X
Xin Pan 已提交
96
  mutable_tensor_desc()->set_data_type(data_type);
F
fengjiayi 已提交
97 98
}

F
fengjiayi 已提交
99
void VarDesc::SetDataTypes(
100
    const std::vector<proto::VarType::Type> &multiple_data_type) {
F
fengjiayi 已提交
101
  if (multiple_data_type.size() != GetTensorDescNum()) {
M
minqiyang 已提交
102 103 104 105 106
    VLOG(3) << "WARNING: The number of given data types("
            << multiple_data_type.size()
            << ") doesn't match the existing tensor number("
            << GetTensorDescNum()
            << "). The Reader is going to be reinitialized.";
F
fengjiayi 已提交
107 108
    SetTensorDescNum(multiple_data_type.size());
  }
109 110
  std::vector<proto::VarType::TensorDesc *> tensor_descs =
      mutable_tensor_descs();
F
fengjiayi 已提交
111 112 113
  for (size_t i = 0; i < multiple_data_type.size(); ++i) {
    tensor_descs[i]->set_data_type(multiple_data_type[i]);
  }
F
fengjiayi 已提交
114 115
}

116
proto::VarType::Type VarDesc::GetDataType() const {
X
Xin Pan 已提交
117
  return tensor_desc().data_type();
118
}
Y
Stash  
Yu Yang 已提交
119

120 121 122 123
size_t VarDesc::ElementSize() const {
  return framework::SizeOfType(GetDataType());
}

124
std::vector<proto::VarType::Type> VarDesc::GetDataTypes() const {
125
  std::vector<proto::VarType::TensorDesc> descs = tensor_descs();
126
  std::vector<proto::VarType::Type> res;
F
fengjiayi 已提交
127 128 129 130 131 132 133
  res.reserve(descs.size());
  for (const auto &tensor_desc : descs) {
    res.push_back(tensor_desc.data_type());
  }
  return res;
}

Y
Yu Yang 已提交
134
void VarDesc::SetLoDLevel(int32_t lod_level) {
135 136 137
  switch (desc_.type().type()) {
    case proto::VarType::LOD_TENSOR:
      desc_.mutable_type()->mutable_lod_tensor()->set_lod_level(lod_level);
Y
Yu Yang 已提交
138
      break;
139 140
    case proto::VarType::LOD_TENSOR_ARRAY:
      desc_.mutable_type()->mutable_tensor_array()->set_lod_level(lod_level);
Y
Yu Yang 已提交
141 142
      break;
    default:
143 144 145
      PADDLE_THROW(platform::errors::Unavailable(
          "Setting 'lod_level' is not supported by the %s type variable.",
          this->Name()));
F
fengjiayi 已提交
146 147 148 149
  }
}

void VarDesc::SetLoDLevels(const std::vector<int32_t> &multiple_lod_level) {
F
fengjiayi 已提交
150
  if (multiple_lod_level.size() != GetTensorDescNum()) {
M
minqiyang 已提交
151 152 153 154 155
    VLOG(3) << "WARNING: The number of given lod_levels("
            << multiple_lod_level.size()
            << ") doesn't match the existing tensor number("
            << GetTensorDescNum()
            << "). The Reader is going to be reinitialized.";
F
fengjiayi 已提交
156 157
    SetTensorDescNum(multiple_lod_level.size());
  }
158 159
  switch (desc_.type().type()) {
    case proto::VarType::READER: {
F
fengjiayi 已提交
160
      size_t i = 0;
161 162
      for (auto &lod_tensor :
           *desc_.mutable_type()->mutable_reader()->mutable_lod_tensor()) {
F
fengjiayi 已提交
163 164 165 166
        lod_tensor.set_lod_level(multiple_lod_level[i++]);
      }
    } break;
    default:
167 168 169
      PADDLE_THROW(platform::errors::Unavailable(
          "Setting 'lod_levels' is not supported by the %s type variable",
          this->Name()));
Y
Yu Yang 已提交
170
  }
Y
Stash  
Yu Yang 已提交
171 172
}

173
int32_t VarDesc::GetLoDLevel() const {
174 175 176 177 178
  switch (desc_.type().type()) {
    case proto::VarType::LOD_TENSOR:
      return desc_.type().lod_tensor().lod_level();
    case proto::VarType::LOD_TENSOR_ARRAY:
      return desc_.type().tensor_array().lod_level();
Y
Yu Yang 已提交
179
    default:
180 181 182
      PADDLE_THROW(platform::errors::Unavailable(
          "Getting 'lod_level' is not supported by the %s type variable.",
          this->Name()));
F
fengjiayi 已提交
183 184 185 186 187
  }
}

std::vector<int32_t> VarDesc::GetLoDLevels() const {
  std::vector<int32_t> res;
188 189 190 191
  switch (desc_.type().type()) {
    case proto::VarType::READER:
      res.reserve(desc_.type().reader().lod_tensor_size());
      for (auto &lod_tensor : desc_.type().reader().lod_tensor()) {
F
fengjiayi 已提交
192 193 194 195 196
        res.push_back(lod_tensor.lod_level());
      }
      return res;
      break;
    default:
197 198 199
      PADDLE_THROW(platform::errors::Unavailable(
          "Getting 'lod_levels' is not supported by the %s type variable.",
          this->Name()));
Y
Yu Yang 已提交
200
  }
Y
Stash  
Yu Yang 已提交
201
}
Y
Yu Yang 已提交
202

203
const proto::VarType::TensorDesc &VarDesc::tensor_desc() const {
204 205 206 207 208 209
  PADDLE_ENFORCE_EQ(
      desc_.has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
  PADDLE_ENFORCE_EQ(
      desc_.type().has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
210 211 212 213 214 215 216
  switch (desc_.type().type()) {
    case proto::VarType::SELECTED_ROWS:
      return desc_.type().selected_rows();
    case proto::VarType::LOD_TENSOR:
      return desc_.type().lod_tensor().tensor();
    case proto::VarType::LOD_TENSOR_ARRAY:
      return desc_.type().tensor_array().tensor();
S
Steffy-zxf 已提交
217 218 219 220
    case proto::VarType::STRINGS:
      return desc_.type().strings();
    case proto::VarType::VOCAB:
      return desc_.type().vocab();
Y
Yu Yang 已提交
221
    default:
222 223 224
      PADDLE_THROW(platform::errors::Unavailable(
          "Getting 'tensor_desc' is not supported by the %s type variable.",
          this->Name()));
F
fengjiayi 已提交
225 226 227
  }
}

228
std::vector<proto::VarType::TensorDesc> VarDesc::tensor_descs() const {
229 230 231
  PADDLE_ENFORCE_EQ(
      desc_.has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
232
  std::vector<proto::VarType::TensorDesc> res;
F
fengjiayi 已提交
233
  res.reserve(GetTensorDescNum());
234 235 236
  switch (desc_.type().type()) {
    case proto::VarType::READER:
      for (const auto &lod_tensor : desc_.type().reader().lod_tensor()) {
F
fengjiayi 已提交
237 238 239 240
        res.push_back(lod_tensor.tensor());
      }
      return res;
    default:
241 242 243
      PADDLE_THROW(platform::errors::Unavailable(
          "Getting 'tensor_descs' is not supported by the %s type variable.",
          this->Name()));
Y
Yu Yang 已提交
244 245 246
  }
}

247
proto::VarType::TensorDesc *VarDesc::mutable_tensor_desc() {
248 249 250 251 252 253
  PADDLE_ENFORCE_EQ(
      desc_.has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
  PADDLE_ENFORCE_EQ(
      desc_.type().has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
254 255 256 257 258 259 260
  switch (desc_.type().type()) {
    case proto::VarType::SELECTED_ROWS:
      return desc_.mutable_type()->mutable_selected_rows();
    case proto::VarType::LOD_TENSOR:
      return desc_.mutable_type()->mutable_lod_tensor()->mutable_tensor();
    case proto::VarType::LOD_TENSOR_ARRAY:
      return desc_.mutable_type()->mutable_tensor_array()->mutable_tensor();
S
Steffy-zxf 已提交
261 262 263 264
    case proto::VarType::STRINGS:
      return desc_.mutable_type()->mutable_strings();
    case proto::VarType::VOCAB:
      return desc_.mutable_type()->mutable_vocab();
Y
Yu Yang 已提交
265
    default:
F
fengjiayi 已提交
266
      PADDLE_THROW(
267 268 269
          platform::errors::Unavailable("Getting 'mutable_tensor_desc' is not "
                                        "supported by the %s type variable.",
                                        this->Name()));
Y
Yu Yang 已提交
270 271
  }
}
F
fengjiayi 已提交
272

273
std::vector<proto::VarType::TensorDesc *> VarDesc::mutable_tensor_descs() {
274 275 276 277 278 279
  PADDLE_ENFORCE_EQ(
      desc_.has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
  PADDLE_ENFORCE_EQ(
      desc_.type().has_type(), true,
      platform::errors::NotFound("The variable's type was not be set."));
280
  std::vector<proto::VarType::TensorDesc *> res;
F
fengjiayi 已提交
281
  res.reserve(GetTensorDescNum());
282 283 284 285
  switch (desc_.type().type()) {
    case proto::VarType::READER:
      for (auto &lod_tensor :
           *desc_.mutable_type()->mutable_reader()->mutable_lod_tensor()) {
F
fengjiayi 已提交
286 287 288 289
        res.push_back(lod_tensor.mutable_tensor());
      }
      return res;
    default:
290 291 292
      PADDLE_THROW(platform::errors::Unavailable(
          "Getting 'tensor_descs' is not supported by the %s type variable.",
          this->Name()));
F
fengjiayi 已提交
293 294 295
  }
}

296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335
std::vector<std::string> VarDesc::AttrNames() const {
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

void VarDesc::RemoveAttr(const std::string &name) { attrs_.erase(name); }

void VarDesc::SetAttr(const std::string &name, const Attribute &v) {
  // NOTICE(sandyhouse): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
    // Find current attr via attr name and set the correct attribute value
    this->attrs_[name] = std::vector<int>();
    return;
  }
  bool valid = attr_type == proto::AttrType::INT ||
               attr_type == proto::AttrType::STRING ||
               attr_type == proto::AttrType::INTS;
  PADDLE_ENFORCE_EQ(valid, true, platform::errors::InvalidArgument(
                                     "The value for attr (%s) must be "
                                     "one of list or int or string.",
                                     name));

  this->attrs_[name] = v;
}

Attribute VarDesc::GetAttr(const std::string &name) const {
  auto it = attrs_.find(name);
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
  return it->second;
}

336 337 338 339 340
bool operator==(const VarDesc &left, const VarDesc &right) {
  return left.Proto()->SerializeAsString() ==
         right.Proto()->SerializeAsString();
}

F
fengjiayi 已提交
341 342
}  // namespace framework
}  // namespace paddle