var_desc.h 4.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18
#include <algorithm>
#include <string>
F
fengjiayi 已提交
19
#include <vector>
W
wanghuancoder 已提交
20

Y
Yu Yang 已提交
21
#include "glog/logging.h"
Y
Yi Wang 已提交
22
#include "paddle/fluid/framework/framework.pb.h"
F
fengjiayi 已提交
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40

namespace paddle {
namespace framework {

// convert between std::vector and protobuf repeated.
template <typename T>
inline std::vector<T> RepeatedToVector(
    const google::protobuf::RepeatedField<T> &repeated_field) {
  std::vector<T> ret;
  ret.reserve(repeated_field.size());
  std::copy(repeated_field.begin(), repeated_field.end(),
            std::back_inserter(ret));
  return ret;
}

template <typename T, typename RepeatedField>
inline void VectorToRepeated(const std::vector<T> &vec,
                             RepeatedField *repeated_field) {
F
fengjiayi 已提交
41
  repeated_field->Clear();
F
fengjiayi 已提交
42 43 44 45 46 47 48 49 50 51
  repeated_field->Reserve(vec.size());
  for (const auto &elem : vec) {
    *repeated_field->Add() = elem;
  }
}

// Specialize vector<bool>.
template <typename RepeatedField>
inline void VectorToRepeated(const std::vector<bool> &vec,
                             RepeatedField *repeated_field) {
F
fengjiayi 已提交
52
  repeated_field->Clear();
F
fengjiayi 已提交
53 54 55 56 57 58
  repeated_field->Reserve(vec.size());
  for (auto elem : vec) {
    *repeated_field->Add() = elem;
  }
}

Y
Yu Yang 已提交
59
class VarDesc {
F
fengjiayi 已提交
60
 public:
Y
Yu Yang 已提交
61
  explicit VarDesc(const std::string &name) {
62
    desc_.set_name(name);
X
Xin Pan 已提交
63
    // TODO(paddle-dev): Why default to lodtensor.
64
    desc_.mutable_type()->set_type(proto::VarType::LOD_TENSOR);
65
  }
F
fengjiayi 已提交
66

Y
Yu Yang 已提交
67
  explicit VarDesc(const proto::VarDesc &desc) : desc_(desc) {}
68

69
  proto::VarDesc *Proto() { return &desc_; }
F
fengjiayi 已提交
70

71 72
  const proto::VarDesc *Proto() const { return &desc_; }

F
fengjiayi 已提交
73 74
  std::string Name() const { return desc_.name(); }

75 76
  void SetName(std::string name) { desc_.set_name(name); }

F
fengjiayi 已提交
77 78 79 80
  void SetTensorDescNum(size_t num);

  size_t GetTensorDescNum() const;

F
fengjiayi 已提交
81 82
  void SetShape(const std::vector<int64_t> &dims);

F
fengjiayi 已提交
83
  void SetShapes(const std::vector<std::vector<int64_t>> &multiple_dims);
F
fengjiayi 已提交
84 85 86 87 88

  std::vector<int64_t> GetShape() const;

  std::vector<std::vector<int64_t>> GetShapes() const;

89
  void SetDataType(proto::VarType::Type data_type);
F
fengjiayi 已提交
90

91 92
  void SetDataTypes(
      const std::vector<proto::VarType::Type> &multiple_data_type);
F
fengjiayi 已提交
93

94
  proto::VarType::Type GetDataType() const;
F
fengjiayi 已提交
95

96
  std::vector<proto::VarType::Type> GetDataTypes() const;
F
fengjiayi 已提交
97

Y
Stash  
Yu Yang 已提交
98 99
  void SetLoDLevel(int32_t lod_level);

F
fengjiayi 已提交
100 101
  void SetLoDLevels(const std::vector<int32_t> &multiple_lod_level);

102
  int32_t GetLoDLevel() const;
Y
Stash  
Yu Yang 已提交
103

F
fengjiayi 已提交
104 105
  std::vector<int32_t> GetLoDLevels() const;

106
  proto::VarType::Type GetType() const;
Y
Yu Yang 已提交
107

108
  void SetType(proto::VarType::Type type);
Y
Yu Yang 已提交
109

110 111 112 113
  bool Persistable() const { return desc_.persistable(); }

  void SetPersistable(bool persistable) { desc_.set_persistable(persistable); }

114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
  bool IsParameter() const { return desc_.is_parameter(); }

  void SetIsParameter(bool is_parameter) {
    desc_.set_is_parameter(is_parameter);
  }

  void ClearIsParameter() { desc_.clear_is_parameter(); }

  bool HasIsParameter() const { return desc_.has_is_parameter(); }

  bool StopGradient() const { return desc_.stop_gradient(); }

  void SetStopGradient(bool stop_gradient) {
    desc_.set_stop_gradient(stop_gradient);
  }

  void ClearStopGradient() { desc_.clear_stop_gradient(); }

  bool HasStopGradient() const { return desc_.has_stop_gradient(); }

H
Huihuang Zheng 已提交
134 135 136 137 138 139
  bool NeedCheckFeed() const { return desc_.need_check_feed(); }

  void SetNeedCheckFeed(bool need_check_feed) {
    desc_.set_need_check_feed(need_check_feed);
  }

F
fengjiayi 已提交
140
 private:
141 142 143 144
  const proto::VarType::TensorDesc &tensor_desc() const;
  std::vector<proto::VarType::TensorDesc> tensor_descs() const;
  proto::VarType::TensorDesc *mutable_tensor_desc();
  std::vector<proto::VarType::TensorDesc *> mutable_tensor_descs();
Y
Yu Yang 已提交
145

146
  proto::VarDesc desc_;
F
fengjiayi 已提交
147
};
148 149

bool operator==(const VarDesc &left, const VarDesc &right);
F
fengjiayi 已提交
150 151
}  // namespace framework
}  // namespace paddle