large_scale_kv.h 8.1 KB
Newer Older
T
tangwei12 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <ThreadPool.h>
#include <functional>
#include <future>  // NOLINT
#include <memory>
#include <string>
#include <thread>  // NOLINT
#include <unordered_map>
#include <unordered_set>
#include <utility>
#include <vector>
27
#include "gflags/gflags.h"
T
tangwei12 已提交
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50

#include "paddle/fluid/distributed/common/utils.h"
#include "paddle/fluid/distributed/table/depends/initializers.h"
#include "paddle/fluid/framework/generator.h"
#include "paddle/fluid/framework/lod_tensor.h"
#include "paddle/fluid/framework/rw_lock.h"
#include "paddle/fluid/framework/selected_rows.h"
#include "paddle/fluid/framework/tensor.h"
#include "paddle/fluid/framework/threadpool.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/fluid/platform/device_context.h"
#include "paddle/fluid/platform/enforce.h"
#include "paddle/fluid/platform/place.h"
#include "paddle/fluid/platform/port.h"
#include "paddle/fluid/string/printf.h"
#include "paddle/fluid/string/string_helper.h"

namespace paddle {
namespace distributed {

enum Mode { training, infer };

struct VALUE {
T
tangwei12 已提交
51 52
  explicit VALUE(size_t length)
      : length_(length),
53
        count_(0),
T
tangwei12 已提交
54
        unseen_days_(0),
55 56
        need_save_(false),
        is_entry_(false) {
T
tangwei12 已提交
57
    data_.resize(length);
58
    memset(data_.data(), 0, sizeof(float) * length);
T
tangwei12 已提交
59 60
  }

T
tangwei12 已提交
61 62
  size_t length_;
  std::vector<float> data_;
T
tangwei12 已提交
63
  int count_;
64 65 66
  int unseen_days_;  // use to check knock-out
  bool need_save_;   // whether need to save
  bool is_entry_;    // whether knock-in
T
tangwei12 已提交
67 68
};

69 70 71 72 73
inline bool count_entry(std::shared_ptr<VALUE> value, int threshold) {
  return value->count_ >= threshold;
}

inline bool probility_entry(std::shared_ptr<VALUE> value, float threshold) {
T
tangwei12 已提交
74
  UniformInitializer uniform = UniformInitializer({"uniform", "0", "0", "1"});
75 76 77
  return uniform.GetValue() >= threshold;
}

T
tangwei12 已提交
78 79
class ValueBlock {
 public:
T
tangwei12 已提交
80 81 82 83 84 85 86 87 88 89
  explicit ValueBlock(const std::vector<std::string> &value_names,
                      const std::vector<int> &value_dims,
                      const std::vector<int> &value_offsets,
                      const std::unordered_map<std::string, int> &value_idx,
                      const std::vector<std::string> &init_attrs,
                      const std::string &entry_attr)
      : value_names_(value_names),
        value_dims_(value_dims),
        value_offsets_(value_offsets),
        value_idx_(value_idx) {
T
Thunderbrook 已提交
90
    for (size_t x = 0; x < value_dims.size(); ++x) {
T
tangwei12 已提交
91
      value_length_ += value_dims[x];
T
tangwei12 已提交
92 93
    }

T
tangwei12 已提交
94 95
    // for Entry
    {
T
tangwei12 已提交
96
      auto slices = string::split_string<std::string>(entry_attr, ":");
97 98
      if (slices[0] == "none") {
        entry_func_ = std::bind(&count_entry, std::placeholders::_1, 0);
T
Thunderbrook 已提交
99
        threshold_ = 0;
T
tangwei12 已提交
100
      } else if (slices[0] == "count_filter_entry") {
T
Thunderbrook 已提交
101 102 103
        threshold_ = std::stoi(slices[1]);
        entry_func_ =
            std::bind(&count_entry, std::placeholders::_1, threshold_);
T
tangwei12 已提交
104
      } else if (slices[0] == "probability_entry") {
T
Thunderbrook 已提交
105
        threshold_ = std::stof(slices[1]);
T
tangwei12 已提交
106
        entry_func_ =
T
Thunderbrook 已提交
107
            std::bind(&probility_entry, std::placeholders::_1, threshold_);
T
tangwei12 已提交
108
      } else {
109
        PADDLE_THROW(platform::errors::InvalidArgument(
T
tangwei12 已提交
110 111
            "Not supported Entry Type : %s, Only support [CountFilterEntry, "
            "ProbabilityEntry]",
112
            slices[0]));
T
tangwei12 已提交
113 114
      }
    }
T
tangwei12 已提交
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129

    // for Initializer
    {
      for (auto &attr : init_attrs) {
        auto slices = string::split_string<std::string>(attr, "&");

        if (slices[0] == "gaussian_random") {
          initializers_.emplace_back(
              std::make_shared<GaussianInitializer>(slices));
        } else if (slices[0] == "fill_constant") {
          initializers_.emplace_back(
              std::make_shared<FillConstantInitializer>(slices));
        } else if (slices[0] == "uniform_random") {
          initializers_.emplace_back(
              std::make_shared<UniformInitializer>(slices));
C
Chengmo 已提交
130 131 132
        } else if (slices[0] == "truncated_gaussian_random") {
          initializers_.emplace_back(
              std::make_shared<TruncatedGaussianInitializer>(slices));
T
tangwei12 已提交
133 134 135 136 137 138
        } else {
          PADDLE_THROW(platform::errors::InvalidArgument(
              "%s can not be supported", attr));
        }
      }
    }
T
tangwei12 已提交
139 140 141 142
  }

  ~ValueBlock() {}

T
tangwei12 已提交
143
  std::vector<float *> Get(const uint64_t &id,
144 145
                           const std::vector<std::string> &value_names,
                           const std::vector<int> &value_dims) {
T
tangwei12 已提交
146 147 148 149
    auto pts = std::vector<float *>();
    pts.reserve(value_names.size());
    auto &values = values_.at(id);
    for (int i = 0; i < static_cast<int>(value_names.size()); i++) {
150 151 152
      PADDLE_ENFORCE_EQ(
          value_dims[i], value_dims_[i],
          platform::errors::InvalidArgument("value dims is not match"));
T
tangwei12 已提交
153 154
      pts.push_back(values->data_.data() +
                    value_offsets_.at(value_idx_.at(value_names[i])));
T
tangwei12 已提交
155
    }
T
tangwei12 已提交
156
    return pts;
T
tangwei12 已提交
157 158
  }

159
  // pull
160 161
  float *Init(const uint64_t &id, const bool with_update = true,
              const int counter = 1) {
162 163 164 165 166
    if (!Has(id)) {
      values_[id] = std::make_shared<VALUE>(value_length_);
    }

    auto &value = values_.at(id);
T
tangwei12 已提交
167

168
    if (with_update) {
169
      AttrUpdate(value, counter);
170 171 172
    }

    return value->data_.data();
T
tangwei12 已提交
173 174
  }

T
Thunderbrook 已提交
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189
  VALUE *InitGet(const uint64_t &id, const bool with_update = true,
                 const int counter = 1) {
    if (!Has(id)) {
      values_[id] = std::make_shared<VALUE>(value_length_);
    }

    auto &value = values_.at(id);

    if (with_update) {
      AttrUpdate(value, counter);
    }

    return value.get();
  }

190
  void AttrUpdate(std::shared_ptr<VALUE> value, const int counter) {
191 192
    // update state
    value->unseen_days_ = 0;
193
    value->count_ += counter;
194 195 196 197 198

    if (!value->is_entry_) {
      value->is_entry_ = entry_func_(value);
      if (value->is_entry_) {
        // initialize
T
Thunderbrook 已提交
199
        for (size_t x = 0; x < value_names_.size(); ++x) {
200 201 202
          initializers_[x]->GetValue(value->data_.data() + value_offsets_[x],
                                     value_dims_[x]);
        }
T
tangwei12 已提交
203
        value->need_save_ = true;
T
tangwei12 已提交
204
      }
T
tangwei12 已提交
205 206
    } else {
      value->need_save_ = true;
T
tangwei12 已提交
207
    }
208 209

    return;
T
tangwei12 已提交
210 211
  }

212 213 214 215 216 217 218 219 220
  // dont jude if (has(id))
  float *Get(const uint64_t &id) {
    auto &value = values_.at(id);
    return value->data_.data();
  }

  // for load, to reset count, unseen_days
  std::shared_ptr<VALUE> GetValue(const uint64_t &id) { return values_.at(id); }

T
tangwei12 已提交
221
  bool GetEntry(const uint64_t &id) {
222
    auto &value = values_.at(id);
T
tangwei12 已提交
223
    return value->is_entry_;
T
tangwei12 已提交
224 225
  }

226 227 228 229
  void SetEntry(const uint64_t &id, const bool state) {
    auto &value = values_.at(id);
    value->is_entry_ = state;
  }
T
tangwei12 已提交
230

231 232 233 234 235 236 237 238 239
  void Shrink(const int threshold) {
    for (auto iter = values_.begin(); iter != values_.end();) {
      auto &value = iter->second;
      value->unseen_days_++;
      if (value->unseen_days_ >= threshold) {
        iter = values_.erase(iter);
      } else {
        ++iter;
      }
T
tangwei12 已提交
240
    }
241
    return;
T
tangwei12 已提交
242 243
  }

T
Thunderbrook 已提交
244 245
  float GetThreshold() { return threshold_; }

T
tangwei12 已提交
246 247 248 249 250 251 252 253 254 255 256
 private:
  bool Has(const uint64_t id) {
    auto got = values_.find(id);
    if (got == values_.end()) {
      return false;
    } else {
      return true;
    }
  }

 public:
T
tangwei12 已提交
257 258
  std::unordered_map<uint64_t, std::shared_ptr<VALUE>> values_;
  size_t value_length_ = 0;
T
tangwei12 已提交
259 260

 private:
T
tangwei12 已提交
261 262 263 264 265
  const std::vector<std::string> &value_names_;
  const std::vector<int> &value_dims_;
  const std::vector<int> &value_offsets_;
  const std::unordered_map<std::string, int> &value_idx_;

266
  std::function<bool(std::shared_ptr<VALUE>)> entry_func_;
T
tangwei12 已提交
267
  std::vector<std::shared_ptr<Initializer>> initializers_;
T
Thunderbrook 已提交
268
  float threshold_;
T
tangwei12 已提交
269 270 271 272
};

}  // namespace distributed
}  // namespace paddle