common_dense_table.h 2.7 KB
Newer Older
T
tangwei12 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <ThreadPool.h>
#include <assert.h>
#include <pthread.h>
#include <string>
#include "Eigen/Dense"
22 23 24 25
#include "paddle/fluid/distributed/ps/table/accessor.h"
#include "paddle/fluid/distributed/ps/table/common_table.h"
#include "paddle/fluid/distributed/ps/table/depends/dense.h"
#include "paddle/fluid/distributed/ps/table/depends/initializers.h"
T
tangwei12 已提交
26 27 28 29 30
#include "paddle/fluid/string/string_helper.h"

namespace paddle {
namespace distributed {

31 32
class DenseOptimizer;

33
class CommonDenseTable : public Table {
T
tangwei12 已提交
34
 public:
Z
zhaocaibei123 已提交
35
  CommonDenseTable() {}
T
tangwei12 已提交
36
  virtual ~CommonDenseTable() {}
Z
zhaocaibei123 已提交
37 38
  int32_t Initialize() override;
  int32_t InitializeShard() override { return 0; }
39 40 41 42 43 44 45 46 47 48
  void CreateInitializer(const std::string& attr, const std::string& name);
  int32_t InitializeValue();
  int32_t InitializeOptimizer();

  int32_t Pull(TableContext& context) override;
  int32_t Push(TableContext& context) override;

  int32_t PullDense(float* pull_values, size_t num);
  int32_t PushDenseParam(const float* values, size_t num);
  int32_t PushDense(const float* values, size_t num);
Z
zhaocaibei123 已提交
49 50
  int32_t Pour() override;
  int32_t SetGlobalLR(float* lr) override;
T
tangwei12 已提交
51

Z
zhaocaibei123 已提交
52 53
  int32_t Load(const std::string& path, const std::string& param) override;
  int32_t Save(const std::string& path, const std::string& param) override;
T
tangwei12 已提交
54

Z
zhaocaibei123 已提交
55 56 57
  int32_t Flush() override { return 0; }
  int32_t Shrink(const std::string& param) override { return 0; }
  void Clear() override { return; }
58
  void* GetShard(size_t shard_idx) override { return 0; }
T
tangwei12 已提交
59 60

 protected:
Z
zhaocaibei123 已提交
61
  int32_t _PushDense(const float* values, size_t num);
T
tangwei12 已提交
62 63

 private:
64
  const int task_pool_size_ = 10;
T
tangwei12 已提交
65 66 67 68 69 70 71 72 73
  bool sync = true;
  std::vector<std::shared_ptr<::ThreadPool>> _shards_task_pool;
  int param_dim_ = 0;
  int param_idx_ = 0;
  std::shared_ptr<DenseOptimizer> optimizer_;
  std::vector<std::vector<float>> values_;
  ReservoirValue<float> pull_reservoir_;
  std::unordered_map<std::string, Initializer*> initializers_;
  std::unordered_map<std::string, int> names_index_;
Z
zhaocaibei123 已提交
74 75 76
  int total_dim_ = 0;
  int fixed_len_params_dim_ = 0;    // used for save/load
  std::vector<int> param_col_ids_;  // used for save/load
T
tangwei12 已提交
77 78 79 80
};

}  // namespace distributed
}  // namespace paddle