data_set.cc 6.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 *     Unless required by applicable law or agreed to in writing, software
 *     distributed under the License is distributed on an "AS IS" BASIS,
 *     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *     See the License for the specific language governing permissions and
 *     limitations under the License. */

15
#include "paddle/fluid/framework/data_set.h"
D
dongdaxiang 已提交
16
#include <random>
17 18 19
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
20 21 22 23 24
#include "paddle/fluid/framework/data_feed_factory.h"

namespace paddle {
namespace framework {

25
template <typename T>
D
dongdaxiang 已提交
26 27 28
DatasetImpl<T>::DatasetImpl() {
  thread_num_ = 1;
}
29

30 31
template <typename T>
void DatasetImpl<T>::SetFileList(const std::vector<std::string>& filelist) {
32
  VLOG(3) << "filelist size: " << filelist.size();
33
  filelist_ = filelist;
34
  /*
35 36
  int file_cnt = filelist_.size();
  if (thread_num_ > file_cnt) {
D
dongdaxiang 已提交
37 38
    VLOG(1) << "DataSet thread num = " << thread_num_
            << ", file num = " << file_cnt
39 40
            << ". Changing DataSet thread num = " << file_cnt;
    thread_num_ = file_cnt;
41
  }*/
42 43
}

44 45
// buggy here, a user should set filelist first before this function
// not user friendly
46 47
template <typename T>
void DatasetImpl<T>::SetThreadNum(int thread_num) {
48 49
  int file_cnt = filelist_.size();
  if (file_cnt != 0 && thread_num > file_cnt) {
D
dongdaxiang 已提交
50 51
    VLOG(1) << "DataSet thread num = " << thread_num
            << ", file num = " << file_cnt
52 53 54 55 56 57
            << ". Changing DataSet thread num = " << file_cnt;
    thread_num = file_cnt;
  }
  thread_num_ = thread_num;
}

58
template <typename T>
X
xujiaqi01 已提交
59 60 61
void DatasetImpl<T>::SetTrainerNum(int trainer_num) {
  trainer_num_ = trainer_num;
}
62

63 64
template <typename T>
void DatasetImpl<T>::SetDataFeedDesc(const std::string& data_feed_desc_str) {
65 66
  google::protobuf::TextFormat::ParseFromString(data_feed_desc_str,
                                                &data_feed_desc_);
67 68
}

69 70
template <typename T>
std::vector<std::shared_ptr<paddle::framework::DataFeed>>&
D
dongdaxiang 已提交
71
DatasetImpl<T>::GetReaders() {
72 73 74
  return readers_;
}

75 76 77
template <typename T>
void DatasetImpl<T>::LoadIntoMemory() {
  VLOG(3) << "DatasetImpl<T>::LoadIntoMemory() begin";
78 79 80 81 82
  if (readers_.size() == 0) {
    CreateReaders();
  }
  std::vector<std::thread> load_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
83 84
    load_threads.push_back(std::thread(
        &paddle::framework::DataFeed::LoadIntoMemory, readers_[i].get()));
85 86 87 88
  }
  for (std::thread& t : load_threads) {
    t.join();
  }
89
  VLOG(3) << "DatasetImpl<T>::LoadIntoMemory() end";
90 91
}

92 93 94
template <typename T>
void DatasetImpl<T>::LocalShuffle() {
  VLOG(3) << "DatasetImpl<T>::LocalShuffle() begin";
95 96 97
  if (readers_.size() == 0) {
    CreateReaders();
  }
98 99 100
  // if it is not InMemory, memory_data_ is empty
  std::random_shuffle(memory_data_.begin(), memory_data_.end());

101 102
  std::vector<std::thread> local_shuffle_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
103 104
    local_shuffle_threads.push_back(std::thread(
        &paddle::framework::DataFeed::LocalShuffle, readers_[i].get()));
105 106 107 108
  }
  for (std::thread& t : local_shuffle_threads) {
    t.join();
  }
109 110
  std::vector<T>().swap(memory_data_);
  VLOG(3) << "DatasetImpl<T>::LocalShuffle() end";
111 112
}

113 114 115 116
template <typename T>
void DatasetImpl<T>::GlobalShuffle() {
  VLOG(3) << "DatasetImpl<T>::GlobalShuffle() begin";
  if (readers_.size() == 0) {
D
dongdaxiang 已提交
117
    CreateReaders();
118 119 120
  }
  // if it is not InMemory, memory_data_ is empty
  std::random_shuffle(memory_data_.begin(), memory_data_.end());
121
  auto fleet_ptr = FleetWrapper::GetInstance();
122
  VLOG(3) << "RegisterClientToClientMsgHandler";
D
dongdaxiang 已提交
123 124 125 126
  fleet_ptr->RegisterClientToClientMsgHandler(
      0, [this](int msg_type, int client_id, const std::string& msg) -> int {
        return this->ReceiveFromClient(msg_type, client_id, msg);
      });
X
xujiaqi01 已提交
127
  VLOG(3) << "start global shuffle threads";
128
  std::vector<std::thread> global_shuffle_threads;
129
  for (int i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
130 131
    global_shuffle_threads.push_back(std::thread(
        &paddle::framework::DataFeed::GlobalShuffle, readers_[i].get()));
132 133 134
  }
  for (std::thread& t : global_shuffle_threads) {
    t.join();
135 136
  }
  VLOG(3) << "DatasetImpl<T>::GlobalShuffle() end";
137 138
}

139 140
template <typename T>
void DatasetImpl<T>::CreateReaders() {
141
  VLOG(3) << "Calling CreateReaders()";
142
  CHECK(thread_num_ > 0) << "thread_num should > 0";
143 144
  VLOG(3) << "thread_num in Readers: " << thread_num_;
  VLOG(3) << "readers size: " << readers_.size();
145 146 147
  if (readers_.size() != 0) {
    return;
  }
148
  VLOG(3) << "data feed class name: " << data_feed_desc_.name();
149
  for (int i = 0; i < thread_num_; ++i) {
150 151
    readers_.push_back(DataFeedFactory::CreateDataFeed(data_feed_desc_.name()));
    readers_.back()->Init(data_feed_desc_);
152 153 154 155 156
    readers_.back()->SetMemoryData(&memory_data_);
    readers_.back()->SetMemoryDataMutex(&mutex_for_update_memory_data_);
    readers_.back()->SetThreadId(i);
    readers_.back()->SetThreadNum(thread_num_);
    readers_.back()->SetTrainerNum(trainer_num_);
157
  }
158
  VLOG(3) << "Filelist size in readers: " << filelist_.size();
159 160 161
  readers_[0]->SetFileList(filelist_);
}

162 163 164 165 166 167 168 169 170 171 172
template <typename T>
void DatasetImpl<T>::DestroyReaders() {
  VLOG(3) << "Calling DestroyReaders()";
  // clear memory_data_ before fill it
  // because if LoadIntoMemory but no Shuffle,
  // memory_data_ has empty data which has been std::move to channel
  if (memory_data_.size() != 0) {
    std::vector<T>().swap(memory_data_);
  }
  std::vector<std::thread> fill_threads;
  for (int i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
173 174 175
    fill_threads.push_back(
        std::thread(&paddle::framework::DataFeed::FillChannelToMemoryData,
                    readers_[i].get()));
176 177 178 179 180
  }
  for (std::thread& t : fill_threads) {
    t.join();
  }
  std::vector<std::shared_ptr<paddle::framework::DataFeed>>().swap(readers_);
D
dongdaxiang 已提交
181
  LOG(WARNING) << "readers size: " << readers_.size();
182 183 184 185
}

template <typename T>
int DatasetImpl<T>::ReceiveFromClient(int msg_type, int client_id,
D
dongdaxiang 已提交
186
                                      const std::string& msg) {
187
  // todo random
188 189 190 191 192 193
  // int64_t index = paddle::ps::local_random_engine()() % thread_num_;
  int64_t index = 0;
  readers_[index]->PutInsToChannel(msg);
  return 0;
}

194 195 196
// explicit instantiation
template class DatasetImpl<std::vector<MultiSlotType>>;

D
dongdaxiang 已提交
197 198
}  // end namespace framework
}  // end namespace paddle