data_set.cc 8.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 *     Unless required by applicable law or agreed to in writing, software
 *     distributed under the License is distributed on an "AS IS" BASIS,
 *     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *     See the License for the specific language governing permissions and
 *     limitations under the License. */

15
#include "paddle/fluid/framework/data_set.h"
D
dongdaxiang 已提交
16
#include <random>
17 18 19
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
20
#include "paddle/fluid/framework/data_feed_factory.h"
21 22
#include "paddle/fluid/platform/timer.h"
#include "paddle/fluid/framework/io/fs.h"
23 24 25 26

namespace paddle {
namespace framework {

X
xjqbest 已提交
27
// constructor
28
template <typename T>
D
dongdaxiang 已提交
29 30
DatasetImpl<T>::DatasetImpl() {
  thread_num_ = 1;
31 32
  trainer_num_ = 1;
  file_idx_ = 0;
D
dongdaxiang 已提交
33
}
34

X
xjqbest 已提交
35
// set filelist, file_idx_ will reset to zero.
36 37
template <typename T>
void DatasetImpl<T>::SetFileList(const std::vector<std::string>& filelist) {
38
  VLOG(3) << "filelist size: " << filelist.size();
39
  filelist_ = filelist;
40
  file_idx_ = 0;
41 42
}

X
xjqbest 已提交
43
// set expect thread num. actually it may change
44 45
template <typename T>
void DatasetImpl<T>::SetThreadNum(int thread_num) {
46
  VLOG(3) << "SetThreadNum thread_num=" << thread_num;
47 48 49
  thread_num_ = thread_num;
}

X
xjqbest 已提交
50 51 52
// if you run distributed, and want to do global shuffle,
// set this before global shuffle.
// be sure you call CreateReaders before SetTrainerNum
53
template <typename T>
X
xujiaqi01 已提交
54 55
void DatasetImpl<T>::SetTrainerNum(int trainer_num) {
  trainer_num_ = trainer_num;
56 57 58 59 60 61 62 63 64 65 66 67 68
  // should inform reader of trainer_num directly
  for (auto reader : readers_) {
    reader->SetTrainerNum(trainer_num);
  }
}

template <typename T>
void DatasetImpl<T>::SetHdfsConfig(const std::string& fs_name,
                                   const std::string& fs_ugi) {
  std::string cmd = std::string("hadoop fs");
  cmd += " -D fs.default.name=" + fs_name;
  cmd += " -D hadoop.job.ugi=" + fs_ugi;
  paddle::framework::hdfs_set_command(cmd);
X
xujiaqi01 已提交
69
}
70

71 72
template <typename T>
void DatasetImpl<T>::SetDataFeedDesc(const std::string& data_feed_desc_str) {
73 74
  google::protobuf::TextFormat::ParseFromString(data_feed_desc_str,
                                                &data_feed_desc_);
75 76
}

X
xjqbest 已提交
77 78
// readers_.size() may not be equal to thread_num_,
// it changes when filelist_.size() < thread_num_
79 80
template <typename T>
std::vector<std::shared_ptr<paddle::framework::DataFeed>>&
D
dongdaxiang 已提交
81
DatasetImpl<T>::GetReaders() {
82 83 84
  return readers_;
}

X
xjqbest 已提交
85 86
// load data into memory, Dataset hold this memory,
// which will later be fed into readers' channel
87 88 89
template <typename T>
void DatasetImpl<T>::LoadIntoMemory() {
  VLOG(3) << "DatasetImpl<T>::LoadIntoMemory() begin";
90 91
  platform::Timer timeline;
  timeline.Start();
92 93 94 95 96
  if (readers_.size() == 0) {
    CreateReaders();
  }
  std::vector<std::thread> load_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
97 98
    load_threads.push_back(std::thread(
        &paddle::framework::DataFeed::LoadIntoMemory, readers_[i].get()));
99 100 101 102
  }
  for (std::thread& t : load_threads) {
    t.join();
  }
103 104 105 106
  timeline.Pause();
  VLOG(3) << "DatasetImpl<T>::LoadIntoMemory() end"
          << ", memory data size=" << memory_data_.size()
          << ", cost time=" << timeline.ElapsedSec() << " seconds";
107 108
}

X
xjqbest 已提交
109
// do local shuffle
110 111 112
template <typename T>
void DatasetImpl<T>::LocalShuffle() {
  VLOG(3) << "DatasetImpl<T>::LocalShuffle() begin";
113 114
  platform::Timer timeline;
  timeline.Start();
115 116 117
  if (readers_.size() == 0) {
    CreateReaders();
  }
118 119 120
  // if it is not InMemory, memory_data_ is empty
  std::random_shuffle(memory_data_.begin(), memory_data_.end());

121 122
  std::vector<std::thread> local_shuffle_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
123 124
    local_shuffle_threads.push_back(std::thread(
        &paddle::framework::DataFeed::LocalShuffle, readers_[i].get()));
125 126 127 128
  }
  for (std::thread& t : local_shuffle_threads) {
    t.join();
  }
129
  std::vector<T>().swap(memory_data_);
130 131 132
  timeline.Pause();
  VLOG(3) << "DatasetImpl<T>::LocalShuffle() end, cost time="
          << timeline.ElapsedSec() << " seconds";
133 134
}

135 136 137
template <typename T>
void DatasetImpl<T>::GlobalShuffle() {
  VLOG(3) << "DatasetImpl<T>::GlobalShuffle() begin";
138 139
  platform::Timer timeline;
  timeline.Start();
140
  auto fleet_ptr = FleetWrapper::GetInstance();
141
  VLOG(3) << "RegisterClientToClientMsgHandler";
D
dongdaxiang 已提交
142 143 144 145
  fleet_ptr->RegisterClientToClientMsgHandler(
      0, [this](int msg_type, int client_id, const std::string& msg) -> int {
        return this->ReceiveFromClient(msg_type, client_id, msg);
      });
146 147 148 149 150
  if (readers_.size() == 0) {
    CreateReaders();
  }
  // if it is not InMemory, memory_data_ is empty
  std::random_shuffle(memory_data_.begin(), memory_data_.end());
X
xujiaqi01 已提交
151
  VLOG(3) << "start global shuffle threads";
152
  std::vector<std::thread> global_shuffle_threads;
153
  for (int i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
154 155
    global_shuffle_threads.push_back(std::thread(
        &paddle::framework::DataFeed::GlobalShuffle, readers_[i].get()));
156 157 158
  }
  for (std::thread& t : global_shuffle_threads) {
    t.join();
159
  }
160 161 162 163
  std::vector<T>().swap(memory_data_);
  timeline.Pause();
  VLOG(3) << "DatasetImpl<T>::GlobalShuffle() end, cost time="
          << timeline.ElapsedSec() << " seconds";
164 165
}

166 167
template <typename T>
void DatasetImpl<T>::CreateReaders() {
168
  VLOG(3) << "Calling CreateReaders()";
169
  CHECK(thread_num_ > 0) << "thread_num should > 0";
170 171 172 173 174 175 176 177 178 179 180 181 182
  int file_cnt = filelist_.size();
  int memory_data_size = memory_data_.size();
  if (memory_data_size != 0 && thread_num_ > memory_data_size) {
    VLOG(3) << "Dataset thread num = " << thread_num_
            << ", memory data size = " << memory_data_size
            << ". Changing Dataset thread num = " << memory_data_size;
    thread_num_ = memory_data_size;
  } else if (file_cnt != 0 && thread_num_ > file_cnt) {
    VLOG(3) << "Dataset thread num = " << thread_num_
            << ", file num = " << file_cnt
            << ". Changing Dataset thread num = " << file_cnt;
    thread_num_ = file_cnt;
  }
183 184
  VLOG(3) << "thread_num in Readers: " << thread_num_;
  VLOG(3) << "readers size: " << readers_.size();
185
  VLOG(3) << "Filelist size in readers: " << filelist_.size();
186 187 188
  if (readers_.size() != 0) {
    return;
  }
189
  VLOG(3) << "data feed class name: " << data_feed_desc_.name();
190
  for (int i = 0; i < thread_num_; ++i) {
191 192
    readers_.push_back(DataFeedFactory::CreateDataFeed(data_feed_desc_.name()));
    readers_.back()->Init(data_feed_desc_);
193 194 195 196 197
    readers_.back()->SetMemoryData(&memory_data_);
    readers_.back()->SetMemoryDataMutex(&mutex_for_update_memory_data_);
    readers_.back()->SetThreadId(i);
    readers_.back()->SetThreadNum(thread_num_);
    readers_.back()->SetTrainerNum(trainer_num_);
198 199 200
    readers_.back()->SetFileListMutex(&mutex_for_pick_file_);
    readers_.back()->SetFileListIndex(&file_idx_);
    readers_.back()->SetFileList(filelist_);
201 202 203
  }
}

204 205 206 207 208 209 210 211 212 213 214
template <typename T>
void DatasetImpl<T>::DestroyReaders() {
  VLOG(3) << "Calling DestroyReaders()";
  // clear memory_data_ before fill it
  // because if LoadIntoMemory but no Shuffle,
  // memory_data_ has empty data which has been std::move to channel
  if (memory_data_.size() != 0) {
    std::vector<T>().swap(memory_data_);
  }
  std::vector<std::thread> fill_threads;
  for (int i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
215 216 217
    fill_threads.push_back(
        std::thread(&paddle::framework::DataFeed::FillChannelToMemoryData,
                    readers_[i].get()));
218 219 220 221 222
  }
  for (std::thread& t : fill_threads) {
    t.join();
  }
  std::vector<std::shared_ptr<paddle::framework::DataFeed>>().swap(readers_);
223
  VLOG(3) << "readers size: " << readers_.size();
224 225 226 227
}

template <typename T>
int DatasetImpl<T>::ReceiveFromClient(int msg_type, int client_id,
D
dongdaxiang 已提交
228
                                      const std::string& msg) {
229 230 231 232 233 234
  VLOG(3) << "ReceiveFromClient msg_type=" << msg_type
          << ", client_id=" << client_id << ", msg length="
          << msg.length();
  auto fleet_ptr = FleetWrapper::GetInstance();
  int64_t index = fleet_ptr->LocalRandomEngine()() % thread_num_;
  VLOG(3) << "ramdom index=" << index;
235 236 237 238
  readers_[index]->PutInsToChannel(msg);
  return 0;
}

239 240 241
// explicit instantiation
template class DatasetImpl<std::vector<MultiSlotType>>;

D
dongdaxiang 已提交
242 243
}  // end namespace framework
}  // end namespace paddle