data_set.cc 4.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 *     Unless required by applicable law or agreed to in writing, software
 *     distributed under the License is distributed on an "AS IS" BASIS,
 *     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *     See the License for the specific language governing permissions and
 *     limitations under the License. */

15
#include "paddle/fluid/framework/data_set.h"
16 17 18
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
19 20 21 22 23
#include "paddle/fluid/framework/data_feed_factory.h"

namespace paddle {
namespace framework {

D
dongdaxiang 已提交
24
Dataset::Dataset() { thread_num_ = 1; }
25 26

void Dataset::SetFileList(const std::vector<std::string>& filelist) {
27
  VLOG(3) << "filelist size: " << filelist.size();
28 29 30
  filelist_ = filelist;
  int file_cnt = filelist_.size();
  if (thread_num_ > file_cnt) {
D
dongdaxiang 已提交
31 32
    VLOG(1) << "DataSet thread num = " << thread_num_
            << ", file num = " << file_cnt
33 34 35 36 37
            << ". Changing DataSet thread num = " << file_cnt;
    thread_num_ = file_cnt;
  }
}

38 39
// buggy here, a user should set filelist first before this function
// not user friendly
40 41 42
void Dataset::SetThreadNum(int thread_num) {
  int file_cnt = filelist_.size();
  if (file_cnt != 0 && thread_num > file_cnt) {
D
dongdaxiang 已提交
43 44
    VLOG(1) << "DataSet thread num = " << thread_num
            << ", file num = " << file_cnt
45 46 47 48 49 50
            << ". Changing DataSet thread num = " << file_cnt;
    thread_num = file_cnt;
  }
  thread_num_ = thread_num;
}

D
dongdaxiang 已提交
51
void Dataset::SetTrainerNum(int trainer_num) { trainer_num_ = trainer_num; }
52

53
void Dataset::SetDataFeedDesc(const std::string& data_feed_desc_str) {
54 55
  google::protobuf::TextFormat::ParseFromString(data_feed_desc_str,
                                                &data_feed_desc_);
56 57
}

D
dongdaxiang 已提交
58
const std::vector<std::shared_ptr<paddle::framework::DataFeed>>&
D
dongdaxiang 已提交
59
Dataset::GetReaders() {
60 61 62 63 64 65 66 67 68
  return readers_;
}

void Dataset::LoadIntoMemory() {
  if (readers_.size() == 0) {
    CreateReaders();
  }
  std::vector<std::thread> load_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
69 70
    load_threads.push_back(std::thread(
        &paddle::framework::DataFeed::LoadIntoMemory, readers_[i].get()));
71 72 73 74 75 76 77 78 79 80 81 82
  }
  for (std::thread& t : load_threads) {
    t.join();
  }
}

void Dataset::LocalShuffle() {
  if (readers_.size() == 0) {
    CreateReaders();
  }
  std::vector<std::thread> local_shuffle_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
D
dongdaxiang 已提交
83 84
    local_shuffle_threads.push_back(std::thread(
        &paddle::framework::DataFeed::LocalShuffle, readers_[i].get()));
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
  }
  for (std::thread& t : local_shuffle_threads) {
    t.join();
  }
}

// todo global shuffle
void Dataset::GlobalShuffle() {
  /*
  auto fleet_ptr = FleetWrapper::GetInstance();
  fleet_ptr->registe_client2client_msg_handler(0,
    [this](int msg_type, int client_id, const std::string& msg) -> int {
    return this->ReceiveFromClient(msg_type, client_id, msg);
  });
  if (readers_.size() == 0) {
    CreateReaders();
  }
  std::vector<std::thread> global_shuffle_threads;
  for (int64_t i = 0; i < thread_num_; ++i) {
    global_shuffle_threads.push_back(std::thread(&paddle::framework::DataFeed::GlobalShuffle,
                                     readers_[i].get(), trainer_num_));
  }
  for (std::thread& t : global_shuffle_threads) {
    t.join();
  }*/
}

void Dataset::CreateReaders() {
113
  VLOG(3) << "Calling CreateReaders()";
114
  CHECK(thread_num_ > 0) << "thread_num should > 0";
115 116
  VLOG(3) << "thread_num in Readers: " << thread_num_;
  VLOG(3) << "readers size: " << readers_.size();
117 118 119
  if (readers_.size() != 0) {
    return;
  }
120
  VLOG(3) << "data feed class name: " << data_feed_desc_.name();
121 122 123 124
  for (int64_t i = 0; i < thread_num_; ++i) {
    readers_.push_back(DataFeedFactory::CreateDataFeed(data_feed_desc_.name()));
    readers_.back()->Init(data_feed_desc_);
  }
125
  VLOG(3) << "Filelist size in readers: " << filelist_.size();
126 127 128
  readers_[0]->SetFileList(filelist_);
}

D
dongdaxiang 已提交
129 130
int Dataset::ReceiveFromClient(int msg_type, int client_id,
                               const std::string& msg) {
131 132 133 134 135 136 137
  // can also use hash
  // int64_t index = paddle::ps::local_random_engine()() % thread_num_;
  int64_t index = 0;
  readers_[index]->PutInsToChannel(msg);
  return 0;
}

D
dongdaxiang 已提交
138 139
}  // end namespace framework
}  // end namespace paddle