提交 d25389fe 编写于 作者: X xujiaqi01 提交者: dongdaxiang

add some log && fix error

上级 fd3adf58
......@@ -177,6 +177,9 @@ int InMemoryDataFeed<T>::Next() {
}
CHECK(in_channel != nullptr);
CHECK(out_channel != nullptr);
VLOG(3) << "in_channel size=" << in_channel->Size()
<< ", out_channel size=" << out_channel->Size()
<< ", thread_id=" << thread_id_;
int index = 0;
T instance;
T ins_vec;
......@@ -259,14 +262,19 @@ void InMemoryDataFeed<T>::FillChannelToMemoryData() {
channel = shuffled_ins_out_;
}
CHECK(channel != nullptr);
local_vec.reserve(channel->Size());
local_vec.resize(channel->Size());
for (int64_t i = 0; i < channel->Size(); ++i) {
channel->Pop(local_vec[i]);
}
std::unique_lock<std::mutex> lock(*mutex_for_update_memory_data_);
lock.lock();
VLOG(3) << "local_vec size=" << local_vec.size() <<", thread_id=" << thread_id_;
{
std::lock_guard<std::mutex> g(*mutex_for_update_memory_data_);
VLOG(3) << "before insert, memory_data_ size=" << memory_data_->size()
<< ", thread_id=" << thread_id_;
memory_data_->insert(memory_data_->end(), local_vec.begin(), local_vec.end());
lock.unlock();
VLOG(3) << "after insert memory_data_ size=" << memory_data_->size()
<< ", thread_id=" << thread_id_;
}
std::vector<T>().swap(local_vec);
}
......
......@@ -176,7 +176,6 @@ void DatasetImpl<T>::DestroyReaders() {
for (std::thread& t : fill_threads) {
t.join();
}
std::vector<std::string>().swap(filelist_);
std::vector<std::shared_ptr<paddle::framework::DataFeed>>().swap(readers_);
}
......
......@@ -83,10 +83,6 @@ class DatasetImpl : public Dataset {
std::vector<std::shared_ptr<paddle::framework::DataFeed>> readers_;
std::vector<T> memory_data_;
std::mutex mutex_for_update_memory_data_;
std::vector<std::shared_ptr<paddle::framework::BlockingQueue<T>>>
shuffled_ins_vec_;
std::vector<std::shared_ptr<paddle::framework::BlockingQueue<T>>>
shuffled_ins_out_vec_;
int thread_num_;
paddle::framework::DataFeedDesc data_feed_desc_;
std::vector<std::string> filelist_;
......
......@@ -118,7 +118,7 @@ void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
}
void Executor::RunFromDataset(const ProgramDesc& main_program, Scope* scope,
MultiSlotDataset* dataset,
Dataset* dataset,
const std::string& trainer_desc_str) {
VLOG(3) << "Start to RunFromDataset in executor";
TrainerDesc trainer_desc;
......
......@@ -113,7 +113,7 @@ class Executor {
void EnableMKLDNN(const ProgramDesc& program);
void RunFromDataset(const ProgramDesc& main_program, Scope* scope,
MultiSlotDataset* dataset,
Dataset* dataset,
const std::string& trainer_desc_str);
private:
......
......@@ -297,6 +297,9 @@ void FleetWrapper::PushSparseVarsWithLabelAsync(
int FleetWrapper::RegisterClientToClientMsgHandler(
int msg_type, MsgHandlerFunc handler) {
#ifdef PADDLE_WITH_PSLIB
VLOG(3) << "calling FleetWrapper::RegisterClientToClientMsgHandler";
VLOG(3) << "pslib_ptr_=" << pslib_ptr_;
VLOG(3) << "_worker_ptr=" << pslib_ptr_->_worker_ptr;
pslib_ptr_->_worker_ptr->registe_client2client_msg_handler(
msg_type, handler);
#else
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册