diff --git a/paddle/fluid/framework/device_worker.cc b/paddle/fluid/framework/device_worker.cc index e39ebf8a7d49e7537da723cf946419bca0c1dd9a..5cfe664203ff244cfd3955d28e13af99b31de4f0 100644 --- a/paddle/fluid/framework/device_worker.cc +++ b/paddle/fluid/framework/device_worker.cc @@ -25,7 +25,7 @@ void DeviceWorker::SetDataFeed(DataFeed* data_feed) { } template -std::string PrintLodTensorType(LoDTensor* tensor, int64_t start, int64_t end) { +std::string PrintLodTensorType(Tensor* tensor, int64_t start, int64_t end) { auto count = tensor->numel(); if (start < 0 || end > count) { VLOG(3) << "access violation"; @@ -38,8 +38,7 @@ std::string PrintLodTensorType(LoDTensor* tensor, int64_t start, int64_t end) { return os.str(); } -std::string PrintLodTensorIntType(LoDTensor* tensor, int64_t start, - int64_t end) { +std::string PrintLodTensorIntType(Tensor* tensor, int64_t start, int64_t end) { auto count = tensor->numel(); if (start < 0 || end > count) { VLOG(3) << "access violation"; @@ -52,7 +51,7 @@ std::string PrintLodTensorIntType(LoDTensor* tensor, int64_t start, return os.str(); } -std::string PrintLodTensor(LoDTensor* tensor, int64_t start, int64_t end) { +std::string PrintLodTensor(Tensor* tensor, int64_t start, int64_t end) { std::string out_val; if (tensor->type() == proto::VarType::FP32) { out_val = PrintLodTensorType(tensor, start, end); diff --git a/paddle/fluid/framework/device_worker.h b/paddle/fluid/framework/device_worker.h index 8d50f476eaeee410a608dfdd2ee05b836c10c8a0..2b4751691bbdd33d204c2b41a4c37a24b6aef37c 100644 --- a/paddle/fluid/framework/device_worker.h +++ b/paddle/fluid/framework/device_worker.h @@ -45,7 +45,7 @@ limitations under the License. */ namespace paddle { namespace framework { -std::string PrintLodTensor(LoDTensor* tensor, int64_t start, int64_t end); +std::string PrintLodTensor(Tensor* tensor, int64_t start, int64_t end); std::pair GetTensorBound(LoDTensor* tensor, int index); bool CheckValidOutput(LoDTensor* tensor, size_t batch_size); @@ -171,6 +171,7 @@ class DeviceWorker { bool need_dump_field_; const std::vector* dump_param_; const std::vector* dump_fields_; + std::vector all_param_; int dump_mode_ = 0; int dump_interval_ = 10000; diff --git a/paddle/fluid/framework/downpour_worker.cc b/paddle/fluid/framework/downpour_worker.cc index 243e7b97c2a75a46c37ad6e72c8de34838680b03..cbdfa00652abdedeb71b7961dc3ef1cabeca2f97 100644 --- a/paddle/fluid/framework/downpour_worker.cc +++ b/paddle/fluid/framework/downpour_worker.cc @@ -771,7 +771,50 @@ void DownpourWorker::TrainFiles() { } } if (!need_skip) { +#ifdef PADDLE_WITH_PSLIB + try { + op->Run(*thread_scope_, place_); + } catch (std::exception& e) { + fprintf(stderr, "error message: %s\n", e.what()); + auto& ins_id_vec = device_reader_->GetInsIdVec(); + size_t batch_size = device_reader_->GetCurBatchSize(); + std::string s = ""; + for (auto& ins_id : ins_id_vec) { + if (s != "") s += ","; + s += ins_id; + } + fprintf(stderr, "batch_size: %zu, ins_ids_vec: %s\n", batch_size, + s.c_str()); + s = ""; + for (auto& param : all_param_) { + Variable* var = thread_scope_->FindVar(param); + if (var == nullptr) { + continue; + } + Tensor* tensor = nullptr; + int64_t len = 0; + if (var->IsType()) { + tensor = var->GetMutable(); + len = tensor->numel(); + } else if (var->IsType()) { + auto selected_rows = var->GetMutable(); + tensor = selected_rows->mutable_value(); + len = tensor->numel(); + } + if (!tensor->IsInitialized()) { + continue; + } + s += param + ":" + std::to_string(len) + ":"; + s += PrintLodTensor(tensor, 0, len); + fprintf(stderr, "%s\n", s.c_str()); + fflush(stderr); + s = ""; + } + throw e; + } +#else op->Run(*thread_scope_, place_); +#endif } } diff --git a/paddle/fluid/framework/hogwild_worker.cc b/paddle/fluid/framework/hogwild_worker.cc index 4d930337e845db9cea7aa8af0c5d3acbd15b11f0..c51f091c54a98924a239f0e1ae717278863f7d6d 100644 --- a/paddle/fluid/framework/hogwild_worker.cc +++ b/paddle/fluid/framework/hogwild_worker.cc @@ -58,6 +58,7 @@ void HogwildWorker::CreateThreadScope(const ProgramDesc &program) { thread_scope_ = &root_scope_->NewScope(); for (auto &var : block.AllVars()) { + all_param_.push_back(var->Name()); if (var->Persistable()) { auto *ptr = root_scope_->Var(var->Name()); InitializeVariable(ptr, var->GetType());