diff --git a/paddle/fluid/operators/distributed/proto_encoder_helper.h b/paddle/fluid/operators/distributed/proto_encoder_helper.h index 2fab02e32fe18ee04f86a69bb5bae1cbe7c6762c..d2b0eb6ca6de1984dc7cfc2a662c88d5e56e1e05 100644 --- a/paddle/fluid/operators/distributed/proto_encoder_helper.h +++ b/paddle/fluid/operators/distributed/proto_encoder_helper.h @@ -82,8 +82,10 @@ class ProtoEncodeHelper { : base_(buf), p_(buf), limit_(base_ + max_size) {} ~ProtoEncodeHelper() { +#define REPLACE_ENFORCE_GLOG 1 // Make sure callers didn't do operations that went over max_size promised - PADDLE_ENFORCE_LE(p_, limit_); + paddle::platform::throw_on_error(p_ <= limit_); +#undef REPLACE_ENFORCE_GLOG } const char* data() const { return base_; } diff --git a/paddle/fluid/operators/listen_and_serv_op.cc b/paddle/fluid/operators/listen_and_serv_op.cc index 966d78b84130c172c41e8049bf6bb1dc659d7d48..dc008d16971bc762b401ddece56f9ec56f7a47d6 100644 --- a/paddle/fluid/operators/listen_and_serv_op.cc +++ b/paddle/fluid/operators/listen_and_serv_op.cc @@ -59,17 +59,16 @@ static void ParallelExecuteBlocks( framework::ProgramDesc *program, framework::Scope *scope) { std::vector> fs; for (size_t idx : parallel_blkids) { - fs.push_back( - framework::Async([&executor, &prepared, &program, &scope, idx]() { - int run_block = idx; // thread local - try { - VLOG(3) << "running server block: " << run_block - << "pointer: " << prepared[run_block].get(); - executor->RunPreparedContext(prepared[run_block].get(), scope); - } catch (const std::exception &e) { - LOG(ERROR) << "run sub program error " << e.what(); - } - })); + fs.push_back(framework::Async([&executor, &prepared, &scope, idx]() { + int run_block = idx; // thread local + try { + VLOG(3) << "running server block: " << run_block + << "pointer: " << prepared[run_block].get(); + executor->RunPreparedContext(prepared[run_block].get(), scope); + } catch (const std::exception &e) { + LOG(ERROR) << "run sub program error " << e.what(); + } + })); } for (size_t i = 0; i < fs.size(); ++i) fs[i].wait(); }