From e9a0fbfff2bac1ec4c22b9dc713492cc19859401 Mon Sep 17 00:00:00 2001 From: Yi Liu Date: Thu, 17 Sep 2020 17:50:46 +0800 Subject: [PATCH] =?UTF-8?q?OP=E6=8A=A5=E9=94=99=E4=BF=A1=E6=81=AF=E4=BC=98?= =?UTF-8?q?=E5=8C=96=20(#27301)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit paddle/fluid/operators/distributed_ops OP报错信息优化 --- .../operators/distributed_ops/fake_init_op.cc | 4 +- .../distributed_ops/listen_and_serv_op.cc | 45 ++++++++++++++----- 2 files changed, 37 insertions(+), 12 deletions(-) diff --git a/paddle/fluid/operators/distributed_ops/fake_init_op.cc b/paddle/fluid/operators/distributed_ops/fake_init_op.cc index 1da164175e..cb27dc75eb 100644 --- a/paddle/fluid/operators/distributed_ops/fake_init_op.cc +++ b/paddle/fluid/operators/distributed_ops/fake_init_op.cc @@ -43,9 +43,9 @@ class FakeInitOp : public framework::OperatorBase { tensor = out_var.GetMutable()->mutable_value(); tensor->Resize(framework::make_ddim(Attr>("shape"))); } else { - PADDLE_THROW( + PADDLE_THROW(platform::errors::InvalidArgument( "fake init op's output only" - "supports SelectedRows and LoDTensor"); + "supports SelectedRows and LoDTensor")); } } }; diff --git a/paddle/fluid/operators/distributed_ops/listen_and_serv_op.cc b/paddle/fluid/operators/distributed_ops/listen_and_serv_op.cc index 5e1e408eb2..43de8488a0 100644 --- a/paddle/fluid/operators/distributed_ops/listen_and_serv_op.cc +++ b/paddle/fluid/operators/distributed_ops/listen_and_serv_op.cc @@ -134,7 +134,10 @@ void ListenAndServOp::RunSyncLoop( auto optimize_blocks = Attr>(kOptimizeBlocks); PADDLE_ENFORCE_GE(num_blocks, 2, - "server program should have at least 2 blocks"); + platform::errors::PreconditionNotMet( + "Invalid number of blocks in server program. Expected " + "equal or greater than 2. Recieved %zu", + num_blocks)); // Prepare all the server block std::vector optimize_blocks_list; @@ -218,7 +221,8 @@ void ListenAndServOp::ResetReceivedVars(framework::Scope *recv_scope, VLOG(3) << "reset sparse var: " << varname; var->GetMutable()->mutable_rows()->clear(); } else { - PADDLE_THROW("The type of sparse var should be SelectedRows"); + PADDLE_THROW(platform::errors::PreconditionNotMet( + "The type of sparse var should be SelectedRows")); } } if (UNLIKELY(reset_all)) { @@ -235,7 +239,8 @@ void ListenAndServOp::ResetReceivedVars(framework::Scope *recv_scope, math::set_constant(*dev_ctx, var->GetMutable(), static_cast(0)); } else { - PADDLE_THROW("The type of dense var should be in [LoDTensor, Tensor]"); + PADDLE_THROW(platform::errors::PreconditionNotMet( + "The type of dense var should be in [LoDTensor, Tensor]")); } } } @@ -254,8 +259,15 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor, std::vector pieces; split(grad_and_id, ':', &pieces); VLOG(3) << "after split, key = " << pieces[0] << ", id=" << pieces[1]; - PADDLE_ENFORCE_EQ(pieces.size(), 2); - PADDLE_ENFORCE_EQ(out_map->count(pieces[0]), 0); + PADDLE_ENFORCE_EQ(pieces.size(), 2, + platform::errors::PreconditionNotMet( + "Invalid format of grad_and_id argument. " + "Expected \"grad:block_id\". Recieved %s", + grad_and_id.c_str())); + PADDLE_ENFORCE_EQ(out_map->count(pieces[0]), 0, + platform::errors::AlreadyExists( + "The gradient name %s has already existed in out_map", + pieces[0].c_str())); int block_id = std::stoi(pieces[1]); (*out_map)[pieces[0]] = block_id; @@ -267,7 +279,10 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor, size_t num_blocks = program->Size(); PADDLE_ENFORCE_GE(num_blocks, 2, - "server program should have at least 2 blocks"); + platform::errors::PreconditionNotMet( + "Invalid number of blocks in server program. Expected " + "equal or greater than 2. Recieved %zu", + num_blocks)); std::vector block_list; for (size_t blkid = 1; blkid < num_blocks; ++blkid) { block_list.push_back(blkid); @@ -342,9 +357,9 @@ void ListenAndServOp::CacheVarsType(const std::vector &varnames, var->IsType()) { dense_vars_.push_back(varname); } else { - PADDLE_THROW( + PADDLE_THROW(platform::errors::PreconditionNotMet( "The type of received var should be in [SelectedRows, LoDTensor, " - "Tensor]."); + "Tensor].")); } } } @@ -450,7 +465,12 @@ void ListenAndServOp::RunImpl(const framework::Scope &scope, split(prefetch_var_name_and_id, ':', &pieces); VLOG(3) << "after split, prefetch_var = " << pieces[0] << ", id=" << pieces[1]; - PADDLE_ENFORCE_EQ(pieces.size(), 2); + PADDLE_ENFORCE_EQ( + pieces.size(), 2, + platform::errors::PreconditionNotMet( + "Invalid format of prefetch_var_name_and_id argument. " + "Expected \"xxx:xxx\". Recieved %s", + prefetch_var_name_and_id.c_str())); int block_id = std::stoi(pieces[1]); prefetch_block_id_list.push_back(block_id); @@ -476,7 +496,12 @@ void ListenAndServOp::RunImpl(const framework::Scope &scope, sparse_grad_name_to_param_name_str) { std::vector pieces; split(sparse_grad_name_and_param_name, ':', &pieces); - PADDLE_ENFORCE_EQ(pieces.size(), 2); + PADDLE_ENFORCE_EQ( + pieces.size(), 2, + platform::errors::PreconditionNotMet( + "Invalid format of sparse_grad_name_and_param_name argument. " + "Expected \"xxx:xxx\". Recieved %s", + sparse_grad_name_and_param_name.c_str())); VLOG(3) << "after split, sparse_grad_name = " << pieces[0] << ", param_name = " << pieces[1]; sparse_grad_name_to_param_name[pieces[0]] = pieces[1]; -- GitLab