未验证 提交 8414575b 编写于 作者: C Chen Weihang 提交者: GitHub

Add examples for error message writing specification - PreconditionNotMet,...

Add examples for error message writing specification - PreconditionNotMet, Unimplemented, Unavailable (#21137)

* add examples for error spec, test=develop

* change ENFORCE to ENFORCE_**, test=develop
上级 7e5f74b8
...@@ -111,10 +111,13 @@ void DataFeed::SetBatchSize(int batch_size) { ...@@ -111,10 +111,13 @@ void DataFeed::SetBatchSize(int batch_size) {
} }
bool DataFeed::PickOneFile(std::string* filename) { bool DataFeed::PickOneFile(std::string* filename) {
PADDLE_ENFORCE(mutex_for_pick_file_ != nullptr, PADDLE_ENFORCE_NOT_NULL(
"should call SetFileListMutex before PickOneFile"); mutex_for_pick_file_,
PADDLE_ENFORCE(file_idx_ != nullptr, platform::errors::PreconditionNotMet(
"should call SetFileListIndex before PickOneFile"); "You should call SetFileListMutex before PickOneFile"));
PADDLE_ENFORCE_NOT_NULL(
file_idx_, platform::errors::PreconditionNotMet(
"You should call SetFileListIndex before PickOneFile"));
std::unique_lock<std::mutex> lock(*mutex_for_pick_file_); std::unique_lock<std::mutex> lock(*mutex_for_pick_file_);
if (*file_idx_ == filelist_.size()) { if (*file_idx_ == filelist_.size()) {
VLOG(3) << "DataFeed::PickOneFile no more file to pick"; VLOG(3) << "DataFeed::PickOneFile no more file to pick";
...@@ -134,7 +137,9 @@ void DataFeed::CheckSetFileList() { ...@@ -134,7 +137,9 @@ void DataFeed::CheckSetFileList() {
} }
void DataFeed::CheckStart() { void DataFeed::CheckStart() {
PADDLE_ENFORCE(finish_start_, "Datafeed has not started running yet."); PADDLE_ENFORCE_EQ(finish_start_, true,
platform::errors::PreconditionNotMet(
"Datafeed has not started running yet."));
} }
void DataFeed::AssignFeedVar(const Scope& scope) { void DataFeed::AssignFeedVar(const Scope& scope) {
......
...@@ -34,7 +34,8 @@ paddle::framework::DataFeedDesc load_datafeed_param_from_file( ...@@ -34,7 +34,8 @@ paddle::framework::DataFeedDesc load_datafeed_param_from_file(
const char* filename) { const char* filename) {
paddle::framework::DataFeedDesc data_feed_desc; paddle::framework::DataFeedDesc data_feed_desc;
int file_descriptor = open(filename, O_RDONLY); int file_descriptor = open(filename, O_RDONLY);
PADDLE_ENFORCE(file_descriptor != -1, "Can not open %s.", filename); PADDLE_ENFORCE_NE(file_descriptor, -1, platform::errors::Unavaliable(
"Cannot open file %s.", filename));
google::protobuf::io::FileInputStream fileInput(file_descriptor); google::protobuf::io::FileInputStream fileInput(file_descriptor);
google::protobuf::TextFormat::Parse(&fileInput, &data_feed_desc); google::protobuf::TextFormat::Parse(&fileInput, &data_feed_desc);
close(file_descriptor); close(file_descriptor);
...@@ -44,7 +45,8 @@ paddle::framework::DataFeedDesc load_datafeed_param_from_file( ...@@ -44,7 +45,8 @@ paddle::framework::DataFeedDesc load_datafeed_param_from_file(
const std::vector<std::string> load_filelist_from_file(const char* filename) { const std::vector<std::string> load_filelist_from_file(const char* filename) {
std::vector<std::string> filelist; std::vector<std::string> filelist;
std::ifstream fin(filename); std::ifstream fin(filename);
PADDLE_ENFORCE(fin.good(), "Can not open %s.", filename); PADDLE_ENFORCE_EQ(fin.good(), true, platform::errors::Unavaliable(
"Cannot open file %s.", filename));
std::string line; std::string line;
while (getline(fin, line)) { while (getline(fin, line)) {
filelist.push_back(line); filelist.push_back(line);
......
...@@ -272,7 +272,8 @@ class ParallelExecutorPassBuilder : public ir::PassBuilder { ...@@ -272,7 +272,8 @@ class ParallelExecutorPassBuilder : public ir::PassBuilder {
} }
#else #else
PADDLE_ENFORCE_NE(FLAGS_use_ngraph, true, PADDLE_ENFORCE_NE(FLAGS_use_ngraph, true,
"Please compile with NGRAPH first to use NGRAPH"); platform::errors::PreconditionNotMet(
"Please compile with NGRAPH first to use NGRAPH"));
#endif #endif
} }
......
...@@ -139,10 +139,12 @@ void FastThreadedSSAGraphExecutor::InsertFetchOps( ...@@ -139,10 +139,12 @@ void FastThreadedSSAGraphExecutor::InsertFetchOps(
for (size_t i = 0; i < fetch_tensors.size(); ++i) { for (size_t i = 0; i < fetch_tensors.size(); ++i) {
auto &var_name = fetch_tensors.at(i); auto &var_name = fetch_tensors.at(i);
auto fetched_var_it = fetched_vars->find(var_name); auto fetched_var_it = fetched_vars->find(var_name);
PADDLE_ENFORCE(fetched_var_it != fetched_vars->end(), PADDLE_ENFORCE_NE(
"Cannot find fetched variable(%s).(Perhaps the main_program " fetched_var_it, fetched_vars->end(),
"is not set to ParallelExecutor)", platform::errors::PreconditionNotMet(
var_name); "Cannot find fetched variable(%s). Perhaps the main_program "
"is not set to ParallelExecutor.",
var_name));
auto &vars = fetched_var_it->second; auto &vars = fetched_var_it->second;
......
...@@ -143,7 +143,9 @@ void ExecutorThreadWorker::CreateThreadScope(const ProgramDesc& program) { ...@@ -143,7 +143,9 @@ void ExecutorThreadWorker::CreateThreadScope(const ProgramDesc& program) {
auto& block = program.Block(0); auto& block = program.Block(0);
PADDLE_ENFORCE_NOT_NULL( PADDLE_ENFORCE_NOT_NULL(
root_scope_, "root_scope should be set before creating thread scope"); root_scope_,
platform::errors::PreconditionNotMet(
"root_scope should be set before creating thread scope."));
thread_scope_ = &root_scope_->NewScope(); thread_scope_ = &root_scope_->NewScope();
for (auto& var : block.AllVars()) { for (auto& var : block.AllVars()) {
......
...@@ -76,7 +76,8 @@ class OperationMap { ...@@ -76,7 +76,8 @@ class OperationMap {
Operation& Get(std::string op_type) { Operation& Get(std::string op_type) {
auto iter = operations_.find(op_type); auto iter = operations_.find(op_type);
PADDLE_ENFORCE_NE(iter, operations_.end(), PADDLE_ENFORCE_NE(iter, operations_.end(),
"Operation %s is not supported yet.", op_type); platform::errors::Unimplemented(
"Operation %s is not supported yet.", op_type));
return iter->second; return iter->second;
} }
......
...@@ -61,10 +61,12 @@ class FuseAllReduceOpPass : public ir::Pass { ...@@ -61,10 +61,12 @@ class FuseAllReduceOpPass : public ir::Pass {
return; return;
} }
PADDLE_ENFORCE_EQ(all_reduce_ops.size(), grads.size(), PADDLE_ENFORCE_EQ(
"The number of all_reduce OpHandle is not equal to the " all_reduce_ops.size(), grads.size(),
"number of grads. Maybe some gradients are sparse type, " platform::errors::Unimplemented(
"it is not supported currently."); "The number of all_reduce OpHandle is not equal to the "
"number of grads. Maybe some gradients are sparse type, "
"it is not supported currently."));
auto &group_params_grads = graph->Get<details::GroupParamsAndGrads>( auto &group_params_grads = graph->Get<details::GroupParamsAndGrads>(
details::kGroupParamsAndDenseGrads); details::kGroupParamsAndDenseGrads);
......
...@@ -39,7 +39,9 @@ std::string Benchmark::SerializeToString() const { ...@@ -39,7 +39,9 @@ std::string Benchmark::SerializeToString() const {
} }
void Benchmark::PersistToFile(const std::string &path) const { void Benchmark::PersistToFile(const std::string &path) const {
std::ofstream file(path, std::ios::app); std::ofstream file(path, std::ios::app);
PADDLE_ENFORCE(file.is_open(), "Can not open %s to add benchmark", path); PADDLE_ENFORCE_EQ(
file.is_open(), true,
platform::errors::Unavailable("Can not open %s to add benchmark.", path));
file << SerializeToString(); file << SerializeToString();
file.flush(); file.flush();
file.close(); file.close();
......
...@@ -51,7 +51,7 @@ class MulOp : public framework::OperatorWithKernel { ...@@ -51,7 +51,7 @@ class MulOp : public framework::OperatorWithKernel {
PADDLE_ENFORCE_NE(framework::product(y_dims), 0, PADDLE_ENFORCE_NE(framework::product(y_dims), 0,
platform::errors::PreconditionNotMet( platform::errors::PreconditionNotMet(
"Maybe the Input variable Y(%s) has not " "The Input variable Y(%s) has not "
"been initialized. You may need to confirm " "been initialized. You may need to confirm "
"if you put exe.run(startup_program) " "if you put exe.run(startup_program) "
"after optimizer.minimize function.", "after optimizer.minimize function.",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册