diff --git a/paddle/fluid/framework/array.h b/paddle/fluid/framework/array.h index b53082986882c80a85826f10d5766525f72c0a97..7424bae1ab865e7c82b676e5aca02a438dedc448 100644 --- a/paddle/fluid/framework/array.h +++ b/paddle/fluid/framework/array.h @@ -55,7 +55,8 @@ class Array { HOSTDEVICE inline T &at(size_t i) { #ifndef __CUDA_ARCH__ - PADDLE_ENFORCE_LT(i, N, "Array index out of bounds"); + PADDLE_ENFORCE_LT( + i, N, platform::errors::OutOfRange("Array index out of bounds.")); #endif return (*this)[i]; } diff --git a/paddle/fluid/framework/async_executor.cc b/paddle/fluid/framework/async_executor.cc index 3c5943daa6e4461fdd05504fc1b90599789e4f1a..9f8f17cd1ac68c0549e0927c30df2481d8ee2280 100644 --- a/paddle/fluid/framework/async_executor.cc +++ b/paddle/fluid/framework/async_executor.cc @@ -76,7 +76,8 @@ void AsyncExecutor::RunFromFile(const ProgramDesc& main_program, auto& block = main_program.Block(0); for (auto var_name : fetch_var_names) { auto var_desc = block.FindVar(var_name); - PADDLE_ENFORCE_NOT_NULL(var_desc, "%s is not found.", var_name); + PADDLE_ENFORCE_NOT_NULL( + var_desc, platform::errors::NotFound("%s is not found.", var_name)); auto shapes = var_desc->GetShape(); PADDLE_ENFORCE(shapes[shapes.size() - 1] == 1, "var %s: Fetched var has wrong shape, " @@ -93,7 +94,8 @@ void AsyncExecutor::RunFromFile(const ProgramDesc& main_program, actual_thread_num_ = thread_num; int file_cnt = filelist.size(); - PADDLE_ENFORCE(file_cnt > 0, "File list cannot be empty"); + PADDLE_ENFORCE_GT(file_cnt, 0, + platform::errors::NotFound("Input file list is empty")); if (actual_thread_num_ > file_cnt) { VLOG(1) << "Thread num = " << thread_num << ", file num = " << file_cnt diff --git a/paddle/fluid/framework/attribute.h b/paddle/fluid/framework/attribute.h index aa452ac220ea63bbf7a79c09b90aadfd2764856b..86dd7a68b532c1f0f0f3c5bdde3dcbc9a21f4cb0 100644 --- a/paddle/fluid/framework/attribute.h +++ b/paddle/fluid/framework/attribute.h @@ -192,7 +192,8 @@ class GreaterThanChecker { public: explicit GreaterThanChecker(T lower_bound) : lower_bound_(lower_bound) {} void operator()(const T& value) const { - PADDLE_ENFORCE(value > lower_bound_, "larger_than check fails."); + PADDLE_ENFORCE_GT(value, lower_bound_, + platform::errors::OutOfRange("larger_than check fails.")); } private: diff --git a/paddle/fluid/framework/ir/fuse_optimizer_ops_pass/fuse_optimizer_op_pass.cc b/paddle/fluid/framework/ir/fuse_optimizer_ops_pass/fuse_optimizer_op_pass.cc index f75ba947e902bb219f3081d8e41b9b9ad81a837f..d637269f86c2d5a4822822588c64f6cecced44e1 100644 --- a/paddle/fluid/framework/ir/fuse_optimizer_ops_pass/fuse_optimizer_op_pass.cc +++ b/paddle/fluid/framework/ir/fuse_optimizer_ops_pass/fuse_optimizer_op_pass.cc @@ -94,8 +94,9 @@ void FuseOptimizerOpPass::ApplyImpl(ir::Graph *graph) const { auto fused_var_name = prefix + "_" + fuse_op_type + "_" + var_name + "_" + aux_var_map[var_name][0]; VLOG(6) << var_name << ": " << fused_var_name; - PADDLE_ENFORCE_EQ(fused_var_set.count(fused_var_name), 0, - "The fused variable already existed."); + PADDLE_ENFORCE_EQ( + fused_var_set.count(fused_var_name), 0, + platform::errors::AlreadyExists("The fused variable already exists.")); fused_var_set.insert(fused_var_name); fused_vars_name.emplace(var_name, fused_var_name); } diff --git a/paddle/fluid/framework/ir/graph.h b/paddle/fluid/framework/ir/graph.h index 23030905bbadbbbb69f24a852b3cdd09b73db089..c671e191e5292c54bcd52809c1e0af30a4fdf53a 100644 --- a/paddle/fluid/framework/ir/graph.h +++ b/paddle/fluid/framework/ir/graph.h @@ -109,8 +109,10 @@ class Graph { template void Set(const std::string &attr_name, AttrType *attr) { - PADDLE_ENFORCE_EQ(attrs_.count(attr_name), 0, "%s already set in the graph", - attr_name); + PADDLE_ENFORCE_EQ( + attrs_.count(attr_name), 0, + platform::errors::AlreadyExists( + "The attribute %s has been set in the graph.", attr_name)); attrs_[attr_name] = attr; attr_dels_[attr_name] = [attr, attr_name]() { VLOG(3) << "deleting " << attr_name; @@ -120,15 +122,19 @@ class Graph { template void SetNotOwned(const std::string &attr_name, AttrType *attr) { - PADDLE_ENFORCE_EQ(attrs_.count(attr_name), 0, "%s already set in the graph", - attr_name); + PADDLE_ENFORCE_EQ( + attrs_.count(attr_name), 0, + platform::errors::AlreadyExists( + "The attribute %s has been set in the graph.", attr_name)); attrs_[attr_name] = attr; attr_dels_[attr_name] = []() {}; } void Erase(const std::string &attr_name) { - PADDLE_ENFORCE_NE(attrs_.count(attr_name), 0, "%s not set in the graph", - attr_name); + PADDLE_ENFORCE_NE( + attrs_.count(attr_name), 0, + platform::errors::NotFound( + "The attribute %s has not been set in the graph.", attr_name)); attr_dels_[attr_name](); attrs_.erase(attr_name); attr_dels_.erase(attr_name); diff --git a/paddle/fluid/framework/ir/graph_pattern_detector.cc b/paddle/fluid/framework/ir/graph_pattern_detector.cc index bc0284e3fcd4f6e18e1f4c6e57b9a4675949fef6..4598cd8c5f7d3ca6ce2d634db125e851b1c95450 100644 --- a/paddle/fluid/framework/ir/graph_pattern_detector.cc +++ b/paddle/fluid/framework/ir/graph_pattern_detector.cc @@ -77,7 +77,8 @@ PDNode *PDPattern::RetrieveNode(const std::string &id) const { void PDPattern::AddEdge(PDNode *a, PDNode *b) { PADDLE_ENFORCE(a); PADDLE_ENFORCE(b); - PADDLE_ENFORCE(a != b, "can't connect to the same nodes."); + PADDLE_ENFORCE_NE(a, b, platform::errors::PermissionDenied( + "Cannot connect the same node in the graph.")); edges_.emplace_back(a, b); } diff --git a/paddle/fluid/framework/ir/pass.h b/paddle/fluid/framework/ir/pass.h index cf6b8d1338e20a67d332c2ddec562f662d8ff0a9..0f452d616c19d05a535f45fd8b704c4d0a26f885 100644 --- a/paddle/fluid/framework/ir/pass.h +++ b/paddle/fluid/framework/ir/pass.h @@ -159,7 +159,9 @@ class PassRegistry { } void Insert(const std::string &pass_type, const PassCreator &pass_creator) { - PADDLE_ENFORCE(!Has(pass_type), "Pass %s has been registered", pass_type); + PADDLE_ENFORCE_NE(Has(pass_type), true, + platform::errors::AlreadyExists( + "Pass %s has been registered.", pass_type)); map_.insert({pass_type, pass_creator}); } diff --git a/paddle/fluid/framework/ir/repeated_fc_relu_fuse_pass.cc b/paddle/fluid/framework/ir/repeated_fc_relu_fuse_pass.cc index 45157ca18be7217dca014ccb78161474df81709d..655ee65134e7e1db8fb8a5fa8ed3913702f779e0 100644 --- a/paddle/fluid/framework/ir/repeated_fc_relu_fuse_pass.cc +++ b/paddle/fluid/framework/ir/repeated_fc_relu_fuse_pass.cc @@ -281,7 +281,8 @@ static int BuildFusion(Graph* graph, const std::string& name_scope, PADDLE_ENFORCE(subgraph.count(pat.RetrieveNode(name)), "pattern has no Node called %s", name.c_str()); Node* p = subgraph.at(pat.RetrieveNode(name)); - PADDLE_ENFORCE_NOT_NULL(p, "subgraph has no node %s", name.c_str()); + PADDLE_ENFORCE_NOT_NULL( + p, platform::errors::NotFound("subgraph has no node %s", name.c_str())); return p; }; diff --git a/paddle/fluid/framework/ir/seqpool_concat_fuse_pass.cc b/paddle/fluid/framework/ir/seqpool_concat_fuse_pass.cc index 4ac379eb0471ea1a8a72c393dad405be90b2fa33..5e81322ff2901d74c9272f92372f4d58a7149fcb 100644 --- a/paddle/fluid/framework/ir/seqpool_concat_fuse_pass.cc +++ b/paddle/fluid/framework/ir/seqpool_concat_fuse_pass.cc @@ -142,7 +142,8 @@ static int BuildFusion(Graph* graph, const std::string& name_scope, PADDLE_ENFORCE(subgraph.count(pat.RetrieveNode(name)), "pattern has no Node called %s", name.c_str()); Node* p = subgraph.at(pat.RetrieveNode(name)); - PADDLE_ENFORCE_NOT_NULL(p, "subgraph has no node %s", name.c_str()); + PADDLE_ENFORCE_NOT_NULL( + p, platform::errors::NotFound("subgraph has no node %s", name.c_str())); return p; }; diff --git a/paddle/fluid/framework/naive_executor.cc b/paddle/fluid/framework/naive_executor.cc index a37bb6f4da1fc3baffad36c13c690c6410ac4270..465d685804cac517faa5b73ed19b5b94dcab901e 100644 --- a/paddle/fluid/framework/naive_executor.cc +++ b/paddle/fluid/framework/naive_executor.cc @@ -12,7 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include +#include #include #include "paddle/fluid/framework/feed_fetch_method.h" diff --git a/paddle/fluid/framework/op_info.h b/paddle/fluid/framework/op_info.h index 778739419c06031b8fc5938fa630a2d2e3ac11d2..6db3ec353af3369a7466a259da2e5ea2bcad7a96 100644 --- a/paddle/fluid/framework/op_info.h +++ b/paddle/fluid/framework/op_info.h @@ -53,7 +53,9 @@ struct OpInfo { } const proto::OpProto& Proto() const { - PADDLE_ENFORCE_NOT_NULL(proto_, "Operator's Proto has not been registered"); + PADDLE_ENFORCE_NOT_NULL( + proto_, + platform::errors::NotFound("Operator's Proto has not been registered")); PADDLE_ENFORCE_EQ(proto_->IsInitialized(), true, platform::errors::InvalidArgument( "Operator's Proto in op info is not initialized.")); diff --git a/paddle/fluid/framework/operator.cc b/paddle/fluid/framework/operator.cc index 2bb27ee7e35bd5b2f7d89a1a7d708e81f443fd89..22a62d806fa60aaffdf9b5a541552edfc13a02d8 100644 --- a/paddle/fluid/framework/operator.cc +++ b/paddle/fluid/framework/operator.cc @@ -191,9 +191,11 @@ bool OperatorBase::HasInputs(const std::string& name) const { std::string OperatorBase::Input(const std::string& name) const { auto& ins = Inputs(name); - PADDLE_ENFORCE_LE(ins.size(), 1UL, - "Operator %s's input %s should contain only one variable.", - type_, name); + PADDLE_ENFORCE_LE( + ins.size(), 1UL, + platform::errors::AlreadyExists( + "Operator %s's input %s should contain only one variable.", type_, + name)); return ins.empty() ? kEmptyVarName : ins[0]; } @@ -429,9 +431,11 @@ const Variable* ExecutionContext::InputVar(const std::string& name) const { auto it = ctx_.inputs.find(name); if (it == ctx_.inputs.end()) return nullptr; - PADDLE_ENFORCE_LE(it->second.size(), 1UL, - "Operator %s's input %s should contain only one variable.", - op_.Type(), name); + PADDLE_ENFORCE_LE( + it->second.size(), 1UL, + platform::errors::AlreadyExists( + "Operator %s's input %s should contain only one variable.", + op_.Type(), name)); return it->second.empty() ? nullptr : it->second[0]; } diff --git a/paddle/fluid/operators/mul_op.cc b/paddle/fluid/operators/mul_op.cc index 8d0898d7fe42c3e09cac1afced8385245cbc4300..aabe8ae9693e35b388f895216c877d78f85564ea 100644 --- a/paddle/fluid/operators/mul_op.cc +++ b/paddle/fluid/operators/mul_op.cc @@ -32,12 +32,15 @@ class MulOp : public framework::OperatorWithKernel { using framework::OperatorWithKernel::OperatorWithKernel; void InferShape(framework::InferShapeContext* ctx) const override { - PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, - "Input(X) of MulOp should not be null."); - PADDLE_ENFORCE_EQ(ctx->HasInput("Y"), true, - "Input(Y) of MulOp should not be null."); - PADDLE_ENFORCE_EQ(ctx->HasOutput("Out"), true, - "Output(Out) of MulOp should not be null."); + PADDLE_ENFORCE_EQ( + ctx->HasInput("X"), true, + platform::errors::NotFound("Input(X) of MulOp should not be null.")); + PADDLE_ENFORCE_EQ( + ctx->HasInput("Y"), true, + platform::errors::NotFound("Input(Y) of MulOp should not be null.")); + PADDLE_ENFORCE_EQ( + ctx->HasOutput("Out"), true, + platform::errors::NotFound("Output(Out) of MulOp should not be null.")); auto x_dims = ctx->GetInputDim("X"); auto y_dims = ctx->GetInputDim("Y");