diff --git a/paddle/fluid/framework/reader.h b/paddle/fluid/framework/reader.h index 4d9ac5374b1cafa64992c74824b890f78e8157b3..a4207deb7e8113ab07b8b7f9b227e121f3e0f1bc 100644 --- a/paddle/fluid/framework/reader.h +++ b/paddle/fluid/framework/reader.h @@ -35,12 +35,16 @@ class ReaderBase { : shapes_(shapes), var_types_(var_types), need_check_feed_(need_check_feed) { - PADDLE_ENFORCE_EQ(shapes_.size(), need_check_feed_.size(), - "Construct ReaderBase with mismatched sizes of shapes " - "and need_check_feed"); - PADDLE_ENFORCE_EQ(var_types_.size(), need_check_feed_.size(), - "Construct ReaderBase with mismatched sizes of var_types " - "and need_check_feed"); + PADDLE_ENFORCE_EQ( + shapes_.size(), need_check_feed_.size(), + platform::errors::InvalidArgument( + "Construct ReaderBase with mismatched sizes of shapes " + "and need_check_feed")); + PADDLE_ENFORCE_EQ( + var_types_.size(), need_check_feed_.size(), + platform::errors::InvalidArgument( + "Construct ReaderBase with mismatched sizes of var_types " + "and need_check_feed")); } virtual void ReadNext(std::vector* out); @@ -108,7 +112,10 @@ class DecoratedReader : public ReaderBase, : ReaderBase(reader->Shapes(), reader->VarTypes(), reader->NeedCheckFeed()), reader_(reader) { - PADDLE_ENFORCE_NOT_NULL(reader_); + PADDLE_ENFORCE_NOT_NULL( + reader_, + platform::errors::InvalidArgument( + "The underlying reader of DecoratedReader should not be null")); } void RegisterDecorateChain() { @@ -148,7 +155,10 @@ class ReaderHolder { template void Reset(const std::shared_ptr& reader) { auto reader_base = std::dynamic_pointer_cast(reader); - PADDLE_ENFORCE_NOT_NULL(reader_base); + PADDLE_ENFORCE_NOT_NULL( + reader_base, + platform::errors::InvalidArgument( + "The underlying reader of ReaderHolder should not be null")); reader_ = reader_base; } @@ -157,7 +167,10 @@ class ReaderHolder { const std::shared_ptr& Get() const { return reader_; } void ReadNext(std::vector* out) { - PADDLE_ENFORCE_NOT_NULL(reader_); + PADDLE_ENFORCE_NOT_NULL( + reader_, + platform::errors::InvalidArgument( + "The underlying reader of ReaderHolder should not be null")); reader_->ReadNext(out); } @@ -174,13 +187,19 @@ class ReaderHolder { void Shutdown() { VLOG(1) << "Shutdown"; - PADDLE_ENFORCE_NOT_NULL(reader_); + PADDLE_ENFORCE_NOT_NULL( + reader_, + platform::errors::InvalidArgument( + "The underlying reader of ReaderHolder should not be null")); reader_->Shutdown(); } void Start() { VLOG(1) << "start"; - PADDLE_ENFORCE_NOT_NULL(reader_); + PADDLE_ENFORCE_NOT_NULL( + reader_, + platform::errors::InvalidArgument( + "The underlying reader of ReaderHolder should not be null")); reader_->Start(); } diff --git a/paddle/fluid/operators/reader/create_double_buffer_reader_op.cc b/paddle/fluid/operators/reader/create_double_buffer_reader_op.cc index e39919947c271e1be91151e95b1d5686a8ea755d..15971af58c6841050e008b486a1487ea8df891ec 100644 --- a/paddle/fluid/operators/reader/create_double_buffer_reader_op.cc +++ b/paddle/fluid/operators/reader/create_double_buffer_reader_op.cc @@ -35,7 +35,9 @@ class CreateDoubleBufferReaderOp : public framework::OperatorBase { dynamic_cast(out->Get().get()); PADDLE_ENFORCE_NOT_NULL( decorated_reader, - platform::errors::NotFound("Not inited with DecoratedReader")); + platform::errors::NotFound("The inited reader should be a " + "DecoratedReader when running " + "create_double_buffer_reader op.")); if (decorated_reader->UnderlyingReader() == underlying_reader.Get()) { return; } diff --git a/paddle/fluid/operators/reader/create_py_reader_op.cc b/paddle/fluid/operators/reader/create_py_reader_op.cc index 14370c55a4fa9ea9db73beb3f9dc641f1d7e5bde..c04bdb2f10930e2bfbe28e4fee75b7bc51676bcb 100644 --- a/paddle/fluid/operators/reader/create_py_reader_op.cc +++ b/paddle/fluid/operators/reader/create_py_reader_op.cc @@ -36,8 +36,11 @@ class CreatePyReaderOp : public framework::OperatorBase { auto* queue_holder_var = scope.FindVar(queue_name); PADDLE_ENFORCE_NOT_NULL( queue_holder_var, - "No LoDTensorBlockingQueueHolder variable with name %s found", - queue_name); + platform::errors::NotFound( + "No LoDTensorBlockingQueueHolder variable with name %s found. This " + "may be because the DataLoader is defined in another Scope, " + "which is different from the Scope when calling Executor.run.", + queue_name)); std::shared_ptr queue; std::shared_ptr ordered_queue; int dev_idx = -1; diff --git a/paddle/fluid/operators/reader/read_op.cc b/paddle/fluid/operators/reader/read_op.cc index f23c858bb637d6f78a9ae9ca135c4ab50c3bfb86..9a5ef25b5b6cf96c05ac1cbe8353fa0134a15adf 100644 --- a/paddle/fluid/operators/reader/read_op.cc +++ b/paddle/fluid/operators/reader/read_op.cc @@ -43,24 +43,25 @@ bool DimensionIsCompatibleWith(const framework::DDim& first, class ReadInferShape : public framework::InferShapeBase { public: void operator()(framework::InferShapeContext* ctx) const override { - PADDLE_ENFORCE(ctx->HasInput("Reader"), - "The ReadOp must take a reader as input."); - PADDLE_ENFORCE(ctx->HasOutputs("Out"), - "The ReadOp should be assigned with output."); + OP_INOUT_CHECK(ctx->HasInput("Reader"), "Input", "Reader", "read"); + OP_INOUT_CHECK(ctx->HasOutputs("Out"), "Output", "Out", "read"); if (!ctx->IsRuntime() && ctx->Attrs().Get("infer_out")) { std::vector reader_dims = ctx->GetReaderDims("Reader"); std::vector out_names = ctx->Outputs("Out"); PADDLE_ENFORCE_EQ( reader_dims.size(), out_names.size(), - "The reader's dim number doesn't match the output number."); + platform::errors::InvalidArgument( + "The reader's dim number doesn't match the output number.")); ctx->SetOutputsDim("Out", reader_dims); auto in_desc = boost::get(ctx->GetInputVarPtrs("Reader")[0]); auto in_lod_levels = in_desc->GetLoDLevels(); auto out_var_ptrs = ctx->GetOutputVarPtrs("Out"); - PADDLE_ENFORCE_EQ(in_lod_levels.size(), out_var_ptrs.size(), - "LoDLevels of Input(Reader) must be the same as the " - "number of Outputs(Out)."); + PADDLE_ENFORCE_EQ( + in_lod_levels.size(), out_var_ptrs.size(), + platform::errors::InvalidArgument( + "LoDLevels of Input(Reader) must be the same as the " + "number of Outputs(Out).")); for (size_t i = 0; i < out_var_ptrs.size(); ++i) { auto* out_desc = boost::get(out_var_ptrs[i]); out_desc->SetLoDLevel(in_lod_levels[i]); @@ -109,31 +110,36 @@ class ReadOp : public framework::OperatorBase { VLOG(3) << "throw_eof_exp"; PADDLE_THROW_EOF(); } - PADDLE_ENFORCE_EQ(ins.size(), out_arg_names.size(), - "input size and output size of read_op do not match"); + PADDLE_ENFORCE_EQ( + ins.size(), out_arg_names.size(), + platform::errors::InvalidArgument("input data number and output data " + "number of read_op do not match")); const std::vector& shapes = reader->Shapes(); const std::vector& var_types = reader->VarTypes(); const std::vector& need_check_feed = reader->NeedCheckFeed(); PADDLE_ENFORCE_EQ(out_arg_names.size(), need_check_feed.size(), - "output size of read_op and the number of fed " - "variables of reader do not match"); + platform::errors::InvalidArgument( + "output size of read_op and the number of fed " + "variables of reader do not match")); for (size_t i = 0; i < out_arg_names.size(); ++i) { auto* out = scope.FindVar(out_arg_names[i])->GetMutable(); if (need_check_feed[i]) { auto in_dims = ins[i].dims(); - PADDLE_ENFORCE_EQ(DimensionIsCompatibleWith(shapes[i], in_dims), true, - "The fed Variable %s should have dimensions = %d, " - "shape = [%s], but received fed shape [%s]", - out_arg_names[i], shapes[i].size(), shapes[i], - in_dims); + PADDLE_ENFORCE_EQ( + DimensionIsCompatibleWith(shapes[i], in_dims), true, + platform::errors::InvalidArgument( + "The fed Variable %s should have dimensions = %d, " + "shape = [%s], but received fed shape [%s]", + out_arg_names[i], shapes[i].size(), shapes[i], in_dims)); PADDLE_ENFORCE_EQ( ins[i].type(), var_types[i], - "The data type of fed Variable %s must be %s, but received %s", - out_arg_names[i], var_types[i], ins[i].type()); + platform::errors::InvalidArgument( + "The data type of fed Variable %s must be %s, but received %s", + out_arg_names[i], var_types[i], ins[i].type())); } out->ShareDataWith(ins[i]); out->set_lod(ins[i].lod());