未验证 提交 7f3e0eaa 编写于 作者: Z Zeng Jinle 提交者: GitHub

refine error msg, test=develop (#23589)

上级 ea22515a
......@@ -35,12 +35,16 @@ class ReaderBase {
: shapes_(shapes),
var_types_(var_types),
need_check_feed_(need_check_feed) {
PADDLE_ENFORCE_EQ(shapes_.size(), need_check_feed_.size(),
"Construct ReaderBase with mismatched sizes of shapes "
"and need_check_feed");
PADDLE_ENFORCE_EQ(var_types_.size(), need_check_feed_.size(),
"Construct ReaderBase with mismatched sizes of var_types "
"and need_check_feed");
PADDLE_ENFORCE_EQ(
shapes_.size(), need_check_feed_.size(),
platform::errors::InvalidArgument(
"Construct ReaderBase with mismatched sizes of shapes "
"and need_check_feed"));
PADDLE_ENFORCE_EQ(
var_types_.size(), need_check_feed_.size(),
platform::errors::InvalidArgument(
"Construct ReaderBase with mismatched sizes of var_types "
"and need_check_feed"));
}
virtual void ReadNext(std::vector<LoDTensor>* out);
......@@ -108,7 +112,10 @@ class DecoratedReader : public ReaderBase,
: ReaderBase(reader->Shapes(), reader->VarTypes(),
reader->NeedCheckFeed()),
reader_(reader) {
PADDLE_ENFORCE_NOT_NULL(reader_);
PADDLE_ENFORCE_NOT_NULL(
reader_,
platform::errors::InvalidArgument(
"The underlying reader of DecoratedReader should not be null"));
}
void RegisterDecorateChain() {
......@@ -148,7 +155,10 @@ class ReaderHolder {
template <typename T>
void Reset(const std::shared_ptr<T>& reader) {
auto reader_base = std::dynamic_pointer_cast<ReaderBase>(reader);
PADDLE_ENFORCE_NOT_NULL(reader_base);
PADDLE_ENFORCE_NOT_NULL(
reader_base,
platform::errors::InvalidArgument(
"The underlying reader of ReaderHolder should not be null"));
reader_ = reader_base;
}
......@@ -157,7 +167,10 @@ class ReaderHolder {
const std::shared_ptr<ReaderBase>& Get() const { return reader_; }
void ReadNext(std::vector<LoDTensor>* out) {
PADDLE_ENFORCE_NOT_NULL(reader_);
PADDLE_ENFORCE_NOT_NULL(
reader_,
platform::errors::InvalidArgument(
"The underlying reader of ReaderHolder should not be null"));
reader_->ReadNext(out);
}
......@@ -174,13 +187,19 @@ class ReaderHolder {
void Shutdown() {
VLOG(1) << "Shutdown";
PADDLE_ENFORCE_NOT_NULL(reader_);
PADDLE_ENFORCE_NOT_NULL(
reader_,
platform::errors::InvalidArgument(
"The underlying reader of ReaderHolder should not be null"));
reader_->Shutdown();
}
void Start() {
VLOG(1) << "start";
PADDLE_ENFORCE_NOT_NULL(reader_);
PADDLE_ENFORCE_NOT_NULL(
reader_,
platform::errors::InvalidArgument(
"The underlying reader of ReaderHolder should not be null"));
reader_->Start();
}
......
......@@ -35,7 +35,9 @@ class CreateDoubleBufferReaderOp : public framework::OperatorBase {
dynamic_cast<framework::DecoratedReader*>(out->Get().get());
PADDLE_ENFORCE_NOT_NULL(
decorated_reader,
platform::errors::NotFound("Not inited with DecoratedReader"));
platform::errors::NotFound("The inited reader should be a "
"DecoratedReader when running "
"create_double_buffer_reader op."));
if (decorated_reader->UnderlyingReader() == underlying_reader.Get()) {
return;
}
......
......@@ -36,8 +36,11 @@ class CreatePyReaderOp : public framework::OperatorBase {
auto* queue_holder_var = scope.FindVar(queue_name);
PADDLE_ENFORCE_NOT_NULL(
queue_holder_var,
"No LoDTensorBlockingQueueHolder variable with name %s found",
queue_name);
platform::errors::NotFound(
"No LoDTensorBlockingQueueHolder variable with name %s found. This "
"may be because the DataLoader is defined in another Scope, "
"which is different from the Scope when calling Executor.run.",
queue_name));
std::shared_ptr<LoDTensorBlockingQueue> queue;
std::shared_ptr<OrderedMultiDeviceLoDTensorBlockingQueue> ordered_queue;
int dev_idx = -1;
......
......@@ -43,24 +43,25 @@ bool DimensionIsCompatibleWith(const framework::DDim& first,
class ReadInferShape : public framework::InferShapeBase {
public:
void operator()(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("Reader"),
"The ReadOp must take a reader as input.");
PADDLE_ENFORCE(ctx->HasOutputs("Out"),
"The ReadOp should be assigned with output.");
OP_INOUT_CHECK(ctx->HasInput("Reader"), "Input", "Reader", "read");
OP_INOUT_CHECK(ctx->HasOutputs("Out"), "Output", "Out", "read");
if (!ctx->IsRuntime() && ctx->Attrs().Get<bool>("infer_out")) {
std::vector<framework::DDim> reader_dims = ctx->GetReaderDims("Reader");
std::vector<std::string> out_names = ctx->Outputs("Out");
PADDLE_ENFORCE_EQ(
reader_dims.size(), out_names.size(),
"The reader's dim number doesn't match the output number.");
platform::errors::InvalidArgument(
"The reader's dim number doesn't match the output number."));
ctx->SetOutputsDim("Out", reader_dims);
auto in_desc =
boost::get<framework::VarDesc*>(ctx->GetInputVarPtrs("Reader")[0]);
auto in_lod_levels = in_desc->GetLoDLevels();
auto out_var_ptrs = ctx->GetOutputVarPtrs("Out");
PADDLE_ENFORCE_EQ(in_lod_levels.size(), out_var_ptrs.size(),
"LoDLevels of Input(Reader) must be the same as the "
"number of Outputs(Out).");
PADDLE_ENFORCE_EQ(
in_lod_levels.size(), out_var_ptrs.size(),
platform::errors::InvalidArgument(
"LoDLevels of Input(Reader) must be the same as the "
"number of Outputs(Out)."));
for (size_t i = 0; i < out_var_ptrs.size(); ++i) {
auto* out_desc = boost::get<framework::VarDesc*>(out_var_ptrs[i]);
out_desc->SetLoDLevel(in_lod_levels[i]);
......@@ -109,31 +110,36 @@ class ReadOp : public framework::OperatorBase {
VLOG(3) << "throw_eof_exp";
PADDLE_THROW_EOF();
}
PADDLE_ENFORCE_EQ(ins.size(), out_arg_names.size(),
"input size and output size of read_op do not match");
PADDLE_ENFORCE_EQ(
ins.size(), out_arg_names.size(),
platform::errors::InvalidArgument("input data number and output data "
"number of read_op do not match"));
const std::vector<framework::DDim>& shapes = reader->Shapes();
const std::vector<framework::proto::VarType::Type>& var_types =
reader->VarTypes();
const std::vector<bool>& need_check_feed = reader->NeedCheckFeed();
PADDLE_ENFORCE_EQ(out_arg_names.size(), need_check_feed.size(),
"output size of read_op and the number of fed "
"variables of reader do not match");
platform::errors::InvalidArgument(
"output size of read_op and the number of fed "
"variables of reader do not match"));
for (size_t i = 0; i < out_arg_names.size(); ++i) {
auto* out =
scope.FindVar(out_arg_names[i])->GetMutable<framework::LoDTensor>();
if (need_check_feed[i]) {
auto in_dims = ins[i].dims();
PADDLE_ENFORCE_EQ(DimensionIsCompatibleWith(shapes[i], in_dims), true,
"The fed Variable %s should have dimensions = %d, "
"shape = [%s], but received fed shape [%s]",
out_arg_names[i], shapes[i].size(), shapes[i],
in_dims);
PADDLE_ENFORCE_EQ(
DimensionIsCompatibleWith(shapes[i], in_dims), true,
platform::errors::InvalidArgument(
"The fed Variable %s should have dimensions = %d, "
"shape = [%s], but received fed shape [%s]",
out_arg_names[i], shapes[i].size(), shapes[i], in_dims));
PADDLE_ENFORCE_EQ(
ins[i].type(), var_types[i],
"The data type of fed Variable %s must be %s, but received %s",
out_arg_names[i], var_types[i], ins[i].type());
platform::errors::InvalidArgument(
"The data type of fed Variable %s must be %s, but received %s",
out_arg_names[i], var_types[i], ins[i].type()));
}
out->ShareDataWith(ins[i]);
out->set_lod(ins[i].lod());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册