未验证 提交 7fa9f16c 编写于 作者: C Chen Weihang 提交者: GitHub

Polish reader folder error message (#24698)

* polish reader error message, test=develop

* fix detail error, test=develop

* reset activation dcudnn change, test=develop
上级 dbfe5333
......@@ -43,7 +43,8 @@ class OpVariant {
const AttrType &Attr(const std::string &name) const {
auto &attrs = Attrs();
auto it = attrs.find(name);
PADDLE_ENFORCE(it != attrs.end(), "Cannot find attribute %s", name);
PADDLE_ENFORCE_NE(it, attrs.end(), platform::errors::NotFound(
"Cannot find attribute %s.", name));
return BOOST_GET_CONST(AttrType, it->second);
}
......
......@@ -51,7 +51,7 @@ void Communicator::InitAll(const std::vector<int>& gpus) {
for (size_t i = 0; i < gpus.size(); ++i) {
(*comm_id_map)[gpus[i]] = i;
}
PADDLE_ENFORCE(
PADDLE_ENFORCE_CUDA_SUCCESS(
dynload::ncclCommInitAll(global_comms->data(), gpus.size(), gpus.data()));
inited = true;
}
......
......@@ -34,9 +34,11 @@ class BlockingQueue {
public:
explicit BlockingQueue(size_t capacity, bool speed_test_mode = false)
: capacity_(capacity), speed_test_mode_(speed_test_mode) {
PADDLE_ENFORCE_GT(
capacity_, static_cast<size_t>(0),
"The capacity of a reader::BlockingQueue must be greater than 0.");
PADDLE_ENFORCE_GT(capacity_, static_cast<size_t>(0),
platform::errors::InvalidArgument(
"The capacity of a reader::BlockingQueue must be "
"greater than 0, but received capacity is %d.",
capacity_));
}
bool Send(const T& elem) {
......@@ -49,7 +51,10 @@ class BlockingQueue {
<< "WARNING: Sending an element to a closed reader::BlokcingQueue.";
return false;
}
PADDLE_ENFORCE_LT(queue_.size(), capacity_);
PADDLE_ENFORCE_LT(
queue_.size(), capacity_,
platform::errors::PermissionDenied(
"The queue size cannot exceed the set queue capacity."));
queue_.push_back(elem);
receive_cv_.notify_one();
return true;
......@@ -65,7 +70,10 @@ class BlockingQueue {
<< "WARNING: Sending an element to a closed reader::BlokcingQueue.";
return false;
}
PADDLE_ENFORCE_LT(queue_.size(), capacity_);
PADDLE_ENFORCE_LT(
queue_.size(), capacity_,
platform::errors::PermissionDenied(
"The queue size cannot exceed the set queue capacity."));
queue_.emplace_back(std::move(elem));
receive_cv_.notify_one();
return true;
......@@ -77,7 +85,9 @@ class BlockingQueue {
[&] { return !queue_.empty() || closed_ || killed_; });
EnforceNotKilled();
if (!queue_.empty()) {
PADDLE_ENFORCE_NOT_NULL(elem);
PADDLE_ENFORCE_NOT_NULL(
elem, platform::errors::InvalidArgument(
"The holder to receive queue data is null pointer."));
*elem = queue_.front();
if (LIKELY(!speed_test_mode_)) {
queue_.pop_front();
......@@ -85,7 +95,10 @@ class BlockingQueue {
send_cv_.notify_one();
return true;
} else {
PADDLE_ENFORCE(closed_);
PADDLE_ENFORCE_EQ(closed_, true,
platform::errors::PermissionDenied(
"Blocking queue status error, if queue is empty "
"when pop data, it should be closed."));
VLOG(3) << "queue is closed! return nothing.";
return false;
}
......@@ -136,9 +149,9 @@ class BlockingQueue {
private:
inline void EnforceNotKilled() {
PADDLE_ENFORCE_NE(
killed_, true,
"Blocking queue is killed because the data reader raises an exception");
PADDLE_ENFORCE_NE(killed_, true, platform::errors::Fatal(
"Blocking queue is killed because the "
"data reader raises an exception."));
}
private:
......
......@@ -62,7 +62,6 @@ BufferedReader::BufferedReader(
}
void BufferedReader::ReadTillBufferFullAsync() {
PADDLE_ENFORCE_EQ(position_.size(), 0U);
for (size_t i = 0; i < buffer_size_; ++i) {
ReadAsync(i);
}
......@@ -87,8 +86,10 @@ void BufferedReader::ReadAsync(size_t i) {
if (gpu.empty()) {
gpu.resize(cpu.size());
} else {
PADDLE_ENFORCE_EQ(gpu.size(), cpu.size(),
"Input tensor number not matched");
PADDLE_ENFORCE_EQ(
gpu.size(), cpu.size(),
platform::errors::InvalidArgument(
"Input tensor number on GPU and CPU devices are not matched."));
}
std::vector<void *> gpu_ptrs;
......
......@@ -36,8 +36,9 @@ class CreateCTRReaderOp : public framework::OperatorBase {
auto* queue_holder_var = scope.FindVar(queue_name);
PADDLE_ENFORCE_NOT_NULL(
queue_holder_var,
"No LoDTensorBlockingQueueHolder variable with name %s found",
queue_name);
platform::errors::PreconditionNotMet(
"No LoDTensorBlockingQueueHolder variable with name %s found",
queue_name));
auto* queue_holder =
queue_holder_var->template GetMutable<LoDTensorBlockingQueueHolder>();
......
......@@ -96,11 +96,14 @@ class CreateCustomReaderOpMaker : public DecoratedReaderMakerBase {
class CustomReaderInferShape : public framework::InferShapeBase {
public:
void operator()(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE(!ctx->IsRuntime(),
"'CustomReaderInferShape' should only be invoked during "
"compile time.");
PADDLE_ENFORCE(ctx->HasOutput("Out"),
"The output decorated reader should not be null.");
PADDLE_ENFORCE_NE(
ctx->IsRuntime(), true,
platform::errors::PreconditionNotMet(
"'CustomReaderInferShape' should only be invoked during "
"compile time."));
PADDLE_ENFORCE_EQ(ctx->HasOutput("Out"), true,
platform::errors::NotFound(
"The output decorated reader should not be null."));
const auto* sub_block =
ctx->Attrs().Get<framework::BlockDesc*>("sub_block");
const auto sink_var_names =
......@@ -109,7 +112,9 @@ class CustomReaderInferShape : public framework::InferShapeBase {
std::vector<int32_t> res_lod_levels;
for (const std::string& var_name : sink_var_names) {
auto* sink_var = sub_block->FindVar(var_name);
PADDLE_ENFORCE_NOT_NULL(sink_var);
PADDLE_ENFORCE_NOT_NULL(
sink_var, platform::errors::NotFound(
"The sink variable is not found in CustomReader."));
res_dims.emplace_back(sink_var->GetShape());
res_lod_levels.push_back(sink_var->GetLoDLevel());
}
......@@ -124,7 +129,9 @@ class CustomReaderInferVarType : public framework::VarTypeInference {
public:
void operator()(framework::InferVarTypeContext* ctx) const override {
auto& out_var_name = ctx->Output("Out")[0];
PADDLE_ENFORCE(ctx->HasVar(out_var_name));
PADDLE_ENFORCE_EQ(ctx->HasVar(out_var_name), true,
platform::errors::NotFound(
"The output reader variable should not be null."));
ctx->SetType(out_var_name, framework::proto::VarType::READER);
auto sink_var_names = BOOST_GET_CONST(std::vector<std::string>,
......@@ -134,7 +141,9 @@ class CustomReaderInferVarType : public framework::VarTypeInference {
std::vector<framework::proto::VarType::Type> res_data_types;
for (const std::string& var_name : sink_var_names) {
framework::VarDesc* var = sub_block->FindVar(var_name);
PADDLE_ENFORCE_NOT_NULL(var);
PADDLE_ENFORCE_NOT_NULL(
var, platform::errors::NotFound(
"The sink variable is not found in CustomReader."));
res_data_types.emplace_back(var->GetDataType());
}
ctx->SetDataTypes(out_var_name, res_data_types);
......@@ -149,11 +158,13 @@ void CustomReader::ReadNextImpl(std::vector<framework::LoDTensor>* out) {
// There is not next data.
return;
}
PADDLE_ENFORCE(source_var_names_.size() == underlying_outs.size(),
"The size of source_var_names(%d) and the size of "
"underlying_outs(%d) are not consistent. Each feeding element "
"must have its own source variable.",
source_var_names_.size(), underlying_outs.size());
PADDLE_ENFORCE_EQ(
source_var_names_.size(), underlying_outs.size(),
platform::errors::InvalidArgument(
"The size of source_var_names(%d) and the size of "
"underlying_outs(%d) are not consistent. Each feeding element "
"must have its own source variable.",
source_var_names_.size(), underlying_outs.size()));
// The scope for CustomReader's sub-block should be independent and shouldn't
// be any other computation scope's child. Otherwise, data preprocessing and
// compution cannot be concurrent.
......
......@@ -201,9 +201,10 @@ class OrderedMultiDeviceLoDTensorBlockingQueue {
class LoDTensorBlockingQueueHolder {
public:
void InitOnce(size_t capacity, bool speed_test_mode = false) {
PADDLE_ENFORCE(
queue_ == nullptr,
"LoDTensorBlockingQueueHolder::InitOnce() can only be called once");
PADDLE_ENFORCE_EQ(
queue_, nullptr,
platform::errors::AlreadyExists("LoDTensorBlockingQueueHolder::"
"InitOnce() can only be called once"));
queue_.reset(new LoDTensorBlockingQueue(capacity, speed_test_mode));
}
......
......@@ -25,7 +25,9 @@ PyReader::PyReader(
const std::vector<framework::proto::VarType::Type>& var_types,
const std::vector<bool>& need_check_feed)
: framework::FileReader(dims, var_types, need_check_feed) {
PADDLE_ENFORCE(queue != nullptr, "LoDTensorBlockingQueue must not be null");
PADDLE_ENFORCE_NOT_NULL(queue,
platform::errors::PreconditionNotMet(
"LoDTensorBlockingQueue must not be null."));
queue_ = queue;
}
......
......@@ -78,7 +78,10 @@ class ReadInferVarType : public framework::StaticGraphVarTypeInference {
std::string reader_name = Input(ctx, "Reader")[0];
auto& out_names = Output(ctx, "Out");
auto dtypes = GetDataTypes(ctx, reader_name);
PADDLE_ENFORCE_EQ(dtypes.size(), out_names.size());
PADDLE_ENFORCE_EQ(dtypes.size(), out_names.size(),
platform::errors::InvalidArgument(
"The number of input reader's dtypes do not match "
"the output variable number."));
for (size_t i = 0; i < dtypes.size(); ++i) {
SetType(ctx, out_names[i], framework::proto::VarType::LOD_TENSOR);
SetDataType(ctx, out_names[i], dtypes[i]);
......
......@@ -62,12 +62,14 @@ void FileReaderMakerBase::Make() {
}
void FileReaderInferShape::operator()(framework::InferShapeContext* ctx) const {
PADDLE_ENFORCE(
!ctx->IsRuntime(),
"'FileReaderInferShape' should only be invoked during compile time.");
PADDLE_ENFORCE(ctx->HasOutput("Out"),
"The output file reader should not be null.");
PADDLE_ENFORCE_NE(
ctx->IsRuntime(), true,
platform::errors::PreconditionNotMet("'FileReaderInferShape' should only "
"be invoked during compile time."));
PADDLE_ENFORCE_EQ(
ctx->HasOutput("Out"), true,
platform::errors::NotFound("The output file reader should not be null."));
bool use_data_config = ctx->Attrs().Get<bool>("use_data_config");
if (use_data_config) {
const auto shape_concat =
......@@ -77,21 +79,26 @@ void FileReaderInferShape::operator()(framework::InferShapeContext* ctx) const {
ctx->SetReaderDims("Out", shapes);
const auto lod_levels = ctx->Attrs().Get<std::vector<int>>("lod_levels");
PADDLE_ENFORCE_EQ(lod_levels.size(), shapes.size(),
"The number of 'lod_levels'(%d) doesn't match the number "
"of 'shapes'(%d).",
lod_levels.size(), shapes.size());
PADDLE_ENFORCE_EQ(
lod_levels.size(), shapes.size(),
platform::errors::InvalidArgument(
"The number of 'lod_levels'(%d) doesn't match the number "
"of 'shapes'(%d).",
lod_levels.size(), shapes.size()));
const auto dtypes = ctx->Attrs().Get<std::vector<int>>("dtypes");
PADDLE_ENFORCE_EQ(
dtypes.size(), shapes.size(),
"The number of 'dtypes'(%d) doesn't match the number of 'shapes'(%d).",
dtypes.size(), shapes.size());
platform::errors::InvalidArgument("The number of 'dtypes'(%d) doesn't "
"match the number of 'shapes'(%d).",
dtypes.size(), shapes.size()));
const auto need_check_feed =
ctx->Attrs().Get<std::vector<int>>("need_check_feed");
PADDLE_ENFORCE_EQ(need_check_feed.size(), shapes.size(),
"The number of 'need_check_feed'(%d) doesn't match the "
"number of 'shapes'(%d).",
need_check_feed.size(), shapes.size());
PADDLE_ENFORCE_EQ(
need_check_feed.size(), shapes.size(),
platform::errors::InvalidArgument(
"The number of 'need_check_feed'(%d) doesn't match the "
"number of 'shapes'(%d).",
need_check_feed.size(), shapes.size()));
framework::VarDesc* reader =
BOOST_GET(framework::VarDesc*, ctx->GetOutputVarPtrs("Out")[0]);
reader->SetLoDLevels(lod_levels);
......@@ -105,14 +112,18 @@ void FileReaderInferVarType::operator()(
void DecoratedReaderInferShape::operator()(
framework::InferShapeContext* ctx) const {
PADDLE_ENFORCE(!ctx->IsRuntime(),
"'DecoratedReaderInferShape' should only be invoked during "
"compile time.");
PADDLE_ENFORCE(ctx->HasInput("UnderlyingReader"),
"Input(UnderlyingReader) should not be null.");
PADDLE_ENFORCE(ctx->HasOutput("Out"),
"The output decorated reader should not be null.");
PADDLE_ENFORCE_NE(
ctx->IsRuntime(), true,
platform::errors::PreconditionNotMet(
"'DecoratedReaderInferShape' should only be invoked during "
"compile time."));
PADDLE_ENFORCE_EQ(ctx->HasInput("UnderlyingReader"), true,
platform::errors::NotFound(
"Input(UnderlyingReader) should not be null."));
PADDLE_ENFORCE_EQ(ctx->HasOutput("Out"), true,
platform::errors::NotFound(
"The output decorated reader should not be null."));
ctx->SetReaderDims("Out", ctx->GetReaderDims("UnderlyingReader"));
framework::VarDesc* in_reader = BOOST_GET(
......
......@@ -45,7 +45,7 @@ function walk_dir(){
if [ $level -le 1 ]; then
enforce_scan $1"/"$file total_check_cnt valid_check_cnt
dir_name=$1
echo "${dir_name#../}"/"$file - total: ${total_check_cnt}, valid: ${valid_check_cnt}, invalid: $(($total_check_cnt-$valid_check_cnt))"
echo "${dir_name#../}/"$file" | ${total_check_cnt} | ${valid_check_cnt} | $(($total_check_cnt-$valid_check_cnt))"
ALL_PADDLE_CHECK_CNT=$(($ALL_PADDLE_CHECK_CNT+$total_check_cnt))
VALID_PADDLE_CHECK_CNT=$(($VALID_PADDLE_CHECK_CNT+$valid_check_cnt))
walk_dir $1"/"$file $level
......
......@@ -29,6 +29,15 @@
ROOT_DIR=../paddle/fluid/operators
white_list_str = "\
layer_norm_op.cc \
box_clip_op.cc \
box_clip_op.h \
random_crop_op.h \
elementwise_op_function.cu.h \
fused_elemwise_activation_op.cc \
auc_op.cu"
function enforce_scan(){
paddle_check=`grep -r -zoE "(PADDLE_ENFORCE[A-Z_]{0,9}|PADDLE_THROW)\(.[^,\);]*.[^;]*\);\s" $1 || true`
total_check_cnt=`echo "$paddle_check" | grep -cE "(PADDLE_ENFORCE|PADDLE_THROW)" || true`
......@@ -45,14 +54,16 @@ function walk_dir(){
for file in `ls $1`
do
if [ -f $1"/"$file ];then
enforce_scan $1"/"$file file_total_check_cnt file_valid_check_cnt
file_invalid_check_cnt=$(($total_check_cnt-$valid_check_cnt))
if [ $file_invalid_check_cnt -gt 0 ];then
echo "- $file | ${file_total_check_cnt} | ${file_valid_check_cnt} | ${file_invalid_check_cnt}"
in_white_list=$(echo $white_list_str | grep "${file}")
if [[ "$in_white_list" == "" ]];then
enforce_scan $1"/"$file file_total_check_cnt file_valid_check_cnt
file_invalid_check_cnt=$(($total_check_cnt-$valid_check_cnt))
if [ $file_invalid_check_cnt -gt 0 ];then
echo "- $file | ${file_total_check_cnt} | ${file_valid_check_cnt} | ${file_invalid_check_cnt}"
fi
fi
fi
if [ -d $1"/"$file ];then
dir_array[$i]=$1"/"$file
((i++))
fi
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册