未验证 提交 a82ce2b1 编写于 作者: H Huihuang Zheng 提交者: GitHub

API/OP (ConditionalBlock) error message enhancement (#23480)

API/OP (ConditionalBlock) error message enhancement (#23480)
上级 4489f0d3
...@@ -51,7 +51,9 @@ class ConditionalBlockInferOp : public ConditionalOp { ...@@ -51,7 +51,9 @@ class ConditionalBlockInferOp : public ConditionalOp {
if (need_run) { if (need_run) {
auto *scope_var = scope.FindVar(Output("Scope")); auto *scope_var = scope.FindVar(Output("Scope"));
PADDLE_ENFORCE(scope_var != nullptr, "Must set scope"); PADDLE_ENFORCE_NOT_NULL(
scope_var, platform::errors::PreconditionNotMet(
"Scope must be set in ConditionalBlockInferOp."));
auto *scopes = scope_var->GetMutable<std::vector<framework::Scope *>>(); auto *scopes = scope_var->GetMutable<std::vector<framework::Scope *>>();
scopes->resize(1); scopes->resize(1);
scopes->front() = &scope.NewScope(); scopes->front() = &scope.NewScope();
......
...@@ -56,7 +56,9 @@ class ConditionalBlockOp : public ConditionalOp { ...@@ -56,7 +56,9 @@ class ConditionalBlockOp : public ConditionalOp {
if (need_run) { if (need_run) {
auto *scope_var = scope.FindVar(Output(ConditionalOp::kScope)); auto *scope_var = scope.FindVar(Output(ConditionalOp::kScope));
PADDLE_ENFORCE(scope_var != nullptr, "Must set scope"); PADDLE_ENFORCE_NOT_NULL(
scope_var, platform::errors::PreconditionNotMet(
"Scope must be set in conditional_block_op."));
auto *scopes = scope_var->GetMutable<std::vector<framework::Scope *>>(); auto *scopes = scope_var->GetMutable<std::vector<framework::Scope *>>();
scopes->resize(1); scopes->resize(1);
scopes->front() = &scope.NewScope(); scopes->front() = &scope.NewScope();
...@@ -79,7 +81,7 @@ class ConditionalBlockInferShape : public framework::InferShapeBase { ...@@ -79,7 +81,7 @@ class ConditionalBlockInferShape : public framework::InferShapeBase {
void operator()(framework::InferShapeContext *context) const override { void operator()(framework::InferShapeContext *context) const override {
PADDLE_ENFORCE_EQ(context->HasInputs(ConditionalOp::kCondition), true, PADDLE_ENFORCE_EQ(context->HasInputs(ConditionalOp::kCondition), true,
platform::errors::InvalidArgument( platform::errors::InvalidArgument(
"conditional_block_op must have condition input")); "conditional_block_op must have condition input."));
} }
}; };
...@@ -116,13 +118,13 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -116,13 +118,13 @@ class ConditionalBlockGradOp : public ConditionalOp {
} }
auto *scope_var = scope.FindVar(Input(ConditionalOp::kScope)); auto *scope_var = scope.FindVar(Input(ConditionalOp::kScope));
PADDLE_ENFORCE_NE(scope_var, nullptr, PADDLE_ENFORCE_NOT_NULL(
platform::errors::InvalidArgument( scope_var, platform::errors::PreconditionNotMet(
"Scope must be set in conditional block op")); "Scope must be set in conditional block op."));
auto &scopes = scope_var->Get<std::vector<framework::Scope *>>(); auto &scopes = scope_var->Get<std::vector<framework::Scope *>>();
PADDLE_ENFORCE_GT(scopes.size(), 0, PADDLE_ENFORCE_GT(scopes.size(), 0,
platform::errors::InvalidArgument( platform::errors::InvalidArgument(
"Scope must be set in conditional block op")); "Scope must be set in conditional block op."));
framework::Scope &cur_scope = *scopes[0]; framework::Scope &cur_scope = *scopes[0];
framework::Executor exec(dev_place); framework::Executor exec(dev_place);
...@@ -192,7 +194,7 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -192,7 +194,7 @@ class ConditionalBlockGradOp : public ConditionalOp {
PADDLE_ENFORCE_EQ(outside_var->IsType<framework::LoDTensor>(), true, PADDLE_ENFORCE_EQ(outside_var->IsType<framework::LoDTensor>(), true,
platform::errors::InvalidArgument( platform::errors::InvalidArgument(
"Type of outside_var %s is NOT LoDTensor, which " "Type of outside_var %s is NOT LoDTensor, which "
"doesn't match input_var %s", "doesn't match input_var %s.",
outside_grad_name, input_name)); outside_grad_name, input_name));
AssignZeroToOutsideTensor( AssignZeroToOutsideTensor(
place, scope, input_var->Get<framework::LoDTensor>(), place, scope, input_var->Get<framework::LoDTensor>(),
...@@ -202,7 +204,7 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -202,7 +204,7 @@ class ConditionalBlockGradOp : public ConditionalOp {
true, true,
platform::errors::InvalidArgument( platform::errors::InvalidArgument(
"Type of outside_var %s is NOT LoDTensorArray, " "Type of outside_var %s is NOT LoDTensorArray, "
"which doesn't match input_var %s", "which doesn't match input_var %s.",
outside_grad_name, input_name)); outside_grad_name, input_name));
const auto &input_tensors = input_var->Get<framework::LoDTensorArray>(); const auto &input_tensors = input_var->Get<framework::LoDTensorArray>();
auto *outside_tensors = auto *outside_tensors =
...@@ -210,7 +212,7 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -210,7 +212,7 @@ class ConditionalBlockGradOp : public ConditionalOp {
PADDLE_ENFORCE_EQ(input_tensors.size(), outside_tensors->size(), PADDLE_ENFORCE_EQ(input_tensors.size(), outside_tensors->size(),
platform::errors::InvalidArgument( platform::errors::InvalidArgument(
"LoDTensorArray outside_var %s doen't have same " "LoDTensorArray outside_var %s doen't have same "
"size as input_var %s", "size as input_var %s.",
outside_grad_name, input_name)); outside_grad_name, input_name));
for (size_t j = 0; j < input_tensors.size(); ++j) { for (size_t j = 0; j < input_tensors.size(); ++j) {
AssignZeroToOutsideTensor(place, scope, input_tensors[j], AssignZeroToOutsideTensor(place, scope, input_tensors[j],
...@@ -220,7 +222,7 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -220,7 +222,7 @@ class ConditionalBlockGradOp : public ConditionalOp {
// TODO(huihuangzheng): add support for SelectedRows // TODO(huihuangzheng): add support for SelectedRows
PADDLE_THROW(platform::errors::InvalidArgument( PADDLE_THROW(platform::errors::InvalidArgument(
"Conditional block grad op doesn't support non-LoDTensor output " "Conditional block grad op doesn't support non-LoDTensor output "
"now")); "now."));
} }
} }
} }
...@@ -245,7 +247,10 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -245,7 +247,10 @@ class ConditionalBlockGradOp : public ConditionalOp {
class ConditionalBlockGradInferShape : public framework::InferShapeBase { class ConditionalBlockGradInferShape : public framework::InferShapeBase {
public: public:
void operator()(framework::InferShapeContext *context) const override { void operator()(framework::InferShapeContext *context) const override {
PADDLE_ENFORCE(context->HasInputs(ConditionalOp::kCondition)); PADDLE_ENFORCE_EQ(
context->HasInputs(ConditionalOp::kCondition), true,
platform::errors::InvalidArgument(
"Condition must be set in conditional_block_grad_op."));
if (context->HasInputs(ConditionalOp::kInputs) && if (context->HasInputs(ConditionalOp::kInputs) &&
context->HasOutputs(framework::GradVarName(ConditionalOp::kInputs))) { context->HasOutputs(framework::GradVarName(ConditionalOp::kInputs))) {
context->SetOutputsDim(framework::GradVarName(ConditionalOp::kInputs), context->SetOutputsDim(framework::GradVarName(ConditionalOp::kInputs),
......
...@@ -49,7 +49,9 @@ class ConditionalOp : public framework::OperatorBase { ...@@ -49,7 +49,9 @@ class ConditionalOp : public framework::OperatorBase {
xs.begin(), xs.end(), retv.begin(), xs.begin(), xs.end(), retv.begin(),
[&scope](const std::string &var_name) -> const framework::LoDTensor * { [&scope](const std::string &var_name) -> const framework::LoDTensor * {
auto *var = scope.FindVar(var_name); auto *var = scope.FindVar(var_name);
PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", var_name); PADDLE_ENFORCE_NOT_NULL(
var, platform::errors::InvalidArgument("Cannot find variable %s",
var_name));
return &var->Get<framework::LoDTensor>(); return &var->Get<framework::LoDTensor>();
}); });
return retv; return retv;
...@@ -57,15 +59,17 @@ class ConditionalOp : public framework::OperatorBase { ...@@ -57,15 +59,17 @@ class ConditionalOp : public framework::OperatorBase {
bool ScalarCondition( bool ScalarCondition(
const std::vector<const framework::LoDTensor *> &ips) const { const std::vector<const framework::LoDTensor *> &ips) const {
if (!(ips.size() == 1UL && ips[0]->IsInitialized())) { PADDLE_ENFORCE_EQ(
PADDLE_THROW("should have one initialized input as condition"); ips.size() == 1UL && ips[0]->IsInitialized(), true,
} platform::errors::InvalidArgument(
"condition should have one initialized input as condition"));
PADDLE_ENFORCE(ips[0]->type() == framework::proto::VarType::BOOL &&
ips[0]->numel() == 1, PADDLE_ENFORCE_EQ(ips[0]->type() == framework::proto::VarType::BOOL &&
"condition input's data type should be bool, " ips[0]->numel() == 1,
"numel should be 1, actual numel is %d", true, platform::errors::InvalidArgument(
ips[0]->numel()); "condition input's data type should be bool, "
"numel should be 1, actual numel is %d",
ips[0]->numel()));
bool res = false; bool res = false;
if (platform::is_gpu_place(ips[0]->place())) { if (platform::is_gpu_place(ips[0]->place())) {
#ifdef PADDLE_WITH_CUDA #ifdef PADDLE_WITH_CUDA
......
...@@ -31,7 +31,12 @@ static bool IsMatchedConditionalBlockOpAndConditionalBlockGradOp( ...@@ -31,7 +31,12 @@ static bool IsMatchedConditionalBlockOpAndConditionalBlockGradOp(
static void FindAllConditionalBlockAndConditionalBlockGradOp( static void FindAllConditionalBlockAndConditionalBlockGradOp(
const framework::ProgramDesc &program, std::vector<OpVariant> *fwd_ops, const framework::ProgramDesc &program, std::vector<OpVariant> *fwd_ops,
std::vector<OpVariant> *bwd_ops) { std::vector<OpVariant> *bwd_ops) {
PADDLE_ENFORCE_GE(fwd_ops->size(), bwd_ops->size()); PADDLE_ENFORCE_GE(
fwd_ops->size(), bwd_ops->size(),
platform::errors::InvalidArgument(
"Size of forward ops must be greater or equal to backward ops. The "
"number of forward ops is %d and the number of backward ops is %d",
fwd_ops->size(), bwd_ops->size()));
for (size_t i = 1; i < program.Size(); ++i) { for (size_t i = 1; i < program.Size(); ++i) {
auto &block = program.Block(i); auto &block = program.Block(i);
...@@ -47,7 +52,11 @@ static void FindAllConditionalBlockAndConditionalBlockGradOp( ...@@ -47,7 +52,11 @@ static void FindAllConditionalBlockAndConditionalBlockGradOp(
PADDLE_ENFORCE_GE( PADDLE_ENFORCE_GE(
fwd_ops->size(), bwd_ops->size(), fwd_ops->size(), bwd_ops->size(),
"There are extra conditional_block_grad ops in the graph or program"); platform::errors::InvalidArgument(
"There are more conditional_block_grad ops than "
"conditional_block ops in the graph or program. The number of "
"forward ops is %d and the number of backward ops is %d",
fwd_ops->size(), bwd_ops->size()));
} }
static void SetSkipVarsForConditionalBlockOp(OpVariant *fwd_op, static void SetSkipVarsForConditionalBlockOp(OpVariant *fwd_op,
...@@ -102,14 +111,17 @@ static void PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOpImpl( ...@@ -102,14 +111,17 @@ static void PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOpImpl(
for (auto &fwd_op : ifelse_op_set) { for (auto &fwd_op : ifelse_op_set) {
if (IsMatchedConditionalBlockOpAndConditionalBlockGradOp(fwd_op, if (IsMatchedConditionalBlockOpAndConditionalBlockGradOp(fwd_op,
bwd_op)) { bwd_op)) {
PADDLE_ENFORCE(matched_fwd_op == nullptr, PADDLE_ENFORCE_EQ(matched_fwd_op, nullptr,
"Found multiple matched conditional_block ops"); platform::errors::PreconditionNotMet(
"Found multiple matched conditional_block ops."));
matched_fwd_op = &fwd_op; matched_fwd_op = &fwd_op;
} }
} }
PADDLE_ENFORCE_NOT_NULL(matched_fwd_op, PADDLE_ENFORCE_NOT_NULL(
"Cannot find matched forward conditional_block op"); matched_fwd_op,
platform::errors::PreconditionNotMet(
"Cannot find matched forward conditional_block op."));
SetSkipVarsForConditionalBlockOp(const_cast<OpVariant *>(matched_fwd_op), SetSkipVarsForConditionalBlockOp(const_cast<OpVariant *>(matched_fwd_op),
&bwd_op); &bwd_op);
......
...@@ -1812,8 +1812,7 @@ class ConditionalBlockGuard(BlockGuard): ...@@ -1812,8 +1812,7 @@ class ConditionalBlockGuard(BlockGuard):
""" """
def __init__(self, block): def __init__(self, block):
if not isinstance(block, ConditionalBlock): check_type(block, "block", ConditionalBlock, "ConditionalBlockGuard")
raise TypeError("block should be conditional block")
super(ConditionalBlockGuard, self).__init__(block.helper.main_program) super(ConditionalBlockGuard, self).__init__(block.helper.main_program)
self.block = block self.block = block
...@@ -1855,8 +1854,7 @@ class ConditionalBlock(object): ...@@ -1855,8 +1854,7 @@ class ConditionalBlock(object):
def __init__(self, inputs, is_scalar_condition=False, name=None): def __init__(self, inputs, is_scalar_condition=False, name=None):
for each_input in inputs: for each_input in inputs:
if not isinstance(each_input, Variable): check_type(each_input, "input", Variable, "ConditionalBlock")
raise TypeError("Each input should be variable")
self.inputs = inputs self.inputs = inputs
self.is_scalar_condition = is_scalar_condition self.is_scalar_condition = is_scalar_condition
self.helper = LayerHelper('conditional_block', name=name) self.helper = LayerHelper('conditional_block', name=name)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册