Created by: willthefrog
cherry pick #24409
test the error message with follow code snippet:
import paddle
from paddle import fluid
import paddle.fluid.core as core
import numpy as np
def test_error_message(skip_grad_var=[], check_sub_result=False):
x = np.random.uniform(0.1, 0.6, (2, 3, 4)).astype("float32")
y = np.random.uniform(0.1, 0.6, (2, 3, 4)).astype("float32")
sub_res = x - y
sub_res = sub_res.reshape((2, 3 * 4))
output = sub_res * sub_res
out_grad = np.random.random_sample((2, 1)).astype(np.float32)
sub_result = np.random.uniform(0.1, 0.6, (1, 3 * 4)).astype("float32")
var_dict = {
'x': x,
'y': y,
'sub_result': sub_result if check_sub_result else sub_res,
'out': output,
'out@GRAD': out_grad,
}
program = fluid.Program()
with fluid.program_guard(program):
block = program.global_block()
for name in var_dict:
block.create_var(
name=name,
dtype='float32',
shape=var_dict[name].shape)
l2dist_op = block.append_op(
type="squared_l2_distance",
inputs={
"X": block.var('x'),
"Y": block.var('y'),
},
outputs={
"Out": block.var('out'),
"sub_result": block.var('sub_result')})
# generate backward op_desc
grad_op_desc_list, op_grad_to_var = core.get_grad_op_desc(
l2dist_op.desc, set(), [])
grad_op_desc = grad_op_desc_list[0]
new_op_desc = block.desc.append_op()
new_op_desc.copy_from(grad_op_desc)
grad_vars = [name for name in grad_op_desc.output_arg_names()
if name not in skip_grad_var]
for var_name in grad_vars:
block.desc.var(var_name.encode("ascii"))
grad_op_desc.infer_var_type(block.desc)
grad_op_desc.infer_shape(block.desc)
for arg in grad_vars:
grad_var = block.desc.find_var(arg.encode("ascii"))
grad_var.set_dtype(core.VarDesc.VarType.FP32)
# remove forward op, only grad is left
block.ops = []
block.desc._remove_op(0, 1)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(program, feed={name: var_dict[name]
for name in ['x', 'y', 'sub_result', 'out@GRAD']})
# NOTE run those one at a time, comment out the others
# InvalidArgumentError: First dimension of gradient must be greater or equal than first dimension of target. But received gradient dimension = 1 and target dimension is 2.
test_error_message(check_sub_result=True)
# NotFoundError: variable(X@GRAD) cannot be found in scope for operator 'squared_l2_distance_grad'.
test_error_message(skip_grad_var=['x@GRAD'])
# NotFoundError: variable(Y@GRAD) cannot be found in scope for operator 'squared_l2_distance_grad'.
test_error_message(skip_grad_var=['y@GRAD'])