diff --git a/paddle/fluid/framework/op_info.h b/paddle/fluid/framework/op_info.h index 5ce39d0c3a334c7fb61576040c6f920c0a7e884b..e1bc5be8c64f9ec31d2b585be461a2a0b9bef826 100644 --- a/paddle/fluid/framework/op_info.h +++ b/paddle/fluid/framework/op_info.h @@ -97,6 +97,8 @@ class OpInfo { return grad_op_maker_ != nullptr && !use_empty_grad_op_desc_maker_; } + bool HasEmptyGradOpMaker() const { return use_empty_grad_op_desc_maker_; } + const DygraphGradOpMakerFN& DygraphGradOpMaker() const { // Normally, proto_ should not be null, except some special operators, such // as LeaklyReluDoubleGrad op. diff --git a/paddle/fluid/operators/generator/templates/operator_utils.c.j2 b/paddle/fluid/operators/generator/templates/operator_utils.c.j2 index 16f0ecaa642f3a77667a176bfcff4408e5b7e99a..f762eedf3bffb0906804b03ac6880e1da937c58c 100644 --- a/paddle/fluid/operators/generator/templates/operator_utils.c.j2 +++ b/paddle/fluid/operators/generator/templates/operator_utils.c.j2 @@ -477,6 +477,7 @@ REGISTER_OPERATOR({{name}}, ops::{{name | to_pascal_case}}Op, {% set backward_name = op["backward"] %} ops::{{backward_name | to_pascal_case}}OpMaker, ops::{{backward_name | to_pascal_case}}OpMaker, +{% elif "forward" in op %} {% else %} paddle::framework::EmptyGradOpMaker, paddle::framework::EmptyGradOpMaker, diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index a9b1bf398f84e6a08b278a3b1a33875cf0363fbe..34bb746c48ed6bc2ad990e0d6f7164c08949e950 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -1327,8 +1327,7 @@ All parameter, weight, gradient are variables in Paddle. if ((grad_op_maker == nullptr) && (grad_comp_op_maker == nullptr)) { // Normally, proto_ should not be null, except some special // operators, such as LeaklyReluDoubleGrad op. - std::string type = - op_info.proto_ ? op_info.proto_->type() : "unknown"; + std::string type = op_desc.Type(); PADDLE_THROW(platform::errors::NotFound( "Neither operator %s's GradOpMaker nor CompGradOpMaker has " "been registered.\nPlease check whether (%s) operator has " @@ -1350,7 +1349,8 @@ All parameter, weight, gradient are variables in Paddle. VLOG(3) << "need skip: " << need_skip << std::endl; if (paddle::prim::PrimCommonUtils::IsBwdPrimEnabled()) { if ((grad_comp_op_maker != nullptr) && (!need_skip)) { - VLOG(3) << "Runing composite fun for " << op_desc.Type(); + VLOG(3) << "Prim Flag Open: Runing composite grad fun for " + << op_desc.Type(); grad_op_descs = grad_comp_op_maker(op_desc, no_grad_set, &grad_to_var, @@ -1362,9 +1362,13 @@ All parameter, weight, gradient are variables in Paddle. } } else { if (grad_op_maker != nullptr) { + VLOG(3) << "Prim Flag Close: Runing origin grad fun for " + << op_desc.Type(); grad_op_descs = grad_op_maker( op_desc, no_grad_set, &grad_to_var, grad_sub_block); } else { + VLOG(3) << "Prim Flag Close: Runing composite grad fun for " + << op_desc.Type(); grad_op_descs = grad_comp_op_maker(op_desc, no_grad_set, &grad_to_var, @@ -1392,6 +1396,9 @@ All parameter, weight, gradient are variables in Paddle. .Get(op_type) .HasNonEmptyGradOpMaker(); }); + m.def("has_empty_grad_op_maker", [](const std::string op_type) { + return framework::OpInfoMap::Instance().Get(op_type).HasEmptyGradOpMaker(); + }); m.def("has_infer_inplace", [](const std::string op_type) { return framework::OpInfoMap::Instance().Get(op_type).HasInferInplace(); }); diff --git a/python/paddle/fluid/backward.py b/python/paddle/fluid/backward.py index 51fe6294a5077abfe495ea00f956100e948a1fed..f7b8531aee4d94b1fdadc9d0564251c684a7fd01 100755 --- a/python/paddle/fluid/backward.py +++ b/python/paddle/fluid/backward.py @@ -2348,7 +2348,7 @@ def _find_op_path_( for i, op in enumerate(block.ops): if _some_in_set_( op.desc.input_arg_names(), input_names - ) and core.has_non_empty_grad_op_maker(op.type): + ) and not core.has_empty_grad_op_maker(op.type): for name in op.desc.output_arg_names(): if name not in no_grad_set: input_names.add(name) @@ -2367,7 +2367,7 @@ def _find_op_path_( if _some_in_set_( op.desc.output_arg_names(), output_names - ) and core.has_non_empty_grad_op_maker(op.type): + ) and not core.has_empty_grad_op_maker(op.type): for name in op.desc.input_arg_names(): if name not in no_grad_set: output_names.add(name) @@ -2382,7 +2382,7 @@ def _find_op_path_( op.desc.output_arg_names(), output_names ): relevant_op_flags[i] = True - if core.has_non_empty_grad_op_maker(op.type): + if not core.has_empty_grad_op_maker(op.type): for name in op.desc.input_arg_names(): if name not in no_grad_set: output_names.add(name) diff --git a/test/autograd/test_autograd_functional_static.py b/test/autograd/test_autograd_functional_static.py index 38c27c5e2384e7a8cbd05005c04cc7400e36993f..8786515d2699a126ec547586fed262f38df35cd1 100644 --- a/test/autograd/test_autograd_functional_static.py +++ b/test/autograd/test_autograd_functional_static.py @@ -466,7 +466,7 @@ class TestHessianFloat32(unittest.TestCase): def test_square(self): def pd_f(x): """Input is a square matrix.""" - return paddle.matmul(x, x.T).flatten().sum() + return paddle.matmul(x, x.T).sum() def np_hess(x): dim = x.shape[0] diff --git a/tools/check_file_diff_approvals.sh b/tools/check_file_diff_approvals.sh index 3215f9d5072225046a8b599423a8edddb1055df0..f2fb9d86cdfdafd302cd023cdb5f2207467380a9 100644 --- a/tools/check_file_diff_approvals.sh +++ b/tools/check_file_diff_approvals.sh @@ -81,6 +81,7 @@ API_FILES=("CMakeLists.txt" "paddle/phi/core/kernel_context.h" "paddle/phi/core/infermeta_utils.h" "paddle/fluid/prim/api/composite_backward/composite_backward_api.h" + "paddle/fluid/prim/api/composite_backward/composite_double_backward_api.h" "paddle/fluid/prim/api/manual_prim/prim_manual_api.h" "paddle/fluid/prim/api/api.yaml" "python/paddle/incubate/autograd/composite_rules.py" @@ -207,7 +208,7 @@ for API_FILE in ${API_FILES[*]}; do elif [ "${API_FILE}" == "paddle/phi/api/include/tensor.h" ] || [ "${API_FILE}" == "paddle/phi/core/tensor_base.h" ] || [ "${API_FILE}" == "paddle/phi/core/dense_tensor.h" ] || [ "${API_FILE}" == "paddle/phi/core/meta_tensor.h" ] || [ "${API_FILE}" == "paddle/phi/core/tensor_meta.h" ] || [ "${API_FILE}" == "paddle/phi/core/attribute.h" ] || [ "${API_FILE}" == "paddle/phi/core/device_context.h" ] || [ "${API_FILE}" == "paddle/phi/core/kernel_utils.h" ] || [ "${API_FILE}" == "paddle/phi/core/kernel_registry.h" ] || [ "${API_FILE}" == "paddle/phi/core/kernel_factory.h" ] || [ "${API_FILE}" == "paddle/phi/core/kernel_context.h" ] || [ "${API_FILE}" == "paddle/phi/core/infermeta_utils.h" ]; then echo_line="You must have one RD (chenwhql, phlrain, zyfncg, YuanRisheng) approval for changing ${API_FILE} , which manages the underlying code for PaddlePaddle PHI Library.\n" check_approval chenwhql phlrain zyfncg YuanRisheng - elif [ "${API_FILE}" == "paddle/fluid/prim/api/composite_backward/composite_backward_api.h" ] || [ "${API_FILE}" == "paddle/fluid/prim/api/manual_prim/prim_manual_api.h" ] || [ "${API_FILE}" == "paddle/fluid/prim/api/api.yaml" ]; then + elif [ "${API_FILE}" == "paddle/fluid/prim/api/composite_backward/composite_backward_api.h" ] || [ "${API_FILE}" == "paddle/fluid/prim/api/manual_prim/prim_manual_api.h" ] || [ "${API_FILE}" == "paddle/fluid/prim/api/api.yaml" ] || [ "${API_FILE}" == "paddle/fluid/prim/api/composite_backward/composite_double_backward_api.h" ]; then echo_line="You must have one RD (JiabinYang, cxxly(chenxiaoxu) , xiaoguoguo626807(wangruting)) approval for changing ${API_FILE} , which manages the code for PaddlePaddle Composite Bacward Prim API.\n" check_approval 1 JiabinYang cxxly xiaoguoguo626807 elif [ "${API_FILE}" == "python/paddle/incubate/autograd/primitives.py" ] || [ "${API_FILE}" == "python/paddle/incubate/autograd/composite_rules.py" ]; then