diff --git a/paddle/fluid/ir/dialect/paddle_dialect/ir/pd_op_vjp_manual.cc b/paddle/fluid/ir/dialect/paddle_dialect/ir/pd_op_vjp_manual.cc index f1ea245d562deee6ed3ad223e1774af878a71b14..9f9060e4cf4ca90968fc13efffbc5bfb466fc210 100644 --- a/paddle/fluid/ir/dialect/paddle_dialect/ir/pd_op_vjp_manual.cc +++ b/paddle/fluid/ir/dialect/paddle_dialect/ir/pd_op_vjp_manual.cc @@ -78,7 +78,7 @@ std::vector> SumOp::Vjp( bool reduce_all = false; std::vector> tensor_res = primitive::sum_vjp( x, out_grad, axis, keepdim, reduce_all, stop_gradients); - std::vector> res(1, std::vector(1)); + std::vector> res(2, std::vector(1)); if (tensor_res[0][0].defined()) { res[0][0] = std::static_pointer_cast(tensor_res[0][0].impl()) diff --git a/paddle/fluid/primitive/backend/CMakeLists.txt b/paddle/fluid/primitive/backend/CMakeLists.txt index 62dcaebf121d22ee67e9d158123e1be866487f0f..deabc1f19d9b502222fdd6491ebdfacaf22505a0 100644 --- a/paddle/fluid/primitive/backend/CMakeLists.txt +++ b/paddle/fluid/primitive/backend/CMakeLists.txt @@ -1,6 +1,5 @@ set(eager_backend_files - ${PADDLE_SOURCE_DIR}/paddle/fluid/primitive/backend/generated/generated_eager_backend.cc -) + ${CMAKE_CURRENT_SOURCE_DIR}/generated/generated_eager_backend.cc) if(WITH_PYTHON OR NOT ON_INFER) cc_library( primitive_backend_eager_experimental @@ -8,9 +7,8 @@ if(WITH_PYTHON OR NOT ON_INFER) DEPS final_dygraph_function eager_utils phi) endif() set(static_backend_files - ${PADDLE_SOURCE_DIR}/paddle/fluid/primitive/backend/generated/generated_static_backend.cc - ${PADDLE_SOURCE_DIR}/paddle/fluid/primitive/backend/manual/manual_static_backend.cc -) + ${CMAKE_CURRENT_SOURCE_DIR}/generated/generated_static_backend.cc + ${CMAKE_CURRENT_SOURCE_DIR}/manual/manual_static_backend.cc) cc_library( primitive_backend_static_experimental SRCS ${static_backend_files} diff --git a/paddle/fluid/primitive/codegen/CMakeLists.txt b/paddle/fluid/primitive/codegen/CMakeLists.txt index 5cb3681535f594188dd55830b2ed395bd7597710..d582616896253981feaf5150074d82dea8c93562 100644 --- a/paddle/fluid/primitive/codegen/CMakeLists.txt +++ b/paddle/fluid/primitive/codegen/CMakeLists.txt @@ -1,15 +1,9 @@ -set(fwd_path - "${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops/ops.parsed.yaml" -) -set(fwd_legacy_path - "${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops/legacy_ops.parsed.yaml" -) -set(rev_path - "${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops/backward_ops.parsed.yaml" -) -set(rev_legacy_path - "${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops/legacy_backward_ops.parsed.yaml" -) +set(parsed_yaml_path + "${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops") +set(fwd_path ${parsed_yaml_path}/ops.parsed.yaml) +set(fwd_legacy_path ${parsed_yaml_path}/legacy_ops.parsed.yaml) +set(rev_path ${parsed_yaml_path}/backward_ops.parsed.yaml) +set(rev_legacy_path ${parsed_yaml_path}/legacy_backward_ops.parsed.yaml) set(prim_path "${PADDLE_SOURCE_DIR}/paddle/fluid/primitive/primitive.yaml") set(templates_dir "${PADDLE_SOURCE_DIR}/paddle/fluid/primitive/codegen/templates/") diff --git a/test/prim/new_ir_prim/test_vjp_prim.py b/test/prim/new_ir_prim/test_vjp_prim.py index 8c2fd4ebd76dfa7bc5fb4a95f1e273a0e4e50843..600a14129782f15db81eb0fe338a36215c78f09a 100644 --- a/test/prim/new_ir_prim/test_vjp_prim.py +++ b/test/prim/new_ir_prim/test_vjp_prim.py @@ -126,14 +126,15 @@ class TestVjpPrim(unittest.TestCase): paddle.fluid.core._set_prim_backward_enabled(True) dout = newir_program.block().ops[-2].result(0) out_grads = [[dout]] - stop_gradients = [[False]] + stop_gradients = [[False], [True]] sum_op = newir_program.block().ops[-1] with paddle.ir.core.program_guard(newir_program): grad_outs = call_vjp(sum_op, out_grads, stop_gradients) expand_op = newir_program.block().ops[-1] - self.assertEqual(len(grad_outs), 1) + self.assertEqual(len(grad_outs), 2) self.assertEqual(len(newir_program.block().ops), 8) self.assertEqual(expand_op.result(0), grad_outs[0][0]) + self.assertEqual(grad_outs[1][0], None) all_op_names = [ "pd.full", "pd.full", @@ -152,14 +153,15 @@ class TestVjpPrim(unittest.TestCase): paddle.fluid.core._set_prim_backward_enabled(False) dout = newir_program.block().ops[-2].result(0) out_grads = [[dout]] - stop_gradients = [[False]] + stop_gradients = [[False], [True]] sum_op = newir_program.block().ops[-1] with paddle.ir.core.program_guard(newir_program): grad_outs = call_vjp(sum_op, out_grads, stop_gradients) - self.assertEqual(len(grad_outs), 1) + self.assertEqual(len(grad_outs), 2) self.assertEqual( grad_outs[0][0].get_defining_op().name(), "pd.sum_grad" ) + self.assertEqual(grad_outs[1][0], None) self.assertEqual(len(newir_program.block().ops), 6)