From 2242136afe66a3ea781232ef5fafdd41417ae1d5 Mon Sep 17 00:00:00 2001 From: Jiabin Yang <360788950@qq.com> Date: Tue, 17 Jan 2023 21:10:40 +0800 Subject: [PATCH] Add more dy2st ut2 (#49881) * add test for composite with dy2st * add more log --- paddle/fluid/operators/elementwise/elementwise_mul_op.cc | 1 + paddle/fluid/operators/expand_v2_op.cc | 1 + paddle/fluid/operators/reduce_ops/reduce_sum_op.cc | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/paddle/fluid/operators/elementwise/elementwise_mul_op.cc b/paddle/fluid/operators/elementwise/elementwise_mul_op.cc index 457ea83c0d..4052f3e09e 100644 --- a/paddle/fluid/operators/elementwise/elementwise_mul_op.cc +++ b/paddle/fluid/operators/elementwise/elementwise_mul_op.cc @@ -88,6 +88,7 @@ class ElementwiseMulGradCompositeOpMaker static_cast(this->Attr("axis")), x_grad_p, y_grad_p); + VLOG(3) << "Runing mul_grad composite func"; this->RecoverOutputName(x_grad, x_grad_name); this->RecoverOutputName(y_grad, y_grad_name); } diff --git a/paddle/fluid/operators/expand_v2_op.cc b/paddle/fluid/operators/expand_v2_op.cc index 7b24c31ff2..9a867c040f 100644 --- a/paddle/fluid/operators/expand_v2_op.cc +++ b/paddle/fluid/operators/expand_v2_op.cc @@ -206,6 +206,7 @@ class ExpandV2GradCompositeOpMaker : public prim::GradCompositeOpMakerBase { auto shape = this->Attr>("shape"); prim::expand_grad( x, out_grad, paddle::experimental::IntArray(shape), x_grad_p); + VLOG(3) << "Runing expand_v2 composite func"; this->RecoverOutputName(x_grad, x_grad_name); } }; diff --git a/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc b/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc index afef765c6f..2b337887fa 100644 --- a/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc +++ b/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc @@ -84,7 +84,7 @@ class ReduceSumCompositeGradOpMaker : public prim::GradCompositeOpMakerBase { // get output orginal name std::string x_grad_name = this->GetOutputName(x_grad_t); - + VLOG(3) << "Runing sum_grad composite func"; // call composite backward func prim::sum_grad( x, out_grad, axis, keep_dim, reduce_all, x_grad); -- GitLab