diff --git a/paddle/fluid/operators/elementwise/elementwise_add_op.cc b/paddle/fluid/operators/elementwise/elementwise_add_op.cc index 26fcd53621a2b12ee51a983d410a401ad45e0915..c122a07c9b1d4999694889b4aa94c51700dbd762 100644 --- a/paddle/fluid/operators/elementwise/elementwise_add_op.cc +++ b/paddle/fluid/operators/elementwise/elementwise_add_op.cc @@ -67,7 +67,7 @@ class ElementwiseAddCompositeGradOpMaker auto dy_ptr = this->GetOutputPtr(&dy); std::string dy_name = this->GetOutputName(dy); int axis = static_cast(this->Attr("axis")); - VLOG(3) << "Runing add_grad composite func"; + VLOG(6) << "Runing add_grad composite func"; prim::add_grad(x, y, out_grad, axis, dx_ptr, dy_ptr); this->RecoverOutputName(dx, dx_name); this->RecoverOutputName(dy, dy_name); diff --git a/paddle/fluid/operators/elementwise/elementwise_div_op.cc b/paddle/fluid/operators/elementwise/elementwise_div_op.cc index 7d96c3106584ad2cdc761b53f516b128c1a3f4b4..97941aa82f3954c34c871f49f9175e639fdd47da 100644 --- a/paddle/fluid/operators/elementwise/elementwise_div_op.cc +++ b/paddle/fluid/operators/elementwise/elementwise_div_op.cc @@ -84,7 +84,7 @@ class ElementwiseDivCompositeGradOpMaker auto dy_ptr = this->GetOutputPtr(&dy); std::string dy_name = this->GetOutputName(dy); int axis = static_cast(this->Attr("axis")); - VLOG(3) << "Runing div_grad composite func"; + VLOG(6) << "Runing div_grad composite func"; prim::divide_grad( x, y, out, out_grad, axis, dx_ptr, dy_ptr); this->RecoverOutputName(dx, dx_name); diff --git a/paddle/fluid/operators/elementwise/elementwise_mul_op.cc b/paddle/fluid/operators/elementwise/elementwise_mul_op.cc index 61467be4c9bd59057a905b3b45133b9c547c20a1..9821cc226128323d48254f020f3470e919469b80 100644 --- a/paddle/fluid/operators/elementwise/elementwise_mul_op.cc +++ b/paddle/fluid/operators/elementwise/elementwise_mul_op.cc @@ -88,7 +88,7 @@ class ElementwiseMulCompositeGradOpMaker static_cast(this->Attr("axis")), x_grad_p, y_grad_p); - VLOG(3) << "Runing mul_grad composite func"; + VLOG(6) << "Runing mul_grad composite func"; this->RecoverOutputName(x_grad, x_grad_name); this->RecoverOutputName(y_grad, y_grad_name); } diff --git a/paddle/fluid/operators/elementwise/elementwise_sub_op.cc b/paddle/fluid/operators/elementwise/elementwise_sub_op.cc index d19f557bfe3c59b6672eb3e89d6e967e729c67d9..a7244062632699992533f851277563edce450998 100644 --- a/paddle/fluid/operators/elementwise/elementwise_sub_op.cc +++ b/paddle/fluid/operators/elementwise/elementwise_sub_op.cc @@ -70,7 +70,7 @@ class ElementwiseSubCompositeGradOpMaker auto dy_ptr = this->GetOutputPtr(&dy); std::string dy_name = this->GetOutputName(dy); int axis = static_cast(this->Attr("axis")); - VLOG(3) << "Runing sub_grad composite func"; + VLOG(6) << "Runing sub_grad composite func"; prim::subtract_grad(x, y, out_grad, axis, dx_ptr, dy_ptr); this->RecoverOutputName(dx, dx_name); this->RecoverOutputName(dy, dy_name); diff --git a/paddle/fluid/operators/expand_v2_op.cc b/paddle/fluid/operators/expand_v2_op.cc index 253c2856063ec7042b79298c5902029c27c99a64..6df6422f7173c9cf0fcde2624d402484de85b322 100644 --- a/paddle/fluid/operators/expand_v2_op.cc +++ b/paddle/fluid/operators/expand_v2_op.cc @@ -206,7 +206,7 @@ class ExpandV2CompositeGradOpMaker : public prim::CompositeGradOpMakerBase { auto shape = this->Attr>("shape"); prim::expand_grad( x, out_grad, paddle::experimental::IntArray(shape), x_grad_p); - VLOG(3) << "Runing expand_v2 composite func"; + VLOG(6) << "Runing expand_v2 composite func"; this->RecoverOutputName(x_grad, x_grad_name); } }; diff --git a/paddle/fluid/operators/generator/templates/operator_utils.c.j2 b/paddle/fluid/operators/generator/templates/operator_utils.c.j2 index a471efaa562b4cb579ae9ddd1f38193567f7d392..63392bb786f0c90befbfe2312cdec9b6d6d59660 100644 --- a/paddle/fluid/operators/generator/templates/operator_utils.c.j2 +++ b/paddle/fluid/operators/generator/templates/operator_utils.c.j2 @@ -665,7 +665,7 @@ class {{op_name | to_composite_grad_opmaker_name}} : public prim::CompositeGradO {%- endmacro %} {% macro call_composite_backward_api(composite_func_info) %} - VLOG(3) << "Runing {{composite_func_info["func_name"]}} composite func"; + VLOG(6) << "Runing {{composite_func_info["func_name"]}} composite func"; prim::{{composite_func_info["func_name"]}}({{composite_func_info["func_args"]}}); {%- endmacro %} diff --git a/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc b/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc index 9af1770a41de69f2973442ff410f7e4c479f5db8..5d14a0911fb2da0ddf209ad0bae792081832a801 100644 --- a/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc +++ b/paddle/fluid/operators/reduce_ops/reduce_sum_op.cc @@ -84,7 +84,7 @@ class ReduceSumCompositeGradOpMaker : public prim::CompositeGradOpMakerBase { // get output orginal name std::string x_grad_name = this->GetOutputName(x_grad_t); - VLOG(3) << "Runing sum_grad composite func"; + VLOG(6) << "Runing sum_grad composite func"; // call composite backward func prim::sum_grad( x, out_grad, axis, keep_dim, reduce_all, x_grad); diff --git a/paddle/fluid/prim/utils/static/composite_grad_desc_maker.h b/paddle/fluid/prim/utils/static/composite_grad_desc_maker.h index a79693826858266ff8f7af8b5a7f820d6ec33098..efb2479e4051444fb51a3ecf10f9a9b83598498a 100644 --- a/paddle/fluid/prim/utils/static/composite_grad_desc_maker.h +++ b/paddle/fluid/prim/utils/static/composite_grad_desc_maker.h @@ -57,6 +57,8 @@ class CompositeGradOpMakerBase { acting_program_(framework::ProgramDesc()), grad_block_(grad_block) { // TODO(jiabin): This should always execute by one thread... + VLOG(6) << "Constructing Composite Grad func for " << fwd_op_.Type() + << "_grad "; StaticCompositeContext::Instance().SetBlock( acting_program_.MutableBlock(0)); } @@ -64,6 +66,7 @@ class CompositeGradOpMakerBase { virtual ~CompositeGradOpMakerBase() = default; virtual std::vector> operator()() { + VLOG(3) << "Runing Composite Grad func for " << fwd_op_.Type() << "_grad "; this->Apply(); std::vector> ops; // TODO(jiabin): Support multiple blocks later