未验证 提交 80310541 编写于 作者: J Jiabin Yang 提交者: GitHub

【Prim】optimize log (#50160)

* optimize log

* fix type error

* fix type error2
上级 cc8a7858
...@@ -67,7 +67,7 @@ class ElementwiseAddCompositeGradOpMaker ...@@ -67,7 +67,7 @@ class ElementwiseAddCompositeGradOpMaker
auto dy_ptr = this->GetOutputPtr(&dy); auto dy_ptr = this->GetOutputPtr(&dy);
std::string dy_name = this->GetOutputName(dy); std::string dy_name = this->GetOutputName(dy);
int axis = static_cast<int>(this->Attr<int>("axis")); int axis = static_cast<int>(this->Attr<int>("axis"));
VLOG(3) << "Runing add_grad composite func"; VLOG(6) << "Runing add_grad composite func";
prim::add_grad<prim::DescTensor>(x, y, out_grad, axis, dx_ptr, dy_ptr); prim::add_grad<prim::DescTensor>(x, y, out_grad, axis, dx_ptr, dy_ptr);
this->RecoverOutputName(dx, dx_name); this->RecoverOutputName(dx, dx_name);
this->RecoverOutputName(dy, dy_name); this->RecoverOutputName(dy, dy_name);
......
...@@ -84,7 +84,7 @@ class ElementwiseDivCompositeGradOpMaker ...@@ -84,7 +84,7 @@ class ElementwiseDivCompositeGradOpMaker
auto dy_ptr = this->GetOutputPtr(&dy); auto dy_ptr = this->GetOutputPtr(&dy);
std::string dy_name = this->GetOutputName(dy); std::string dy_name = this->GetOutputName(dy);
int axis = static_cast<int>(this->Attr<int>("axis")); int axis = static_cast<int>(this->Attr<int>("axis"));
VLOG(3) << "Runing div_grad composite func"; VLOG(6) << "Runing div_grad composite func";
prim::divide_grad<prim::DescTensor>( prim::divide_grad<prim::DescTensor>(
x, y, out, out_grad, axis, dx_ptr, dy_ptr); x, y, out, out_grad, axis, dx_ptr, dy_ptr);
this->RecoverOutputName(dx, dx_name); this->RecoverOutputName(dx, dx_name);
......
...@@ -88,7 +88,7 @@ class ElementwiseMulCompositeGradOpMaker ...@@ -88,7 +88,7 @@ class ElementwiseMulCompositeGradOpMaker
static_cast<int>(this->Attr<int>("axis")), static_cast<int>(this->Attr<int>("axis")),
x_grad_p, x_grad_p,
y_grad_p); y_grad_p);
VLOG(3) << "Runing mul_grad composite func"; VLOG(6) << "Runing mul_grad composite func";
this->RecoverOutputName(x_grad, x_grad_name); this->RecoverOutputName(x_grad, x_grad_name);
this->RecoverOutputName(y_grad, y_grad_name); this->RecoverOutputName(y_grad, y_grad_name);
} }
......
...@@ -70,7 +70,7 @@ class ElementwiseSubCompositeGradOpMaker ...@@ -70,7 +70,7 @@ class ElementwiseSubCompositeGradOpMaker
auto dy_ptr = this->GetOutputPtr(&dy); auto dy_ptr = this->GetOutputPtr(&dy);
std::string dy_name = this->GetOutputName(dy); std::string dy_name = this->GetOutputName(dy);
int axis = static_cast<int>(this->Attr<int>("axis")); int axis = static_cast<int>(this->Attr<int>("axis"));
VLOG(3) << "Runing sub_grad composite func"; VLOG(6) << "Runing sub_grad composite func";
prim::subtract_grad<prim::DescTensor>(x, y, out_grad, axis, dx_ptr, dy_ptr); prim::subtract_grad<prim::DescTensor>(x, y, out_grad, axis, dx_ptr, dy_ptr);
this->RecoverOutputName(dx, dx_name); this->RecoverOutputName(dx, dx_name);
this->RecoverOutputName(dy, dy_name); this->RecoverOutputName(dy, dy_name);
......
...@@ -206,7 +206,7 @@ class ExpandV2CompositeGradOpMaker : public prim::CompositeGradOpMakerBase { ...@@ -206,7 +206,7 @@ class ExpandV2CompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
auto shape = this->Attr<std::vector<int>>("shape"); auto shape = this->Attr<std::vector<int>>("shape");
prim::expand_grad<prim::DescTensor>( prim::expand_grad<prim::DescTensor>(
x, out_grad, paddle::experimental::IntArray(shape), x_grad_p); x, out_grad, paddle::experimental::IntArray(shape), x_grad_p);
VLOG(3) << "Runing expand_v2 composite func"; VLOG(6) << "Runing expand_v2 composite func";
this->RecoverOutputName(x_grad, x_grad_name); this->RecoverOutputName(x_grad, x_grad_name);
} }
}; };
......
...@@ -665,7 +665,7 @@ class {{op_name | to_composite_grad_opmaker_name}} : public prim::CompositeGradO ...@@ -665,7 +665,7 @@ class {{op_name | to_composite_grad_opmaker_name}} : public prim::CompositeGradO
{%- endmacro %} {%- endmacro %}
{% macro call_composite_backward_api(composite_func_info) %} {% macro call_composite_backward_api(composite_func_info) %}
VLOG(3) << "Runing {{composite_func_info["func_name"]}} composite func"; VLOG(6) << "Runing {{composite_func_info["func_name"]}} composite func";
prim::{{composite_func_info["func_name"]}}<prim::DescTensor>({{composite_func_info["func_args"]}}); prim::{{composite_func_info["func_name"]}}<prim::DescTensor>({{composite_func_info["func_args"]}});
{%- endmacro %} {%- endmacro %}
......
...@@ -84,7 +84,7 @@ class ReduceSumCompositeGradOpMaker : public prim::CompositeGradOpMakerBase { ...@@ -84,7 +84,7 @@ class ReduceSumCompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
// get output orginal name // get output orginal name
std::string x_grad_name = this->GetOutputName(x_grad_t); std::string x_grad_name = this->GetOutputName(x_grad_t);
VLOG(3) << "Runing sum_grad composite func"; VLOG(6) << "Runing sum_grad composite func";
// call composite backward func // call composite backward func
prim::sum_grad<prim::DescTensor>( prim::sum_grad<prim::DescTensor>(
x, out_grad, axis, keep_dim, reduce_all, x_grad); x, out_grad, axis, keep_dim, reduce_all, x_grad);
......
...@@ -57,6 +57,8 @@ class CompositeGradOpMakerBase { ...@@ -57,6 +57,8 @@ class CompositeGradOpMakerBase {
acting_program_(framework::ProgramDesc()), acting_program_(framework::ProgramDesc()),
grad_block_(grad_block) { grad_block_(grad_block) {
// TODO(jiabin): This should always execute by one thread... // TODO(jiabin): This should always execute by one thread...
VLOG(6) << "Constructing Composite Grad func for " << fwd_op_.Type()
<< "_grad ";
StaticCompositeContext::Instance().SetBlock( StaticCompositeContext::Instance().SetBlock(
acting_program_.MutableBlock(0)); acting_program_.MutableBlock(0));
} }
...@@ -64,6 +66,7 @@ class CompositeGradOpMakerBase { ...@@ -64,6 +66,7 @@ class CompositeGradOpMakerBase {
virtual ~CompositeGradOpMakerBase() = default; virtual ~CompositeGradOpMakerBase() = default;
virtual std::vector<std::unique_ptr<framework::OpDesc>> operator()() { virtual std::vector<std::unique_ptr<framework::OpDesc>> operator()() {
VLOG(3) << "Runing Composite Grad func for " << fwd_op_.Type() << "_grad ";
this->Apply(); this->Apply();
std::vector<std::unique_ptr<framework::OpDesc>> ops; std::vector<std::unique_ptr<framework::OpDesc>> ops;
// TODO(jiabin): Support multiple blocks later // TODO(jiabin): Support multiple blocks later
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册