Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
80310541
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
80310541
编写于
2月 03, 2023
作者:
J
Jiabin Yang
提交者:
GitHub
2月 03, 2023
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
【Prim】optimize log (#50160)
* optimize log * fix type error * fix type error2
上级
cc8a7858
变更
8
隐藏空白更改
内联
并排
Showing
8 changed file
with
10 addition
and
7 deletion
+10
-7
paddle/fluid/operators/elementwise/elementwise_add_op.cc
paddle/fluid/operators/elementwise/elementwise_add_op.cc
+1
-1
paddle/fluid/operators/elementwise/elementwise_div_op.cc
paddle/fluid/operators/elementwise/elementwise_div_op.cc
+1
-1
paddle/fluid/operators/elementwise/elementwise_mul_op.cc
paddle/fluid/operators/elementwise/elementwise_mul_op.cc
+1
-1
paddle/fluid/operators/elementwise/elementwise_sub_op.cc
paddle/fluid/operators/elementwise/elementwise_sub_op.cc
+1
-1
paddle/fluid/operators/expand_v2_op.cc
paddle/fluid/operators/expand_v2_op.cc
+1
-1
paddle/fluid/operators/generator/templates/operator_utils.c.j2
...e/fluid/operators/generator/templates/operator_utils.c.j2
+1
-1
paddle/fluid/operators/reduce_ops/reduce_sum_op.cc
paddle/fluid/operators/reduce_ops/reduce_sum_op.cc
+1
-1
paddle/fluid/prim/utils/static/composite_grad_desc_maker.h
paddle/fluid/prim/utils/static/composite_grad_desc_maker.h
+3
-0
未找到文件。
paddle/fluid/operators/elementwise/elementwise_add_op.cc
浏览文件 @
80310541
...
@@ -67,7 +67,7 @@ class ElementwiseAddCompositeGradOpMaker
...
@@ -67,7 +67,7 @@ class ElementwiseAddCompositeGradOpMaker
auto
dy_ptr
=
this
->
GetOutputPtr
(
&
dy
);
auto
dy_ptr
=
this
->
GetOutputPtr
(
&
dy
);
std
::
string
dy_name
=
this
->
GetOutputName
(
dy
);
std
::
string
dy_name
=
this
->
GetOutputName
(
dy
);
int
axis
=
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
));
int
axis
=
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
));
VLOG
(
3
)
<<
"Runing add_grad composite func"
;
VLOG
(
6
)
<<
"Runing add_grad composite func"
;
prim
::
add_grad
<
prim
::
DescTensor
>
(
x
,
y
,
out_grad
,
axis
,
dx_ptr
,
dy_ptr
);
prim
::
add_grad
<
prim
::
DescTensor
>
(
x
,
y
,
out_grad
,
axis
,
dx_ptr
,
dy_ptr
);
this
->
RecoverOutputName
(
dx
,
dx_name
);
this
->
RecoverOutputName
(
dx
,
dx_name
);
this
->
RecoverOutputName
(
dy
,
dy_name
);
this
->
RecoverOutputName
(
dy
,
dy_name
);
...
...
paddle/fluid/operators/elementwise/elementwise_div_op.cc
浏览文件 @
80310541
...
@@ -84,7 +84,7 @@ class ElementwiseDivCompositeGradOpMaker
...
@@ -84,7 +84,7 @@ class ElementwiseDivCompositeGradOpMaker
auto
dy_ptr
=
this
->
GetOutputPtr
(
&
dy
);
auto
dy_ptr
=
this
->
GetOutputPtr
(
&
dy
);
std
::
string
dy_name
=
this
->
GetOutputName
(
dy
);
std
::
string
dy_name
=
this
->
GetOutputName
(
dy
);
int
axis
=
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
));
int
axis
=
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
));
VLOG
(
3
)
<<
"Runing div_grad composite func"
;
VLOG
(
6
)
<<
"Runing div_grad composite func"
;
prim
::
divide_grad
<
prim
::
DescTensor
>
(
prim
::
divide_grad
<
prim
::
DescTensor
>
(
x
,
y
,
out
,
out_grad
,
axis
,
dx_ptr
,
dy_ptr
);
x
,
y
,
out
,
out_grad
,
axis
,
dx_ptr
,
dy_ptr
);
this
->
RecoverOutputName
(
dx
,
dx_name
);
this
->
RecoverOutputName
(
dx
,
dx_name
);
...
...
paddle/fluid/operators/elementwise/elementwise_mul_op.cc
浏览文件 @
80310541
...
@@ -88,7 +88,7 @@ class ElementwiseMulCompositeGradOpMaker
...
@@ -88,7 +88,7 @@ class ElementwiseMulCompositeGradOpMaker
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
)),
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
)),
x_grad_p
,
x_grad_p
,
y_grad_p
);
y_grad_p
);
VLOG
(
3
)
<<
"Runing mul_grad composite func"
;
VLOG
(
6
)
<<
"Runing mul_grad composite func"
;
this
->
RecoverOutputName
(
x_grad
,
x_grad_name
);
this
->
RecoverOutputName
(
x_grad
,
x_grad_name
);
this
->
RecoverOutputName
(
y_grad
,
y_grad_name
);
this
->
RecoverOutputName
(
y_grad
,
y_grad_name
);
}
}
...
...
paddle/fluid/operators/elementwise/elementwise_sub_op.cc
浏览文件 @
80310541
...
@@ -70,7 +70,7 @@ class ElementwiseSubCompositeGradOpMaker
...
@@ -70,7 +70,7 @@ class ElementwiseSubCompositeGradOpMaker
auto
dy_ptr
=
this
->
GetOutputPtr
(
&
dy
);
auto
dy_ptr
=
this
->
GetOutputPtr
(
&
dy
);
std
::
string
dy_name
=
this
->
GetOutputName
(
dy
);
std
::
string
dy_name
=
this
->
GetOutputName
(
dy
);
int
axis
=
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
));
int
axis
=
static_cast
<
int
>
(
this
->
Attr
<
int
>
(
"axis"
));
VLOG
(
3
)
<<
"Runing sub_grad composite func"
;
VLOG
(
6
)
<<
"Runing sub_grad composite func"
;
prim
::
subtract_grad
<
prim
::
DescTensor
>
(
x
,
y
,
out_grad
,
axis
,
dx_ptr
,
dy_ptr
);
prim
::
subtract_grad
<
prim
::
DescTensor
>
(
x
,
y
,
out_grad
,
axis
,
dx_ptr
,
dy_ptr
);
this
->
RecoverOutputName
(
dx
,
dx_name
);
this
->
RecoverOutputName
(
dx
,
dx_name
);
this
->
RecoverOutputName
(
dy
,
dy_name
);
this
->
RecoverOutputName
(
dy
,
dy_name
);
...
...
paddle/fluid/operators/expand_v2_op.cc
浏览文件 @
80310541
...
@@ -206,7 +206,7 @@ class ExpandV2CompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
...
@@ -206,7 +206,7 @@ class ExpandV2CompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
auto
shape
=
this
->
Attr
<
std
::
vector
<
int
>>
(
"shape"
);
auto
shape
=
this
->
Attr
<
std
::
vector
<
int
>>
(
"shape"
);
prim
::
expand_grad
<
prim
::
DescTensor
>
(
prim
::
expand_grad
<
prim
::
DescTensor
>
(
x
,
out_grad
,
paddle
::
experimental
::
IntArray
(
shape
),
x_grad_p
);
x
,
out_grad
,
paddle
::
experimental
::
IntArray
(
shape
),
x_grad_p
);
VLOG
(
3
)
<<
"Runing expand_v2 composite func"
;
VLOG
(
6
)
<<
"Runing expand_v2 composite func"
;
this
->
RecoverOutputName
(
x_grad
,
x_grad_name
);
this
->
RecoverOutputName
(
x_grad
,
x_grad_name
);
}
}
};
};
...
...
paddle/fluid/operators/generator/templates/operator_utils.c.j2
浏览文件 @
80310541
...
@@ -665,7 +665,7 @@ class {{op_name | to_composite_grad_opmaker_name}} : public prim::CompositeGradO
...
@@ -665,7 +665,7 @@ class {{op_name | to_composite_grad_opmaker_name}} : public prim::CompositeGradO
{%- endmacro %}
{%- endmacro %}
{% macro call_composite_backward_api(composite_func_info) %}
{% macro call_composite_backward_api(composite_func_info) %}
VLOG(
3
) << "Runing {{composite_func_info["func_name"]}} composite func";
VLOG(
6
) << "Runing {{composite_func_info["func_name"]}} composite func";
prim::{{composite_func_info["func_name"]}}<prim::DescTensor>({{composite_func_info["func_args"]}});
prim::{{composite_func_info["func_name"]}}<prim::DescTensor>({{composite_func_info["func_args"]}});
{%- endmacro %}
{%- endmacro %}
...
...
paddle/fluid/operators/reduce_ops/reduce_sum_op.cc
浏览文件 @
80310541
...
@@ -84,7 +84,7 @@ class ReduceSumCompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
...
@@ -84,7 +84,7 @@ class ReduceSumCompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
// get output orginal name
// get output orginal name
std
::
string
x_grad_name
=
this
->
GetOutputName
(
x_grad_t
);
std
::
string
x_grad_name
=
this
->
GetOutputName
(
x_grad_t
);
VLOG
(
3
)
<<
"Runing sum_grad composite func"
;
VLOG
(
6
)
<<
"Runing sum_grad composite func"
;
// call composite backward func
// call composite backward func
prim
::
sum_grad
<
prim
::
DescTensor
>
(
prim
::
sum_grad
<
prim
::
DescTensor
>
(
x
,
out_grad
,
axis
,
keep_dim
,
reduce_all
,
x_grad
);
x
,
out_grad
,
axis
,
keep_dim
,
reduce_all
,
x_grad
);
...
...
paddle/fluid/prim/utils/static/composite_grad_desc_maker.h
浏览文件 @
80310541
...
@@ -57,6 +57,8 @@ class CompositeGradOpMakerBase {
...
@@ -57,6 +57,8 @@ class CompositeGradOpMakerBase {
acting_program_
(
framework
::
ProgramDesc
()),
acting_program_
(
framework
::
ProgramDesc
()),
grad_block_
(
grad_block
)
{
grad_block_
(
grad_block
)
{
// TODO(jiabin): This should always execute by one thread...
// TODO(jiabin): This should always execute by one thread...
VLOG
(
6
)
<<
"Constructing Composite Grad func for "
<<
fwd_op_
.
Type
()
<<
"_grad "
;
StaticCompositeContext
::
Instance
().
SetBlock
(
StaticCompositeContext
::
Instance
().
SetBlock
(
acting_program_
.
MutableBlock
(
0
));
acting_program_
.
MutableBlock
(
0
));
}
}
...
@@ -64,6 +66,7 @@ class CompositeGradOpMakerBase {
...
@@ -64,6 +66,7 @@ class CompositeGradOpMakerBase {
virtual
~
CompositeGradOpMakerBase
()
=
default
;
virtual
~
CompositeGradOpMakerBase
()
=
default
;
virtual
std
::
vector
<
std
::
unique_ptr
<
framework
::
OpDesc
>>
operator
()()
{
virtual
std
::
vector
<
std
::
unique_ptr
<
framework
::
OpDesc
>>
operator
()()
{
VLOG
(
3
)
<<
"Runing Composite Grad func for "
<<
fwd_op_
.
Type
()
<<
"_grad "
;
this
->
Apply
();
this
->
Apply
();
std
::
vector
<
std
::
unique_ptr
<
framework
::
OpDesc
>>
ops
;
std
::
vector
<
std
::
unique_ptr
<
framework
::
OpDesc
>>
ops
;
// TODO(jiabin): Support multiple blocks later
// TODO(jiabin): Support multiple blocks later
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录