未验证 提交 6cb53e91 编写于 作者: X xiaoguoguo626807 提交者: GitHub

【prim】merge branch for GradOpMaker codeGen to clear code (#53874)

* review

* modify opcompat bug

* modify pybind
上级 14e0ce71
......@@ -472,15 +472,14 @@ REGISTER_OPERATOR({{name}}, ops::{{name | to_pascal_case}}Op,
{% if not "forward" in op %}{# it is a forward op #}
ops::{{name | to_pascal_case}}OpMaker,
{% endif %}
{% if "only_backward_composite" in op and op["only_backward_composite"] is true %}{# backward #}
{% elif "backward" in op and op["backward"] is not none %}
{% if "backward" in op and op["backward"] is not none and op["only_backward_composite"] is false%}{# backward #}
{% set backward_name = op["backward"] %}
ops::{{backward_name | to_pascal_case}}OpMaker<paddle::framework::OpDesc>,
ops::{{backward_name | to_pascal_case}}OpMaker<paddle::imperative::OpBase>,
{% elif "forward" in op %}
{% else %}
{% elif "forward" not in op %}
paddle::framework::EmptyGradOpMaker<paddle::framework::OpDesc>,
paddle::framework::EmptyGradOpMaker<paddle::imperative::OpBase>,
{% else %}
{% endif %}
{% if op is supports_inplace %}{# inplace#}
ops::{{name | to_pascal_case}}InplaceInferer,
......
......@@ -54,10 +54,15 @@ def is_base_op(op):
return "kernel" in op and "infer_meta" in op
# this func describe a op that only has composite implementation,
# without kernel implementation. kernel implementation include
# other op (invoke) or c++ kernel (kernel + infermeta)
def is_only_composite_op(op):
return "composite" in op and "kernel" not in op and "invoke" not in op
# this func describe a op that has composite implementation,
# maybe also has kernel implementation.
def is_composite_op(op):
return "composite" in op
......
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
......@@ -1363,12 +1363,12 @@ All parameter, weight, gradient are variables in Paddle.
}
} else {
if (grad_op_maker != nullptr) {
VLOG(3) << "Prim Flag Close: Runing origin grad fun for "
VLOG(6) << "Prim Flag Close: Runing origin grad fun for "
<< op_desc.Type();
grad_op_descs = grad_op_maker(
op_desc, no_grad_set, &grad_to_var, grad_sub_block);
} else {
VLOG(3) << "Prim Flag Close: Runing composite grad fun for "
VLOG(6) << "Prim Flag Close: Runing composite grad fun for "
<< op_desc.Type();
grad_op_descs = grad_comp_op_maker(op_desc,
no_grad_set,
......
......@@ -1525,12 +1525,6 @@
attrs : [bool use_mkldnn = false, str mkldnn_data_type = "float32",
bool use_quantizer = false, float Scale_x = 1.0f, float Scale_y = 1.0f, float Scale_out = 1.0f]
- op : maximum (elementwise_min)
backward : maximum_grad (elementwise_min_grad)
extra :
attrs : [bool use_mkldnn = false, str mkldnn_data_type = "float32",
bool use_quantizer = false, float Scale_x = 1.0f, float Scale_y = 1.0f, float Scale_out = 1.0f]
- op : maxout
inputs :
x : X
......@@ -1591,6 +1585,12 @@
out : Out
drop_empty_grad : [inputs_grad]
- op : minimum (elementwise_min)
backward : minimum_grad (elementwise_min_grad)
extra :
attrs : [bool use_mkldnn = false, str x_data_format = "", str y_data_format = "", str mkldnn_data_type = "float32",
bool use_quantizer = false, float Scale_x = 1.0f, float Scale_y = 1.0f, float Scale_out = 1.0f]
- op : mish
backward : mish_grad
extra :
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册