未验证 提交 be3908a3 编写于 作者: X xiongkun 提交者: GitHub

[Dy2Static] Remove GradTransformer (#47063)

* [Dy2Static] Remove GradTransformer
1. fix einsum infershape bugs.
2. remove grad_transformer and unify paddle.grad and paddle.static.gradient.
3. add dygraph_and_dy2static_only decorator for dy2static.

* fix bugs

* rename
上级 36ab58f8
...@@ -241,7 +241,7 @@ inline static void InferLabelShape(const std::vector<std::string>& op_labels, ...@@ -241,7 +241,7 @@ inline static void InferLabelShape(const std::vector<std::string>& op_labels,
} else if (labelshape->is_default(c) || (*labelshape)[c] == -1) { } else if (labelshape->is_default(c) || (*labelshape)[c] == -1) {
(*labelshape)[c] = op_dim[dim_ptr]; (*labelshape)[c] = op_dim[dim_ptr];
dim_ptr++; dim_ptr++;
} else { } else if (op_dim[dim_ptr] != -1) {
PADDLE_ENFORCE_EQ( PADDLE_ENFORCE_EQ(
(*labelshape)[c], (*labelshape)[c],
op_dim[dim_ptr], op_dim[dim_ptr],
......
...@@ -27,6 +27,7 @@ from ..data_feeder import convert_dtype ...@@ -27,6 +27,7 @@ from ..data_feeder import convert_dtype
import warnings import warnings
from ..framework import _get_paddle_place, _in_legacy_dygraph, _in_eager_without_dygraph_check from ..framework import _get_paddle_place, _in_legacy_dygraph, _in_eager_without_dygraph_check
import paddle import paddle
import warnings
__all__ = [ __all__ = [
'no_grad', 'no_grad_', 'grad', 'guard', 'enable_dygraph', 'disable_dygraph', 'no_grad', 'no_grad_', 'grad', 'guard', 'enable_dygraph', 'disable_dygraph',
...@@ -45,6 +46,20 @@ def in_declarative_mode(): ...@@ -45,6 +46,20 @@ def in_declarative_mode():
return _in_declarative_mode_ return _in_declarative_mode_
def declarative_unsupport_argument_warning(func_name, input_names, inputs,
support_values):
"""
Warning if inputs do not elementwisely equals to support_values.
It's a utility function for dy2static when dygraph interface have
more inputs than static interface such as paddle.grad.
"""
for name, inp, sup in zip(input_names, inputs, support_values):
if inp != sup:
warnings.warn(f"{func_name} has unsupported parameter in jit: " +
f"{name}, jit will discard it")
def _switch_to_static_graph_(func): def _switch_to_static_graph_(func):
def __impl__(*args, **kwargs): def __impl__(*args, **kwargs):
...@@ -290,6 +305,10 @@ def no_grad(func=None): ...@@ -290,6 +305,10 @@ def no_grad(func=None):
test_layer() test_layer()
""" """
if in_declarative_mode():
warnings.warn(
"paddle.no_grad is only supported for inference model, and not supported for training under @to_static."
)
if func is None: if func is None:
return _switch_tracer_mode_guard_(is_train=False) return _switch_tracer_mode_guard_(is_train=False)
else: else:
...@@ -428,7 +447,7 @@ def guard(place=None): ...@@ -428,7 +447,7 @@ def guard(place=None):
yield yield
@framework.dygraph_only @framework.non_static_only
def grad(outputs, def grad(outputs,
inputs, inputs,
grad_outputs=None, grad_outputs=None,
...@@ -563,6 +582,16 @@ def grad(outputs, ...@@ -563,6 +582,16 @@ def grad(outputs,
grad_y1 = paddle.to_tensor(3.0) grad_y1 = paddle.to_tensor(3.0)
print(test_dygraph_grad([grad_y1, grad_value])) # [24.] print(test_dygraph_grad([grad_y1, grad_value])) # [24.]
''' '''
if in_declarative_mode():
# In dy2static context, we call static interface `gradients`
# to calculate grads.
from paddle.static import gradients
declarative_unsupport_argument_warning(
"paddle.grad",
["retain_graph", "create_grad", "only_inputs", "allow_unused"],
[retain_graph, create_graph, only_inputs, allow_unused],
[None, False, True, False])
return gradients(outputs, inputs, grad_outputs, no_grad_vars)
def check_in_out(in_out_list, name): def check_in_out(in_out_list, name):
assert in_out_list is not None, "{} should not be None".format(name) assert in_out_list is not None, "{} should not be None".format(name)
......
...@@ -102,7 +102,7 @@ class DygraphToStaticAst(BaseTransformer): ...@@ -102,7 +102,7 @@ class DygraphToStaticAst(BaseTransformer):
PrintTransformer, # print statement PrintTransformer, # print statement
CallTransformer, # transform call recursively CallTransformer, # transform call recursively
CastTransformer, # type casting statement CastTransformer, # type casting statement
GradTransformer, # transform paddle.grad to paddle.gradients #GradTransformer, # transform paddle.grad to paddle.gradients
DecoratorTransformer, # transform decorators to function call DecoratorTransformer, # transform decorators to function call
] ]
......
...@@ -513,6 +513,17 @@ def _dygraph_only_(func): ...@@ -513,6 +513,17 @@ def _dygraph_only_(func):
return __impl__ return __impl__
def _non_static_only_(func):
def __impl__(*args, **kwargs):
from .dygraph.base import in_declarative_mode
assert _non_static_mode() or in_declarative_mode(
), "We only support '%s()' in dynamic graph mode, please call 'paddle.disable_static()' to enter dynamic graph mode." % func.__name__
return func(*args, **kwargs)
return __impl__
def _static_only_(func): def _static_only_(func):
def __impl__(*args, **kwargs): def __impl__(*args, **kwargs):
...@@ -572,6 +583,7 @@ dygraph_not_support = wrap_decorator(_dygraph_not_support_) ...@@ -572,6 +583,7 @@ dygraph_not_support = wrap_decorator(_dygraph_not_support_)
dygraph_only = wrap_decorator(_dygraph_only_) dygraph_only = wrap_decorator(_dygraph_only_)
static_only = wrap_decorator(_static_only_) static_only = wrap_decorator(_static_only_)
fake_interface_only = wrap_decorator(_fake_interface_only_) fake_interface_only = wrap_decorator(_fake_interface_only_)
non_static_only = wrap_decorator(_non_static_only_)
def _dygraph_tracer(): def _dygraph_tracer():
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册