diff --git a/paddle/phi/kernels/impl/einsum_impl.h b/paddle/phi/kernels/impl/einsum_impl.h index 80529c8b669aac9aa04f4883d4465a6fd720bcb1..dafb967ae8ed5c9d8fc227928219fc4c76305f36 100644 --- a/paddle/phi/kernels/impl/einsum_impl.h +++ b/paddle/phi/kernels/impl/einsum_impl.h @@ -241,7 +241,7 @@ inline static void InferLabelShape(const std::vector& op_labels, } else if (labelshape->is_default(c) || (*labelshape)[c] == -1) { (*labelshape)[c] = op_dim[dim_ptr]; dim_ptr++; - } else { + } else if (op_dim[dim_ptr] != -1) { PADDLE_ENFORCE_EQ( (*labelshape)[c], op_dim[dim_ptr], diff --git a/python/paddle/fluid/dygraph/base.py b/python/paddle/fluid/dygraph/base.py index 5101483858d0a21965bc777d8ed95b459e1953a5..82ef806583743e908a2fe5b05d98fe3b9e3ca5ba 100644 --- a/python/paddle/fluid/dygraph/base.py +++ b/python/paddle/fluid/dygraph/base.py @@ -27,6 +27,7 @@ from ..data_feeder import convert_dtype import warnings from ..framework import _get_paddle_place, _in_legacy_dygraph, _in_eager_without_dygraph_check import paddle +import warnings __all__ = [ 'no_grad', 'no_grad_', 'grad', 'guard', 'enable_dygraph', 'disable_dygraph', @@ -45,6 +46,20 @@ def in_declarative_mode(): return _in_declarative_mode_ +def declarative_unsupport_argument_warning(func_name, input_names, inputs, + support_values): + """ + Warning if inputs do not elementwisely equals to support_values. + It's a utility function for dy2static when dygraph interface have + more inputs than static interface such as paddle.grad. + + """ + for name, inp, sup in zip(input_names, inputs, support_values): + if inp != sup: + warnings.warn(f"{func_name} has unsupported parameter in jit: " + + f"{name}, jit will discard it") + + def _switch_to_static_graph_(func): def __impl__(*args, **kwargs): @@ -290,6 +305,10 @@ def no_grad(func=None): test_layer() """ + if in_declarative_mode(): + warnings.warn( + "paddle.no_grad is only supported for inference model, and not supported for training under @to_static." + ) if func is None: return _switch_tracer_mode_guard_(is_train=False) else: @@ -428,7 +447,7 @@ def guard(place=None): yield -@framework.dygraph_only +@framework.non_static_only def grad(outputs, inputs, grad_outputs=None, @@ -563,6 +582,16 @@ def grad(outputs, grad_y1 = paddle.to_tensor(3.0) print(test_dygraph_grad([grad_y1, grad_value])) # [24.] ''' + if in_declarative_mode(): + # In dy2static context, we call static interface `gradients` + # to calculate grads. + from paddle.static import gradients + declarative_unsupport_argument_warning( + "paddle.grad", + ["retain_graph", "create_grad", "only_inputs", "allow_unused"], + [retain_graph, create_graph, only_inputs, allow_unused], + [None, False, True, False]) + return gradients(outputs, inputs, grad_outputs, no_grad_vars) def check_in_out(in_out_list, name): assert in_out_list is not None, "{} should not be None".format(name) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py index 0853b15e6830c4ba5c4d4c14ff5b51d0a14ae696..5ec1dbea504cc6f536d13c34260b1c77e3681366 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py @@ -102,7 +102,7 @@ class DygraphToStaticAst(BaseTransformer): PrintTransformer, # print statement CallTransformer, # transform call recursively CastTransformer, # type casting statement - GradTransformer, # transform paddle.grad to paddle.gradients + #GradTransformer, # transform paddle.grad to paddle.gradients DecoratorTransformer, # transform decorators to function call ] diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 40223d4bcc2a9d8a33a1b7093f28e6f5a081165f..85d525ab5f75cb3d2f2124e72979580cc4a67e8a 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -513,6 +513,17 @@ def _dygraph_only_(func): return __impl__ +def _non_static_only_(func): + + def __impl__(*args, **kwargs): + from .dygraph.base import in_declarative_mode + assert _non_static_mode() or in_declarative_mode( + ), "We only support '%s()' in dynamic graph mode, please call 'paddle.disable_static()' to enter dynamic graph mode." % func.__name__ + return func(*args, **kwargs) + + return __impl__ + + def _static_only_(func): def __impl__(*args, **kwargs): @@ -572,6 +583,7 @@ dygraph_not_support = wrap_decorator(_dygraph_not_support_) dygraph_only = wrap_decorator(_dygraph_only_) static_only = wrap_decorator(_static_only_) fake_interface_only = wrap_decorator(_fake_interface_only_) +non_static_only = wrap_decorator(_non_static_only_) def _dygraph_tracer():