From 2be69d052f244af7069bd9a09cb6930e5ba07ead Mon Sep 17 00:00:00 2001 From: YuanRisheng Date: Fri, 24 Feb 2023 11:21:53 +0800 Subject: [PATCH] [Save/Load]Fix backward op's error when use jit.load (#50744) * perfect translated layer * perfect code according comment --- python/paddle/fluid/framework.py | 3 +++ python/paddle/jit/translated_layer.py | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index c207c5282f3..1409acd5ca0 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -2773,6 +2773,9 @@ class OpProtoHolder: return custom_op_names + def has_op_proto(self, type): + return type in self.op_proto_map + @staticmethod def generated_op_attr_names(): return { diff --git a/python/paddle/jit/translated_layer.py b/python/paddle/jit/translated_layer.py index 45563584f16..be9e1bee325 100644 --- a/python/paddle/jit/translated_layer.py +++ b/python/paddle/jit/translated_layer.py @@ -563,6 +563,11 @@ class _ProgramHolder: op.desc.set_output("ReserveSpace", [reserve_space.name]) continue + # There are some situations that users will add backward op in Forward + # function of Layer. And because backward op doesn't have proto. So, we + # should skip it when we meet it. + if not OpProtoHolder.instance().has_op_proto(op.type): + continue proto = OpProtoHolder.instance().get_op_proto(op.type) has_create_intermediate_out = False for output_proto in proto.outputs: -- GitLab