From 893d37e5c64ec2022aa8e404bd0286dc6d4685ec Mon Sep 17 00:00:00 2001 From: ShenLiang Date: Thu, 31 Dec 2020 13:22:50 +0800 Subject: [PATCH] Fix rank_attention op_version, test=op_version (#30006) * fix rank_attention, test=op_version --- paddle/fluid/operators/rank_attention_op.cc | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/paddle/fluid/operators/rank_attention_op.cc b/paddle/fluid/operators/rank_attention_op.cc index 460df0333f8..d7490220da0 100644 --- a/paddle/fluid/operators/rank_attention_op.cc +++ b/paddle/fluid/operators/rank_attention_op.cc @@ -13,6 +13,7 @@ limitations under the License. */ #include #include #include +#include "paddle/fluid/framework/op_version_registry.h" namespace paddle { namespace operators { @@ -176,3 +177,18 @@ REGISTER_OP_CPU_KERNEL( rank_attention, ops::RankAttentionKernel, ops::RankAttentionKernel); + +REGISTER_OP_VERSION(rank_attention) + .AddCheckpoint( + R"ROC( + Upgrade rank_attention, add 1 outputs [InputHelp] and 1 attribute + [MaxSize]. + )ROC", + paddle::framework::compatible::OpVersionDesc() + .NewOutput("InputHelp", + "Output tensor of rank_attention_Op operator " + "in order to assist calculation in the reverse process.") + .NewAttr( + "MaxSize", + "Forward calculation to set the pre-applied video memory size", + 0)); -- GitLab