From 81aba737d39ebbfa3cfb15e4bd79e7b154f19d58 Mon Sep 17 00:00:00 2001 From: xiaoting <31891223+tink2123@users.noreply.github.com> Date: Fri, 7 May 2021 10:50:12 +0800 Subject: [PATCH] [Cherry pick] fix srn for sub_layers (#2694) * fix srn for sublayer * update for paddle2.1 --- ppocr/modeling/heads/self_attention.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/ppocr/modeling/heads/self_attention.py b/ppocr/modeling/heads/self_attention.py index 51d5198f..6c27fdbe 100644 --- a/ppocr/modeling/heads/self_attention.py +++ b/ppocr/modeling/heads/self_attention.py @@ -285,8 +285,7 @@ class PrePostProcessLayer(nn.Layer): elif cmd == "n": # add layer normalization self.functors.append( self.add_sublayer( - "layer_norm_%d" % len( - self.sublayers(include_sublayers=False)), + "layer_norm_%d" % len(self.sublayers()), paddle.nn.LayerNorm( normalized_shape=d_model, weight_attr=fluid.ParamAttr( @@ -320,9 +319,7 @@ class PrepareEncoder(nn.Layer): self.src_emb_dim = src_emb_dim self.src_max_len = src_max_len self.emb = paddle.nn.Embedding( - num_embeddings=self.src_max_len, - embedding_dim=self.src_emb_dim, - sparse=True) + num_embeddings=self.src_max_len, embedding_dim=self.src_emb_dim) self.dropout_rate = dropout_rate def forward(self, src_word, src_pos): -- GitLab