diff --git a/ppocr/modeling/heads/self_attention.py b/ppocr/modeling/heads/self_attention.py index 51d5198f558dcb7e0351f04b3a884b71707104d4..4e96e4a455c49246f711e4b7888d38ac88c3678e 100644 --- a/ppocr/modeling/heads/self_attention.py +++ b/ppocr/modeling/heads/self_attention.py @@ -285,8 +285,7 @@ class PrePostProcessLayer(nn.Layer): elif cmd == "n": # add layer normalization self.functors.append( self.add_sublayer( - "layer_norm_%d" % len( - self.sublayers(include_sublayers=False)), + "layer_norm_%d" % len(self.sublayers()), paddle.nn.LayerNorm( normalized_shape=d_model, weight_attr=fluid.ParamAttr(