From 477eb586515ef9eaf94abff7a8084386fadace21 Mon Sep 17 00:00:00 2001 From: tink2123 Date: Mon, 22 Nov 2021 12:14:06 +0800 Subject: [PATCH] fix attn loss for ce --- ppocr/data/imaug/label_ops.py | 2 +- ppocr/modeling/heads/rec_att_head.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/ppocr/data/imaug/label_ops.py b/ppocr/data/imaug/label_ops.py index 8c260a92..aebd97fb 100644 --- a/ppocr/data/imaug/label_ops.py +++ b/ppocr/data/imaug/label_ops.py @@ -318,7 +318,7 @@ class AttnLabelEncode(BaseRecLabelEncode): text = self.encode(text) if text is None: return None - if len(text) >= self.max_text_len: + if len(text) >= self.max_text_len - 1: return None data['length'] = np.array(len(text)) text = [0] + text + [len(self.character) - 1] + [0] * (self.max_text_len diff --git a/ppocr/modeling/heads/rec_att_head.py b/ppocr/modeling/heads/rec_att_head.py index 6d77e42e..3c572af8 100644 --- a/ppocr/modeling/heads/rec_att_head.py +++ b/ppocr/modeling/heads/rec_att_head.py @@ -75,6 +75,7 @@ class AttentionHead(nn.Layer): probs_step, axis=1)], axis=1) next_input = probs_step.argmax(axis=1) targets = next_input + if not self.training: probs = paddle.nn.functional.softmax(probs, axis=2) return probs -- GitLab