From e9c3ff84a94a0826806b5462a622bf6efe9e4081 Mon Sep 17 00:00:00 2001 From: tink2123 Date: Mon, 22 Nov 2021 12:08:33 +0800 Subject: [PATCH] fix attenton loss for ce --- ppocr/modeling/heads/rec_att_head.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ppocr/modeling/heads/rec_att_head.py b/ppocr/modeling/heads/rec_att_head.py index 7926f074..ab8b119f 100644 --- a/ppocr/modeling/heads/rec_att_head.py +++ b/ppocr/modeling/heads/rec_att_head.py @@ -45,7 +45,6 @@ class AttentionHead(nn.Layer): output_hiddens = [] if targets is not None: - print("target is not None") for i in range(num_steps): char_onehots = self._char_to_onehot( targets[:, i], onehot_dim=self.num_classes) @@ -55,7 +54,6 @@ class AttentionHead(nn.Layer): output = paddle.concat(output_hiddens, axis=1) probs = self.generator(output) else: - print("target is None") targets = paddle.zeros(shape=[batch_size], dtype="int32") probs = None char_onehots = None -- GitLab