diff --git a/ppocr/modeling/heads/rec_robustscanner_head.py b/ppocr/modeling/heads/rec_robustscanner_head.py index 7956059ecfe01f27db364d3d748d6af24dad0aac..550836bd401b0b8799e2afb9b185de8ed6b3d5b1 100644 --- a/ppocr/modeling/heads/rec_robustscanner_head.py +++ b/ppocr/modeling/heads/rec_robustscanner_head.py @@ -99,10 +99,11 @@ class DotProductAttentionLayer(nn.Layer): logits = paddle.reshape(logits, [n, c, h, w]) if valid_ratios is not None: # cal mask of attention weight - for i, valid_ratio in enumerate(valid_ratios): - valid_width = min(w, int(w * valid_ratio + 0.5)) - if valid_width < w: - logits[i, :, :, valid_width:] = float('-inf') + with paddle.fluid.framework._stride_in_no_check_dy2st_diff(): + for i, valid_ratio in enumerate(valid_ratios): + valid_width = min(w, int(w * valid_ratio + 0.5)) + if valid_width < w: + logits[i, :, :, valid_width:] = float('-inf') # reshape to (n, c, h, w) logits = paddle.reshape(logits, [n, c, t])