diff --git a/ppocr/postprocess/rec_postprocess.py b/ppocr/postprocess/rec_postprocess.py index fbf8b93e3d11121c99ce5b2dcbf2149e15453d4a..5547f005759b6585252c62d1e8a9d468e9880a2f 100644 --- a/ppocr/postprocess/rec_postprocess.py +++ b/ppocr/postprocess/rec_postprocess.py @@ -891,7 +891,7 @@ class VLLabelDecode(BaseRecLabelDecode): ) + length[i])].topk(1)[0][:, 0] preds_prob = paddle.exp( paddle.log(preds_prob).sum() / (preds_prob.shape[0] + 1e-6)) - text.append((preds_text, preds_prob.numpy()[0])) + text.append((preds_text, float(preds_prob))) if label is None: return text label = self.decode(label) diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index 02ee8a24d241195d1330ea42fc05ed35dd7a87b7..e184b14ddc0718aa169229b3c95d2dc84d46a183 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -150,7 +150,9 @@ if [ ${MODE} = "lite_train_lite_infer" ];then # pretrain lite train data wget -nc -P ./pretrain_models/ https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/MobileNetV3_large_x0_5_pretrained.pdparams --no-check-certificate wget -nc -P ./pretrain_models/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_mv3_db_v2.0_train.tar --no-check-certificate - cd ./pretrain_models/ && tar xf det_mv3_db_v2.0_train.tar && cd ../ + cd ./pretrain_models/ + tar xf det_mv3_db_v2.0_train.tar + cd ../ if [[ ${model_name} =~ "ch_PP-OCRv2_det" ]];then wget -nc -P ./pretrain_models/ https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_distill_train.tar --no-check-certificate cd ./pretrain_models/ && tar xf ch_PP-OCRv2_det_distill_train.tar && cd ../ diff --git a/test_tipc/supplementary/train.py b/test_tipc/supplementary/train.py index e632d1d1803a85144bc750c3ff6ff51b1eb65973..a15a99ff85e6667700f7e57800e1feeb013da869 100644 --- a/test_tipc/supplementary/train.py +++ b/test_tipc/supplementary/train.py @@ -168,22 +168,22 @@ def train(config, scaler=None): if idx % 10 == 0: et = time.time() strs = f"epoch: [{epoch}/{EPOCH}], iter: [{idx}/{data_num}], " - strs += f"loss: {avg_loss.numpy()[0]}" - strs += f", acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}" + strs += f"loss: {float(avg_loss)}" + strs += f", acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}" strs += f", batch_time: {round(et-st, 4)} s" logger.info(strs) st = time.time() if epoch % 10 == 0: acc = eval(config, model) - if len(best_acc) < 1 or acc['top5'].numpy()[0] > best_acc['top5']: + if len(best_acc) < 1 or float(acc['top5']) > best_acc['top5']: best_acc = acc best_acc['epoch'] = epoch is_best = True else: is_best = False logger.info( - f"The best acc: acc_topk1: {best_acc['top1'].numpy()[0]}, acc_top5: {best_acc['top5'].numpy()[0]}, best_epoch: {best_acc['epoch']}" + f"The best acc: acc_topk1: {float(best_acc['top1'])}, acc_top5: {float(best_acc['top5'])}, best_epoch: {best_acc['epoch']}" ) save_model( model, @@ -276,22 +276,22 @@ def train_distill(config, scaler=None): if idx % 10 == 0: et = time.time() strs = f"epoch: [{epoch}/{EPOCH}], iter: [{idx}/{data_num}], " - strs += f"loss: {avg_loss.numpy()[0]}" - strs += f", acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}" + strs += f"loss: {float(avg_loss)}" + strs += f", acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}" strs += f", batch_time: {round(et-st, 4)} s" logger.info(strs) st = time.time() if epoch % 10 == 0: acc = eval(config, model._layers.student) - if len(best_acc) < 1 or acc['top5'].numpy()[0] > best_acc['top5']: + if len(best_acc) < 1 or float(acc['top5']) > best_acc['top5']: best_acc = acc best_acc['epoch'] = epoch is_best = True else: is_best = False logger.info( - f"The best acc: acc_topk1: {best_acc['top1'].numpy()[0]}, acc_top5: {best_acc['top5'].numpy()[0]}, best_epoch: {best_acc['epoch']}" + f"The best acc: acc_topk1: {float(best_acc['top1'])}, acc_top5: {float(best_acc['top5'])}, best_epoch: {best_acc['epoch']}" ) save_model( @@ -401,22 +401,22 @@ def train_distill_multiopt(config, scaler=None): if idx % 10 == 0: et = time.time() strs = f"epoch: [{epoch}/{EPOCH}], iter: [{idx}/{data_num}], " - strs += f"loss: {avg_loss.numpy()[0]}, loss1: {avg_loss1.numpy()[0]}" - strs += f", acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}" + strs += f"loss: {float(avg_loss)}, loss1: {float(avg_loss1)}" + strs += f", acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}" strs += f", batch_time: {round(et-st, 4)} s" logger.info(strs) st = time.time() if epoch % 10 == 0: acc = eval(config, model._layers.student) - if len(best_acc) < 1 or acc['top5'].numpy()[0] > best_acc['top5']: + if len(best_acc) < 1 or float(acc['top5']) > best_acc['top5']: best_acc = acc best_acc['epoch'] = epoch is_best = True else: is_best = False logger.info( - f"The best acc: acc_topk1: {best_acc['top1'].numpy()[0]}, acc_top5: {best_acc['top5'].numpy()[0]}, best_epoch: {best_acc['epoch']}" + f"The best acc: acc_topk1: {float(best_acc['top1'])}, acc_top5: {float(best_acc['top5'])}, best_epoch: {best_acc['epoch']}" ) save_model( model, [optimizer, optimizer1], @@ -450,7 +450,7 @@ def eval(config, model): labels = paddle.concat(labels, axis=0) acc = metric_func(outs, labels) - strs = f"The metric are as follows: acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}" + strs = f"The metric are as follows: acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}" logger.info(strs) return acc