diff --git a/ppocr/metrics/eval_det_iou.py b/ppocr/metrics/eval_det_iou.py index 0e32b2d19281de9a18a1fe0343bd7e8237825b7b..19b9008eff5abc619bad20b0a22beaab9a34cf7f 100644 --- a/ppocr/metrics/eval_det_iou.py +++ b/ppocr/metrics/eval_det_iou.py @@ -168,22 +168,27 @@ class DetectionIoUEvaluator(object): numGlobalCareGt += numGtCare numGlobalCareDet += numDetCare + # perSampleMetrics = { + # 'precision': precision, + # 'recall': recall, + # 'hmean': hmean, + # 'pairs': pairs, + # 'iouMat': [] if len(detPols) > 100 else iouMat.tolist(), + # 'gtPolPoints': gtPolPoints, + # 'detPolPoints': detPolPoints, + # 'gtCare': numGtCare, + # 'detCare': numDetCare, + # 'gtDontCare': gtDontCarePolsNum, + # 'detDontCare': detDontCarePolsNum, + # 'detMatched': detMatched, + # 'evaluationLog': evaluationLog + # } + perSampleMetrics = { - 'precision': precision, - 'recall': recall, - 'hmean': hmean, - 'pairs': pairs, - 'iouMat': [] if len(detPols) > 100 else iouMat.tolist(), - 'gtPolPoints': gtPolPoints, - 'detPolPoints': detPolPoints, 'gtCare': numGtCare, 'detCare': numDetCare, - 'gtDontCare': gtDontCarePolsNum, - 'detDontCare': detDontCarePolsNum, 'detMatched': detMatched, - 'evaluationLog': evaluationLog } - return perSampleMetrics def combine_results(self, results):