From 4386b8418dea02ff644a40911282b3101cb4b2c4 Mon Sep 17 00:00:00 2001 From: gaotingquan Date: Tue, 15 Mar 2022 07:41:42 +0000 Subject: [PATCH] fix: support bs>1 --- .../ppshitu_v2/configs/test_cls_config.yaml | 4 +- .../ppshitu_v2/configs/test_det_config.yaml | 2 + .../ppshitu_v2/configs/test_rec_config.yaml | 2 +- .../algo_mod/postprocessor/classification.py | 44 ++++++++++--------- .../processor/algo_mod/postprocessor/det.py | 36 +++++++-------- .../algo_mod/predictor/paddle_predictor.py | 19 +++----- 6 files changed, 51 insertions(+), 56 deletions(-) diff --git a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml index 7b18e9a0..9a9a167b 100644 --- a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml @@ -29,10 +29,10 @@ Modules: inference_model_dir: "./MobileNetV2_infer" to_model_names: image: inputs - from_model_names: + from_model_indexes: logits: 0 - name: TopK type: postprocessor k: 10 - class_id_map_file: "../ppcls/utils/imagenet1k_label_list.txt" + class_id_map_file: "../../../ppcls/utils/imagenet1k_label_list.txt" save_dir: None \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/configs/test_det_config.yaml b/deploy/python/ppshitu_v2/configs/test_det_config.yaml index 64a421fa..7a46dd41 100644 --- a/deploy/python/ppshitu_v2/configs/test_det_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_det_config.yaml @@ -25,6 +25,8 @@ Modules: - name: PaddlePredictor type: predictor inference_model_dir: ./models/ppyolov2_r50vd_dcn_mainbody_v1.0_infer/ + from_model_indexes: + boxes: 0 - name: DetPostPro type: postprocessor threshold: 0.2 diff --git a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml index 1c986bd8..ba826fae 100644 --- a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml @@ -28,7 +28,7 @@ Modules: inference_model_dir: models/product_ResNet50_vd_aliproduct_v1.0_infer to_model_names: image: x - from_model_names: + from_model_indexes: features: 0 - name: FeatureNormalizer type: postprocessor \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py index 4c15b957..33365751 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py @@ -39,26 +39,28 @@ class TopK(BaseProcessor): return class_id_map def process(self, data): - # TODO(gaotingquan): only support bs==1 when 'connector' is not implemented. - probs = data["pred"]["logits"][0] - index = probs.argsort(axis=0)[-self.topk:][::-1].astype( - "int32") if not self.multilabel else np.where( - probs >= 0.5)[0].astype("int32") - clas_id_list = [] - score_list = [] - label_name_list = [] - for i in index: - clas_id_list.append(i.item()) - score_list.append(probs[i].item()) - if self.class_id_map is not None: - label_name_list.append(self.class_id_map[i.item()]) - result = { - "class_ids": clas_id_list, - "scores": np.around( - score_list, decimals=5).tolist(), - } - if label_name_list is not None: - result["label_names"] = label_name_list + logits = data["pred"]["logits"] + all_results = [] + for probs in logits: + index = probs.argsort(axis=0)[-self.topk:][::-1].astype( + "int32") if not self.multilabel else np.where( + probs >= 0.5)[0].astype("int32") + clas_id_list = [] + score_list = [] + label_name_list = [] + for i in index: + clas_id_list.append(i.item()) + score_list.append(probs[i].item()) + if self.class_id_map is not None: + label_name_list.append(self.class_id_map[i.item()]) + result = { + "class_ids": clas_id_list, + "scores": np.around( + score_list, decimals=5).tolist(), + } + if label_name_list is not None: + result["label_names"] = label_name_list + all_results.append(result) - data["classification_res"] = result + data["classification_res"] = all_results return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py index 58743063..5e7792b7 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py @@ -12,33 +12,29 @@ class DetPostPro(BaseProcessor): self.max_det_results = config["max_det_results"] def process(self, data): - pred = data["pred"] - np_boxes = pred[list(pred.keys())[0]] + np_boxes = data["pred"]["boxes"] if reduce(lambda x, y: x * y, np_boxes.shape) >= 6: keep_indexes = np_boxes[:, 1].argsort()[::-1][: self.max_det_results] - # TODO(gaotingquan): only support bs==1 - single_res = np_boxes[0] - class_id = int(single_res[0]) - score = single_res[1] - bbox = single_res[2:] - if score > self.threshold: + + all_results = [] + for idx in keep_indexes: + single_res = np_boxes[idx] + class_id = int(single_res[0]) + score = single_res[1] + bbox = single_res[2:] + if score < self.threshold: + continue label_name = self.label_list[class_id] - results = { + all_results.append({ "class_id": class_id, "score": score, "bbox": bbox, - "label_name": label_name, - } - data["detection_res"] = results - return data + "label_name": label_name + }) + data["detection_res"] = all_results + return data logger.warning('[Detector] No object detected.') - results = { - "class_id": None, - "score": None, - "bbox": None, - "label_name": None, - } - data["detection_res"] = results + data["detection_res"] = [] return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py index 0a10a443..ea303f63 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py @@ -55,10 +55,8 @@ class PaddlePredictor(BaseProcessor): } else: self.input_name_map = {} - if "from_model_names" in config and config["from_model_names"]: - self.output_name_map = config["from_model_names"] - else: - self.output_name_map = {} + + self.output_name_map = config["from_model_indexes"] def process(self, data): input_names = self.predictor.get_input_names() @@ -73,15 +71,12 @@ class PaddlePredictor(BaseProcessor): output_names = self.predictor.get_output_names() for output_name in output_names: output = self.predictor.get_output_handle(output_name) - model_output.append((output_name, output.copy_to_cpu())) + model_output.append(output.copy_to_cpu()) - if self.output_name_map: - output_data = {} - for name in self.output_name_map: - idx = self.output_name_map[name] - output_data[name] = model_output[idx][1] - else: - output_data = dict(model_output) + output_data = {} + for name in self.output_name_map: + idx = self.output_name_map[name] + output_data[name] = model_output[idx] data["pred"] = output_data return data -- GitLab