diff --git a/ppdet/engine/export_utils.py b/ppdet/engine/export_utils.py index 93f8a929c528472f82dc2638293086a23997317b..d8d45860fe0fdfecfee66671128c759323d66a26 100644 --- a/ppdet/engine/export_utils.py +++ b/ppdet/engine/export_utils.py @@ -131,12 +131,15 @@ def _dump_infer_config(config, path, image_shape, model): 'use_dynamic_shape': use_dynamic_shape }) export_onnx = config.get('export_onnx', False) + export_eb = config.get('export_eb', False) infer_arch = config['architecture'] if 'RCNN' in infer_arch and export_onnx: logger.warning( "Exporting RCNN model to ONNX only support batch_size = 1") infer_cfg['export_onnx'] = True + infer_cfg['export_eb'] = export_eb + if infer_arch in MOT_ARCH: if infer_arch == 'DeepSORT': diff --git a/ppdet/modeling/post_process.py b/ppdet/modeling/post_process.py index 9096d124f04f99a598d433e793d3e6d258e3c86d..56a696c919a756b50fd00cdfb62ccc0728257c4f 100644 --- a/ppdet/modeling/post_process.py +++ b/ppdet/modeling/post_process.py @@ -34,16 +34,17 @@ __all__ = [ @register class BBoxPostProcess(object): - __shared__ = ['num_classes', 'export_onnx'] + __shared__ = ['num_classes', 'export_onnx', 'export_eb'] __inject__ = ['decode', 'nms'] def __init__(self, num_classes=80, decode=None, nms=None, - export_onnx=False): + export_onnx=False, export_eb=False): super(BBoxPostProcess, self).__init__() self.num_classes = num_classes self.decode = decode self.nms = nms self.export_onnx = export_onnx + self.export_eb = export_eb def __call__(self, head_out, rois, im_shape, scale_factor): """ @@ -100,6 +101,10 @@ class BBoxPostProcess(object): pred_result (Tensor): The final prediction results with shape [N, 6] including labels, scores and bboxes. """ + if self.export_eb: + # enable rcnn models for edgeboard hw to skip the following postprocess. + return bboxes, bboxes, bbox_num + if not self.export_onnx: bboxes_list = [] bbox_num_list = []