# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import cv2 import numpy as np from paddleclas.deploy.utils import logger, config from paddleclas.deploy.utils.predictor import Predictor from paddleclas.deploy.utils.get_image_list import get_image_list from paddleclas.deploy.python.preprocess import create_operators from paddleclas.deploy.python.postprocess import build_postprocess class RecPredictor(Predictor): def __init__(self, config): super().__init__(config["Global"], config["Global"]["rec_inference_model_dir"]) self.preprocess_ops = create_operators(config["RecPreProcess"][ "transform_ops"]) self.postprocess = build_postprocess(config["RecPostProcess"]) self.benchmark = config["Global"].get("benchmark", False) if self.benchmark: import auto_log pid = os.getpid() self.auto_logger = auto_log.AutoLogger( model_name=config["Global"].get("model_name", "rec"), model_precision='fp16' if config["Global"]["use_fp16"] else 'fp32', batch_size=config["Global"].get("batch_size", 1), data_shape=[3, 224, 224], save_path=config["Global"].get("save_log_path", "./auto_log.log"), inference_config=self.config, pids=pid, process_name=None, gpu_ids=None, time_keys=[ 'preprocess_time', 'inference_time', 'postprocess_time' ], warmup=2) def predict(self, images, feature_normalize=True): use_onnx = self.args.get("use_onnx", False) if not use_onnx: input_names = self.predictor.get_input_names() input_tensor = self.predictor.get_input_handle(input_names[0]) output_names = self.predictor.get_output_names() output_tensor = self.predictor.get_output_handle(output_names[0]) else: input_names = self.predictor.get_inputs()[0].name output_names = self.predictor.get_outputs()[0].name if self.benchmark: self.auto_logger.times.start() if not isinstance(images, (list, )): images = [images] for idx in range(len(images)): for ops in self.preprocess_ops: images[idx] = ops(images[idx]) image = np.array(images) if self.benchmark: self.auto_logger.times.stamp() if not use_onnx: input_tensor.copy_from_cpu(image) self.predictor.run() batch_output = output_tensor.copy_to_cpu() else: batch_output = self.predictor.run( output_names=[output_names], input_feed={input_names: image})[0] if self.benchmark: self.auto_logger.times.stamp() if feature_normalize: feas_norm = np.sqrt( np.sum(np.square(batch_output), axis=1, keepdims=True)) batch_output = np.divide(batch_output, feas_norm) if self.postprocess is not None: batch_output = self.postprocess(batch_output) if self.benchmark: self.auto_logger.times.end(stamp=True) return batch_output def main(config): rec_predictor = RecPredictor(config) image_list = get_image_list(config["Global"]["infer_imgs"]) batch_imgs = [] batch_names = [] cnt = 0 for idx, img_path in enumerate(image_list): img = cv2.imread(img_path) if img is None: logger.warning( "Image file failed to read and has been skipped. The path: {}". format(img_path)) else: img = img[:, :, ::-1] batch_imgs.append(img) img_name = os.path.basename(img_path) batch_names.append(img_name) cnt += 1 if cnt % config["Global"]["batch_size"] == 0 or (idx + 1 ) == len(image_list): if len(batch_imgs) == 0: continue batch_results = rec_predictor.predict(batch_imgs) for number, result_dict in enumerate(batch_results): filename = batch_names[number] print("{}:\t{}".format(filename, result_dict)) batch_imgs = [] batch_names = [] if rec_predictor.benchmark: rec_predictor.auto_logger.report() return if __name__ == "__main__": args = config.parse_args() config = config.get_config(args.config, overrides=args.override, show=True) main(config)