diff --git a/doc/IMDB_GO_CLIENT.md b/doc/deprecated/IMDB_GO_CLIENT.md similarity index 100% rename from doc/IMDB_GO_CLIENT.md rename to doc/deprecated/IMDB_GO_CLIENT.md diff --git a/doc/IMDB_GO_CLIENT_CN.md b/doc/deprecated/IMDB_GO_CLIENT_CN.md similarity index 100% rename from doc/IMDB_GO_CLIENT_CN.md rename to doc/deprecated/IMDB_GO_CLIENT_CN.md diff --git a/doc/MODEL_ENSEMBLE_IN_PADDLE_SERVING.md b/doc/deprecated/MODEL_ENSEMBLE_IN_PADDLE_SERVING.md similarity index 100% rename from doc/MODEL_ENSEMBLE_IN_PADDLE_SERVING.md rename to doc/deprecated/MODEL_ENSEMBLE_IN_PADDLE_SERVING.md diff --git a/doc/MODEL_ENSEMBLE_IN_PADDLE_SERVING_CN.md b/doc/deprecated/MODEL_ENSEMBLE_IN_PADDLE_SERVING_CN.md similarity index 100% rename from doc/MODEL_ENSEMBLE_IN_PADDLE_SERVING_CN.md rename to doc/deprecated/MODEL_ENSEMBLE_IN_PADDLE_SERVING_CN.md diff --git a/doc/NEW_OPERATOR.md b/doc/deprecated/NEW_OPERATOR.md similarity index 100% rename from doc/NEW_OPERATOR.md rename to doc/deprecated/NEW_OPERATOR.md diff --git a/doc/NEW_OPERATOR_CN.md b/doc/deprecated/NEW_OPERATOR_CN.md similarity index 100% rename from doc/NEW_OPERATOR_CN.md rename to doc/deprecated/NEW_OPERATOR_CN.md diff --git a/python/examples/ocr/rec_web_server.py b/python/examples/ocr/rec_web_server.py index 300c26be8f6b33c0cdd4a57e75648e444a25d763..a3de120aff910f72a224a61cdc67d1ff50e65ab2 100644 --- a/python/examples/ocr/rec_web_server.py +++ b/python/examples/ocr/rec_web_server.py @@ -43,25 +43,21 @@ class OCRService(WebService): data = np.fromstring(data, np.uint8) im = cv2.imdecode(data, cv2.IMREAD_COLOR) img_list.append(im) - feed_list = [] max_wh_ratio = 0 for i, boximg in enumerate(img_list): h, w = boximg.shape[0:2] wh_ratio = w * 1.0 / h max_wh_ratio = max(max_wh_ratio, wh_ratio) - for img in img_list: + _, w, h = self.ocr_reader.resize_norm_img(img_list[0], + max_wh_ratio).shape + imgs = np.zeros((len(img_list), 3, w, h)).astype('float32') + for i, img in enumerate(img_list): norm_img = self.ocr_reader.resize_norm_img(img, max_wh_ratio) - #feed = {"image": norm_img} - feed_list.append(norm_img) - if len(feed_list) == 1: - feed_batch = { - "image": np.concatenate( - feed_list, axis=0)[np.newaxis, :] - } - else: - feed_batch = {"image": np.concatenate(feed_list, axis=0)} + imgs[i] = norm_img + + feed = {"image": imgs.copy()} fetch = ["ctc_greedy_decoder_0.tmp_0", "softmax_0.tmp_0"] - return feed_batch, fetch, True + return feed, fetch, True def postprocess(self, feed={}, fetch=[], fetch_map=None): rec_res = self.ocr_reader.postprocess(fetch_map, with_score=True) diff --git a/python/pipeline/channel.py b/python/pipeline/channel.py index 9f06c445d1366ed9151b42f2f6532de8ac986758..e4c38b9e93828eed692ec0bcdb46ef8216c8a4d4 100644 --- a/python/pipeline/channel.py +++ b/python/pipeline/channel.py @@ -326,7 +326,10 @@ class ProcessChannel(object): with self._cv: while self._stop.value == 0: try: - self._que.put({op_name: channeldata}, timeout=0) + self._que.put((channeldata.id, { + op_name: channeldata + }), + timeout=0) break except Queue.Full: self._cv.wait() @@ -378,7 +381,7 @@ class ProcessChannel(object): else: while self._stop.value == 0: try: - self._que.put(put_data, timeout=0) + self._que.put((data_id, put_data), timeout=0) break except Queue.Empty: self._cv.wait() @@ -414,7 +417,7 @@ class ProcessChannel(object): with self._cv: while self._stop.value == 0 and resp is None: try: - resp = self._que.get(timeout=0) + resp = self._que.get(timeout=0)[1] break except Queue.Empty: if timeout is not None: @@ -459,7 +462,7 @@ class ProcessChannel(object): while self._stop.value == 0 and self._consumer_cursors[ op_name] - self._base_cursor.value >= len(self._output_buf): try: - channeldata = self._que.get(timeout=0) + channeldata = self._que.get(timeout=0)[1] self._output_buf.append(channeldata) list_values = list(channeldata.values()) _LOGGER.debug( @@ -633,7 +636,10 @@ class ThreadChannel(Queue.PriorityQueue): with self._cv: while self._stop is False: try: - self.put({op_name: channeldata}, timeout=0) + self.put((channeldata.id, { + op_name: channeldata + }), + timeout=0) break except Queue.Full: self._cv.wait() @@ -680,7 +686,7 @@ class ThreadChannel(Queue.PriorityQueue): else: while self._stop is False: try: - self.put(put_data, timeout=0) + self.put((data_id, put_data), timeout=0) break except Queue.Empty: self._cv.wait() @@ -716,7 +722,7 @@ class ThreadChannel(Queue.PriorityQueue): with self._cv: while self._stop is False and resp is None: try: - resp = self.get(timeout=0) + resp = self.get(timeout=0)[1] break except Queue.Empty: if timeout is not None: diff --git a/python/pipeline/profiler.py b/python/pipeline/profiler.py index b83bdd1dc8c5c948353c8ee95f51fe325e38dbfc..18845a7578b3e435f86dbe4c85e559b29cb65ddd 100644 --- a/python/pipeline/profiler.py +++ b/python/pipeline/profiler.py @@ -120,7 +120,7 @@ class PerformanceTracer(object): tot_cost)) if "DAG" in op_cost: - calls = op_cost["DAG"].values() + calls = list(op_cost["DAG"].values()) calls.sort() tot = len(calls) qps = 1.0 * tot / self._interval_s