diff --git a/python/examples/imagenet/benchmark.py b/python/examples/imagenet/benchmark.py index 6b21719e7b665906e7abd02a7a3b8aef50136685..00e9fbe75cd36eabe19f9804a34d001537f6aa31 100644 --- a/python/examples/imagenet/benchmark.py +++ b/python/examples/imagenet/benchmark.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,13 +15,17 @@ # limitations under the License. # pylint: disable=doc-string-missing +from __future__ import unicode_literals, absolute_import +import os import sys -from image_reader import ImageReader +import time from paddle_serving_client import Client from paddle_serving_client.utils import MultiThreadRunner from paddle_serving_client.utils import benchmark_args -import time -import os +import requests +import json +import base64 +from image_reader import ImageReader args = benchmark_args() @@ -31,30 +37,61 @@ def single_func(idx, resource): img_list = [] for i in range(1000): img_list.append(open("./image_data/n01440764/" + file_list[i]).read()) + profile_flags = False + if "FLAGS_profile_client" in os.environ and os.environ[ + "FLAGS_profile_client"]: + profile_flags = True if args.request == "rpc": reader = ImageReader() fetch = ["score"] client = Client() client.load_client_config(args.model) client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) + start = time.time() + for i in range(1000): + if args.batch_size >= 1: + feed_batch = [] + i_start = time.time() + for bi in range(args.batch_size): + img = reader.process_image(img_list[i]) + feed_batch.append({"image": img}) + i_end = time.time() + if profile_flags: + print("PROFILE\tpid:{}\timage_pre_0:{} image_pre_1:{}". + format(os.getpid(), + int(round(i_start * 1000000)), + int(round(i_end * 1000000)))) + + result = client.predict(feed=feed_batch, fetch=fetch) + else: + print("unsupport batch size {}".format(args.batch_size)) + elif args.request == "http": + py_version = 2 + server = "http://" + resource["endpoint"][idx % len(resource[ + "endpoint"])] + "/image/prediction" start = time.time() - for i in range(100): - img = reader.process_image(img_list[i]) - fetch_map = client.predict(feed={"image": img}, fetch=["score"]) - end = time.time() - return [[end - start]] + for i in range(1000): + if py_version == 2: + image = base64.b64encode( + open("./image_data/n01440764/" + file_list[i]).read()) + else: + image = base64.b64encode(open(image_path, "rb").read()).decode( + "utf-8") + req = json.dumps({"feed": [{"image": image}], "fetch": ["score"]}) + r = requests.post( + server, data=req, headers={"Content-Type": "application/json"}) + end = time.time() return [[end - start]] -if __name__ == "__main__": +if __name__ == '__main__': multi_thread_runner = MultiThreadRunner() - endpoint_list = ["127.0.0.1:9292"] - #card_num = 4 - #for i in range(args.thread): - # endpoint_list.append("127.0.0.1:{}".format(9295 + i % card_num)) + endpoint_list = ["127.0.0.1:9696"] + #endpoint_list = endpoint_list + endpoint_list + endpoint_list result = multi_thread_runner.run(single_func, args.thread, {"endpoint": endpoint_list}) + #result = single_func(0, {"endpoint": endpoint_list}) avg_cost = 0 for i in range(args.thread): avg_cost += result[0][i] diff --git a/python/examples/imagenet/benchmark.sh b/python/examples/imagenet/benchmark.sh index 16fadbbac6cd7e616d11135653cfbcfeebe6d4f2..618a62c063c0bc4955baf8516bc5bc93e4832394 100644 --- a/python/examples/imagenet/benchmark.sh +++ b/python/examples/imagenet/benchmark.sh @@ -1,9 +1,12 @@ rm profile_log -for thread_num in 1 2 4 8 16 +for thread_num in 1 2 4 8 do - $PYTHONROOT/bin/python benchmark.py --thread $thread_num --model ResNet101_vd_client_config/serving_client_conf.prototxt --request rpc > profile 2>&1 +for batch_size in 1 2 4 8 16 32 64 128 +do + $PYTHONROOT/bin/python benchmark.py --thread $thread_num --batch_size $batch_size --model ResNet50_vd_client_config/serving_client_conf.prototxt --request rpc > profile 2>&1 echo "========================================" echo "batch size : $batch_size" >> profile_log $PYTHONROOT/bin/python ../util/show_profile.py profile $thread_num >> profile_log tail -n 1 profile >> profile_log done +done diff --git a/python/examples/imagenet/benchmark_batch.py b/python/examples/imagenet/benchmark_batch.py deleted file mode 100644 index 1646fb9a94d6953f90f9f4907aa74940f13c2730..0000000000000000000000000000000000000000 --- a/python/examples/imagenet/benchmark_batch.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=doc-string-missing - -from __future__ import unicode_literals, absolute_import -import os -import sys -import time -from paddle_serving_client import Client -from paddle_serving_client.utils import MultiThreadRunner -from paddle_serving_client.utils import benchmark_args -import requests -import json -import base64 -from image_reader import ImageReader - -args = benchmark_args() - - -def single_func(idx, resource): - file_list = [] - for file_name in os.listdir("./image_data/n01440764"): - file_list.append(file_name) - img_list = [] - for i in range(1000): - img_list.append(open("./image_data/n01440764/" + file_list[i]).read()) - profile_flags = False - if "FLAGS_profile_client" in os.environ and os.environ[ - "FLAGS_profile_client"]: - profile_flags = True - if args.request == "rpc": - reader = ImageReader() - fetch = ["score"] - client = Client() - client.load_client_config(args.model) - client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) - start = time.time() - for i in range(1000): - if args.batch_size >= 1: - feed_batch = [] - i_start = time.time() - for bi in range(args.batch_size): - img = reader.process_image(img_list[i]) - feed_batch.append({"image": img}) - i_end = time.time() - if profile_flags: - print("PROFILE\tpid:{}\timage_pre_0:{} image_pre_1:{}". - format(os.getpid(), - int(round(i_start * 1000000)), - int(round(i_end * 1000000)))) - - result = client.predict(feed=feed_batch, fetch=fetch) - else: - print("unsupport batch size {}".format(args.batch_size)) - - elif args.request == "http": - py_version = 2 - server = "http://" + resource["endpoint"][idx % len(resource[ - "endpoint"])] + "/image/prediction" - start = time.time() - for i in range(1000): - if py_version == 2: - image = base64.b64encode( - open("./image_data/n01440764/" + file_list[i]).read()) - else: - image = base64.b64encode(open(image_path, "rb").read()).decode( - "utf-8") - req = json.dumps({"feed": [{"image": image}], "fetch": ["score"]}) - r = requests.post( - server, data=req, headers={"Content-Type": "application/json"}) - end = time.time() - return [[end - start]] - - -if __name__ == '__main__': - multi_thread_runner = MultiThreadRunner() - endpoint_list = ["127.0.0.1:9292"] - #endpoint_list = endpoint_list + endpoint_list + endpoint_list - result = multi_thread_runner.run(single_func, args.thread, - {"endpoint": endpoint_list}) - #result = single_func(0, {"endpoint": endpoint_list}) - avg_cost = 0 - for i in range(args.thread): - avg_cost += result[0][i] - avg_cost = avg_cost / args.thread - print("average total cost {} s.".format(avg_cost)) diff --git a/python/examples/imagenet/benchmark_batch.py.lprof b/python/examples/imagenet/benchmark_batch.py.lprof new file mode 100644 index 0000000000000000000000000000000000000000..7ff4f1411ded79aba3390e606193ec4fedacf06f Binary files /dev/null and b/python/examples/imagenet/benchmark_batch.py.lprof differ diff --git a/python/examples/imagenet/benchmark_batch.sh b/python/examples/imagenet/benchmark_batch.sh deleted file mode 100644 index 4118ffcc755e6d47c69924efbb1b7d5474db8b00..0000000000000000000000000000000000000000 --- a/python/examples/imagenet/benchmark_batch.sh +++ /dev/null @@ -1,12 +0,0 @@ -rm profile_log -for thread_num in 1 2 4 8 16 -do -for batch_size in 1 2 4 8 16 32 64 128 256 512 -do - $PYTHONROOT/bin/python benchmark_batch.py --thread $thread_num --batch_size $batch_size --model ResNet101_vd_client_config/serving_client_conf.prototxt --request rpc > profile 2>&1 - echo "========================================" - echo "batch size : $batch_size" >> profile_log - $PYTHONROOT/bin/python ../util/show_profile.py profile $thread_num >> profile_log - tail -n 1 profile >> profile_log -done -done