提交 5e571c7b 编写于 作者: B barriery

Merge branch 'develop' of https://github.com/PaddlePaddle/Serving into pipeline-auto-batch

...@@ -14,11 +14,17 @@ https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server-0.3.2-py2-none-an ...@@ -14,11 +14,17 @@ https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server-0.3.2-py2-none-an
## GPU server ## GPU server
### Python 3 ### Python 3
``` ```
https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.3.2-py3-none-any.whl #cuda 9.0
https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.3.2.post9-py3-none-any.whl
#cuda 10.0
https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.3.2.post10-py3-none-any.whl
``` ```
### Python 2 ### Python 2
``` ```
https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.3.2-py2-none-any.whl #cuda 9.0
https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.3.2.post9-py2-none-any.whl
#cuda 10.0
https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.3.2.post10-py2-none-any.whl
``` ```
## Client ## Client
......
...@@ -83,6 +83,7 @@ if (SERVER) ...@@ -83,6 +83,7 @@ if (SERVER)
OUTPUT ${PADDLE_SERVING_BINARY_DIR}/.timestamp OUTPUT ${PADDLE_SERVING_BINARY_DIR}/.timestamp
COMMAND cp -r COMMAND cp -r
${CMAKE_CURRENT_SOURCE_DIR}/paddle_serving_server_gpu/ ${PADDLE_SERVING_BINARY_DIR}/python/ ${CMAKE_CURRENT_SOURCE_DIR}/paddle_serving_server_gpu/ ${PADDLE_SERVING_BINARY_DIR}/python/
COMMAND env ${py_env} ${PYTHON_EXECUTABLE} paddle_serving_server_gpu/gen_cuda_version.py ${CUDA_VERSION_MAJOR}
COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py bdist_wheel COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py bdist_wheel
DEPENDS ${SERVING_SERVER_CORE} server_config_py_proto ${PY_FILES}) DEPENDS ${SERVING_SERVER_CORE} server_config_py_proto ${PY_FILES})
add_custom_target(paddle_python ALL DEPENDS ${PADDLE_SERVING_BINARY_DIR}/.timestamp) add_custom_target(paddle_python ALL DEPENDS ${PADDLE_SERVING_BINARY_DIR}/.timestamp)
......
...@@ -363,7 +363,15 @@ class Server(object): ...@@ -363,7 +363,15 @@ class Server(object):
def download_bin(self): def download_bin(self):
os.chdir(self.module_path) os.chdir(self.module_path)
need_download = False need_download = False
device_version = "serving-gpu-"
#acquire lock
version_file = open("{}/version.py".format(self.module_path), "r")
import re
for line in version_file.readlines():
if re.match("cuda_version", line):
cuda_version = line.split("\"")[1]
device_version = "serving-gpu-cuda" + cuda_version + "-"
folder_name = device_version + serving_server_version folder_name = device_version + serving_server_version
tar_name = folder_name + ".tar.gz" tar_name = folder_name + ".tar.gz"
bin_url = "https://paddle-serving.bj.bcebos.com/bin/" + tar_name bin_url = "https://paddle-serving.bj.bcebos.com/bin/" + tar_name
...@@ -372,8 +380,6 @@ class Server(object): ...@@ -372,8 +380,6 @@ class Server(object):
download_flag = "{}/{}.is_download".format(self.module_path, download_flag = "{}/{}.is_download".format(self.module_path,
folder_name) folder_name)
#acquire lock
version_file = open("{}/version.py".format(self.module_path), "r")
fcntl.flock(version_file, fcntl.LOCK_EX) fcntl.flock(version_file, fcntl.LOCK_EX)
if os.path.exists(download_flag): if os.path.exists(download_flag):
...@@ -385,6 +391,7 @@ class Server(object): ...@@ -385,6 +391,7 @@ class Server(object):
os.system("touch {}/{}.is_download".format(self.module_path, os.system("touch {}/{}.is_download".format(self.module_path,
folder_name)) folder_name))
print('Frist time run, downloading PaddleServing components ...') print('Frist time run, downloading PaddleServing components ...')
r = os.system('wget ' + bin_url + ' --no-check-certificate') r = os.system('wget ' + bin_url + ' --no-check-certificate')
if r != 0: if r != 0:
if os.path.exists(tar_name): if os.path.exists(tar_name):
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import re
import os
new_str = ""
with open("paddle_serving_server_gpu/version.py", "r") as f:
for line in f.readlines():
if re.match("cuda_version", line):
line = re.sub(r"\d+", sys.argv[1], line)
new_str = new_str + line
with open("paddle_serving_server_gpu/version.py", "w") as f:
f.write(new_str)
...@@ -15,3 +15,4 @@ ...@@ -15,3 +15,4 @@
serving_client_version = "0.3.2" serving_client_version = "0.3.2"
serving_server_version = "0.3.2" serving_server_version = "0.3.2"
module_proto_version = "0.3.2" module_proto_version = "0.3.2"
cuda_version = "9"
...@@ -41,7 +41,6 @@ REQUIRED_PACKAGES = [ ...@@ -41,7 +41,6 @@ REQUIRED_PACKAGES = [
'paddle_serving_client', 'flask >= 1.1.1', 'paddle_serving_app' 'paddle_serving_client', 'flask >= 1.1.1', 'paddle_serving_app'
] ]
packages=['paddle_serving_server_gpu', packages=['paddle_serving_server_gpu',
'paddle_serving_server_gpu.proto', 'paddle_serving_server_gpu.proto',
'paddle_serving_server_gpu.pipeline', 'paddle_serving_server_gpu.pipeline',
...@@ -58,7 +57,7 @@ package_dir={'paddle_serving_server_gpu': ...@@ -58,7 +57,7 @@ package_dir={'paddle_serving_server_gpu':
setup( setup(
name='paddle-serving-server-gpu', name='paddle-serving-server-gpu',
version=serving_server_version.replace('-', ''), version=serving_server_version.replace('-', '') + '.post@CUDA_VERSION_MAJOR@',
description= description=
('Paddle Serving Package for saved model with PaddlePaddle'), ('Paddle Serving Package for saved model with PaddlePaddle'),
url='https://github.com/PaddlePaddle/Serving', url='https://github.com/PaddlePaddle/Serving',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册