Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Serving
提交
637f710f
S
Serving
项目概览
PaddlePaddle
/
Serving
大约 1 年 前同步成功
通知
186
Star
833
Fork
253
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
105
列表
看板
标记
里程碑
合并请求
10
Wiki
2
Wiki
分析
仓库
DevOps
项目成员
Pages
S
Serving
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
105
Issue
105
列表
看板
标记
里程碑
合并请求
10
合并请求
10
Pages
分析
分析
仓库分析
DevOps
Wiki
2
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
637f710f
编写于
2月 02, 2020
作者:
G
guru4elephant
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add serving python api
上级
27993a90
变更
7
隐藏空白更改
内联
并排
Showing
7 changed file
with
166 addition
and
22 deletion
+166
-22
CMakeLists.txt
CMakeLists.txt
+5
-5
cmake/paddlepaddle.cmake
cmake/paddlepaddle.cmake
+8
-2
cmake/paddlepaddle.cmake_from_source
cmake/paddlepaddle.cmake_from_source
+86
-0
examples/demo-serving/CMakeLists.txt
examples/demo-serving/CMakeLists.txt
+14
-8
paddle_inference/inferencer-fluid-cpu/CMakeLists.txt
paddle_inference/inferencer-fluid-cpu/CMakeLists.txt
+1
-1
python/paddle_serving/__init__.py
python/paddle_serving/__init__.py
+0
-1
python/paddle_serving/serving_server/__init__.py
python/paddle_serving/serving_server/__init__.py
+52
-5
未找到文件。
CMakeLists.txt
浏览文件 @
637f710f
...
@@ -46,11 +46,11 @@ set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING
...
@@ -46,11 +46,11 @@ set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING
set
(
THIRD_PARTY_BUILD_TYPE Release
)
set
(
THIRD_PARTY_BUILD_TYPE Release
)
option
(
WITH_AVX
"Compile Paddle Serving with AVX intrinsics"
${
AVX_FOUND
}
)
option
(
WITH_AVX
"Compile Paddle Serving with AVX intrinsics"
OFF
)
option
(
WITH_MKL
"Compile Paddle Serving with MKL support."
${
AVX_FOUND
}
)
option
(
WITH_MKL
"Compile Paddle Serving with MKL support."
OFF
)
option
(
WITH_GPU
"Compile Paddle Serving with NVIDIA GPU"
${
CUDA_FOUND
}
)
option
(
WITH_GPU
"Compile Paddle Serving with NVIDIA GPU"
OFF
)
option
(
CLIENT_ONLY
"Compile client libraries and demos only"
FALSE
)
option
(
CLIENT_ONLY
"Compile client libraries and demos only"
OFF
)
option
(
WITH_ELASTIC_CTR
"Compile ELASITC-CTR solution"
FALSE
)
option
(
WITH_ELASTIC_CTR
"Compile ELASITC-CTR solution"
OFF
)
set
(
WITH_MKLML
${
WITH_MKL
}
)
set
(
WITH_MKLML
${
WITH_MKL
}
)
if
(
NOT DEFINED WITH_MKLDNN
)
if
(
NOT DEFINED WITH_MKLDNN
)
...
...
cmake/paddlepaddle.cmake
浏览文件 @
637f710f
...
@@ -36,7 +36,7 @@ SET(PADDLE_VERSION "latest")
...
@@ -36,7 +36,7 @@ SET(PADDLE_VERSION "latest")
if
(
WITH_GPU
)
if
(
WITH_GPU
)
SET
(
PADDLE_LIB_VERSION
"
${
PADDLE_VERSION
}
-gpu-cuda
${
CUDA_VERSION_MAJOR
}
-cudnn7-avx-mkl"
)
SET
(
PADDLE_LIB_VERSION
"
${
PADDLE_VERSION
}
-gpu-cuda
${
CUDA_VERSION_MAJOR
}
-cudnn7-avx-mkl"
)
else
()
else
()
if
(
AVX_FOUND
)
if
(
WITH_AVX
)
if
(
WITH_MKLML
)
if
(
WITH_MKLML
)
SET
(
PADDLE_LIB_VERSION
"
${
PADDLE_VERSION
}
-cpu-avx-mkl"
)
SET
(
PADDLE_LIB_VERSION
"
${
PADDLE_VERSION
}
-cpu-avx-mkl"
)
else
()
else
()
...
@@ -63,9 +63,12 @@ ExternalProject_Add(
...
@@ -63,9 +63,12 @@ ExternalProject_Add(
${
CMAKE_COMMAND
}
-E copy_directory
${
PADDLE_DOWNLOAD_DIR
}
/paddle/include
${
PADDLE_INSTALL_DIR
}
/include &&
${
CMAKE_COMMAND
}
-E copy_directory
${
PADDLE_DOWNLOAD_DIR
}
/paddle/include
${
PADDLE_INSTALL_DIR
}
/include &&
${
CMAKE_COMMAND
}
-E copy_directory
${
PADDLE_DOWNLOAD_DIR
}
/paddle/lib
${
PADDLE_INSTALL_DIR
}
/lib &&
${
CMAKE_COMMAND
}
-E copy_directory
${
PADDLE_DOWNLOAD_DIR
}
/paddle/lib
${
PADDLE_INSTALL_DIR
}
/lib &&
${
CMAKE_COMMAND
}
-E copy_directory
${
PADDLE_DOWNLOAD_DIR
}
/third_party
${
PADDLE_INSTALL_DIR
}
/third_party
${
CMAKE_COMMAND
}
-E copy_directory
${
PADDLE_DOWNLOAD_DIR
}
/third_party
${
PADDLE_INSTALL_DIR
}
/third_party
#${CMAKE_COMMAND} -E copy ${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib/libmkldnn.so.0 ${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib/libmkldnn.so
)
)
if
(
WITH_MKLML
)
file
(
COPY
${
PADDLE_INSTALL_DIR
}
/third_party/install/mkldnn/lib/libmkldnn.so.0 DESTINATION
${
PADDLE_INSTALL_DIR
}
/third_party/install/mkldnn/lib/libmkldnn.so FOLLOW_SYMLINK_CHAIN
)
endif
()
INCLUDE_DIRECTORIES
(
${
PADDLE_INCLUDE_DIR
}
)
INCLUDE_DIRECTORIES
(
${
PADDLE_INCLUDE_DIR
}
)
SET
(
CMAKE_INSTALL_RPATH
"
${
CMAKE_INSTALL_RPATH
}
"
"
${
PADDLE_INSTALL_DIR
}
/third_party/install/mklml/lib"
)
SET
(
CMAKE_INSTALL_RPATH
"
${
CMAKE_INSTALL_RPATH
}
"
"
${
PADDLE_INSTALL_DIR
}
/third_party/install/mklml/lib"
)
LINK_DIRECTORIES
(
${
PADDLE_INSTALL_DIR
}
/third_party/install/mklml/lib
)
LINK_DIRECTORIES
(
${
PADDLE_INSTALL_DIR
}
/third_party/install/mklml/lib
)
...
@@ -73,6 +76,9 @@ LINK_DIRECTORIES(${PADDLE_INSTALL_DIR}/third_party/install/mklml/lib)
...
@@ -73,6 +76,9 @@ LINK_DIRECTORIES(${PADDLE_INSTALL_DIR}/third_party/install/mklml/lib)
SET
(
CMAKE_INSTALL_RPATH
"
${
CMAKE_INSTALL_RPATH
}
"
"
${
PADDLE_INSTALL_DIR
}
/third_party/install/mkldnn/lib"
)
SET
(
CMAKE_INSTALL_RPATH
"
${
CMAKE_INSTALL_RPATH
}
"
"
${
PADDLE_INSTALL_DIR
}
/third_party/install/mkldnn/lib"
)
LINK_DIRECTORIES
(
${
PADDLE_INSTALL_DIR
}
/third_party/install/mkldnn/lib
)
LINK_DIRECTORIES
(
${
PADDLE_INSTALL_DIR
}
/third_party/install/mkldnn/lib
)
ADD_LIBRARY
(
openblas STATIC IMPORTED GLOBAL
)
SET_PROPERTY
(
TARGET openblas PROPERTY IMPORTED_LOCATION
${
PADDLE_INSTALL_DIR
}
/third_party/install/openblas/lib/libopenblas.a
)
ADD_LIBRARY
(
paddle_fluid STATIC IMPORTED GLOBAL
)
ADD_LIBRARY
(
paddle_fluid STATIC IMPORTED GLOBAL
)
SET_PROPERTY
(
TARGET paddle_fluid PROPERTY IMPORTED_LOCATION
${
PADDLE_INSTALL_DIR
}
/lib/libpaddle_fluid.a
)
SET_PROPERTY
(
TARGET paddle_fluid PROPERTY IMPORTED_LOCATION
${
PADDLE_INSTALL_DIR
}
/lib/libpaddle_fluid.a
)
...
...
cmake/paddlepaddle.cmake_from_source
0 → 100644
浏览文件 @
637f710f
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
INCLUDE(ExternalProject)
SET(PADDLE_SOURCES_DIR ${THIRD_PARTY_PATH}/Paddle)
SET(PADDLE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/Paddle/)
SET(PADDLE_INCLUDE_DIR "${PADDLE_INSTALL_DIR}/include" CACHE PATH "PaddlePaddle include directory." FORCE)
SET(PADDLE_LIBRARIES "${PADDLE_INSTALL_DIR}/lib/libpaddle_fluid.a" CACHE FILEPATH "Paddle library." FORCE)
INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/Paddle/fluid_install_dir)
# Reference https://stackoverflow.com/questions/45414507/pass-a-list-of-prefix-paths-to-externalproject-add-in-cmake-args
set(prefix_path "${THIRD_PARTY_PATH}/install/gflags|${THIRD_PARTY_PATH}/install/leveldb|${THIRD_PARTY_PATH}/install/snappy|${THIRD_PARTY_PATH}/install/gtest|${THIRD_PARTY_PATH}/install/protobuf|${THIRD_PARTY_PATH}/install/zlib|${THIRD_PARTY_PATH}/install/glog")
message( "WITH_GPU = ${WITH_GPU}")
# If minimal .a is need, you can set WITH_DEBUG_SYMBOLS=OFF
ExternalProject_Add(
extern_paddle
${EXTERNAL_PROJECT_LOG_ARGS}
# TODO(wangguibao): change to de newst repo when they changed.
GIT_REPOSITORY "https://github.com/PaddlePaddle/Paddle"
GIT_TAG "v1.5.1"
PREFIX ${PADDLE_SOURCES_DIR}
UPDATE_COMMAND ""
BINARY_DIR ${CMAKE_BINARY_DIR}/Paddle
CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_INSTALL_PREFIX=${PADDLE_INSTALL_DIR}
-DCMAKE_INSTALL_LIBDIR=${PADDLE_INSTALL_DIR}/lib
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DCMAKE_BUILD_TYPE=${THIRD_PARTY_BUILD_TYPE}
-DCMAKE_PREFIX_PATH=${prefix_path}
-DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR}
-DWITH_SWIG_PY=OFF
-DWITH_PYTHON=OFF
-DWITH_MKL=OFF
-DWITH_AVX=OFF
-DWITH_MKLDNN=OFF
-DWITH_GPU=OFF
-DWITH_FLUID_ONLY=ON
-DWITH_TESTING=OFF
-DWITH_DISTRIBUTE=OFF
-DON_INFER=ON
${EXTERNAL_OPTIONAL_ARGS}
LIST_SEPARATOR |
CMAKE_CACHE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${PADDLE_INSTALL_DIR}
-DCMAKE_INSTALL_LIBDIR:PATH=${PADDLE_INSTALL_DIR}/lib
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON
-DCMAKE_BUILD_TYPE:STRING=${THIRD_PARTY_BUILD_TYPE}
BUILD_COMMAND $(MAKE)
INSTALL_COMMAND $(MAKE) fluid_lib_dist
)
ExternalProject_Get_Property(extern_paddle BINARY_DIR)
ADD_LIBRARY(paddle_fluid STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET paddle_fluid PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/paddle/fluid/inference/libpaddle_fluid.a)
LIST(APPEND external_project_dependencies paddle)
ADD_LIBRARY(snappystream STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET snappystream PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/third_party/install/snappystream/lib/libsnappystream.a)
ADD_LIBRARY(openblas STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET openblas PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/third_party/install/openblas/lib/libopenblas.a)
ADD_LIBRARY(xxhash STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET xxhash PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/third_party/install/xxhash/lib/libxxhash.a)
LIST(APPEND paddle_depend_libs
snappystream
snappy
xxhash
openblas)
examples/demo-serving/CMakeLists.txt
浏览文件 @
637f710f
...
@@ -71,8 +71,14 @@ target_link_libraries(serving kvdb rocksdb)
...
@@ -71,8 +71,14 @@ target_link_libraries(serving kvdb rocksdb)
if
(
WITH_GPU
)
if
(
WITH_GPU
)
target_link_libraries
(
serving
${
CUDA_LIBRARIES
}
)
target_link_libraries
(
serving
${
CUDA_LIBRARIES
}
)
endif
()
endif
()
target_link_libraries
(
serving -liomp5 -lmklml_intel -lmkldnn -lpthread
-lcrypto -lm -lrt -lssl -ldl -lz -lbz2
)
if
(
WITH_MKL
)
message
(
"lalalala: "
${
WITH_MKL
}
)
target_link_libraries
(
serving -liomp5 -lmklml_intel -lmkldnn -lpthread -lcrypto -lm -lrt -lssl -ldl -lz -lbz2
)
else
()
message
(
"hehehehe: "
${
WITH_MKL
}
)
target_link_libraries
(
serving openblas -lpthread -lcrypto -lm -lrt -lssl -ldl -lz -lbz2
)
endif
()
install
(
TARGETS serving
install
(
TARGETS serving
RUNTIME DESTINATION
RUNTIME DESTINATION
...
@@ -85,10 +91,10 @@ install(FILES ${inc}
...
@@ -85,10 +91,10 @@ install(FILES ${inc}
DESTINATION
${
PADDLE_SERVING_INSTALL_DIR
}
/include/serving
)
DESTINATION
${
PADDLE_SERVING_INSTALL_DIR
}
/include/serving
)
if
(
${
WITH_MKL
}
)
if
(
${
WITH_MKL
}
)
install
(
FILES
install
(
FILES
${
CMAKE_BINARY_DIR
}
/third_party/install/Paddle/third_party/install/mklml/lib/libmklml_intel.so
${
CMAKE_BINARY_DIR
}
/third_party/install/Paddle/third_party/install/mklml/lib/libmklml_intel.so
${
CMAKE_BINARY_DIR
}
/third_party/install/Paddle/third_party/install/mklml/lib/libiomp5.so
${
CMAKE_BINARY_DIR
}
/third_party/install/Paddle/third_party/install/mklml/lib/libiomp5.so
${
CMAKE_BINARY_DIR
}
/third_party/install/Paddle/third_party/install/mkldnn/lib/libmkldnn.so.0
${
CMAKE_BINARY_DIR
}
/third_party/install/Paddle/third_party/install/mkldnn/lib/libmkldnn.so.0
DESTINATION
DESTINATION
${
PADDLE_SERVING_INSTALL_DIR
}
/demo/serving/bin
)
${
PADDLE_SERVING_INSTALL_DIR
}
/demo/serving/bin
)
endif
()
endif
()
paddle_inference/inferencer-fluid-cpu/CMakeLists.txt
浏览文件 @
637f710f
...
@@ -3,7 +3,7 @@ add_library(fluid_cpu_engine ${fluid_cpu_engine_srcs})
...
@@ -3,7 +3,7 @@ add_library(fluid_cpu_engine ${fluid_cpu_engine_srcs})
target_include_directories
(
fluid_cpu_engine PUBLIC
target_include_directories
(
fluid_cpu_engine PUBLIC
${
CMAKE_BINARY_DIR
}
/Paddle/fluid_install_dir/
)
${
CMAKE_BINARY_DIR
}
/Paddle/fluid_install_dir/
)
add_dependencies
(
fluid_cpu_engine pdserving extern_paddle configure kvdb
)
add_dependencies
(
fluid_cpu_engine pdserving extern_paddle configure kvdb
)
target_link_libraries
(
fluid_cpu_engine pdserving paddle_fluid
iomp5 mklml_intel
-lpthread -lcrypto -lm -lrt -lssl -ldl -lz
)
target_link_libraries
(
fluid_cpu_engine pdserving paddle_fluid -lpthread -lcrypto -lm -lrt -lssl -ldl -lz
)
install
(
TARGETS fluid_cpu_engine
install
(
TARGETS fluid_cpu_engine
ARCHIVE DESTINATION
${
PADDLE_SERVING_INSTALL_DIR
}
/lib
ARCHIVE DESTINATION
${
PADDLE_SERVING_INSTALL_DIR
}
/lib
...
...
python/paddle_serving/__init__.py
浏览文件 @
637f710f
...
@@ -11,5 +11,4 @@
...
@@ -11,5 +11,4 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# See the License for the specific language governing permissions and
# limitations under the License.
# limitations under the License.
from
.serving_client
import
Client
from
.io
import
save_model
from
.io
import
save_model
python/paddle_serving/serving_server/__init__.py
浏览文件 @
637f710f
...
@@ -49,6 +49,27 @@ class Server(object):
...
@@ -49,6 +49,27 @@ class Server(object):
self
.
infer_service_conf
=
None
self
.
infer_service_conf
=
None
self
.
model_toolkit_conf
=
None
self
.
model_toolkit_conf
=
None
self
.
engine
=
None
self
.
engine
=
None
self
.
workflow_fn
=
"workflow.prototxt"
self
.
resource_fn
=
"resource.prototxt"
self
.
infer_service_fn
=
"infer_service.prototxt"
self
.
model_toolkit_fn
=
"model_toolkit.prototxt"
self
.
workdir
=
""
self
.
max_concurrency
=
0
self
.
num_threads
=
0
self
.
port
=
8080
self
.
reload_interval_s
=
10
def
set_max_concurrency
(
self
,
concurrency
):
self
.
max_concurrency
=
concurrency
def
set_num_threads
(
self
,
threads
):
self
.
num_threads
=
threads
def
set_port
(
self
,
port
):
self
.
port
=
port
def
set_reload_interval
(
self
,
interval
):
self
.
reload_interval_s
=
interval
def
set_op_sequence
(
self
,
op_seq
):
def
set_op_sequence
(
self
,
op_seq
):
self
.
workflow_conf
=
op_seq
self
.
workflow_conf
=
op_seq
...
@@ -97,15 +118,18 @@ class Server(object):
...
@@ -97,15 +118,18 @@ class Server(object):
if
workdir
==
None
:
if
workdir
==
None
:
workdir
=
"./tmp"
workdir
=
"./tmp"
os
.
system
(
"mkdir {}"
.
format
(
workdir
))
os
.
system
(
"mkdir {}"
.
format
(
workdir
))
else
:
os
.
system
(
"mkdir {}"
.
format
(
workdir
))
self
.
_prepare_resource
(
workdir
)
self
.
_prepare_resource
(
workdir
)
self
.
_prepare_engine
(
self
.
config_file
,
device
)
self
.
_prepare_engine
(
self
.
config_file
,
device
)
self
.
_prepare_infer_service
(
port
)
self
.
_prepare_infer_service
(
port
)
self
.
workdir
=
workdir
self
.
workdir
=
workdir
infer_service_fn
=
"{}/
server_infer_service.prototxt"
.
format
(
workdir
)
infer_service_fn
=
"{}/
{}"
.
format
(
workdir
,
self
.
infer_service_fn
)
workflow_fn
=
"{}/
server_workflow.prototxt"
.
format
(
workdir
)
workflow_fn
=
"{}/
{}"
.
format
(
workdir
,
self
.
workflow_fn
)
resource_fn
=
"{}/
server_resource.prototxt"
.
format
(
workdir
)
resource_fn
=
"{}/
{}"
.
format
(
workdir
,
self
.
resource_fn
)
model_toolkit_fn
=
"{}/
server_model_toolkit.prototxt"
.
format
(
workdir
)
model_toolkit_fn
=
"{}/
{}"
.
format
(
workdir
,
self
.
model_toolkit_fn
)
self
.
_write_pb_str
(
infer_service_fn
,
self
.
infer_service_conf
)
self
.
_write_pb_str
(
infer_service_fn
,
self
.
infer_service_conf
)
self
.
_write_pb_str
(
workflow_fn
,
self
.
workflow_conf
)
self
.
_write_pb_str
(
workflow_fn
,
self
.
workflow_conf
)
...
@@ -114,5 +138,28 @@ class Server(object):
...
@@ -114,5 +138,28 @@ class Server(object):
def
run_server
(
self
):
def
run_server
(
self
):
# just run server with system command
# just run server with system command
os
.
system
(
"./pdserving {}"
.
format
(
self
.
workdir
))
# currently we do not load cube
command
=
"./pdserving -enable_model_toolkit "
\
"-inferservice_path {} "
\
"-inferservice_file {} "
\
"-max_concurrency {} "
\
"-num_threads {} "
\
"-port {} "
\
"-reload_interval_s {} "
\
"-resource_path {} "
\
"-resource_file {} "
\
"-workflow_path {} "
\
"-workflow_file {} "
.
format
(
self
.
workdir
,
self
.
infer_service_fn
,
self
.
max_concurrency
,
self
.
num_threads
,
self
.
port
,
self
.
reload_interval_s
,
self
.
workdir
,
self
.
resource_fn
,
self
.
workdir
,
self
.
workflow_fn
)
os
.
system
(
command
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录