Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Serving
提交
d457e5a5
S
Serving
项目概览
PaddlePaddle
/
Serving
大约 1 年 前同步成功
通知
186
Star
833
Fork
253
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
105
列表
看板
标记
里程碑
合并请求
10
Wiki
2
Wiki
分析
仓库
DevOps
项目成员
Pages
S
Serving
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
105
Issue
105
列表
看板
标记
里程碑
合并请求
10
合并请求
10
Pages
分析
分析
仓库分析
DevOps
Wiki
2
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
d457e5a5
编写于
2月 22, 2019
作者:
W
wangguibao
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
20190222
Change-Id: Iadcfd4a3c92f2b91dd6fbcb5fa81f01f605c9ec2
上级
2ed3094a
变更
15
隐藏空白更改
内联
并排
Showing
15 changed file
with
435 addition
and
66 deletion
+435
-66
CMakeLists.txt
CMakeLists.txt
+1
-0
predictor/CMakeLists.txt
predictor/CMakeLists.txt
+17
-7
predictor/common/constant.cpp
predictor/common/constant.cpp
+4
-10
predictor/common/inner_common.h
predictor/common/inner_common.h
+3
-1
predictor/framework/infer.h
predictor/framework/infer.h
+33
-31
predictor/framework/manager.h
predictor/framework/manager.h
+5
-5
predictor/framework/resource.cpp
predictor/framework/resource.cpp
+7
-5
predictor/framework/server.cpp
predictor/framework/server.cpp
+0
-5
predictor/framework/service.h
predictor/framework/service.h
+1
-1
predictor/framework/workflow.h
predictor/framework/workflow.h
+1
-1
proto_configure/CMakeLists.txt
proto_configure/CMakeLists.txt
+18
-0
proto_configure/include/configure_parser.h
proto_configure/include/configure_parser.h
+17
-0
proto_configure/proto/configure.proto
proto_configure/proto/configure.proto
+66
-0
proto_configure/src/configure_parser.cpp
proto_configure/src/configure_parser.cpp
+58
-0
proto_configure/tests/test_configure.cpp
proto_configure/tests/test_configure.cpp
+204
-0
未找到文件。
CMakeLists.txt
浏览文件 @
d457e5a5
...
@@ -101,6 +101,7 @@ add_subdirectory(bsl)
...
@@ -101,6 +101,7 @@ add_subdirectory(bsl)
add_subdirectory
(
ullib
)
add_subdirectory
(
ullib
)
add_subdirectory
(
spreg
)
add_subdirectory
(
spreg
)
add_subdirectory
(
configure
)
add_subdirectory
(
configure
)
add_subdirectory
(
proto_configure
)
add_subdirectory
(
mempool
)
add_subdirectory
(
mempool
)
add_subdirectory
(
predictor
)
add_subdirectory
(
predictor
)
add_subdirectory
(
inferencer-fluid-cpu
)
add_subdirectory
(
inferencer-fluid-cpu
)
...
...
predictor/CMakeLists.txt
浏览文件 @
d457e5a5
...
@@ -17,49 +17,59 @@ set_source_files_properties(
...
@@ -17,49 +17,59 @@ set_source_files_properties(
PROPERTIES
PROPERTIES
COMPILE_FLAGS
"-Wno-strict-aliasing -Wno-unused-variable -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
COMPILE_FLAGS
"-Wno-strict-aliasing -Wno-unused-variable -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
add_dependencies
(
pdserving protobuf boost brpc leveldb bsl pdcodegen configure
add_dependencies
(
pdserving protobuf boost brpc leveldb bsl pdcodegen configure
proto_configure
ullib spreg mempool
)
ullib spreg mempool
)
target_include_directories
(
pdserving PUBLIC
target_include_directories
(
pdserving PUBLIC
${
CMAKE_CURRENT_LIST_DIR
}
/
${
CMAKE_CURRENT_LIST_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/../proto_configure
${
CMAKE_CURRENT_LIST_DIR
}
/../configure
${
CMAKE_CURRENT_LIST_DIR
}
/../configure
${
CMAKE_CURRENT_LIST_DIR
}
/../proto_configure/include
${
CMAKE_CURRENT_LIST_DIR
}
/../mempool
${
CMAKE_CURRENT_LIST_DIR
}
/../mempool
${
CMAKE_CURRENT_LIST_DIR
}
/../spreg
${
CMAKE_CURRENT_LIST_DIR
}
/../spreg
${
CMAKE_CURRENT_LIST_DIR
}
/../ullib/include
${
CMAKE_CURRENT_LIST_DIR
}
/../ullib/include
${
CMAKE_CURRENT_BINARY_DIR
}
/../bsl/include
)
${
CMAKE_CURRENT_BINARY_DIR
}
/../bsl/include
)
target_link_libraries
(
pdserving brpc protobuf boost leveldb bsl
target_link_libraries
(
pdserving
configure ullib spreg mempool -lpthread -lcrypto -lm -lrt -lssl -ldl -lz
)
brpc protobuf boost leveldb bsl configure proto_configure ullib spreg
mempool -lpthread -lcrypto -lm -lrt -lssl -ldl -lz
)
add_executable
(
pdserving_exe
${
pdserving_srcs
}
)
add_executable
(
pdserving_exe
${
pdserving_srcs
}
)
set_source_files_properties
(
set_source_files_properties
(
${
pdserving_srcs
}
${
pdserving_srcs
}
PROPERTIES
PROPERTIES
COMPILE_FLAGS
"-Wno-strict-aliasing -Wno-unused-variable -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
COMPILE_FLAGS
"-Wno-strict-aliasing -Wno-unused-variable -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
add_dependencies
(
pdserving_exe protobuf boost brpc leveldb bsl pdcodegen
add_dependencies
(
pdserving_exe
configure ullib spreg mempool
)
protobuf boost brpc leveldb bsl pdcodegen configure proto_configure
ullib spreg mempool
)
target_include_directories
(
pdserving_exe PUBLIC
target_include_directories
(
pdserving_exe PUBLIC
${
CMAKE_CURRENT_LIST_DIR
}
/
${
CMAKE_CURRENT_LIST_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/../proto_configure
${
CMAKE_CURRENT_LIST_DIR
}
/../configure
${
CMAKE_CURRENT_LIST_DIR
}
/../configure
${
CMAKE_CURRENT_LIST_DIR
}
/../proto_configure/include
${
CMAKE_CURRENT_LIST_DIR
}
/../mempool
${
CMAKE_CURRENT_LIST_DIR
}
/../mempool
${
CMAKE_CURRENT_LIST_DIR
}
/../spreg
${
CMAKE_CURRENT_LIST_DIR
}
/../spreg
${
CMAKE_CURRENT_LIST_DIR
}
/../ullib/include
${
CMAKE_CURRENT_LIST_DIR
}
/../ullib/include
${
CMAKE_CURRENT_BINARY_DIR
}
/../bsl/include
)
${
CMAKE_CURRENT_BINARY_DIR
}
/../bsl/include
)
target_link_libraries
(
pdserving_exe brpc protobuf leveldb bsl configure ullib
target_link_libraries
(
pdserving_exe brpc protobuf leveldb bsl configure
spreg mempool -lpthread -lcrypto -lm -lrt -lssl -ldl -lz
)
proto_configure ullib spreg mempool -lpthread -lcrypto -lm -lrt -lssl
-ldl -lz
)
add_library
(
pdclient
${
pdclient_srcs
}
)
add_library
(
pdclient
${
pdclient_srcs
}
)
set_source_files_properties
(
set_source_files_properties
(
${
pdclient_srcs
}
${
pdclient_srcs
}
PROPERTIES
PROPERTIES
COMPILE_FLAGS
"-Wno-strict-aliasing -Wno-unused-variable -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
COMPILE_FLAGS
"-Wno-strict-aliasing -Wno-unused-variable -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
add_dependencies
(
pdclient protobuf boost brpc pdcodegen
)
add_dependencies
(
pdclient protobuf boost brpc pdcodegen
proto_configure
)
target_include_directories
(
pdclient PUBLIC
target_include_directories
(
pdclient PUBLIC
${
CMAKE_CURRENT_LIST_DIR
}
/
${
CMAKE_CURRENT_LIST_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_BINARY_DIR
}
/../proto_configure
${
CMAKE_CURRENT_LIST_DIR
}
/../configure
${
CMAKE_CURRENT_LIST_DIR
}
/../configure
${
CMAKE_CURRENT_LIST_DIR
}
/../proto_configure/include
${
CMAKE_CURRENT_LIST_DIR
}
/../mempool
${
CMAKE_CURRENT_LIST_DIR
}
/../mempool
${
CMAKE_CURRENT_LIST_DIR
}
/../spreg
${
CMAKE_CURRENT_LIST_DIR
}
/../spreg
${
CMAKE_CURRENT_LIST_DIR
}
/../ullib/include
${
CMAKE_CURRENT_LIST_DIR
}
/../ullib/include
...
...
predictor/common/constant.cpp
浏览文件 @
d457e5a5
...
@@ -9,24 +9,18 @@ DEFINE_int32(el_log_level, 16, "");
...
@@ -9,24 +9,18 @@ DEFINE_int32(el_log_level, 16, "");
DEFINE_int32
(
idle_timeout_s
,
16
,
""
);
DEFINE_int32
(
idle_timeout_s
,
16
,
""
);
DEFINE_int32
(
port
,
8010
,
""
);
DEFINE_int32
(
port
,
8010
,
""
);
DEFINE_string
(
workflow_path
,
"./conf"
,
""
);
DEFINE_string
(
workflow_path
,
"./conf"
,
""
);
DEFINE_string
(
workflow_file
,
"workflow.
conf
"
,
""
);
DEFINE_string
(
workflow_file
,
"workflow.
prototxt
"
,
""
);
DEFINE_string
(
inferservice_path
,
"./conf"
,
""
);
DEFINE_string
(
inferservice_path
,
"./conf"
,
""
);
DEFINE_string
(
inferservice_file
,
"service.
conf
"
,
""
);
DEFINE_string
(
inferservice_file
,
"service.
prototxt
"
,
""
);
DEFINE_string
(
logger_path
,
"./conf"
,
""
);
DEFINE_string
(
logger_path
,
"./conf"
,
""
);
DEFINE_string
(
logger_file
,
"log.conf"
,
""
);
DEFINE_string
(
logger_file
,
"log.conf"
,
""
);
DEFINE_string
(
resource_path
,
"./conf"
,
""
);
DEFINE_string
(
resource_path
,
"./conf"
,
""
);
DEFINE_string
(
resource_file
,
"resource.conf"
,
""
);
DEFINE_string
(
resource_file
,
"resource.prototxt"
,
""
);
DEFINE_bool
(
enable_yacl
,
false
,
"enable yacl"
);
DEFINE_string
(
yacl_module_name
,
"predictor"
,
"yacl module name"
);
DEFINE_string
(
yacl_param_dump_file
,
"./data/yacl_param_list.txt"
,
"yacl param dump file path"
);
DEFINE_bool
(
enable_mc_cache
,
false
,
"enable mc cache"
);
DEFINE_bool
(
enable_nshead_protocol
,
false
,
"enable nshead protocol in server side"
);
DEFINE_string
(
nshead_protocol
,
"itp"
,
"type of nshead protocol, support itp, nova_pbrpc, public_pbrpc, nshead_mcpack"
);
DEFINE_int32
(
max_concurrency
,
0
,
"Limit of request processing in parallel, 0: unlimited"
);
DEFINE_int32
(
max_concurrency
,
0
,
"Limit of request processing in parallel, 0: unlimited"
);
DEFINE_int32
(
num_threads
,
0
,
"Number of pthreads that server runs on, not change if this value <= 0"
);
DEFINE_int32
(
num_threads
,
0
,
"Number of pthreads that server runs on, not change if this value <= 0"
);
DEFINE_int32
(
reload_interval_s
,
10
,
""
);
DEFINE_int32
(
reload_interval_s
,
10
,
""
);
DEFINE_bool
(
enable_model_toolkit
,
false
,
"enable model toolkit"
);
DEFINE_bool
(
enable_model_toolkit
,
false
,
"enable model toolkit"
);
DEFINE_string
(
enable_protocol_list
,
"baidu_std
nshead
"
,
"set protocol list"
);
DEFINE_string
(
enable_protocol_list
,
"baidu_std"
,
"set protocol list"
);
const
char
*
START_OP_NAME
=
"startup_op"
;
const
char
*
START_OP_NAME
=
"startup_op"
;
}
// predictor
}
// predictor
...
...
predictor/common/inner_common.h
浏览文件 @
d457e5a5
...
@@ -26,7 +26,9 @@
...
@@ -26,7 +26,9 @@
#include <error.h>
#include <error.h>
#include "Configure.h"
#include "Configure.h"
// #include <comlog/comlog.h>
#include "configure.pb.h"
#include "configure_parser.h"
#include "common/utils.h"
#include "common/utils.h"
#include "common/types.h"
#include "common/types.h"
...
...
predictor/framework/infer.h
浏览文件 @
d457e5a5
...
@@ -13,12 +13,14 @@ namespace baidu {
...
@@ -13,12 +13,14 @@ namespace baidu {
namespace
paddle_serving
{
namespace
paddle_serving
{
namespace
predictor
{
namespace
predictor
{
using
configure
::
ModelToolkitConf
;
class
InferEngine
{
class
InferEngine
{
public:
public:
virtual
~
InferEngine
()
{}
virtual
~
InferEngine
()
{}
virtual
int
proc_initialize
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
{
virtual
int
proc_initialize
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
{
return
proc_initialize_impl
(
conf
,
version
);
return
proc_initialize_impl
(
conf
,
version
);
}
}
virtual
int
proc_finalize
()
{
virtual
int
proc_finalize
()
{
...
@@ -43,7 +45,7 @@ public:
...
@@ -43,7 +45,7 @@ public:
// begin: framework inner call
// begin: framework inner call
virtual
int
proc_initialize_impl
(
virtual
int
proc_initialize_impl
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
=
0
;
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
=
0
;
virtual
int
thrd_initialize_impl
()
=
0
;
virtual
int
thrd_initialize_impl
()
=
0
;
virtual
int
thrd_finalize_impl
()
=
0
;
virtual
int
thrd_finalize_impl
()
=
0
;
virtual
int
thrd_clear_impl
()
=
0
;
virtual
int
thrd_clear_impl
()
=
0
;
...
@@ -68,13 +70,13 @@ public:
...
@@ -68,13 +70,13 @@ public:
virtual
int
load
(
const
std
::
string
&
data_path
)
=
0
;
virtual
int
load
(
const
std
::
string
&
data_path
)
=
0
;
int
proc_initialize_impl
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
{
int
proc_initialize_impl
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
{
_reload_tag_file
=
conf
[
"ReloadableMeta"
].
to_cstr
();
_reload_tag_file
=
conf
.
reloadable_meta
();
_reload_mode_tag
=
conf
[
"ReloadableType"
].
to_cstr
();
_reload_mode_tag
=
conf
.
reloadable_type
();
_model_data_path
=
conf
[
"ModelDataPath"
].
to_cstr
();
_model_data_path
=
conf
.
model_data_path
();
_infer_thread_num
=
conf
[
"RuntimeThreadNum"
].
to_uint32
();
_infer_thread_num
=
conf
.
runtime_thread_num
();
_infer_batch_size
=
conf
[
"BatchInferSize"
].
to_uint32
();
_infer_batch_size
=
conf
.
batch_infer_size
();
_infer_batch_align
=
conf
[
"EnableBatchAlign"
].
to_uint32
();
_infer_batch_align
=
conf
.
enable_batch_align
();
if
(
!
check_need_reload
()
||
load
(
_model_data_path
)
!=
0
)
{
if
(
!
check_need_reload
()
||
load
(
_model_data_path
)
!=
0
)
{
LOG
(
FATAL
)
<<
"Failed load model_data_path"
<<
_model_data_path
;
LOG
(
FATAL
)
<<
"Failed load model_data_path"
<<
_model_data_path
;
return
-
1
;
return
-
1
;
...
@@ -89,7 +91,7 @@ public:
...
@@ -89,7 +91,7 @@ public:
return
0
;
return
0
;
}
}
int
proc_initialize
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
{
int
proc_initialize
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
{
if
(
proc_initialize_impl
(
conf
,
version
)
!=
0
)
{
if
(
proc_initialize_impl
(
conf
,
version
)
!=
0
)
{
LOG
(
FATAL
)
<<
"Failed proc initialize impl"
;
LOG
(
FATAL
)
<<
"Failed proc initialize impl"
;
return
-
1
;
return
-
1
;
...
@@ -178,10 +180,10 @@ public:
...
@@ -178,10 +180,10 @@ public:
}
}
private:
private:
int
parse_version_info
(
const
co
mcfg
::
ConfigUnit
&
config
,
bool
version
)
{
int
parse_version_info
(
const
co
nfigure
::
EngineDesc
&
config
,
bool
version
)
{
try
{
try
{
std
::
string
version_file
=
config
[
"VersionFile"
].
to_cstr
();
std
::
string
version_file
=
config
.
version_file
();
std
::
string
version_type
=
config
[
"VersionType"
].
to_cstr
();
std
::
string
version_type
=
config
.
version_type
();
if
(
version_type
==
"abacus_version"
)
{
if
(
version_type
==
"abacus_version"
)
{
if
(
parse_abacus_version
(
version_file
)
!=
0
)
{
if
(
parse_abacus_version
(
version_file
)
!=
0
)
{
...
@@ -387,7 +389,7 @@ class DBReloadableInferEngine : public ReloadableInferEngine {
...
@@ -387,7 +389,7 @@ class DBReloadableInferEngine : public ReloadableInferEngine {
public:
public:
virtual
~
DBReloadableInferEngine
()
{}
virtual
~
DBReloadableInferEngine
()
{}
int
proc_initialize
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
{
int
proc_initialize
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
{
THREAD_KEY_CREATE
(
&
_skey
,
NULL
);
THREAD_KEY_CREATE
(
&
_skey
,
NULL
);
THREAD_MUTEX_INIT
(
&
_mutex
,
NULL
);
THREAD_MUTEX_INIT
(
&
_mutex
,
NULL
);
return
ReloadableInferEngine
::
proc_initialize
(
conf
,
version
);
return
ReloadableInferEngine
::
proc_initialize
(
conf
,
version
);
...
@@ -486,7 +488,7 @@ class CloneDBReloadableInferEngine : public DBReloadableInferEngine<EngineCore>
...
@@ -486,7 +488,7 @@ class CloneDBReloadableInferEngine : public DBReloadableInferEngine<EngineCore>
public:
public:
virtual
~
CloneDBReloadableInferEngine
()
{}
virtual
~
CloneDBReloadableInferEngine
()
{}
virtual
int
proc_initialize
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
{
virtual
int
proc_initialize
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
{
_pd
=
new
(
std
::
nothrow
)
ModelData
<
EngineCore
>
;
_pd
=
new
(
std
::
nothrow
)
ModelData
<
EngineCore
>
;
if
(
!
_pd
)
{
if
(
!
_pd
)
{
LOG
(
FATAL
)
<<
"Failed to allocate for ProcData"
;
LOG
(
FATAL
)
<<
"Failed to allocate for ProcData"
;
...
@@ -754,30 +756,30 @@ public:
...
@@ -754,30 +756,30 @@ public:
}
}
~
VersionedInferEngine
()
{}
~
VersionedInferEngine
()
{}
int
proc_initialize
(
const
co
mcfg
::
ConfigUnit
&
conf
)
{
int
proc_initialize
(
const
co
nfigure
::
VersionedEngine
&
conf
)
{
size_t
version_num
=
conf
[
"Version"
].
size
();
size_t
version_num
=
conf
.
versions_
size
();
for
(
size_t
vi
=
0
;
vi
<
version_num
;
++
vi
)
{
for
(
size_t
vi
=
0
;
vi
<
version_num
;
++
vi
)
{
if
(
proc_initialize
(
conf
[
"Version"
][
vi
]
,
true
)
!=
0
)
{
if
(
proc_initialize
(
conf
.
versions
(
vi
)
,
true
)
!=
0
)
{
LOG
(
FATAL
)
<<
"Failed proc initialize version: "
LOG
(
FATAL
)
<<
"Failed proc initialize version: "
<<
vi
<<
", model: "
<<
conf
[
"Name"
].
to_c
str
();
<<
vi
<<
", model: "
<<
conf
.
name
().
c_
str
();
return
-
1
;
return
-
1
;
}
}
}
}
if
(
version_num
==
0
)
{
if
(
version_num
==
0
)
{
if
(
proc_initialize
(
conf
,
false
)
!=
0
)
{
if
(
proc_initialize
(
conf
.
default_version
()
,
false
)
!=
0
)
{
LOG
(
FATAL
)
<<
"Failed proc intialize engine: "
LOG
(
FATAL
)
<<
"Failed proc intialize engine: "
<<
conf
[
"Name"
].
to_c
str
();
<<
conf
.
name
().
c_
str
();
return
-
1
;
return
-
1
;
}
}
}
}
LOG
(
WARNING
)
LOG
(
WARNING
)
<<
"Succ proc initialize engine: "
<<
conf
[
"Name"
].
to_c
str
();
<<
"Succ proc initialize engine: "
<<
conf
.
name
().
c_
str
();
return
0
;
return
0
;
}
}
int
proc_initialize
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
version
)
{
int
proc_initialize
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
version
)
{
std
::
string
engine_type
=
conf
[
"Type"
].
to_cstr
();
std
::
string
engine_type
=
conf
.
type
();
InferEngine
*
engine
InferEngine
*
engine
=
StaticInferFactory
::
instance
().
generate_object
(
=
StaticInferFactory
::
instance
().
generate_object
(
engine_type
);
engine_type
);
...
@@ -938,7 +940,7 @@ public:
...
@@ -938,7 +940,7 @@ public:
}
}
// --
// --
int
proc_initialize_impl
(
const
co
mcfg
::
ConfigUnit
&
conf
,
bool
)
{
return
-
1
;
}
int
proc_initialize_impl
(
const
co
nfigure
::
EngineDesc
&
conf
,
bool
)
{
return
-
1
;
}
int
thrd_initialize_impl
()
{
return
-
1
;
}
int
thrd_initialize_impl
()
{
return
-
1
;
}
int
thrd_finalize_impl
()
{
return
-
1
;
}
int
thrd_finalize_impl
()
{
return
-
1
;
}
int
thrd_clear_impl
()
{
return
-
1
;
}
int
thrd_clear_impl
()
{
return
-
1
;
}
...
@@ -958,23 +960,23 @@ public:
...
@@ -958,23 +960,23 @@ public:
}
}
int
proc_initialize
(
const
char
*
path
,
const
char
*
file
)
{
int
proc_initialize
(
const
char
*
path
,
const
char
*
file
)
{
comcfg
::
Configure
conf
;
ModelToolkitConf
model_toolkit_
conf
;
if
(
conf
.
load
(
path
,
file
)
!=
0
)
{
if
(
conf
igure
::
read_proto_conf
(
path
,
file
,
&
model_toolkit_conf
)
!=
0
)
{
LOG
(
FATAL
)
<<
"failed load infer config, path:"
LOG
(
FATAL
)
<<
"failed load infer config, path:
"
<<
path
<<
"/"
<<
file
;
<<
path
<<
"/"
<<
file
;
return
-
1
;
return
-
1
;
}
}
size_t
engine_num
=
conf
[
"Engine"
].
size
();
size_t
engine_num
=
model_toolkit_conf
.
engines_
size
();
for
(
size_t
ei
=
0
;
ei
<
engine_num
;
++
ei
)
{
for
(
size_t
ei
=
0
;
ei
<
engine_num
;
++
ei
)
{
std
::
string
engine_name
=
conf
[
"Engine"
][
ei
][
"Name"
].
to_cstr
();
std
::
string
engine_name
=
model_toolkit_conf
.
engines
(
ei
).
name
();
VersionedInferEngine
*
engine
=
new
(
std
::
nothrow
)
VersionedInferEngine
();
VersionedInferEngine
*
engine
=
new
(
std
::
nothrow
)
VersionedInferEngine
();
if
(
!
engine
)
{
if
(
!
engine
)
{
LOG
(
FATAL
)
<<
"Failed generate versioned engine: "
<<
engine_name
;
LOG
(
FATAL
)
<<
"Failed generate versioned engine: "
<<
engine_name
;
return
-
1
;
return
-
1
;
}
}
if
(
engine
->
proc_initialize
(
conf
[
"Engine"
][
ei
]
)
!=
0
)
{
if
(
engine
->
proc_initialize
(
model_toolkit_conf
.
engines
(
ei
)
)
!=
0
)
{
LOG
(
FATAL
)
<<
"Failed initialize version engine, name:"
LOG
(
FATAL
)
<<
"Failed initialize version engine, name:"
<<
engine_name
;
<<
engine_name
;
return
-
1
;
return
-
1
;
...
...
predictor/framework/manager.h
浏览文件 @
d457e5a5
...
@@ -10,6 +10,8 @@ namespace baidu {
...
@@ -10,6 +10,8 @@ namespace baidu {
namespace
paddle_serving
{
namespace
paddle_serving
{
namespace
predictor
{
namespace
predictor
{
using
configure
::
WorkflowConf
;
class
Workflow
;
class
Workflow
;
//class InferService;
//class InferService;
//class ParallelInferService;
//class ParallelInferService;
...
@@ -37,11 +39,9 @@ public:
...
@@ -37,11 +39,9 @@ public:
}
}
int
initialize
(
const
std
::
string
path
,
const
std
::
string
file
)
{
int
initialize
(
const
std
::
string
path
,
const
std
::
string
file
)
{
comcfg
::
Configure
conf
;
WorkflowConf
workflow_conf
;
if
(
conf
.
load
(
path
.
c_str
(),
file
.
c_str
())
!=
0
)
{
if
(
configure
::
read_proto_conf
(
path
,
file
,
&
workflow_conf
)
!=
0
)
{
LOG
(
FATAL
)
LOG
(
FATAL
)
<<
"Failed load manager<"
<<
typeid
<
T
>
.
name
()
<<
"> configure!"
;
<<
"Failed load manager<"
<<
typeid
(
T
).
name
()
<<
"> configure!"
;
return
-
1
;
return
-
1
;
}
}
...
...
predictor/framework/resource.cpp
浏览文件 @
d457e5a5
...
@@ -6,6 +6,8 @@ namespace baidu {
...
@@ -6,6 +6,8 @@ namespace baidu {
namespace
paddle_serving
{
namespace
paddle_serving
{
namespace
predictor
{
namespace
predictor
{
using
configure
::
ResourceConf
;
// __thread bool p_thread_initialized = false;
// __thread bool p_thread_initialized = false;
static
void
dynamic_resource_deleter
(
void
*
d
)
{
static
void
dynamic_resource_deleter
(
void
*
d
)
{
...
@@ -28,9 +30,9 @@ int DynamicResource::clear() {
...
@@ -28,9 +30,9 @@ int DynamicResource::clear() {
}
}
int
Resource
::
initialize
(
const
std
::
string
&
path
,
const
std
::
string
&
file
)
{
int
Resource
::
initialize
(
const
std
::
string
&
path
,
const
std
::
string
&
file
)
{
comcfg
::
Configure
conf
;
ResourceConf
resource_
conf
;
if
(
conf
.
load
(
path
.
c_str
(),
file
.
c_str
()
)
!=
0
)
{
if
(
conf
igure
::
read_proto_conf
(
path
,
file
,
&
resource_conf
)
!=
0
)
{
LOG
(
ERROR
)
<<
"Failed initialize resource from: "
LOG
(
ERROR
)
<<
"Failed initialize resource from: "
<<
path
<<
"/"
<<
file
;
<<
path
<<
"/"
<<
file
;
return
-
1
;
return
-
1
;
}
}
...
@@ -44,13 +46,13 @@ int Resource::initialize(const std::string& path, const std::string& file) {
...
@@ -44,13 +46,13 @@ int Resource::initialize(const std::string& path, const std::string& file) {
if
(
FLAGS_enable_model_toolkit
)
{
if
(
FLAGS_enable_model_toolkit
)
{
int
err
=
0
;
int
err
=
0
;
std
::
string
model_toolkit_path
=
conf
[
"model_toolkit_path"
].
to_cstr
(
&
err
);
std
::
string
model_toolkit_path
=
resource_conf
.
model_toolkit_path
(
);
if
(
err
!=
0
)
{
if
(
err
!=
0
)
{
LOG
(
ERROR
)
<<
"read model_toolkit_path failed, path["
LOG
(
ERROR
)
<<
"read model_toolkit_path failed, path["
<<
path
<<
"], file["
<<
file
<<
"]"
;
<<
path
<<
"], file["
<<
file
<<
"]"
;
return
-
1
;
return
-
1
;
}
}
std
::
string
model_toolkit_file
=
conf
[
"model_toolkit_file"
].
to_cstr
(
&
err
);
std
::
string
model_toolkit_file
=
resource_conf
.
model_toolkit_file
(
);
if
(
err
!=
0
)
{
if
(
err
!=
0
)
{
LOG
(
ERROR
)
<<
"read model_toolkit_file failed, path["
LOG
(
ERROR
)
<<
"read model_toolkit_file failed, path["
<<
path
<<
"], file["
<<
file
<<
"]"
;
<<
path
<<
"], file["
<<
file
<<
"]"
;
...
...
predictor/framework/server.cpp
浏览文件 @
d457e5a5
...
@@ -24,11 +24,6 @@ bool ServerManager::_compare_string_piece_without_case(
...
@@ -24,11 +24,6 @@ bool ServerManager::_compare_string_piece_without_case(
ServerManager
::
ServerManager
()
{
ServerManager
::
ServerManager
()
{
_format_services
.
clear
();
_format_services
.
clear
();
_options
.
idle_timeout_sec
=
FLAGS_idle_timeout_s
;
_options
.
idle_timeout_sec
=
FLAGS_idle_timeout_s
;
if
(
FLAGS_enable_nshead_protocol
)
{
LOG
(
INFO
)
<<
"FLAGS_enable_nshead_protocol on, try to set FLAGS_nshead_protocol["
<<
FLAGS_nshead_protocol
<<
"] in server side"
;
_set_server_option_by_protocol
(
FLAGS_nshead_protocol
);
}
_options
.
max_concurrency
=
FLAGS_max_concurrency
;
_options
.
max_concurrency
=
FLAGS_max_concurrency
;
_options
.
num_threads
=
FLAGS_num_threads
;
_options
.
num_threads
=
FLAGS_num_threads
;
}
}
...
...
predictor/framework/service.h
浏览文件 @
d457e5a5
...
@@ -14,7 +14,7 @@ public:
...
@@ -14,7 +14,7 @@ public:
typedef
OpChannel
<
google
::
protobuf
::
Message
>
BuiltinChannel
;
typedef
OpChannel
<
google
::
protobuf
::
Message
>
BuiltinChannel
;
static
const
char
*
tag
()
{
static
const
char
*
tag
()
{
return
"
S
ervice"
;
return
"
s
ervice"
;
}
}
InferService
()
:
InferService
()
:
...
...
predictor/framework/workflow.h
浏览文件 @
d457e5a5
...
@@ -17,7 +17,7 @@ public:
...
@@ -17,7 +17,7 @@ public:
Workflow
()
{}
Workflow
()
{}
static
const
char
*
tag
()
{
static
const
char
*
tag
()
{
return
"
W
orkflow"
;
return
"
w
orkflow"
;
}
}
// Each workflow object corresponds to an independent
// Each workflow object corresponds to an independent
...
...
proto_configure/CMakeLists.txt
0 → 100644
浏览文件 @
d457e5a5
LIST
(
APPEND protofiles
${
CMAKE_CURRENT_LIST_DIR
}
/proto/configure.proto
)
PROTOBUF_GENERATE_CPP
(
configure_proto_srcs configure_proto_hdrs
${
protofiles
}
)
list
(
APPEND proto_configure_srcs
${
configure_proto_srcs
}
)
list
(
APPEND proto_configure_srcs
${
CMAKE_CURRENT_LIST_DIR
}
/src/configure_parser.cpp
)
add_library
(
proto_configure
${
proto_configure_srcs
}
)
add_executable
(
test_configure
${
CMAKE_CURRENT_LIST_DIR
}
/tests/test_configure.cpp
)
target_include_directories
(
test_configure PUBLIC
${
CMAKE_CURRENT_BINARY_DIR
}
/
${
CMAKE_CURRENT_LIST_DIR
}
/include
)
target_link_libraries
(
test_configure proto_configure protobuf
)
proto_configure/include/configure_parser.h
0 → 100644
浏览文件 @
d457e5a5
#pragma once
#include <google/protobuf/message.h>
namespace
baidu
{
namespace
paddle_serving
{
namespace
configure
{
int
read_proto_conf
(
const
std
::
string
&
conf_path
,
const
std
::
string
&
conf_file
,
google
::
protobuf
::
Message
*
conf
);
int
write_proto_conf
(
google
::
protobuf
::
Message
*
message
,
const
std
::
string
&
output_path
,
const
std
::
string
&
output_file
);
}
// configure
}
// paddle_serving
}
// baidu
proto_configure/proto/configure.proto
0 → 100644
浏览文件 @
d457e5a5
syntax
=
"proto2"
;
package
baidu
.
paddle_serving.configure
;
message
EngineDesc
{
required
string
type
=
1
;
required
string
reloadable_meta
=
2
;
required
string
reloadable_type
=
3
;
required
string
model_data_path
=
4
;
required
uint32
runtime_thread_num
=
5
;
required
uint32
batch_infer_size
=
6
;
required
uint32
enable_batch_align
=
7
;
optional
string
version_file
=
8
;
optional
string
version_type
=
9
;
};
message
VersionedEngine
{
required
string
name
=
1
;
repeated
EngineDesc
versions
=
2
;
optional
EngineDesc
default_version
=
3
;
};
// model_toolkit conf
message
ModelToolkitConf
{
repeated
VersionedEngine
engines
=
1
;
};
// reource conf
message
ResourceConf
{
required
string
model_toolkit_path
=
1
;
required
string
model_toolkit_file
=
2
;
};
// DAG node depency info
message
DAGNodeDependency
{
required
string
name
=
1
;
required
string
mode
=
2
;
};
// DAG Node
message
DAGNode
{
required
string
name
=
1
;
required
string
type
=
2
;
repeated
DAGNodeDependency
dependencies
=
3
;
};
// workflow entry
message
Workflow
{
required
string
name
=
1
;
required
string
workflow_type
=
2
;
repeated
DAGNode
nodes
=
3
;
};
// Workflow conf
message
WorkflowConf
{
repeated
Workflow
workflow
=
1
;
}
message
InferService
{
required
string
name
=
1
;
repeated
string
workflow
=
2
;
};
// InferService conf
message
InferServiceConf
{
repeated
InferService
service
=
1
;
};
proto_configure/src/configure_parser.cpp
0 → 100644
浏览文件 @
d457e5a5
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <fstream>
#include "butil/logging.h"
#include <google/protobuf/text_format.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
namespace
baidu
{
namespace
paddle_serving
{
namespace
configure
{
int
read_proto_conf
(
const
std
::
string
&
conf_path
,
const
std
::
string
&
conf_file
,
google
::
protobuf
::
Message
*
conf
)
{
std
::
string
file_str
=
conf_path
+
conf_file
;
int
fd
=
open
(
file_str
.
c_str
(),
O_RDONLY
);
if
(
fd
==
-
1
)
{
LOG
(
WARNING
)
<<
"File not found: "
<<
file_str
.
c_str
();
return
-
1
;
}
google
::
protobuf
::
io
::
FileInputStream
input
(
fd
);
bool
success
=
google
::
protobuf
::
TextFormat
::
Parse
(
&
input
,
conf
);
close
(
fd
);
if
(
!
success
)
{
return
-
1
;
}
return
0
;
}
int
write_proto_conf
(
google
::
protobuf
::
Message
*
message
,
const
std
::
string
&
output_path
,
const
std
::
string
&
output_file
)
{
std
::
string
binary_str
;
google
::
protobuf
::
TextFormat
::
PrintToString
(
*
message
,
&
binary_str
);
std
::
string
file_str
=
output_path
+
output_file
;
std
::
ofstream
fout_bin
((
file_str
.
c_str
()));
if
(
!
fout_bin
)
{
LOG
(
WARNING
)
<<
"Open file error: "
<<
file_str
.
c_str
();
return
-
1
;
}
fout_bin
.
write
((
char
*
)
binary_str
.
c_str
(),
binary_str
.
size
());
fout_bin
.
close
();
return
0
;
}
}
// configure
}
// paddle_serving
}
// baidu
/* vim: set expandtab ts=4 sw=4 sts=4 tw=100: */
proto_configure/tests/test_configure.cpp
0 → 100644
浏览文件 @
d457e5a5
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <iostream>
#include "configure.pb.h"
#include "configure_parser.h"
using
baidu
::
paddle_serving
::
configure
::
EngineDesc
;
using
baidu
::
paddle_serving
::
configure
::
VersionedEngine
;
using
baidu
::
paddle_serving
::
configure
::
ModelToolkitConf
;
using
baidu
::
paddle_serving
::
configure
::
ResourceConf
;
using
baidu
::
paddle_serving
::
configure
::
DAGNodeDependency
;
using
baidu
::
paddle_serving
::
configure
::
DAGNode
;
using
baidu
::
paddle_serving
::
configure
::
Workflow
;
using
baidu
::
paddle_serving
::
configure
::
WorkflowConf
;
using
baidu
::
paddle_serving
::
configure
::
InferService
;
using
baidu
::
paddle_serving
::
configure
::
InferServiceConf
;
const
std
::
string
output_dir
=
"./conf/"
;
const
std
::
string
model_toolkit_conf_file
=
"model_toolkit.prototxt"
;
const
std
::
string
resource_conf_file
=
"resource.prototxt"
;
const
std
::
string
workflow_conf_file
=
"workflow.prototxt"
;
const
std
::
string
service_conf_file
=
"service.prototxt"
;
int
test_write_conf
()
{
// model_toolkit conf
ModelToolkitConf
model_toolkit_conf
;
// This engine has a default version
VersionedEngine
*
engine
=
model_toolkit_conf
.
add_engines
();
engine
->
set_name
(
"image_classification_resnet"
);
EngineDesc
*
engine_desc
=
engine
->
mutable_default_version
();
engine_desc
->
set_type
(
"FLUID_CPU_NATIVE_V2"
);
engine_desc
->
set_reloadable_meta
(
"./data/model/paddle/fluid_time_file"
);
engine_desc
->
set_reloadable_type
(
"timestamp_ne"
);
engine_desc
->
set_model_data_path
(
"./data/model/paddle/fluid/SE_ResNeXt50_32x4d"
);
engine_desc
->
set_runtime_thread_num
(
0
);
engine_desc
->
set_batch_infer_size
(
0
);
engine_desc
->
set_enable_batch_align
(
0
);
// This engine has two versioned branches
engine
=
model_toolkit_conf
.
add_engines
();
engine
->
set_name
(
"image_classification_resnet_versioned"
);
// Version 1
engine_desc
=
engine
->
add_versions
();
engine_desc
->
set_type
(
"FLUID_CPU_NATIVE_DIR"
);
engine_desc
->
set_reloadable_meta
(
"./data/model/paddle/fluid_time_file"
);
engine_desc
->
set_reloadable_type
(
"timestamp_ne"
);
engine_desc
->
set_model_data_path
(
"./data/model/paddle/fluid/SE_ResNeXt50_32x4d"
);
engine_desc
->
set_runtime_thread_num
(
0
);
engine_desc
->
set_batch_infer_size
(
0
);
engine_desc
->
set_enable_batch_align
(
0
);
// Version 2
engine_desc
=
engine
->
add_versions
();
engine_desc
->
set_type
(
"FLUID_CPU_NATIVE_DIR"
);
engine_desc
->
set_reloadable_meta
(
"./data/model/paddle/fluid_time_file_2"
);
engine_desc
->
set_reloadable_type
(
"timestamp_ne_2"
);
engine_desc
->
set_model_data_path
(
"./data/model/paddle/fluid/SE_ResNeXt50_32x4d_2"
);
engine_desc
->
set_runtime_thread_num
(
0
);
engine_desc
->
set_batch_infer_size
(
0
);
engine_desc
->
set_enable_batch_align
(
0
);
int
ret
=
baidu
::
paddle_serving
::
configure
::
write_proto_conf
(
&
model_toolkit_conf
,
output_dir
,
model_toolkit_conf_file
);
if
(
ret
!=
0
)
{
return
ret
;
}
// resource conf
ResourceConf
resource_conf
;
resource_conf
.
set_model_toolkit_path
(
output_dir
);
resource_conf
.
set_model_toolkit_file
(
"resource.prototxt"
);
ret
=
baidu
::
paddle_serving
::
configure
::
write_proto_conf
(
&
resource_conf
,
output_dir
,
resource_conf_file
);
if
(
ret
!=
0
)
{
return
ret
;
}
// workflow entries conf
WorkflowConf
workflow_conf
;
Workflow
*
workflow
=
workflow_conf
.
add_workflow
();
workflow
->
set_name
(
"workflow1"
);
workflow
->
set_workflow_type
(
"Sequence"
);
DAGNode
*
dag_node
=
workflow
->
add_nodes
();
dag_node
->
set_name
(
"image_reader_op"
);
dag_node
->
set_type
(
"ReaderOp"
);
dag_node
=
workflow
->
add_nodes
();
dag_node
->
set_name
(
"imag_classify_op"
);
dag_node
->
set_type
(
"ClassifyOp"
);
DAGNodeDependency
*
node_dependency
=
dag_node
->
add_dependencies
();
node_dependency
->
set_name
(
"image_reader_op"
);
node_dependency
->
set_mode
(
"RO"
);
dag_node
=
workflow
->
add_nodes
();
dag_node
->
set_name
(
"write_json_op"
);
dag_node
->
set_type
(
"WriteOp"
);
node_dependency
=
dag_node
->
add_dependencies
();
node_dependency
->
set_name
(
"image_classify_op"
);
node_dependency
->
set_mode
(
"RO"
);
workflow
=
workflow_conf
.
add_workflow
();
workflow
->
set_name
(
"workflow2"
);
workflow
->
set_workflow_type
(
"Sequence"
);
dag_node
=
workflow
->
add_nodes
();
dag_node
->
set_name
(
"dense_op"
);
dag_node
->
set_type
(
"DenseOp"
);
ret
=
baidu
::
paddle_serving
::
configure
::
write_proto_conf
(
&
workflow_conf
,
output_dir
,
workflow_conf_file
);
if
(
ret
!=
0
)
{
return
ret
;
}
InferServiceConf
infer_service_conf
;
InferService
*
infer_service
=
infer_service_conf
.
add_service
();
infer_service
->
set_name
(
"ImageClassifyService"
);
infer_service
->
add_workflow
(
"workflow1"
);
infer_service
->
add_workflow
(
"workflow2"
);
infer_service
=
infer_service_conf
.
add_service
();
infer_service
->
set_name
(
"BuiltinDenseFormatService"
);
infer_service
->
add_workflow
(
"workflow2"
);
ret
=
baidu
::
paddle_serving
::
configure
::
write_proto_conf
(
&
infer_service_conf
,
output_dir
,
service_conf_file
);
if
(
ret
!=
0
)
{
return
ret
;
}
return
0
;
}
int
test_read_conf
()
{
int
ret
=
0
;
ModelToolkitConf
model_toolkit_conf
;
ret
=
baidu
::
paddle_serving
::
configure
::
read_proto_conf
(
output_dir
,
model_toolkit_conf_file
,
&
model_toolkit_conf
);
if
(
ret
!=
0
)
{
std
::
cout
<<
"Read conf fail: "
<<
model_toolkit_conf_file
<<
std
::
endl
;
return
-
1
;
}
ResourceConf
resource_conf
;
ret
=
baidu
::
paddle_serving
::
configure
::
read_proto_conf
(
output_dir
,
resource_conf_file
,
&
resource_conf
);
if
(
ret
!=
0
)
{
std
::
cout
<<
"Read conf fail: "
<<
resource_conf_file
<<
std
::
endl
;
return
-
1
;
}
WorkflowConf
workflow_conf
;
ret
=
baidu
::
paddle_serving
::
configure
::
read_proto_conf
(
output_dir
,
workflow_conf_file
,
&
workflow_conf
);
if
(
ret
!=
0
)
{
std
::
cout
<<
"Read conf fail: "
<<
workflow_conf_file
<<
std
::
endl
;
return
-
1
;
}
InferServiceConf
service_conf
;
ret
=
baidu
::
paddle_serving
::
configure
::
read_proto_conf
(
output_dir
,
service_conf_file
,
&
service_conf
);
if
(
ret
!=
0
)
{
std
::
cout
<<
"Read conf fail: "
<<
service_conf_file
<<
std
::
endl
;
return
-
1
;
}
return
0
;
}
int
main
()
{
int
ret
=
0
;
struct
stat
stat_buf
;
if
(
stat
(
output_dir
.
c_str
(),
&
stat_buf
)
!=
0
)
{
int
ret
=
mkdir
(
"./conf"
,
0777
);
if
(
ret
!=
0
)
{
std
::
cout
<<
"mkdir ./conf fail"
<<
std
::
endl
;
return
-
1
;
}
if
(
stat
(
"./conf"
,
&
stat_buf
)
!=
0
)
{
std
::
cout
<<
"./conf not exist and creating it failed"
<<
std
::
endl
;
return
-
1
;
}
}
ret
=
test_write_conf
();
if
(
ret
!=
0
)
{
std
::
cout
<<
"test_write_conf fail"
<<
std
::
endl
;
return
-
1
;
}
std
::
cout
<<
"test_write_conf success"
<<
std
::
endl
;
ret
=
test_read_conf
();
if
(
ret
!=
0
)
{
std
::
cout
<<
"test_read_conf fail"
<<
std
::
endl
;
return
-
1
;
}
std
::
cout
<<
"test_read_conf success"
<<
std
::
endl
;
return
0
;
}
/* vim: set expandtab ts=4 sw=4 sts=4 tw=100: */
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录