Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
feb9eec9
P
Paddle
项目概览
PaddlePaddle
/
Paddle
1 年多 前同步成功
通知
2310
Star
20933
Fork
5423
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
feb9eec9
编写于
6月 28, 2022
作者:
S
Sing_chan
提交者:
GitHub
6月 28, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Revert "make inference_api_test compile with dynamic linking library (#41944)"
This reverts commit
f8b9073f
.
上级
a97a8dd1
变更
11
隐藏空白更改
内联
并排
Showing
11 changed file
with
39 addition
and
75 deletion
+39
-75
paddle/fluid/inference/CMakeLists.txt
paddle/fluid/inference/CMakeLists.txt
+0
-2
paddle/fluid/inference/analysis/CMakeLists.txt
paddle/fluid/inference/analysis/CMakeLists.txt
+4
-4
paddle/fluid/inference/capi/CMakeLists.txt
paddle/fluid/inference/capi/CMakeLists.txt
+4
-0
paddle/fluid/inference/capi_exp/CMakeLists.txt
paddle/fluid/inference/capi_exp/CMakeLists.txt
+4
-0
paddle/fluid/inference/tests/api/CMakeLists.txt
paddle/fluid/inference/tests/api/CMakeLists.txt
+18
-45
paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc
...ference/tests/api/analyzer_image_classification_tester.cc
+5
-5
paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc
paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc
+0
-5
paddle/fluid/inference/tests/api/trt_mobilenet_test.cc
paddle/fluid/inference/tests/api/trt_mobilenet_test.cc
+0
-5
paddle/fluid/inference/tests/api/trt_resnext_test.cc
paddle/fluid/inference/tests/api/trt_resnext_test.cc
+0
-5
paddle/phi/common/place.h
paddle/phi/common/place.h
+3
-2
paddle/scripts/paddle_build.bat
paddle/scripts/paddle_build.bat
+1
-2
未找到文件。
paddle/fluid/inference/CMakeLists.txt
浏览文件 @
feb9eec9
...
@@ -119,8 +119,6 @@ cc_library(
...
@@ -119,8 +119,6 @@ cc_library(
get_property
(
os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES
)
get_property
(
os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES
)
target_link_libraries
(
paddle_inference_shared
${
os_dependency_modules
}
)
target_link_libraries
(
paddle_inference_shared
${
os_dependency_modules
}
)
if
(
WIN32
)
if
(
WIN32
)
set_property
(
TARGET paddle_inference_shared
PROPERTY WINDOWS_EXPORT_ALL_SYMBOLS ON
)
target_link_libraries
(
paddle_inference_shared gflags
)
target_link_libraries
(
paddle_inference_shared gflags
)
endif
()
endif
()
...
...
paddle/fluid/inference/analysis/CMakeLists.txt
浏览文件 @
feb9eec9
...
@@ -49,10 +49,10 @@ function(inference_analysis_test_build TARGET)
...
@@ -49,10 +49,10 @@ function(inference_analysis_test_build TARGET)
SRCS
SRCS
${
analysis_test_SRCS
}
${
analysis_test_SRCS
}
DEPS
DEPS
${
analysis_test_EXTRA_DEPS
}
analysis
analysis
pass
pass
${
GLOB_PASS_LIB
}
)
${
GLOB_PASS_LIB
}
${
analysis_test_EXTRA_DEPS
}
)
endif
()
endif
()
endfunction
()
endfunction
()
...
@@ -80,10 +80,10 @@ function(inference_analysis_test TARGET)
...
@@ -80,10 +80,10 @@ function(inference_analysis_test TARGET)
SRCS
SRCS
${
analysis_test_SRCS
}
${
analysis_test_SRCS
}
DEPS
DEPS
${
analysis_test_EXTRA_DEPS
}
analysis
analysis
pass
pass
${
GLOB_PASS_LIB
}
)
${
GLOB_PASS_LIB
}
${
analysis_test_EXTRA_DEPS
}
)
inference_base_test_run
(
${
TARGET
}
COMMAND
${
TARGET
}
ARGS
inference_base_test_run
(
${
TARGET
}
COMMAND
${
TARGET
}
ARGS
${
analysis_test_ARGS
}
)
${
analysis_test_ARGS
}
)
endif
()
endif
()
...
...
paddle/fluid/inference/capi/CMakeLists.txt
浏览文件 @
feb9eec9
...
@@ -20,6 +20,10 @@ cc_library(
...
@@ -20,6 +20,10 @@ cc_library(
SRCS
${
C_API_SRCS
}
SRCS
${
C_API_SRCS
}
DEPS paddle_inference
)
DEPS paddle_inference
)
if
(
NOT ON_INFER
)
return
()
endif
()
# Create inference capi shared library
# Create inference capi shared library
cc_library
(
cc_library
(
paddle_inference_c_shared SHARED
paddle_inference_c_shared SHARED
...
...
paddle/fluid/inference/capi_exp/CMakeLists.txt
浏览文件 @
feb9eec9
...
@@ -20,6 +20,10 @@ cc_library(
...
@@ -20,6 +20,10 @@ cc_library(
SRCS
${
C_API_SRCS
}
SRCS
${
C_API_SRCS
}
DEPS paddle_inference
)
DEPS paddle_inference
)
if
(
NOT ON_INFER
)
return
()
endif
()
# Create inference capi shared library
# Create inference capi shared library
cc_library
(
cc_library
(
paddle_inference_c_shared SHARED
paddle_inference_c_shared SHARED
...
...
paddle/fluid/inference/tests/api/CMakeLists.txt
浏览文件 @
feb9eec9
set
(
INFERENCE_EXTRA_DEPS paddle_inference_shared
)
if
(
NOT APPLE AND NOT WIN32
)
set
(
INFERENCE_EXTRA_DEPS paddle_inference_shared
)
else
()
set
(
INFERENCE_EXTRA_DEPS paddle_inference_api paddle_inference_io
ir_pass_manager analysis_predictor benchmark
)
endif
()
if
(
WITH_GPU AND TENSORRT_FOUND
)
set
(
INFERENCE_EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
analysis
${
analysis_deps
}
)
endif
()
function
(
download_data install_dir data_file check_sum
)
function
(
download_data install_dir data_file check_sum
)
string
(
REGEX MATCH
"[^/
\\
]+$"
file_name
${
data_file
}
)
string
(
REGEX MATCH
"[^/
\\
]+$"
file_name
${
data_file
}
)
...
@@ -939,26 +948,18 @@ if(WITH_GPU AND TENSORRT_FOUND)
...
@@ -939,26 +948,18 @@ if(WITH_GPU AND TENSORRT_FOUND)
analyzer_capi_exp_gpu_tester.cc
analyzer_capi_exp_gpu_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
TRT_MODEL_INSTALL_DIR
}
/trt_inference_test_models
)
--infer_model=
${
TRT_MODEL_INSTALL_DIR
}
/trt_inference_test_models
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_gpu paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_gpu paddle_inference_c
)
endif
()
inference_analysis_test
(
inference_analysis_test
(
test_analyzer_capi_exp_xpu
test_analyzer_capi_exp_xpu
SRCS
SRCS
analyzer_capi_exp_xpu_tester.cc
analyzer_capi_exp_xpu_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
TRT_MODEL_INSTALL_DIR
}
/trt_inference_test_models
)
--infer_model=
${
TRT_MODEL_INSTALL_DIR
}
/trt_inference_test_models
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_xpu paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_xpu paddle_inference_c
)
endif
()
set
(
TRT_MODEL_QUANT_RESNET_DIR
set
(
TRT_MODEL_QUANT_RESNET_DIR
"
${
INFERENCE_DEMO_INSTALL_DIR
}
/small_quant_model"
)
"
${
INFERENCE_DEMO_INSTALL_DIR
}
/small_quant_model"
)
...
@@ -1106,13 +1107,9 @@ inference_analysis_test(
...
@@ -1106,13 +1107,9 @@ inference_analysis_test(
analyzer_capi_exp_tester.cc
analyzer_capi_exp_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
RESNET50_MODEL_DIR
}
/model
)
--infer_model=
${
RESNET50_MODEL_DIR
}
/model
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp paddle_inference_c
)
endif
()
inference_analysis_test
(
inference_analysis_test
(
test_analyzer_capi_exp_pd_config
test_analyzer_capi_exp_pd_config
...
@@ -1120,14 +1117,9 @@ inference_analysis_test(
...
@@ -1120,14 +1117,9 @@ inference_analysis_test(
analyzer_capi_exp_pd_config_tester.cc
analyzer_capi_exp_pd_config_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
MOBILENET_INSTALL_DIR
}
/model
)
--infer_model=
${
MOBILENET_INSTALL_DIR
}
/model
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_pd_config
paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_pd_config paddle_inference_c
)
endif
()
inference_analysis_test
(
inference_analysis_test
(
test_analyzer_capi_exp_pd_tensor
test_analyzer_capi_exp_pd_tensor
...
@@ -1135,14 +1127,9 @@ inference_analysis_test(
...
@@ -1135,14 +1127,9 @@ inference_analysis_test(
analyzer_capi_exp_pd_tensor_tester.cc
analyzer_capi_exp_pd_tensor_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
MOBILENET_INSTALL_DIR
}
/model
)
--infer_model=
${
MOBILENET_INSTALL_DIR
}
/model
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_pd_tensor
paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_pd_tensor paddle_inference_c
)
endif
()
if
(
NOT APPLE AND NOT WIN32
)
if
(
NOT APPLE AND NOT WIN32
)
inference_analysis_test
(
inference_analysis_test
(
...
@@ -1151,16 +1138,10 @@ if(NOT APPLE AND NOT WIN32)
...
@@ -1151,16 +1138,10 @@ if(NOT APPLE AND NOT WIN32)
analyzer_capi_exp_pd_threads_tester.cc
analyzer_capi_exp_pd_threads_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
MOBILENET_INSTALL_DIR
}
/model
)
--infer_model=
${
MOBILENET_INSTALL_DIR
}
/model
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_pd_threads
paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_pd_threads paddle_inference_c
)
endif
()
endif
()
endif
()
inference_analysis_test
(
inference_analysis_test
(
test_analyzer_zerocopytensor_tensor
test_analyzer_zerocopytensor_tensor
SRCS
SRCS
...
@@ -1201,13 +1182,9 @@ if(WITH_MKLDNN)
...
@@ -1201,13 +1182,9 @@ if(WITH_MKLDNN)
analyzer_capi_exp_int_tester.cc
analyzer_capi_exp_int_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
INT8_DATA_DIR
}
/resnet50/model
)
--infer_model=
${
INT8_DATA_DIR
}
/resnet50/model
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_int paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_int paddle_inference_c
)
endif
()
endif
()
endif
()
inference_analysis_test
(
inference_analysis_test
(
...
@@ -1216,13 +1193,9 @@ inference_analysis_test(
...
@@ -1216,13 +1193,9 @@ inference_analysis_test(
analyzer_capi_exp_ner_tester.cc
analyzer_capi_exp_ner_tester.cc
EXTRA_DEPS
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
${
INFERENCE_EXTRA_DEPS
}
paddle_inference_c
ARGS
ARGS
--infer_model=
${
CHINESE_NER_INSTALL_DIR
}
/model
)
--infer_model=
${
CHINESE_NER_INSTALL_DIR
}
/model
)
if
(
WIN32
)
target_link_libraries
(
test_analyzer_capi_exp_ner paddle_inference_c_shared
)
else
()
target_link_libraries
(
test_analyzer_capi_exp_ner paddle_inference_c
)
endif
()
if
(
WITH_GPU
)
if
(
WITH_GPU
)
inference_analysis_test
(
inference_analysis_test
(
...
...
paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc
浏览文件 @
feb9eec9
...
@@ -66,6 +66,11 @@ void profile(bool use_mkldnn = false) {
...
@@ -66,6 +66,11 @@ void profile(bool use_mkldnn = false) {
FLAGS_num_threads
);
FLAGS_num_threads
);
}
}
TEST
(
Analyzer_resnet50
,
profile
)
{
profile
();
}
#ifdef PADDLE_WITH_MKLDNN
TEST
(
Analyzer_resnet50
,
profile_mkldnn
)
{
profile
(
true
/* use_mkldnn */
);
}
#endif
// Check the fuse status
// Check the fuse status
TEST
(
Analyzer_resnet50
,
fuse_statis
)
{
TEST
(
Analyzer_resnet50
,
fuse_statis
)
{
AnalysisConfig
cfg
;
AnalysisConfig
cfg
;
...
@@ -77,11 +82,6 @@ TEST(Analyzer_resnet50, fuse_statis) {
...
@@ -77,11 +82,6 @@ TEST(Analyzer_resnet50, fuse_statis) {
LOG
(
INFO
)
<<
"num_ops: "
<<
num_ops
;
LOG
(
INFO
)
<<
"num_ops: "
<<
num_ops
;
}
}
TEST
(
Analyzer_resnet50
,
profile
)
{
profile
();
}
#ifdef PADDLE_WITH_MKLDNN
TEST
(
Analyzer_resnet50
,
profile_mkldnn
)
{
profile
(
true
/* use_mkldnn */
);
}
#endif
// Compare result of NativeConfig and AnalysisConfig
// Compare result of NativeConfig and AnalysisConfig
void
compare
(
bool
use_mkldnn
=
false
)
{
void
compare
(
bool
use_mkldnn
=
false
)
{
AnalysisConfig
cfg
;
AnalysisConfig
cfg
;
...
...
paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc
浏览文件 @
feb9eec9
...
@@ -23,11 +23,6 @@ namespace inference {
...
@@ -23,11 +23,6 @@ namespace inference {
TEST
(
TensorRT_fc
,
compare
)
{
TEST
(
TensorRT_fc
,
compare
)
{
std
::
string
model_dir
=
FLAGS_infer_model
+
"/fc_uint8"
;
std
::
string
model_dir
=
FLAGS_infer_model
+
"/fc_uint8"
;
AnalysisConfig
config
;
config
.
EnableUseGpu
(
100
,
0
);
config
.
SetModel
(
model_dir
);
config
.
DisableGlogInfo
();
auto
predictor
=
CreatePaddlePredictor
(
config
);
compare
(
model_dir
,
/* use_tensorrt */
true
);
compare
(
model_dir
,
/* use_tensorrt */
true
);
// Open it when need.
// Open it when need.
// profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt);
// profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt);
...
...
paddle/fluid/inference/tests/api/trt_mobilenet_test.cc
浏览文件 @
feb9eec9
...
@@ -23,11 +23,6 @@ namespace inference {
...
@@ -23,11 +23,6 @@ namespace inference {
TEST
(
TensorRT_mobilenet
,
compare
)
{
TEST
(
TensorRT_mobilenet
,
compare
)
{
std
::
string
model_dir
=
FLAGS_infer_model
+
"/mobilenet"
;
std
::
string
model_dir
=
FLAGS_infer_model
+
"/mobilenet"
;
AnalysisConfig
config
;
config
.
EnableUseGpu
(
100
,
0
);
config
.
SetModel
(
model_dir
);
config
.
DisableGlogInfo
();
auto
predictor
=
CreatePaddlePredictor
(
config
);
compare
(
model_dir
,
/* use_tensorrt */
true
);
compare
(
model_dir
,
/* use_tensorrt */
true
);
// Open it when need.
// Open it when need.
// profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt);
// profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt);
...
...
paddle/fluid/inference/tests/api/trt_resnext_test.cc
浏览文件 @
feb9eec9
...
@@ -23,11 +23,6 @@ namespace inference {
...
@@ -23,11 +23,6 @@ namespace inference {
TEST
(
TensorRT_resnext50
,
compare
)
{
TEST
(
TensorRT_resnext50
,
compare
)
{
std
::
string
model_dir
=
FLAGS_infer_model
+
"/resnext50"
;
std
::
string
model_dir
=
FLAGS_infer_model
+
"/resnext50"
;
AnalysisConfig
config
;
config
.
EnableUseGpu
(
100
,
0
);
config
.
SetModel
(
model_dir
);
config
.
DisableGlogInfo
();
auto
predictor
=
CreatePaddlePredictor
(
config
);
compare
(
model_dir
,
/* use_tensorrt */
true
);
compare
(
model_dir
,
/* use_tensorrt */
true
);
}
}
...
...
paddle/phi/common/place.h
浏览文件 @
feb9eec9
...
@@ -39,9 +39,10 @@ enum class AllocationType : int8_t {
...
@@ -39,9 +39,10 @@ enum class AllocationType : int8_t {
const
char
*
AllocationTypeStr
(
AllocationType
type
);
const
char
*
AllocationTypeStr
(
AllocationType
type
);
size_t
GetOrRegisterGlobalDeviceTypeId
(
const
std
::
string
&
device_type
);
PADDLE_API
size_t
GetOrRegisterGlobalDeviceTypeId
(
const
std
::
string
&
device_type
);
std
::
string
GetGlobalDeviceType
(
size_t
device_type_id_
);
PADDLE_API
std
::
string
GetGlobalDeviceType
(
size_t
device_type_id_
);
/// \brief The place is used to specify where the data is stored.
/// \brief The place is used to specify where the data is stored.
class
PADDLE_API
Place
{
class
PADDLE_API
Place
{
...
...
paddle/scripts/paddle_build.bat
浏览文件 @
feb9eec9
...
@@ -685,8 +685,7 @@ set PATH=%THIRD_PARTY_PATH:/=\%\install\openblas\lib;%THIRD_PARTY_PATH:/=\%\inst
...
@@ -685,8 +685,7 @@ set PATH=%THIRD_PARTY_PATH:/=\%\install\openblas\lib;%THIRD_PARTY_PATH:/=\%\inst
%THIRD
_PARTY_PATH:/
=
\
%
\install\zlib\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\mklml\lib
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\zlib\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\mklml\lib
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\mkldnn\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\warpctc\bin
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\mkldnn\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\warpctc\bin
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\onnxruntime\lib
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\paddle2onnx\lib
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\onnxruntime\lib
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\paddle2onnx\lib
;
^
%work_dir%
\
%BUILD_DIR%
\paddle\fluid\inference
;
%work_dir%
\
%BUILD_DIR%
\paddle\fluid\inference\capi_exp
;
^
%work_dir%
\
%BUILD_DIR%
\paddle\fluid\inference
;
%PATH%
%PATH%
REM TODO: make ut find .dll in install\onnxruntime\lib
REM TODO: make ut find .dll in install\onnxruntime\lib
xcopy
%THIRD
_PARTY_PATH:/
=
\
%
\install\onnxruntime\lib\onnxruntime.dll
%work_dir%
\
%BUILD_DIR%
\paddle\fluid\inference\tests\api\
/Y
xcopy
%THIRD
_PARTY_PATH:/
=
\
%
\install\onnxruntime\lib\onnxruntime.dll
%work_dir%
\
%BUILD_DIR%
\paddle\fluid\inference\tests\api\
/Y
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录