Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
5a634786
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
5a634786
编写于
7月 27, 2018
作者:
T
Tao Luo
提交者:
GitHub
7月 27, 2018
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #12312 from luotao1/unify
unify libpaddle_inference_api and libpaddle_fluid
上级
e12b1d17
062556f9
变更
11
隐藏空白更改
内联
并排
Showing
11 changed file
with
29 addition
and
60 deletion
+29
-60
cmake/inference_lib.cmake
cmake/inference_lib.cmake
+2
-9
paddle/fluid/inference/CMakeLists.txt
paddle/fluid/inference/CMakeLists.txt
+21
-5
paddle/fluid/inference/api/CMakeLists.txt
paddle/fluid/inference/api/CMakeLists.txt
+1
-28
paddle/fluid/inference/api/api.map
paddle/fluid/inference/api/api.map
+0
-6
paddle/fluid/inference/api/api.sym
paddle/fluid/inference/api/api.sym
+0
-1
paddle/fluid/inference/api/demo_ci/CMakeLists.txt
paddle/fluid/inference/api/demo_ci/CMakeLists.txt
+0
-2
paddle/fluid/inference/check_symbol.sh
paddle/fluid/inference/check_symbol.sh
+2
-2
paddle/fluid/inference/tensorrt/convert/CMakeLists.txt
paddle/fluid/inference/tensorrt/convert/CMakeLists.txt
+1
-1
paddle/fluid/inference/tensorrt/convert/mul_op.cc
paddle/fluid/inference/tensorrt/convert/mul_op.cc
+0
-1
paddle/fluid/inference/tests/book/CMakeLists.txt
paddle/fluid/inference/tests/book/CMakeLists.txt
+2
-2
paddle/fluid/operators/tensorrt_engine_op.cc
paddle/fluid/operators/tensorrt_engine_op.cc
+0
-3
未找到文件。
cmake/inference_lib.cmake
浏览文件 @
5a634786
...
@@ -148,18 +148,11 @@ if (WITH_ANAKIN AND WITH_GPU)
...
@@ -148,18 +148,11 @@ if (WITH_ANAKIN AND WITH_GPU)
list
(
APPEND inference_deps anakin_inference_lib
)
list
(
APPEND inference_deps anakin_inference_lib
)
endif
()
endif
()
copy
(
inference_api_lib DEPS paddle_inference_api paddle_inference_api_shared
SRCS
${
src_dir
}
/
${
module
}
/paddle_inference_api.h
${
src_dir
}
/
${
module
}
/demo_ci
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/api/libpaddle_inference_api*
DSTS
${
dst_dir
}
/inference
${
dst_dir
}
/inference
${
dst_dir
}
/inference
)
list
(
APPEND inference_deps inference_api_lib
)
set
(
module
"inference"
)
set
(
module
"inference"
)
copy
(
inference_lib DEPS
${
inference_deps
}
copy
(
inference_lib DEPS
${
inference_deps
}
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
src_dir
}
/
${
module
}
/api/paddle_inference_api.h
${
src_dir
}
/
${
module
}
/api/demo_ci
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
)
)
set
(
module
"platform"
)
set
(
module
"platform"
)
...
...
paddle/fluid/inference/CMakeLists.txt
浏览文件 @
5a634786
...
@@ -14,8 +14,15 @@ cc_library(paddle_fluid_api
...
@@ -14,8 +14,15 @@ cc_library(paddle_fluid_api
get_property
(
fluid_modules GLOBAL PROPERTY FLUID_MODULES
)
get_property
(
fluid_modules GLOBAL PROPERTY FLUID_MODULES
)
# paddle_fluid_origin exclude inference api interface
cc_library
(
paddle_fluid_origin DEPS
${
fluid_modules
}
paddle_fluid_api
)
if
(
NOT APPLE
)
add_subdirectory
(
api
)
endif
()
# Create static library
# Create static library
cc_library
(
paddle_fluid DEPS
${
fluid_modules
}
paddle_fluid_api
)
cc_library
(
paddle_fluid DEPS
${
fluid_modules
}
paddle_fluid_api
paddle_inference_api
)
if
(
NOT APPLE
)
if
(
NOT APPLE
)
# TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac.
# TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac.
set
(
LINK_FLAGS
"-Wl,--retain-symbols-file
${
CMAKE_CURRENT_SOURCE_DIR
}
/paddle_fluid.sym"
)
set
(
LINK_FLAGS
"-Wl,--retain-symbols-file
${
CMAKE_CURRENT_SOURCE_DIR
}
/paddle_fluid.sym"
)
...
@@ -24,7 +31,7 @@ endif()
...
@@ -24,7 +31,7 @@ endif()
# Create shared library
# Create shared library
cc_library
(
paddle_fluid_shared SHARED
cc_library
(
paddle_fluid_shared SHARED
SRCS io.cc
SRCS io.cc
${
CMAKE_CURRENT_SOURCE_DIR
}
/api/api.cc
${
CMAKE_CURRENT_SOURCE_DIR
}
/api/api_impl.cc
DEPS
${
fluid_modules
}
paddle_fluid_api
)
DEPS
${
fluid_modules
}
paddle_fluid_api
)
set_target_properties
(
paddle_fluid_shared PROPERTIES OUTPUT_NAME paddle_fluid
)
set_target_properties
(
paddle_fluid_shared PROPERTIES OUTPUT_NAME paddle_fluid
)
...
@@ -32,12 +39,21 @@ if(NOT APPLE)
...
@@ -32,12 +39,21 @@ if(NOT APPLE)
# TODO(liuyiqun): Temporarily disable the link flag because it is not support on Mac.
# TODO(liuyiqun): Temporarily disable the link flag because it is not support on Mac.
set
(
LINK_FLAGS
"-Wl,--version-script
${
CMAKE_CURRENT_SOURCE_DIR
}
/paddle_fluid.map"
)
set
(
LINK_FLAGS
"-Wl,--version-script
${
CMAKE_CURRENT_SOURCE_DIR
}
/paddle_fluid.map"
)
set_target_properties
(
paddle_fluid_shared PROPERTIES LINK_FLAGS
"
${
LINK_FLAGS
}
"
)
set_target_properties
(
paddle_fluid_shared PROPERTIES LINK_FLAGS
"
${
LINK_FLAGS
}
"
)
# check symbol hidden
FILE
(
WRITE
${
CMAKE_CURRENT_BINARY_DIR
}
/check_symbol.cmake
"execute_process(COMMAND bash -c
\"
${
CMAKE_CURRENT_SOURCE_DIR
}
/check_symbol.sh"
"
${
CMAKE_CURRENT_BINARY_DIR
}
/libpaddle_fluid.so
\"
RESULT_VARIABLE symbol_res)
\n
"
"if(NOT
\"\$
{symbol_res}
\"
STREQUAL
\"
0
\"
)
\n
"
" message(FATAL_ERROR
\"
Check symbol failed.
\"
)
\n
"
"endif()
\n
"
)
add_custom_command
(
OUTPUT
"
${
CMAKE_CURRENT_BINARY_DIR
}
/.check_symbol"
COMMAND
${
CMAKE_COMMAND
}
-P
"
${
CMAKE_CURRENT_BINARY_DIR
}
/check_symbol.cmake"
DEPENDS paddle_fluid_shared
)
add_custom_target
(
check_symbol ALL DEPENDS
"
${
CMAKE_CURRENT_BINARY_DIR
}
/.check_symbol"
)
endif
()
endif
()
if
(
WITH_TESTING
)
if
(
WITH_TESTING
)
# both tests/book and analysis depends the models that generated by python/paddle/fluid/tests/book
# both tests/book and analysis depends the models that generated by python/paddle/fluid/tests/book
add_subdirectory
(
tests/book
)
add_subdirectory
(
tests/book
)
endif
()
endif
()
if
(
NOT APPLE
)
add_subdirectory
(
api
)
endif
()
paddle/fluid/inference/api/CMakeLists.txt
浏览文件 @
5a634786
...
@@ -42,35 +42,8 @@ function(inference_api_test TARGET_NAME)
...
@@ -42,35 +42,8 @@ function(inference_api_test TARGET_NAME)
endif
(
WITH_TESTING
)
endif
(
WITH_TESTING
)
endfunction
(
inference_api_test
)
endfunction
(
inference_api_test
)
cc_library
(
paddle_inference_api
cc_library
(
paddle_inference_api SRCS api.cc api_impl.cc
)
SRCS api.cc api_impl.cc
DEPS
${
FLUID_CORE_MODULES
}
${
GLOB_OP_LIB
}
)
if
(
NOT APPLE
)
set
(
LINK_FLAGS
"-Wl,--retain-symbols-file
${
CMAKE_CURRENT_SOURCE_DIR
}
/api.sym"
)
set_target_properties
(
paddle_inference_api PROPERTIES LINK_FLAGS
"
${
LINK_FLAGS
}
"
)
endif
()
# Here the shared library doesn't depend on other fluid libraries, or double free will occur.
cc_library
(
paddle_inference_api_shared SHARED
SRCS api.cc api_impl.cc
)
add_dependencies
(
paddle_inference_api_shared
${
FLUID_CORE_MODULES
}
${
GLOB_OP_LIB
}
)
set_target_properties
(
paddle_inference_api_shared PROPERTIES OUTPUT_NAME paddle_inference_api
)
if
(
NOT APPLE
)
set
(
LINK_FLAGS
"-Wl,--version-script
${
CMAKE_CURRENT_SOURCE_DIR
}
/api.map"
)
set_target_properties
(
paddle_inference_api_shared PROPERTIES LINK_FLAGS
"
${
LINK_FLAGS
}
"
)
FILE
(
WRITE
${
CMAKE_CURRENT_BINARY_DIR
}
/check_symbol.cmake
"execute_process(COMMAND bash -c
\"
${
CMAKE_CURRENT_SOURCE_DIR
}
/check_symbol.sh"
"
${
CMAKE_CURRENT_BINARY_DIR
}
/libpaddle_inference_api.so
\"
RESULT_VARIABLE symbol_res)
\n
"
"if(NOT
\"\$
{symbol_res}
\"
STREQUAL
\"
0
\"
)
\n
"
" message(FATAL_ERROR
\"
Check symbol failed.
\"
)
\n
"
"endif()
\n
"
)
add_custom_command
(
OUTPUT
"
${
CMAKE_CURRENT_BINARY_DIR
}
/.check_symbol"
COMMAND
${
CMAKE_COMMAND
}
-P
"
${
CMAKE_CURRENT_BINARY_DIR
}
/check_symbol.cmake"
DEPENDS paddle_inference_api_shared
)
add_custom_target
(
check_symbol ALL DEPENDS
"
${
CMAKE_CURRENT_BINARY_DIR
}
/.check_symbol"
)
endif
()
cc_test
(
test_paddle_inference_api
cc_test
(
test_paddle_inference_api
SRCS api_tester.cc
SRCS api_tester.cc
...
...
paddle/fluid/inference/api/api.map
已删除
100644 → 0
浏览文件 @
e12b1d17
{
global:
*paddle*;
local:
*;
};
paddle/fluid/inference/api/api.sym
已删除
100644 → 0
浏览文件 @
e12b1d17
*paddle*
paddle/fluid/inference/api/demo_ci/CMakeLists.txt
浏览文件 @
5a634786
...
@@ -55,11 +55,9 @@ endif()
...
@@ -55,11 +55,9 @@ endif()
# Note: libpaddle_inference_api.so/a must put before libpaddle_fluid.so/a
# Note: libpaddle_inference_api.so/a must put before libpaddle_fluid.so/a
if
(
WITH_STATIC_LIB
)
if
(
WITH_STATIC_LIB
)
set
(
DEPS
set
(
DEPS
${
PADDLE_LIB
}
/paddle/fluid/inference/libpaddle_inference_api.a
${
PADDLE_LIB
}
/paddle/fluid/inference/libpaddle_fluid.a
)
${
PADDLE_LIB
}
/paddle/fluid/inference/libpaddle_fluid.a
)
else
()
else
()
set
(
DEPS
set
(
DEPS
${
PADDLE_LIB
}
/paddle/fluid/inference/libpaddle_inference_api.so
${
PADDLE_LIB
}
/paddle/fluid/inference/libpaddle_fluid.so
)
${
PADDLE_LIB
}
/paddle/fluid/inference/libpaddle_fluid.so
)
endif
()
endif
()
set
(
EXTERNAL_LIB
"-lrt -ldl -lpthread"
)
set
(
EXTERNAL_LIB
"-lrt -ldl -lpthread"
)
...
...
paddle/fluid/inference/
api/
check_symbol.sh
→
paddle/fluid/inference/check_symbol.sh
浏览文件 @
5a634786
...
@@ -3,8 +3,8 @@
...
@@ -3,8 +3,8 @@
lib
=
$1
lib
=
$1
if
[
$#
-ne
1
]
;
then
echo
"No input library"
;
exit
-1
;
fi
if
[
$#
-ne
1
]
;
then
echo
"No input library"
;
exit
-1
;
fi
num_paddle_syms
=
$(
nm
-D
--defined-only
${
lib
}
|
grep
paddle |
wc
-l
)
num_paddle_syms
=
$(
nm
-D
${
lib
}
|
grep
paddle |
wc
-l
)
num_google_syms
=
$(
nm
-D
--defined-only
${
lib
}
|
grep
google
|
wc
-l
)
num_google_syms
=
$(
nm
-D
${
lib
}
|
grep
google |
grep
-v
paddle |
grep
T
|
wc
-l
)
if
[
$num_paddle_syms
-le
0
]
;
then
echo
"Have no paddle symbols"
;
exit
-1
;
fi
if
[
$num_paddle_syms
-le
0
]
;
then
echo
"Have no paddle symbols"
;
exit
-1
;
fi
if
[
$num_google_syms
-ge
1
]
;
then
echo
"Have some google symbols"
;
exit
-1
;
fi
if
[
$num_google_syms
-ge
1
]
;
then
echo
"Have some google symbols"
;
exit
-1
;
fi
...
...
paddle/fluid/inference/tensorrt/convert/CMakeLists.txt
浏览文件 @
5a634786
# Add TRT tests
# Add TRT tests
nv_library
(
tensorrt_converter
nv_library
(
tensorrt_converter
SRCS mul_op.cc conv2d_op.cc fc_op.cc pool2d_op.cc
SRCS mul_op.cc conv2d_op.cc fc_op.cc pool2d_op.cc
DEPS tensorrt_engine
mul_op
)
DEPS tensorrt_engine
operator scope framework_proto op_registry
)
nv_test
(
test_op_converter SRCS test_op_converter.cc DEPS
nv_test
(
test_op_converter SRCS test_op_converter.cc DEPS
${
FLUID_CORE_MODULES
}
tensorrt_engine tensorrt_converter
)
${
FLUID_CORE_MODULES
}
tensorrt_engine tensorrt_converter
)
...
...
paddle/fluid/inference/tensorrt/convert/mul_op.cc
浏览文件 @
5a634786
...
@@ -49,5 +49,4 @@ class MulOpConverter : public OpConverter {
...
@@ -49,5 +49,4 @@ class MulOpConverter : public OpConverter {
}
// namespace inference
}
// namespace inference
}
// namespace paddle
}
// namespace paddle
USE_OP
(
mul
);
REGISTER_TRT_OP_CONVERTER
(
mul
,
MulOpConverter
);
REGISTER_TRT_OP_CONVERTER
(
mul
,
MulOpConverter
);
paddle/fluid/inference/tests/book/CMakeLists.txt
浏览文件 @
5a634786
...
@@ -17,7 +17,7 @@ function(inference_test TARGET_NAME)
...
@@ -17,7 +17,7 @@ function(inference_test TARGET_NAME)
string
(
REGEX REPLACE
"^_$"
""
arg
"
${
arg
}
"
)
string
(
REGEX REPLACE
"^_$"
""
arg
"
${
arg
}
"
)
cc_test
(
test_inference_
${
TARGET_NAME
}${
arg
}
cc_test
(
test_inference_
${
TARGET_NAME
}${
arg
}
SRCS test_inference_
${
TARGET_NAME
}
.cc
SRCS test_inference_
${
TARGET_NAME
}
.cc
DEPS paddle_fluid
DEPS paddle_fluid
_origin
ARGS --dirname=
${
PYTHON_TESTS_DIR
}
/book/
${
TARGET_NAME
}${
arg
}
.inference.model
)
ARGS --dirname=
${
PYTHON_TESTS_DIR
}
/book/
${
TARGET_NAME
}${
arg
}
.inference.model
)
set_tests_properties
(
test_inference_
${
TARGET_NAME
}${
arg
}
set_tests_properties
(
test_inference_
${
TARGET_NAME
}${
arg
}
PROPERTIES DEPENDS test_
${
TARGET_NAME
}
)
PROPERTIES DEPENDS test_
${
TARGET_NAME
}
)
...
@@ -43,6 +43,6 @@ inference_test(word2vec)
...
@@ -43,6 +43,6 @@ inference_test(word2vec)
# TODO(TJ): clean me up
# TODO(TJ): clean me up
cc_test
(
test_inference_nlp
cc_test
(
test_inference_nlp
SRCS test_inference_nlp.cc
SRCS test_inference_nlp.cc
DEPS paddle_fluid
DEPS paddle_fluid
_origin
ARGS
ARGS
--model_path=
${
PADDLE_BINARY_DIR
}
/python/paddle/fluid/tests/book/recognize_digits_mlp.inference.model
)
--model_path=
${
PADDLE_BINARY_DIR
}
/python/paddle/fluid/tests/book/recognize_digits_mlp.inference.model
)
paddle/fluid/operators/tensorrt_engine_op.cc
浏览文件 @
5a634786
...
@@ -163,7 +163,4 @@ REGISTER_OP_CPU_KERNEL(
...
@@ -163,7 +163,4 @@ REGISTER_OP_CPU_KERNEL(
ops
::
TensorRTEngineKernel
<
paddle
::
platform
::
CPUDeviceContext
,
int
>
,
ops
::
TensorRTEngineKernel
<
paddle
::
platform
::
CPUDeviceContext
,
int
>
,
ops
::
TensorRTEngineKernel
<
paddle
::
platform
::
CPUDeviceContext
,
int64_t
>
);
ops
::
TensorRTEngineKernel
<
paddle
::
platform
::
CPUDeviceContext
,
int64_t
>
);
// A trick to compile with the needed TensorRT op converter.
USE_TRT_CONVERTER
(
mul
)
#endif // PADDLE_WITH_CUDA
#endif // PADDLE_WITH_CUDA
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录