提交 bf19cd62 编写于 作者: S sjtubinlong

inference: add linux building

上级 5fabe443
...@@ -36,7 +36,7 @@ if (NOT DEFINED OPENCV_DIR OR ${OPENCV_DIR} STREQUAL "") ...@@ -36,7 +36,7 @@ if (NOT DEFINED OPENCV_DIR OR ${OPENCV_DIR} STREQUAL "")
endif() endif()
include_directories("${CMAKE_SOURCE_DIR}/") include_directories("${CMAKE_SOURCE_DIR}/")
include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/yaml-cpp/include") include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/ext-yaml-cpp/include")
include_directories("${PADDLE_DIR}/") include_directories("${PADDLE_DIR}/")
include_directories("${PADDLE_DIR}/third_party/install/protobuf/include") include_directories("${PADDLE_DIR}/third_party/install/protobuf/include")
include_directories("${PADDLE_DIR}/third_party/install/glog/include") include_directories("${PADDLE_DIR}/third_party/install/glog/include")
...@@ -82,7 +82,7 @@ if (WIN32) ...@@ -82,7 +82,7 @@ if (WIN32)
add_definitions(-DSTATIC_LIB) add_definitions(-DSTATIC_LIB)
endif() endif()
else() else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -o2 -std=c++11")
set(CMAKE_STATIC_LIBRARY_PREFIX "") set(CMAKE_STATIC_LIBRARY_PREFIX "")
endif() endif()
...@@ -160,14 +160,13 @@ if (NOT WIN32) ...@@ -160,14 +160,13 @@ if (NOT WIN32)
set(EXTERNAL_LIB "-lrt -ldl -lpthread") set(EXTERNAL_LIB "-lrt -ldl -lpthread")
set(DEPS ${DEPS} set(DEPS ${DEPS}
${MATH_LIB} ${MKLDNN_LIB} ${MATH_LIB} ${MKLDNN_LIB}
glog gflags protobuf snappystream snappy z xxhash glog gflags protobuf yaml-cpp snappystream snappy z xxhash
${EXTERNAL_LIB}) ${EXTERNAL_LIB})
else() else()
set(DEPS ${DEPS} set(DEPS ${DEPS}
${MATH_LIB} ${MKLDNN_LIB} ${MATH_LIB} ${MKLDNN_LIB}
opencv_world346 glog libyaml-cppmt gflags_static libprotobuf snappy zlibstatic xxhash snappystream ${EXTERNAL_LIB}) opencv_world346 glog libyaml-cppmt gflags_static libprotobuf snappy zlibstatic xxhash snappystream ${EXTERNAL_LIB})
set(DEPS ${DEPS} libcmt shlwapi) set(DEPS ${DEPS} libcmt shlwapi)
set(DEPS ${DEPS} ${YAML_CPP_LIBRARY})
endif(NOT WIN32) endif(NOT WIN32)
if(WITH_GPU) if(WITH_GPU)
...@@ -206,13 +205,17 @@ ADD_LIBRARY(libpaddleseg_inference STATIC ${PADDLESEG_INFERENCE_SRCS}) ...@@ -206,13 +205,17 @@ ADD_LIBRARY(libpaddleseg_inference STATIC ${PADDLESEG_INFERENCE_SRCS})
target_link_libraries(libpaddleseg_inference ${DEPS}) target_link_libraries(libpaddleseg_inference ${DEPS})
add_executable(demo demo.cpp) add_executable(demo demo.cpp)
ADD_DEPENDENCIES(libpaddleseg_inference yaml-cpp) ADD_DEPENDENCIES(libpaddleseg_inference ext-yaml-cpp)
ADD_DEPENDENCIES(demo yaml-cpp libpaddleseg_inference) ADD_DEPENDENCIES(demo ext-yaml-cpp libpaddleseg_inference)
target_link_libraries(demo ${DEPS} libpaddleseg_inference) target_link_libraries(demo ${DEPS} libpaddleseg_inference)
if (WIN32)
add_custom_command(TARGET demo POST_BUILD add_custom_command(TARGET demo POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/bin/mkldnn.dll ./mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./mkldnn.dll
) COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
\ No newline at end of file COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./mkldnn.dll
)
endif()
DEPLOY: DEPLOY:
USE_GPU: 1 USE_GPU: 1
MODEL_PATH: "C:\\PaddleDeploy\\models\\deeplabv3p_xception65_humanseg" MODEL_PATH: "/root/projects/models/deeplabv3p_xception65_humanseg"
MODEL_NAME: "unet" MODEL_NAME: "unet"
MODEL_FILENAME: "__model__" MODEL_FILENAME: "__model__"
PARAMS_FILENAME: "__params__" PARAMS_FILENAME: "__params__"
...@@ -11,5 +11,5 @@ DEPLOY: ...@@ -11,5 +11,5 @@ DEPLOY:
NUM_CLASSES: 2 NUM_CLASSES: 2
CHANNELS : 3 CHANNELS : 3
PRE_PROCESSOR: "SegPreProcessor" PRE_PROCESSOR: "SegPreProcessor"
PREDICTOR_MODE: "ANALYSIS" PREDICTOR_MODE: "NATIVE"
BATCH_SIZE : 3 BATCH_SIZE : 3
\ No newline at end of file
...@@ -21,7 +21,6 @@ int main(int argc, char** argv) { ...@@ -21,7 +21,6 @@ int main(int argc, char** argv) {
// 2. get all the images with extension '.jpeg' at input_dir // 2. get all the images with extension '.jpeg' at input_dir
auto imgs = PaddleSolution::utils::get_directory_images(FLAGS_input_dir, ".jpeg|.jpg"); auto imgs = PaddleSolution::utils::get_directory_images(FLAGS_input_dir, ".jpeg|.jpg");
// 3. predict // 3. predict
predictor.predict(imgs); predictor.predict(imgs);
return 0; return 0;
......
...@@ -6,7 +6,7 @@ include(ExternalProject) ...@@ -6,7 +6,7 @@ include(ExternalProject)
message("${CMAKE_BUILD_TYPE}") message("${CMAKE_BUILD_TYPE}")
ExternalProject_Add( ExternalProject_Add(
yaml-cpp ext-yaml-cpp
GIT_REPOSITORY https://github.com/jbeder/yaml-cpp.git GIT_REPOSITORY https://github.com/jbeder/yaml-cpp.git
GIT_TAG e0e01d53c27ffee6c86153fa41e7f5e57d3e5c90 GIT_TAG e0e01d53c27ffee6c86153fa41e7f5e57d3e5c90
CMAKE_ARGS CMAKE_ARGS
...@@ -26,4 +26,4 @@ ExternalProject_Add( ...@@ -26,4 +26,4 @@ ExternalProject_Add(
# Disable install step # Disable install step
INSTALL_COMMAND "" INSTALL_COMMAND ""
LOG_DOWNLOAD ON LOG_DOWNLOAD ON
) )
\ No newline at end of file
...@@ -57,7 +57,7 @@ namespace PaddleSolution { ...@@ -57,7 +57,7 @@ namespace PaddleSolution {
} }
bool load_config(const std::string& conf_file) { bool load_config(const std::string& conf_file) {
reset(); reset();
YAML::Node config = YAML::LoadFile(conf_file); YAML::Node config = YAML::LoadFile(conf_file);
...@@ -79,8 +79,6 @@ namespace PaddleSolution { ...@@ -79,8 +79,6 @@ namespace PaddleSolution {
_img_type = config["DEPLOY"]["IMAGE_TYPE"].as<std::string>(); _img_type = config["DEPLOY"]["IMAGE_TYPE"].as<std::string>();
// 5. get class number // 5. get class number
_class_num = config["DEPLOY"]["NUM_CLASSES"].as<int>(); _class_num = config["DEPLOY"]["NUM_CLASSES"].as<int>();
// 6. get model_name
_model_name = config["DEPLOY"]["MODEL_NAME"].as<std::string>();
// 7. set model path // 7. set model path
_model_path = config["DEPLOY"]["MODEL_PATH"].as<std::string>(); _model_path = config["DEPLOY"]["MODEL_PATH"].as<std::string>();
// 8. get model file_name // 8. get model file_name
...@@ -101,7 +99,7 @@ namespace PaddleSolution { ...@@ -101,7 +99,7 @@ namespace PaddleSolution {
} }
void debug() const { void debug() const {
std::cout << "EVAL_CROP_SIZE: (" << _resize[0] << ", " << _resize[1] << ")" << std::endl; std::cout << "EVAL_CROP_SIZE: (" << _resize[0] << ", " << _resize[1] << ")" << std::endl;
std::cout << "MEAN: ["; std::cout << "MEAN: [";
...@@ -129,7 +127,6 @@ namespace PaddleSolution { ...@@ -129,7 +127,6 @@ namespace PaddleSolution {
std::cout << "DEPLOY.NUM_CLASSES: " << _class_num << std::endl; std::cout << "DEPLOY.NUM_CLASSES: " << _class_num << std::endl;
std::cout << "DEPLOY.CHANNELS: " << _channels << std::endl; std::cout << "DEPLOY.CHANNELS: " << _channels << std::endl;
std::cout << "DEPLOY.MODEL_PATH: " << _model_path << std::endl; std::cout << "DEPLOY.MODEL_PATH: " << _model_path << std::endl;
std::cout << "DEPLOY.MODEL_NAME: " << _model_name << std::endl;
std::cout << "DEPLOY.MODEL_FILENAME: " << _model_file_name << std::endl; std::cout << "DEPLOY.MODEL_FILENAME: " << _model_file_name << std::endl;
std::cout << "DEPLOY.PARAMS_FILENAME: " << _param_file_name << std::endl; std::cout << "DEPLOY.PARAMS_FILENAME: " << _param_file_name << std::endl;
std::cout << "DEPLOY.PRE_PROCESSOR: " << _pre_processor << std::endl; std::cout << "DEPLOY.PRE_PROCESSOR: " << _pre_processor << std::endl;
...@@ -152,8 +149,6 @@ namespace PaddleSolution { ...@@ -152,8 +149,6 @@ namespace PaddleSolution {
int _channels; int _channels;
// DEPLOY.MODEL_PATH // DEPLOY.MODEL_PATH
std::string _model_path; std::string _model_path;
// DEPLOY.MODEL_NAME
std::string _model_name;
// DEPLOY.MODEL_FILENAME // DEPLOY.MODEL_FILENAME
std::string _model_file_name; std::string _model_file_name;
// DEPLOY.PARAMS_FILENAME // DEPLOY.PARAMS_FILENAME
......
...@@ -3,7 +3,13 @@ ...@@ -3,7 +3,13 @@
#include <iostream> #include <iostream>
#include <vector> #include <vector>
#include <string> #include <string>
#ifdef _WIN32
#include <filesystem> #include <filesystem>
#else
#include <dirent.h>
#include <sys/types.h>
#endif
namespace PaddleSolution { namespace PaddleSolution {
namespace utils { namespace utils {
...@@ -14,7 +20,33 @@ namespace PaddleSolution { ...@@ -14,7 +20,33 @@ namespace PaddleSolution {
#endif #endif
return dir + seperator + path; return dir + seperator + path;
} }
#ifndef _WIN32
// scan a directory and get all files with input extensions
inline std::vector<std::string> get_directory_images(const std::string& path, const std::string& exts)
{
std::vector<std::string> imgs;
struct dirent *entry;
DIR *dir = opendir(path.c_str());
if (dir == NULL) {
closedir(dir);
return imgs;
}
while ((entry = readdir(dir)) != NULL) {
printf("%s\n", entry->d_name);
std::string item = entry->d_name;
auto ext = strrchr(entry->d_name, '.');
if (!ext || std::string(ext) == "." || std::string(ext) == "..") {
continue;
}
printf("%s\n", entry->d_name);
if (exts.find(ext) != std::string::npos) {
imgs.push_back(path_join(path, entry->d_name));
}
}
return imgs;
}
#else
// scan a directory and get all files with input extensions // scan a directory and get all files with input extensions
inline std::vector<std::string> get_directory_images(const std::string& path, const std::string& exts) inline std::vector<std::string> get_directory_images(const std::string& path, const std::string& exts)
{ {
...@@ -28,5 +60,6 @@ namespace PaddleSolution { ...@@ -28,5 +60,6 @@ namespace PaddleSolution {
} }
return imgs; return imgs;
} }
#endif
} }
} }
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册