diff --git a/cmake/configure.cmake b/cmake/configure.cmake index 323b1632e9fe8007eaf3a8da507fc188721796b5..51a267074dfc9d2a9d3c21b91cb9a65e2c1b1ada 100644 --- a/cmake/configure.cmake +++ b/cmake/configure.cmake @@ -48,6 +48,7 @@ if(WIN32) SET(CMAKE_C_RESPONSE_FILE_LINK_FLAG "@") SET(CMAKE_CXX_RESPONSE_FILE_LINK_FLAG "@") + add_definitions(-DPADDLE_DLL_INFERENCE) # set definition for the dll export if (NOT MSVC) message(FATAL "Windows build only support msvc. Which was binded by the nvcc compiler of NVIDIA.") diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index 5887a330687a524879bfa1a0743ee0a635fbb8f1..a78fe41552b7cb1a42ce924fc604db8e0dafc0e7 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -17,10 +17,6 @@ if(WITH_TESTING) include(tests/test.cmake) # some generic cmake function for inference endif() -if(WIN32) - add_definitions(-DPADDLE_DLL_INFERENCE) -endif() - # TODO(panyx0718): Should this be called paddle_fluid_inference_api_internal? cc_library(paddle_fluid_api SRCS io.cc diff --git a/paddle/fluid/inference/api/paddle_infer_declare.h b/paddle/fluid/inference/api/paddle_infer_declare.h index e8525f440fe7f2d54d045eedb79aed228513e550..39c9653f16cefb71a9f2a0ddcc08723d189d411c 100644 --- a/paddle/fluid/inference/api/paddle_infer_declare.h +++ b/paddle/fluid/inference/api/paddle_infer_declare.h @@ -17,7 +17,11 @@ #if defined(_WIN32) #ifndef PD_INFER_DECL #ifdef PADDLE_DLL_INFERENCE +#ifndef PADDLE_ON_INFERENCE +#define PD_INFER_DECL +#else #define PD_INFER_DECL __declspec(dllexport) +#endif // PADDLE_ON_INFERENCE #else #define PD_INFER_DECL __declspec(dllimport) #endif // PADDLE_DLL_INFERENCE