diff --git a/paddle/contrib/inference/demo_ci/run.sh b/paddle/contrib/inference/demo_ci/run.sh index a14b72dcbcd534544cf3a188f40d239bb6f2da59..554590407ae993bf4486b00f1320d790b3903617 100755 --- a/paddle/contrib/inference/demo_ci/run.sh +++ b/paddle/contrib/inference/demo_ci/run.sh @@ -2,7 +2,12 @@ set -x PADDLE_ROOT=$1 WITH_MKL=$2 WITH_GPU=$3 -if [ $3 == "ON" ]; then +if [ $2 == ON ]; then + # You can export yourself if move the install path + MKL_LIB=${PADDLE_ROOT}/build/fluid_install_dir/third_party/install/mklml/lib + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MKL_LIB} +fi +if [ $3 == ON ]; then use_gpu_list='true false' else use_gpu_list='false' diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index 6c80c69c100a9949be4e59b7e0b3da899d029b8a..86643b9aa111a9d73c184097e0c43bbec30c6b31 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -14,10 +14,6 @@ cc_library(paddle_fluid_api get_property(fluid_modules GLOBAL PROPERTY FLUID_MODULES) -if(WITH_CONTRIB) - set(fluid_modules "${fluid_modules}" paddle_inference_api) -endif() - # Create static library cc_library(paddle_fluid DEPS ${fluid_modules} paddle_fluid_api) if(NOT APPLE)