diff --git a/core/general-server/CMakeLists.txt b/core/general-server/CMakeLists.txt index 88a5455d8d0f56a0e9820c424e12d6c6e2b641f6..e9e12a33f54db33c30c1dfcf729c513977630e56 100644 --- a/core/general-server/CMakeLists.txt +++ b/core/general-server/CMakeLists.txt @@ -14,17 +14,8 @@ endif() target_include_directories(serving PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/../../core/predictor - ) - include_directories(${CUDNN_ROOT}/include/) -if(WITH_GPU) - target_link_libraries(serving -Wl,--whole-archive paddle_infence_engine - -Wl,--no-whole-archive) -endif() - -if(WITH_LITE) - target_link_libraries(serving -Wl,--whole-archive paddle_inference_engine - -Wl,--no-whole-archive) -endif() +) +include_directories(${CUDNN_ROOT}/include/) target_link_libraries(serving -Wl,--whole-archive paddle_inference_engine -Wl,--no-whole-archive) diff --git a/paddle_inference/paddle/include/paddle_engine.h b/paddle_inference/paddle/include/paddle_engine.h index c9233c0488b48d85a2bec7f687865090bd7a6507..ea96b67aa188a1b283a6f96c1671db7f9323bcc9 100644 --- a/paddle_inference/paddle/include/paddle_engine.h +++ b/paddle_inference/paddle/include/paddle_engine.h @@ -153,12 +153,12 @@ class PaddleInferenceEngine : public PaddleEngineBase { config.EnableMemoryOptim(); } - if (engine_conf.has_encrypted_model() && engine_conf.encrypted_mode()) { + if (engine_conf.has_encrypted_model() && engine_conf.encrypted_model()) { // decrypt model std::string model_buffer, params_buffer, key_buffer; - ReadBinaryFile(model_path + "encrypt_model", &model_buffer); - ReadBinaryFile(model_path + "encrypt_params", ¶ms_buffer); - ReadBinaryFile(model_path + "key", &key_buffer); + predictor::ReadBinaryFile(model_path + "encrypt_model", &model_buffer); + predictor::ReadBinaryFile(model_path + "encrypt_params", ¶ms_buffer); + predictor::ReadBinaryFile(model_path + "key", &key_buffer); auto cipher = paddle::MakeCipher(""); std::string real_model_buffer = cipher->Decrypt(model_buffer, key_buffer);