compiler error when do not have TENSORRT library
Created by: luotao1
../../libpaddle_fluid.a(tensorrt_engine_op.cc.o): In function `paddle::operators::TensorRTEngineKernel<paddle::platform::CPUDeviceContext, long>::Prepare(paddle::framework::ExecutionContext const&) const':
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7PrepareERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7PrepareERKNS_9framework16ExecutionContextE]+0xf2): undefined reference to `vtable for paddle::inference::tensorrt::TensorRTEngine'
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7PrepareERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7PrepareERKNS_9framework16ExecutionContextE]+0x2be): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::FreezeNetwork()'
../../libpaddle_fluid.a(tensorrt_engine_op.cc.o): In function `paddle::operators::TensorRTEngineKernel<paddle::platform::CPUDeviceContext, long>::Compute(paddle::framework::ExecutionContext const&) const':
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE]+0x181): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::SetInputFromCPU(std::string const&, void const*, unsigned long)'
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE]+0x1fd): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::SetInputFromGPU(std::string const&, void const*, unsigned long)'
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE]+0x2c0): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::GetITensor(std::string const&)'
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE]+0x430): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::GetOutputInCPU(std::string const&, void*, unsigned long)'
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextElE7ComputeERKNS_9framework16ExecutionContextE]+0x4d9): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::GetOutputInGPU(std::string const&, void*, unsigned long)'
../../libpaddle_fluid.a(tensorrt_engine_op.cc.o): In function `paddle::operators::TensorRTEngineKernel<paddle::platform::CPUDeviceContext, int>::Prepare(paddle::framework::ExecutionContext const&) const':
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextEiE7PrepareERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextEiE7PrepareERKNS_9framework16ExecutionContextE]+0xf2): undefined reference to `vtable for paddle::inference::tensorrt::TensorRTEngine'
tensorrt_engine_op.cc:(.text._ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextEiE7PrepareERKNS_9framework16ExecutionContextE[_ZNK6paddle9operators20TensorRTEngineKernelINS_8platform16CPUDeviceContextEiE7PrepareERKNS_9framework16ExecutionContextE]+0x2be): undefined reference to `paddle::inference::tensorrt::TensorRTEngine::FreezeNetwork()'
../../libpaddle_fluid.a(tensorrt_engine_op.cc.o): In function `paddle::operators::TensorRTEngineKernel<paddle::platform::CPUDeviceContext, int>::Compute(paddle::framework::ExecutionContext const&) const':
...