未验证 提交 56e72b20 编写于 作者: J JingZhuangzhuang 提交者: GitHub

modify infer gpu memory strategy (#41427)

* modify infer gpu memory strategy

* modify infer gpu memory strategy
上级 53409bcd
...@@ -1061,13 +1061,6 @@ std::unique_ptr<PaddlePredictor> CreatePaddlePredictor< ...@@ -1061,13 +1061,6 @@ std::unique_ptr<PaddlePredictor> CreatePaddlePredictor<
gflags.push_back("--cudnn_deterministic=True"); gflags.push_back("--cudnn_deterministic=True");
} }
// TODO(wilber): jetson tx2 may fail to run the model due to insufficient memory
// under the native_best_fit strategy. Modify the default allocation strategy to
// auto_growth. todo, find a more appropriate way to solve the problem.
#ifdef WITH_NV_JETSON
gflags.push_back("--allocator_strategy=auto_growth");
#endif
// TODO(Shixiaowei02): Add a mandatory scheme to use the thread local // TODO(Shixiaowei02): Add a mandatory scheme to use the thread local
// allocator when multi-stream is enabled. // allocator when multi-stream is enabled.
if (config.thread_local_stream_enabled()) { if (config.thread_local_stream_enabled()) {
......
...@@ -364,11 +364,7 @@ PADDLE_DEFINE_EXPORTED_double( ...@@ -364,11 +364,7 @@ PADDLE_DEFINE_EXPORTED_double(
* Example: * Example:
* Note: For selecting allocator policy of PaddlePaddle. * Note: For selecting allocator policy of PaddlePaddle.
*/ */
#ifdef PADDLE_ON_INFERENCE
static constexpr char kDefaultAllocatorStrategy[] = "naive_best_fit";
#else
static constexpr char kDefaultAllocatorStrategy[] = "auto_growth"; static constexpr char kDefaultAllocatorStrategy[] = "auto_growth";
#endif
PADDLE_DEFINE_EXPORTED_string( PADDLE_DEFINE_EXPORTED_string(
allocator_strategy, kDefaultAllocatorStrategy, allocator_strategy, kDefaultAllocatorStrategy,
"The allocation strategy, enum in [naive_best_fit, auto_growth]. " "The allocation strategy, enum in [naive_best_fit, auto_growth]. "
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册