diff --git a/paddle/fluid/inference/tests/book/test_inference_nlp.cc b/paddle/fluid/inference/tests/book/test_inference_nlp.cc index 378e1620a090243e9429ffcf59c4a42d005830eb..f7788ccbf407fa9bf6ab4eed0d53b12f89ef3e34 100644 --- a/paddle/fluid/inference/tests/book/test_inference_nlp.cc +++ b/paddle/fluid/inference/tests/book/test_inference_nlp.cc @@ -91,6 +91,8 @@ void ThreadRunInfer( const std::vector>& jobs) { auto copy_program = std::unique_ptr( new paddle::framework::ProgramDesc(*inference_program)); + auto& sub_scope = scope->NewScope(); + std::string feed_holder_name = "feed_" + paddle::string::to_string(tid); std::string fetch_holder_name = "fetch_" + paddle::string::to_string(tid); copy_program->SetFeedHolderName(feed_holder_name); @@ -113,10 +115,11 @@ void ThreadRunInfer( auto start_ms = GetCurrentMs(); for (size_t i = 0; i < inputs.size(); ++i) { feed_targets[feed_target_names[0]] = inputs[i]; - executor->Run(*copy_program, scope, &feed_targets, &fetch_targets, true, - true, feed_holder_name, fetch_holder_name); + executor->Run(*copy_program, &sub_scope, &feed_targets, &fetch_targets, + true, true, feed_holder_name, fetch_holder_name); } auto stop_ms = GetCurrentMs(); + scope->DeleteScope(&sub_scope); LOG(INFO) << "Tid: " << tid << ", process " << inputs.size() << " samples, avg time per sample: " << (stop_ms - start_ms) / inputs.size() << " ms";