提交 d42117e7 编写于 作者: Y Yu Yang

Set NumThreads

上级 ba227df9
......@@ -498,14 +498,15 @@ All parameter, weight, gradient are variables in Paddle.
m.def("reset_profiler", platform::ResetProfiler);
py::class_<ParallelExecutor>(m, "ParallelExecutor")
.def(
"__init__",
[](ParallelExecutor &self, const std::vector<platform::Place> &places,
.def("__init__",
[](ParallelExecutor &self, size_t num_threads,
const std::vector<platform::Place> &places,
const std::unordered_set<std::string> &params,
const ProgramDesc &startup_program,
const ProgramDesc &main_program, const std::string &loss_var_name,
Scope *scope) {
new (&self) ParallelExecutor(places, params, startup_program,
new (&self)
ParallelExecutor(num_threads, places, params, startup_program,
main_program, loss_var_name, scope);
})
.def("run", &ParallelExecutor::Run);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册