diff --git a/paddle/fluid/inference/api/paddle_analysis_config.h b/paddle/fluid/inference/api/paddle_analysis_config.h index 0ed5380e6755c3b4c0078896799e478a86dfaab0..f9587c099573d7f255b3edada0ab081ede864878 100644 --- a/paddle/fluid/inference/api/paddle_analysis_config.h +++ b/paddle/fluid/inference/api/paddle_analysis_config.h @@ -367,7 +367,7 @@ struct PD_INFER_DECL AnalysisConfig { /// /// \param device_id device_id the custom device to use (default is 0). /// - void EnableCustomDevice(const std::string& device_type, int device_id); + void EnableCustomDevice(const std::string& device_type, int device_id = 0); /// /// \brief Turn on ONNXRuntime. /// diff --git a/paddle/fluid/pybind/inference_api.cc b/paddle/fluid/pybind/inference_api.cc index 9b99cad869315743b7396f06705b2f376bc60057..60f1bfd9216d4629c0a17d66eb8bd295dcff156c 100644 --- a/paddle/fluid/pybind/inference_api.cc +++ b/paddle/fluid/pybind/inference_api.cc @@ -662,6 +662,10 @@ void BindAnalysisConfig(py::module *m) { .def("set_xpu_device_id", &AnalysisConfig::SetXpuDeviceId, py::arg("device_id") = 0) + .def("enable_custom_device", + &AnalysisConfig::EnableCustomDevice, + py::arg("device_type"), + py::arg("device_id") = 0) .def("enable_npu", &AnalysisConfig::EnableNpu, py::arg("device_id") = 0) .def("enable_ipu", &AnalysisConfig::EnableIpu,