提交 8e9fffa9 编写于 作者: Y yangfei963158659 提交者: GitHub

Merge pull request #1261 from yangfei963158659/develop

fix cl path problem
......@@ -114,6 +114,9 @@ class CLEngine {
cl_device_id DeviceID(int index = 0) { return devices_[index]; }
std::string GetCLPath() { return cl_path_; }
void setClPath(std::string cl_path) { cl_path_ = cl_path; }
private:
CLEngine() { initialized_ = false; }
......@@ -129,6 +132,7 @@ class CLEngine {
cl_int status_;
std::string cl_path_;
std::unique_ptr<_cl_program, CLProgramDeleter> program_;
// bool SetClContext();
......
......@@ -58,7 +58,8 @@ class CLScope {
}
auto program = CLEngine::Instance()->CreateProgramWith(
context_.get(), "./cl_kernel/" + file_name);
context_.get(),
CLEngine::Instance()->GetCLPath() + "/cl_kernel/" + file_name);
DLOG << " --- begin build program -> " << file_name << " --- ";
CLEngine::Instance()->BuildProgram(program.get());
......
......@@ -29,7 +29,9 @@ PaddleMobilePredictor<Dtype, P>::PaddleMobilePredictor(
template <typename Dtype, Precision P>
bool PaddleMobilePredictor<Dtype, P>::Init(const PaddleMobileConfig &config) {
paddle_mobile_.reset(new PaddleMobile<Dtype, P>());
#ifdef PADDLE_MOBILE_CL
paddle_mobile_->SetCLPath(config.cl_path);
#endif
if (config.memory_pack.from_memory) {
DLOG << "load from memory!";
paddle_mobile_->LoadCombinedMemory(config.memory_pack.model_size,
......
......@@ -132,6 +132,7 @@ struct PaddleMobileConfig : public PaddlePredictor::Config {
int thread_num = 1;
std::string prog_file;
std::string param_file;
std::string cl_path;
struct PaddleModelMemoryPack memory_pack;
};
......
......@@ -158,6 +158,13 @@ void PaddleMobile<Dtype, P>::Predict_To(int end) {
}
#endif
#ifdef PADDLE_MOBILE_CL
template <typename Dtype, Precision P>
void PaddleMobile<Dtype, P>::SetCLPath(std::string path) {
framework::CLEngine::Instance()->setClPath(path);
}
#endif
template class PaddleMobile<CPU, Precision::FP32>;
template class PaddleMobile<FPGA, Precision::FP32>;
template class PaddleMobile<GPU_MALI, Precision::FP32>;
......
......@@ -26,6 +26,9 @@ limitations under the License. */
#include "framework/load_ops.h"
#include "framework/loader.h"
#include "framework/tensor.h"
#ifdef PADDLE_MOBILE_CL
#include "framework/cl/cl_engine.h"
#endif
namespace paddle_mobile {
......@@ -68,6 +71,11 @@ class PaddleMobile {
void Predict_To(int end);
#endif
#ifdef PADDLE_MOBILE_CL
public:
void SetCLPath(std::string cl_path);
#endif
private:
std::shared_ptr<framework::Loader<Dtype, P>> loader_;
std::shared_ptr<framework::Executor<Dtype, P>> executor_;
......
......@@ -22,7 +22,7 @@ int main() {
auto time1 = paddle_mobile::time();
// auto isok = paddle_mobile.Load(std::string(g_mobilenet_detect) + "/model",
// std::string(g_mobilenet_detect) + "/params", true);
paddle_mobile.SetCLPath(".");
auto isok = paddle_mobile.Load(std::string(g_mobilenet), true);
if (isok) {
auto time2 = paddle_mobile::time();
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册