提交 875f5a0c 编写于 作者: Y yongqiang

add workspace

上级 6d2b2a40
...@@ -237,6 +237,10 @@ void PaddlePredictor::SaveOptimizedModel(const std::string &model_dir, ...@@ -237,6 +237,10 @@ void PaddlePredictor::SaveOptimizedModel(const std::string &model_dir,
<< "The SaveOptimizedModel API is only supported by CxxConfig predictor."; << "The SaveOptimizedModel API is only supported by CxxConfig predictor.";
} }
PaddlePredictor::~PaddlePredictor() {
lite::DeviceInfo::Global().CleanWorkspace();
}
template <typename ConfigT> template <typename ConfigT>
std::shared_ptr<PaddlePredictor> CreatePaddlePredictor(const ConfigT &) { std::shared_ptr<PaddlePredictor> CreatePaddlePredictor(const ConfigT &) {
return std::shared_ptr<PaddlePredictor>(); return std::shared_ptr<PaddlePredictor>();
......
...@@ -118,7 +118,7 @@ class LITE_API PaddlePredictor { ...@@ -118,7 +118,7 @@ class LITE_API PaddlePredictor {
LiteModelType model_type = LiteModelType::kProtobuf, LiteModelType model_type = LiteModelType::kProtobuf,
bool record_info = false); bool record_info = false);
virtual ~PaddlePredictor() = default; virtual ~PaddlePredictor();
protected: protected:
int threads_{1}; int threads_{1};
......
...@@ -77,6 +77,11 @@ class DeviceInfo { ...@@ -77,6 +77,11 @@ class DeviceInfo {
return reinterpret_cast<T*>(workspace_.mutable_data<int8_t>()); return reinterpret_cast<T*>(workspace_.mutable_data<int8_t>());
} }
bool ExtendWorkspace(size_t size); bool ExtendWorkspace(size_t size);
bool CleanWorkspace() {
// workspace_.Resize({1});
// workspace_.mutable_data<int8_t>();
workspace_.clear();
}
private: private:
int core_num_; int core_num_;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册