提交 8a23b571 编写于 作者: Z zhoufeng

Op debug feature

Signed-off-by: Nzhoufeng <zhoufeng54@huawei.com>
上级 103f2d10
......@@ -35,6 +35,9 @@ class ModelRunner {
bool LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint32_t model_id,
std::shared_ptr<DavinciModel> davinci_model, std::shared_ptr<ModelListener> listener);
bool DistributeTask(uint32_t model_id);
bool LoadModelComplete(uint32_t model_id);
const std::vector<uint32_t> &GetTaskIdList(uint32_t model_id) const;
......@@ -43,6 +46,8 @@ class ModelRunner {
const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap(uint32_t model_id) const;
void *GetModelHandle(uint32_t model_id) const;
bool UnloadModel(uint32_t model_id);
bool RunModel(uint32_t model_id, const InputData &input_data, OutputData *output_data);
......
......@@ -49,6 +49,15 @@ bool ModelRunner::LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint
return true;
}
bool ModelRunner::DistributeTask(uint32_t model_id) {
auto model_iter = runtime_models_.find(model_id);
if (model_iter == runtime_models_.end()) {
GELOGE(PARAM_INVALID, "Model id %u not found.", model_id);
return false;
}
return model_iter->second->DistributeTask();
}
bool ModelRunner::LoadModelComplete(uint32_t model_id) {
auto model_iter = runtime_models_.find(model_id);
if (model_iter == runtime_models_.end()) {
......@@ -91,6 +100,16 @@ const std::map<std::string, std::shared_ptr<RuntimeInfo>> &ModelRunner::GetRunti
return model_iter->second->GetRuntimeInfoMap();
}
void *ModelRunner::GetModelHandle(uint32_t model_id) const {
auto model_iter = runtime_models_.find(model_id);
if (model_iter == runtime_models_.end()) {
GELOGW("Model id %u not found.", model_id);
return nullptr;
}
return model_iter->second->GetModelHandle();
}
bool ModelRunner::UnloadModel(uint32_t model_id) {
auto iter = runtime_models_.find(model_id);
if (iter != runtime_models_.end()) {
......
......@@ -283,14 +283,16 @@ bool RuntimeModel::Load(uint32_t device_id, uint64_t session_id, std::shared_ptr
}
GenerateTask(device_id, session_id, davinci_model);
return status;
}
status = LoadTask();
bool RuntimeModel::DistributeTask() {
bool status = LoadTask();
if (!status) {
GELOGE(FAILED, "DistributeTask failed");
return status;
return false;
}
return status;
return true;
}
bool RuntimeModel::Run() {
......
......@@ -35,10 +35,12 @@ class RuntimeModel {
~RuntimeModel();
bool Load(uint32_t device_id, uint64_t session_id, std::shared_ptr<DavinciModel> &davinci_model);
bool DistributeTask();
bool LoadComplete();
const std::vector<uint32_t> &GetTaskIdList() const;
const std::vector<uint32_t> &GetStreamIdList() const;
const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap() const { return runtime_info_map_; }
rtModel_t GetModelHandle() const { return rt_model_handle_; }
bool Run();
bool CopyInputData(const InputData &input_data);
bool GetInputOutputDescInfo(bool zero_copy, std::vector<InputOutputDescInfo> *input_desc,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册