Merge pull request !52 from yanghaoran/r0.6pull/52/MERGE
@@ -35,9 +35,6 @@ class ModelRunner { | |||||
bool LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint32_t model_id, | bool LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint32_t model_id, | ||||
std::shared_ptr<DavinciModel> davinci_model, std::shared_ptr<ModelListener> listener); | std::shared_ptr<DavinciModel> davinci_model, std::shared_ptr<ModelListener> listener); | ||||
bool DistributeTask(uint32_t model_id); | |||||
bool LoadModelComplete(uint32_t model_id); | bool LoadModelComplete(uint32_t model_id); | ||||
const std::vector<uint32_t> &GetTaskIdList(uint32_t model_id) const; | const std::vector<uint32_t> &GetTaskIdList(uint32_t model_id) const; | ||||
@@ -46,8 +43,6 @@ class ModelRunner { | |||||
const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap(uint32_t model_id) const; | const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap(uint32_t model_id) const; | ||||
void *GetModelHandle(uint32_t model_id) const; | |||||
bool UnloadModel(uint32_t model_id); | bool UnloadModel(uint32_t model_id); | ||||
bool RunModel(uint32_t model_id, const InputData &input_data, OutputData *output_data); | bool RunModel(uint32_t model_id, const InputData &input_data, OutputData *output_data); | ||||
@@ -49,15 +49,6 @@ bool ModelRunner::LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint | |||||
return true; | return true; | ||||
} | } | ||||
bool ModelRunner::DistributeTask(uint32_t model_id) { | |||||
auto model_iter = runtime_models_.find(model_id); | |||||
if (model_iter == runtime_models_.end()) { | |||||
GELOGE(PARAM_INVALID, "Model id %u not found.", model_id); | |||||
return false; | |||||
} | |||||
return model_iter->second->DistributeTask(); | |||||
} | |||||
bool ModelRunner::LoadModelComplete(uint32_t model_id) { | bool ModelRunner::LoadModelComplete(uint32_t model_id) { | ||||
auto model_iter = runtime_models_.find(model_id); | auto model_iter = runtime_models_.find(model_id); | ||||
if (model_iter == runtime_models_.end()) { | if (model_iter == runtime_models_.end()) { | ||||
@@ -100,16 +91,6 @@ const std::map<std::string, std::shared_ptr<RuntimeInfo>> &ModelRunner::GetRunti | |||||
return model_iter->second->GetRuntimeInfoMap(); | return model_iter->second->GetRuntimeInfoMap(); | ||||
} | } | ||||
void *ModelRunner::GetModelHandle(uint32_t model_id) const { | |||||
auto model_iter = runtime_models_.find(model_id); | |||||
if (model_iter == runtime_models_.end()) { | |||||
GELOGW("Model id %u not found.", model_id); | |||||
return nullptr; | |||||
} | |||||
return model_iter->second->GetModelHandle(); | |||||
} | |||||
bool ModelRunner::UnloadModel(uint32_t model_id) { | bool ModelRunner::UnloadModel(uint32_t model_id) { | ||||
auto iter = runtime_models_.find(model_id); | auto iter = runtime_models_.find(model_id); | ||||
if (iter != runtime_models_.end()) { | if (iter != runtime_models_.end()) { | ||||
@@ -283,16 +283,14 @@ bool RuntimeModel::Load(uint32_t device_id, uint64_t session_id, std::shared_ptr | |||||
} | } | ||||
GenerateTask(device_id, session_id, davinci_model); | GenerateTask(device_id, session_id, davinci_model); | ||||
return status; | |||||
} | |||||
bool RuntimeModel::DistributeTask() { | |||||
bool status = LoadTask(); | |||||
status = LoadTask(); | |||||
if (!status) { | if (!status) { | ||||
GELOGE(FAILED, "DistributeTask failed"); | GELOGE(FAILED, "DistributeTask failed"); | ||||
return false; | |||||
return status; | |||||
} | } | ||||
return true; | |||||
return status; | |||||
} | } | ||||
bool RuntimeModel::Run() { | bool RuntimeModel::Run() { | ||||
@@ -35,12 +35,10 @@ class RuntimeModel { | |||||
~RuntimeModel(); | ~RuntimeModel(); | ||||
bool Load(uint32_t device_id, uint64_t session_id, std::shared_ptr<DavinciModel> &davinci_model); | bool Load(uint32_t device_id, uint64_t session_id, std::shared_ptr<DavinciModel> &davinci_model); | ||||
bool DistributeTask(); | |||||
bool LoadComplete(); | bool LoadComplete(); | ||||
const std::vector<uint32_t> &GetTaskIdList() const; | const std::vector<uint32_t> &GetTaskIdList() const; | ||||
const std::vector<uint32_t> &GetStreamIdList() const; | const std::vector<uint32_t> &GetStreamIdList() const; | ||||
const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap() const { return runtime_info_map_; } | const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap() const { return runtime_info_map_; } | ||||
rtModel_t GetModelHandle() const { return rt_model_handle_; } | |||||
bool Run(); | bool Run(); | ||||
bool CopyInputData(const InputData &input_data); | bool CopyInputData(const InputData &input_data); | ||||
bool GetInputOutputDescInfo(bool zero_copy, std::vector<InputOutputDescInfo> *input_desc, | bool GetInputOutputDescInfo(bool zero_copy, std::vector<InputOutputDescInfo> *input_desc, | ||||