提交 2cb83c8f 编写于 作者: M mindspore-ci-bot 提交者: Gitee

!52 Revert "Op debug feature"

Merge pull request !52 from yanghaoran/r0.6
......@@ -35,9 +35,6 @@ class ModelRunner {
bool LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint32_t model_id,
std::shared_ptr<DavinciModel> davinci_model, std::shared_ptr<ModelListener> listener);
bool DistributeTask(uint32_t model_id);
bool LoadModelComplete(uint32_t model_id);
const std::vector<uint32_t> &GetTaskIdList(uint32_t model_id) const;
......@@ -46,8 +43,6 @@ class ModelRunner {
const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap(uint32_t model_id) const;
void *GetModelHandle(uint32_t model_id) const;
bool UnloadModel(uint32_t model_id);
bool RunModel(uint32_t model_id, const InputData &input_data, OutputData *output_data);
......
......@@ -49,15 +49,6 @@ bool ModelRunner::LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint
return true;
}
bool ModelRunner::DistributeTask(uint32_t model_id) {
auto model_iter = runtime_models_.find(model_id);
if (model_iter == runtime_models_.end()) {
GELOGE(PARAM_INVALID, "Model id %u not found.", model_id);
return false;
}
return model_iter->second->DistributeTask();
}
bool ModelRunner::LoadModelComplete(uint32_t model_id) {
auto model_iter = runtime_models_.find(model_id);
if (model_iter == runtime_models_.end()) {
......@@ -100,16 +91,6 @@ const std::map<std::string, std::shared_ptr<RuntimeInfo>> &ModelRunner::GetRunti
return model_iter->second->GetRuntimeInfoMap();
}
void *ModelRunner::GetModelHandle(uint32_t model_id) const {
auto model_iter = runtime_models_.find(model_id);
if (model_iter == runtime_models_.end()) {
GELOGW("Model id %u not found.", model_id);
return nullptr;
}
return model_iter->second->GetModelHandle();
}
bool ModelRunner::UnloadModel(uint32_t model_id) {
auto iter = runtime_models_.find(model_id);
if (iter != runtime_models_.end()) {
......
......@@ -283,16 +283,14 @@ bool RuntimeModel::Load(uint32_t device_id, uint64_t session_id, std::shared_ptr
}
GenerateTask(device_id, session_id, davinci_model);
return status;
}
bool RuntimeModel::DistributeTask() {
bool status = LoadTask();
status = LoadTask();
if (!status) {
GELOGE(FAILED, "DistributeTask failed");
return false;
return status;
}
return true;
return status;
}
bool RuntimeModel::Run() {
......
......@@ -35,12 +35,10 @@ class RuntimeModel {
~RuntimeModel();
bool Load(uint32_t device_id, uint64_t session_id, std::shared_ptr<DavinciModel> &davinci_model);
bool DistributeTask();
bool LoadComplete();
const std::vector<uint32_t> &GetTaskIdList() const;
const std::vector<uint32_t> &GetStreamIdList() const;
const std::map<std::string, std::shared_ptr<RuntimeInfo>> &GetRuntimeInfoMap() const { return runtime_info_map_; }
rtModel_t GetModelHandle() const { return rt_model_handle_; }
bool Run();
bool CopyInputData(const InputData &input_data);
bool GetInputOutputDescInfo(bool zero_copy, std::vector<InputOutputDescInfo> *input_desc,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册