You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

graph_builder.cc 26 kB

5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/build/graph_builder.h"
  17. #include "common/ge/ge_util.h"
  18. #include "common/helper/model_helper.h"
  19. #include "graph/build/logical_stream_allocator.h"
  20. #include "graph/build/run_context.h"
  21. #include "graph/build/stream_graph_optimizer.h"
  22. #include "graph/common/ge_call_wrapper.h"
  23. #include "graph/ge_context.h"
  24. #include "graph/manager/graph_var_manager.h"
  25. #include "graph/passes/mark_same_addr_pass.h"
  26. #include "graph/utils/node_utils.h"
  27. #include "graph/utils/type_utils.h"
  28. #include "init/gelib.h"
  29. #include "model/ge_model.h"
  30. #include "graph/ge_context.h"
  31. #include "opskernel_manager/ops_kernel_builder_manager.h"
  32. using domi::BuildMode;
  33. namespace {
  34. const int32_t kInvalidPerfLevel = -1;
  35. enum NodeType { kSubgraphData, kSubgraphNode, kOthers };
  36. } // namespace
  37. namespace ge {
  38. NodeType TransferNodeType(const NodePtr &node) {
  39. const std::string type = node->GetType();
  40. if (type == ge::DATA) {
  41. if (node->GetOwnerComputeGraph()->GetParentNode() == nullptr) {
  42. GELOGD("access src data node:%s", node->GetName().c_str());
  43. return kOthers;
  44. }
  45. GELOGD("access subgraph input node:%s", node->GetName().c_str());
  46. return kSubgraphData;
  47. } else if (type == PARTITIONEDCALL) {
  48. GELOGD("access subgraph node:%s", node->GetName().c_str());
  49. return kSubgraphNode;
  50. }
  51. GELOGD("access other node:%s", node->GetName().c_str());
  52. return kOthers;
  53. }
  54. Status HandleSubgraphNode(NodePtr &src_node, OutDataAnchorPtr &src_out_anchor) {
  55. auto subgraph = NodeUtils::GetSubgraph(*src_node, 0);
  56. GE_CHECK_NOTNULL(subgraph);
  57. const NodePtr &net_output_node = subgraph->FindFirstNodeMatchType(NETOUTPUT);
  58. GE_CHECK_NOTNULL(net_output_node);
  59. const InDataAnchorPtr &in_data_anchor = net_output_node->GetInDataAnchor(src_out_anchor->GetIdx());
  60. GE_CHECK_NOTNULL(in_data_anchor);
  61. const OutDataAnchorPtr &peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  62. GE_CHECK_NOTNULL(peer_out_anchor);
  63. src_node = peer_out_anchor->GetOwnerNode();
  64. src_out_anchor = peer_out_anchor;
  65. return SUCCESS;
  66. }
  67. Status HandleSubgraphDataNode(NodePtr &src_node, OutDataAnchorPtr &src_out_anchor) {
  68. uint32_t index = 0;
  69. if (!AttrUtils::GetInt(src_node->GetOpDesc(), ATTR_NAME_PARENT_NODE_INDEX, index)) {
  70. GELOGE(FAILED, "Get attr ATTR_NAME_PARENT_NODE_INDEX failed, node:%s.", src_node->GetName().c_str());
  71. return FAILED;
  72. }
  73. const NodePtr &parent_node = src_node->GetOwnerComputeGraph()->GetParentNode();
  74. GE_CHECK_NOTNULL(parent_node);
  75. const InDataAnchorPtr &in_data_anchor = parent_node->GetInDataAnchor(index);
  76. GE_CHECK_NOTNULL(in_data_anchor);
  77. const OutDataAnchorPtr &peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  78. GE_CHECK_NOTNULL(peer_out_anchor);
  79. src_node = peer_out_anchor->GetOwnerNode();
  80. src_out_anchor = peer_out_anchor;
  81. return SUCCESS;
  82. }
  83. GraphBuilder::GraphBuilder() : build_mode_(BuildMode::GEN_TASK_WITH_FUSION), hcom_parallel_(false) {}
  84. void GraphBuilder::SetOptions(const ge::GraphManagerOptions &options) {
  85. stream_max_parallel_num_ = options.stream_max_parallel_num;
  86. hcom_parallel_ = options.hcom_parallel;
  87. if (options.perf_level == kInvalidPerfLevel) {
  88. build_mode_ = static_cast<int>(BuildMode::GEN_TASK_WITH_FUSION);
  89. } else {
  90. build_mode_ = options.perf_level;
  91. }
  92. }
  93. Status GraphBuilder::CalcOpParam(const ge::ComputeGraphPtr &graph) {
  94. GELOGI("Begin to calculate op running param.");
  95. GE_CHECK_NOTNULL(graph);
  96. auto instance_ptr = ge::GELib::GetInstance();
  97. if (instance_ptr == nullptr || !instance_ptr->InitFlag()) {
  98. GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GraphBuilder: GE is not initialized");
  99. return GE_CLI_GE_NOT_INITIALIZED;
  100. }
  101. for (const auto &node_ptr : graph->GetNodes(graph->GetGraphUnknownFlag())) {
  102. GE_CHECK_NOTNULL(node_ptr->GetOpDesc());
  103. std::string kernel_lib_name = node_ptr->GetOpDesc()->GetOpKernelLibName();
  104. if (kernel_lib_name.empty()) {
  105. // reset op kernel lib
  106. (void)instance_ptr->DNNEngineManagerObj().GetDNNEngineName(node_ptr);
  107. kernel_lib_name = node_ptr->GetOpDesc()->GetOpKernelLibName();
  108. if (kernel_lib_name.empty()) {
  109. GELOGE(INTERNAL_ERROR, "Get node:%s(%s) kernel lib failed.", node_ptr->GetName().c_str(),
  110. node_ptr->GetType().c_str());
  111. return INTERNAL_ERROR;
  112. }
  113. }
  114. auto ret = SetInputSize(node_ptr);
  115. if (ret != SUCCESS) {
  116. GELOGE(ret, "Set node inputDesc size failed, node name is %s", node_ptr->GetName().c_str());
  117. return ret;
  118. }
  119. ret = OpsKernelBuilderManager::Instance().CalcOpRunningParam(*node_ptr);
  120. if (ret != SUCCESS) {
  121. GELOGE(ret, "Calculate op running param failed, node name is %s", node_ptr->GetName().c_str());
  122. return ret;
  123. }
  124. GE_CHK_STATUS_RET(AddOutputMemTypeForNode(node_ptr));
  125. }
  126. auto parent_node = graph->GetParentNode();
  127. if (parent_node == nullptr) {
  128. GELOGI("Graph[%s] do not have parent node, no need update parent node output size.", graph->GetName().c_str());
  129. return SUCCESS;
  130. }
  131. GE_CHK_STATUS_RET(UpdateParentNodeOutputSize(graph, parent_node));
  132. GELOGI("Success to calculate op running param.");
  133. return SUCCESS;
  134. }
  135. Status GraphBuilder::UpdateParentNodeOutputSize(const ge::ComputeGraphPtr &graph, ge::NodePtr &parent_node_ptr) {
  136. GELOGI("Begin to update parent node[%s] of graph[%s] output size.", parent_node_ptr->GetName().c_str(),
  137. graph->GetName().c_str());
  138. auto parent_op_desc = parent_node_ptr->GetOpDesc();
  139. GE_CHECK_NOTNULL(parent_op_desc);
  140. bool is_unknown_shape = graph->GetGraphUnknownFlag();
  141. if (is_unknown_shape) {
  142. GELOGI("Current graph[%s] is unknown, no need to update parent node[%s] output size.", graph->GetName().c_str(),
  143. parent_node_ptr->GetName().c_str());
  144. return SUCCESS;
  145. }
  146. for (const auto &node_ptr : graph->GetDirectNode()) {
  147. if (node_ptr->GetType() != NETOUTPUT) {
  148. continue;
  149. }
  150. auto op_desc = node_ptr->GetOpDesc();
  151. GE_CHECK_NOTNULL(op_desc);
  152. for (const auto &in_data_anchor : node_ptr->GetAllInDataAnchors()) {
  153. auto index = in_data_anchor->GetIdx();
  154. ge::GeTensorDesc desc_temp = op_desc->GetInputDesc(index);
  155. uint32_t parent_index = 0;
  156. if (!AttrUtils::GetInt(desc_temp, ATTR_NAME_PARENT_NODE_INDEX, parent_index)) {
  157. GELOGI("NetOutput input tensor %d, attr %s not found.", index, ATTR_NAME_PARENT_NODE_INDEX.c_str());
  158. continue;
  159. }
  160. int64_t size = 0;
  161. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(desc_temp, size) != SUCCESS, GELOGI("Get size failed!"));
  162. ge::GeTensorDesc parent_desc_temp = parent_op_desc->GetOutputDesc(parent_index);
  163. ge::TensorUtils::SetSize(parent_desc_temp, size);
  164. GE_CHK_STATUS_RET(parent_op_desc->UpdateOutputDesc(parent_index, parent_desc_temp));
  165. GELOGI("Update parent node[%s] output index[%u] to size[%ld].", parent_node_ptr->GetName().c_str(), parent_index,
  166. size);
  167. }
  168. }
  169. return SUCCESS;
  170. }
  171. Status GraphBuilder::Build(ComputeGraphPtr &comp_graph, std::vector<SubGraphInfoPtr> &subgraph_ptr_list,
  172. GeRootModelPtr &ge_root_model_ptr, uint64_t session_id) {
  173. GELOGI("Start to build model.");
  174. if (comp_graph == nullptr) {
  175. GELOGE(GE_GRAPH_PARAM_NULLPTR, "Graph build comp_graph is null.");
  176. return GE_GRAPH_PARAM_NULLPTR;
  177. }
  178. ge_root_model_ptr = MakeShared<ge::GeRootModel>(comp_graph);
  179. if (ge_root_model_ptr == nullptr) {
  180. return MEMALLOC_FAILED;
  181. }
  182. GeModelPtr ge_model_ptr = nullptr;
  183. bool is_dynamic_shape = false;
  184. // To be compatible with the old process, do not verify the return value temporarily.
  185. (void)AttrUtils::GetBool(comp_graph, ATTR_NAME_DYNAMIC_SHAPE_PARTITIONED, is_dynamic_shape);
  186. if (is_dynamic_shape) {
  187. GE_CHK_STATUS_RET(
  188. BuildForDynamicShapeGraph(comp_graph, subgraph_ptr_list, ge_root_model_ptr, ge_model_ptr, session_id),
  189. "Build for dynamic shape graph failed.");
  190. return SUCCESS;
  191. }
  192. GE_CHK_STATUS_RET(BuildForKnownShapeGraph(comp_graph, subgraph_ptr_list, ge_model_ptr, session_id),
  193. "Build for known shape graph failed.");
  194. ge_root_model_ptr->SetSubgraphInstanceNameToModel(comp_graph->GetName(), ge_model_ptr);
  195. return SUCCESS;
  196. }
  197. Status GraphBuilder::BuildForKnownShapeGraph(ComputeGraphPtr &comp_graph, std::vector<SubGraphInfoPtr> &subgraph_list,
  198. GeModelPtr &ge_model_ptr, uint64_t session_id) {
  199. if (ge::GetContext().GetHostExecFlag()) {
  200. GE_CHK_STATUS_RET(BuildForHostCpuGraph(comp_graph, ge_model_ptr, session_id), "Build for host-cpu graph failed.");
  201. return SUCCESS;
  202. }
  203. GELOGI("Begin to build known shape graph[%s].", comp_graph->GetName().c_str());
  204. Status ret = SecondPartition(comp_graph, subgraph_list);
  205. GE_CHK_STATUS_RET(ret, "Graph[%s] second partition Failed.", comp_graph->GetName().c_str());
  206. auto subgraph_map = graph_partitioner_.GetSubGraphMap();
  207. GE_TIMESTAMP_START(BuildSubgraph);
  208. ge::ModelBuilder builder(session_id, comp_graph, subgraph_map, stream_max_parallel_num_, hcom_parallel_, build_mode_);
  209. GE_DUMP(comp_graph, "BeforePreBuildModel");
  210. GE_TIMESTAMP_START(PreBuildModel);
  211. GE_CHK_STATUS_RET(builder.PreBuildModel(), "Graph[%s] builder PreBuildModel() return fail.",
  212. comp_graph->GetName().c_str());
  213. GE_TIMESTAMP_END(PreBuildModel, "GraphBuilder::PreBuildModel");
  214. GE_DUMP(comp_graph, "AfterPreBuildModel");
  215. GE_TIMESTAMP_START(CalcOpParam);
  216. GE_CHK_STATUS_RET(CalcOpParam(comp_graph), "Graph[%s] builder CalcOpParam() return fail.",
  217. comp_graph->GetName().c_str());
  218. GE_TIMESTAMP_END(CalcOpParam, "GraphBuilder::CalcOpParam");
  219. GE_DUMP(comp_graph, "AfterCalcOpParam");
  220. ModelPtr model_ptr = MakeShared<ge::Model>();
  221. if (model_ptr == nullptr) {
  222. return MEMALLOC_FAILED;
  223. }
  224. GE_TIMESTAMP_START(BuildModelForGetTask);
  225. GE_CHK_STATUS_RET(builder.BuildModelForGetTask(*model_ptr), "Graph[%s] builder BuildModelForGetTask() return fail.",
  226. comp_graph->GetName().c_str());
  227. GE_TIMESTAMP_END(BuildModelForGetTask, "GraphBuilder::BuildModelForGetTask");
  228. GE_DUMP(comp_graph, "AfterBuildModel");
  229. GE_TIMESTAMP_START(GetTaskInfo);
  230. ret = GetTaskInfo(builder, model_ptr, comp_graph, subgraph_map, session_id);
  231. GE_TIMESTAMP_END(GetTaskInfo, "GraphBuilder::GetTaskInfo");
  232. GE_DUMP(comp_graph, "AfterGetTask");
  233. if (ret != SUCCESS) {
  234. GELOGE(ret, "Graph[%s] builder GetTaskInfo() return fail.", comp_graph->GetName().c_str());
  235. return ret;
  236. }
  237. ge_model_ptr = MakeShared<ge::GeModel>();
  238. if (ge_model_ptr == nullptr) {
  239. return MEMALLOC_FAILED;
  240. }
  241. GE_CHK_STATUS_RET(builder.SaveDataToModel(*model_ptr, *ge_model_ptr),
  242. "Graph[%s] builder SaveDataToModel() return fail.", comp_graph->GetName().c_str());
  243. GELOGI("Success to build graph[%s] model.", comp_graph->GetName().c_str());
  244. GE_TIMESTAMP_END(BuildSubgraph, "GraphBuilder::Build");
  245. return SUCCESS;
  246. }
  247. Status GraphBuilder::BuildForUnknownShapeGraph(ComputeGraphPtr &comp_graph, GeModelPtr &ge_model_ptr,
  248. uint64_t session_id) {
  249. GELOGI("Begin to build unknown shape graph[%s].", comp_graph->GetName().c_str());
  250. GE_TIMESTAMP_START(CalcOpParam);
  251. GE_CHK_STATUS_RET(CalcOpParam(comp_graph), "Graph[%s] builder CalcOpParam() return fail.",
  252. comp_graph->GetName().c_str());
  253. GE_TIMESTAMP_END(CalcOpParam, "GraphBuilder::CalcOpParam");
  254. GE_DUMP(comp_graph, "AfterCalcOpParam");
  255. Graph2SubGraphInfoList subgraph_map;
  256. ge::ModelBuilder builder(session_id, comp_graph, subgraph_map, stream_max_parallel_num_, hcom_parallel_, build_mode_);
  257. ModelPtr model_ptr = MakeShared<ge::Model>();
  258. if (model_ptr == nullptr) {
  259. return MEMALLOC_FAILED;
  260. }
  261. GE_TIMESTAMP_START(BuildModelForGetDynShapeTask);
  262. GE_CHK_STATUS_RET(builder.BuildModelForGetDynShapeTask(*model_ptr),
  263. "Graph[%s] builder BuildModelForGetDynShapeTask() return fail.", comp_graph->GetName().c_str());
  264. GE_TIMESTAMP_END(BuildModelForGetDynShapeTask, "GraphBuilder::BuildModelForGetDynShapeTask");
  265. GE_TIMESTAMP_START(GetTaskInfo);
  266. Status ret = GetTaskInfo(builder, model_ptr, comp_graph, subgraph_map, session_id);
  267. GE_TIMESTAMP_END(GetTaskInfo, "GraphBuilder::GetTaskInfo");
  268. GraphUtils::DumpGEGraph(comp_graph, "AfterGetTask");
  269. GraphUtils::DumpGEGraphToOnnx(*comp_graph, "AfterGetTask");
  270. if (ret != SUCCESS) {
  271. GELOGE(ret, "Graph[%s] builder GetTaskInfo() return fail.", comp_graph->GetName().c_str());
  272. return ret;
  273. }
  274. ge_model_ptr = MakeShared<ge::GeModel>();
  275. if (ge_model_ptr == nullptr) {
  276. return MEMALLOC_FAILED;
  277. }
  278. GE_CHK_STATUS_RET(builder.SaveDataToModel(*model_ptr, *ge_model_ptr),
  279. "Graph[%s] builder SaveDataToModel() return fail.", comp_graph->GetName().c_str());
  280. GELOGI("Success to build graph[%s] model.", comp_graph->GetName().c_str());
  281. return SUCCESS;
  282. }
  283. Status GraphBuilder::BuildForHostCpuGraph(ComputeGraphPtr &comp_graph, GeModelPtr &ge_model_ptr, uint64_t session_id) {
  284. return BuildForUnknownShapeGraph(comp_graph, ge_model_ptr, session_id);
  285. }
  286. Status GraphBuilder::BuildForDynamicShapeGraph(ComputeGraphPtr &comp_graph,
  287. std::vector<SubGraphInfoPtr> &subgraph_ptr_list,
  288. GeRootModelPtr &ge_root_model_ptr, GeModelPtr &ge_model_ptr,
  289. uint64_t session_id) {
  290. GELOGI("Start to build BuildForDynamicShape for dynamic shape.");
  291. // Update Root Graph Data size
  292. for (auto &node : comp_graph->GetDirectNode()) {
  293. auto op_desc = node->GetOpDesc();
  294. GE_CHECK_NOTNULL(op_desc);
  295. op_desc->SetStreamId(kInvalidStream);
  296. if (node->GetType() == DATA) {
  297. GE_CHK_STATUS_RET(CalcDynShapeRootGraphDataSize(op_desc), "Calc dynamic shape root graph data[%s] size failed.",
  298. op_desc->GetName().c_str());
  299. }
  300. }
  301. //
  302. for (auto &sub_graph : comp_graph->GetAllSubgraphs()) {
  303. // exclude functional subgraph in known subgraph
  304. if (sub_graph->GetParentGraph() != comp_graph && !sub_graph->GetParentGraph()->GetGraphUnknownFlag()) {
  305. continue;
  306. }
  307. if (sub_graph->GetGraphUnknownFlag()) {
  308. // unknown shape build flow
  309. GE_CHK_STATUS_RET(BuildForUnknownShapeGraph(sub_graph, ge_model_ptr, session_id),
  310. "Build for unknown shape graph failed.");
  311. } else {
  312. // reset functional subgraph parent graph as known subgraph
  313. for (const auto &node : sub_graph->GetDirectNode()) {
  314. for (const auto &sub_graph_name : node->GetOpDesc()->GetSubgraphInstanceNames()) {
  315. auto sub_sub_graph = comp_graph->GetSubgraph(sub_graph_name);
  316. GE_CHK_STATUS_RET(sub_graph->AddSubgraph(sub_sub_graph), "Failed add subgraph to known graph.");
  317. }
  318. }
  319. // known shape build flow
  320. GE_CHK_STATUS_RET(BuildForKnownShapeGraph(sub_graph, subgraph_ptr_list, ge_model_ptr, session_id),
  321. "Build for known shape graph failed.");
  322. }
  323. ge_root_model_ptr->SetSubgraphInstanceNameToModel(sub_graph->GetName(), ge_model_ptr);
  324. }
  325. return SUCCESS;
  326. }
  327. Status GraphBuilder::GetTaskInfo(const ge::ModelBuilder &builder, const ModelPtr &model_ptr,
  328. ComputeGraphPtr &comp_graph, Graph2SubGraphInfoList &subgraph_map,
  329. uint64_t session_id) {
  330. GE_CHECK_NOTNULL(model_ptr);
  331. GE_CHECK_NOTNULL(comp_graph);
  332. int64_t memory_size = 0;
  333. if (!AttrUtils::GetInt(model_ptr, ATTR_MODEL_MEMORY_SIZE, memory_size)) {
  334. GELOGE(INTERNAL_ERROR, "Get memory size fail.");
  335. return INTERNAL_ERROR;
  336. }
  337. int64_t p2p_memory_size = 0;
  338. if (!AttrUtils::GetInt(model_ptr, ATTR_MODEL_P2P_MEMORY_SIZE, p2p_memory_size)) {
  339. GELOGE(INTERNAL_ERROR, "Get p2p memory size fail.");
  340. return INTERNAL_ERROR;
  341. }
  342. int64_t weight_size = 0;
  343. if (!AttrUtils::GetInt(model_ptr, ATTR_MODEL_WEIGHT_SIZE, weight_size)) {
  344. GELOGE(INTERNAL_ERROR, "Get weight memory size fail.");
  345. return INTERNAL_ERROR;
  346. }
  347. auto var_manager = VarManager::Instance(session_id);
  348. auto *get_mem_base = reinterpret_cast<uint8_t *>(reinterpret_cast<uintptr_t>(var_manager->GetVarMemMaxSize()));
  349. uint8_t *get_weight_mem_base = get_mem_base;
  350. if (weight_size > 0) {
  351. get_weight_mem_base = get_mem_base + memory_size + p2p_memory_size;
  352. }
  353. std::map<int64_t, uint8_t *> mem_type_to_data_mem_base;
  354. mem_type_to_data_mem_base[RT_MEMORY_HBM] = get_mem_base;
  355. if (p2p_memory_size == 0) {
  356. mem_type_to_data_mem_base[RT_MEMORY_P2P_DDR] = nullptr;
  357. } else {
  358. mem_type_to_data_mem_base[RT_MEMORY_P2P_DDR] = get_mem_base + memory_size;
  359. }
  360. std::map<int64_t, uint64_t> mem_type_to_data_mem_size;
  361. mem_type_to_data_mem_size[RT_MEMORY_HBM] = memory_size;
  362. mem_type_to_data_mem_size[RT_MEMORY_P2P_DDR] = p2p_memory_size;
  363. RunContextUtil run_context;
  364. Status ret = run_context.InitMemInfo(get_mem_base, memory_size, mem_type_to_data_mem_base, mem_type_to_data_mem_size,
  365. get_weight_mem_base, weight_size);
  366. if (ret != SUCCESS) {
  367. GELOGE(ret, "task_generator init mem info fail.");
  368. return ret;
  369. }
  370. auto weight_buffer = builder.GetWeightBuffer();
  371. ret = run_context.CreateRunContext(*model_ptr, comp_graph, weight_buffer, session_id);
  372. if (ret != SUCCESS) {
  373. GELOGE(ret, "runContext create run context fail.");
  374. return ret;
  375. }
  376. StreamGraphOptimizer stream_optimizer;
  377. ret = stream_optimizer.OptimizeStreamedSubGraph(comp_graph, subgraph_map, run_context.GetRunContext());
  378. if (ret != SUCCESS) {
  379. GELOGE(ret, "Optimize streamed subGraph fail.");
  380. return ret;
  381. }
  382. GE_DUMP(comp_graph, "AfterOptimizeStreamedSubGraph");
  383. auto *get_var_mem_base = reinterpret_cast<uint8_t *>(reinterpret_cast<uintptr_t>(var_manager->GetVarMemLogicBase()));
  384. uint64_t var_size = (var_manager->GetVarMemSize(RT_MEMORY_HBM) > 0) ? var_manager->GetVarMemMaxSize() : 0;
  385. TaskGenerator task_generator(get_var_mem_base, var_size);
  386. ret = task_generator.GetTaskInfo(*model_ptr, comp_graph, session_id, run_context.GetRunContext());
  387. return ret;
  388. }
  389. Status GraphBuilder::SetInputSize(const ge::NodePtr &node_ptr) {
  390. // set input_desc.size = src_node.output_desc.size
  391. if (node_ptr->GetType() == DATA) {
  392. bool is_unknown_shape = false;
  393. GE_CHK_STATUS_RET(ge::NodeUtils::GetNodeUnknownShapeStatus(*node_ptr, is_unknown_shape),
  394. "Get data node[%s] shape status failed!", node_ptr->GetName().c_str());
  395. if (is_unknown_shape) {
  396. GELOGD("data node: %s is unknown shape, do not set input size!", node_ptr->GetName().c_str());
  397. return SUCCESS;
  398. }
  399. if (UpdateDataInputSize(node_ptr) != SUCCESS) {
  400. GELOGE(FAILED, "Update data input size failed.");
  401. return FAILED;
  402. }
  403. }
  404. for (const auto &in_data_anchor : node_ptr->GetAllInDataAnchors()) {
  405. const auto &peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  406. GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue);
  407. const auto &src_node = peer_out_anchor->GetOwnerNode();
  408. const auto &src_op = src_node->GetOpDesc();
  409. GE_IF_BOOL_EXEC(src_op == nullptr, continue);
  410. auto node_op_desc = node_ptr->GetOpDesc();
  411. GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue);
  412. // set dst_node.input_desc = src_node.output_desc
  413. ge::GeTensorDesc desc_temp(src_op->GetOutputDesc(peer_out_anchor->GetIdx()));
  414. int64_t size = 0;
  415. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(desc_temp, size) != SUCCESS, GELOGI("Get size failed!"));
  416. GELOGD("src node %s output desc, dim_size: %zu, mem_size: %ld, format: %s, type: %s.", src_node->GetName().c_str(),
  417. desc_temp.GetShape().GetDimNum(), size, TypeUtils::FormatToSerialString(desc_temp.GetFormat()).c_str(),
  418. TypeUtils::DataTypeToSerialString(desc_temp.GetDataType()).c_str());
  419. for (size_t i = 0; i < desc_temp.GetShape().GetDimNum(); ++i) {
  420. GELOGD("dims[%zu]: %ld", i, desc_temp.GetShape().GetDim(i));
  421. }
  422. auto input_desc = node_op_desc->GetInputDescPtr(in_data_anchor->GetIdx());
  423. GE_CHECK_NOTNULL(input_desc);
  424. ge::TensorUtils::SetSize(const_cast<GeTensorDesc &>(*input_desc), size);
  425. GE_CHK_STATUS_RET(node_op_desc->UpdateInputDesc(in_data_anchor->GetIdx(), *input_desc));
  426. GELOGD("%s input desc, dim_size: %zu, mem_size: %ld, format: %s, type: %s.", node_ptr->GetName().c_str(),
  427. input_desc->GetShape().GetDimNum(), size, TypeUtils::FormatToSerialString(input_desc->GetFormat()).c_str(),
  428. TypeUtils::DataTypeToSerialString(input_desc->GetDataType()).c_str());
  429. }
  430. return SUCCESS;
  431. }
  432. Status GraphBuilder::UpdateDataInputSize(const ge::NodePtr &node_ptr) {
  433. const auto &op_desc = node_ptr->GetOpDesc();
  434. if (op_desc == nullptr) {
  435. GELOGE(FAILED, "Op desc is nullptr.");
  436. return FAILED;
  437. }
  438. // data op only has one output anchor
  439. ge::GeTensorDesc output_desc = op_desc->GetOutputDesc(0);
  440. int64_t output_size = 0;
  441. if (ge::TensorUtils::GetSize(output_desc, output_size) != SUCCESS) {
  442. GELOGW("Get size failed!");
  443. }
  444. if (output_size > 0) {
  445. GELOGI("No need to update data input size.");
  446. return SUCCESS;
  447. } else {
  448. int64_t real_dim_size = 0;
  449. ge::graphStatus graph_status = TensorUtils::GetTensorSizeInBytes(output_desc, real_dim_size);
  450. if (graph_status != GRAPH_SUCCESS) {
  451. GELOGE(FAILED, "Get tensor size in bytes failed.");
  452. return FAILED;
  453. }
  454. // data op only has one input anchor
  455. ge::GeTensorDesc input_desc = op_desc->GetInputDesc(0);
  456. ge::TensorUtils::SetSize(input_desc, real_dim_size);
  457. if (op_desc->UpdateInputDesc(0, input_desc) != GRAPH_SUCCESS) {
  458. GELOGE(FAILED, "Update input desc size failed.");
  459. return FAILED;
  460. }
  461. }
  462. return SUCCESS;
  463. }
  464. Status GraphBuilder::CalcDynShapeRootGraphDataSize(const ge::OpDescPtr &op_desc) {
  465. GELOGI("Begin to calc dynamic shape graph data[%s] size.", op_desc->GetName().c_str());
  466. // data op only has one output anchor
  467. ge::GeTensorDesc output_desc = op_desc->GetOutputDesc(0);
  468. if (output_desc.MutableShape().IsUnknownShape()) {
  469. GELOGI("No need to update dynamic shape graph data output size for unknown shape data.");
  470. return SUCCESS;
  471. }
  472. int64_t output_size = 0;
  473. if (ge::TensorUtils::GetSize(output_desc, output_size) != SUCCESS) {
  474. GELOGW("Get size failed!");
  475. }
  476. if (output_size > 0) {
  477. GELOGI("No need to update dynamic shape graph data output size[%ld].", output_size);
  478. return SUCCESS;
  479. } else {
  480. int64_t real_dim_size = 0;
  481. ge::graphStatus graph_status = TensorUtils::GetTensorSizeInBytes(output_desc, real_dim_size);
  482. if (graph_status != GRAPH_SUCCESS) {
  483. GELOGE(FAILED, "Get tensor size in bytes failed.");
  484. return FAILED;
  485. }
  486. ge::TensorUtils::SetSize(output_desc, real_dim_size);
  487. GELOGI("Update dynamic shape graph data output size to [%ld].", real_dim_size);
  488. if (op_desc->UpdateOutputDesc(0, output_desc) != GRAPH_SUCCESS) {
  489. GELOGE(FAILED, "Update dynamic shape graph data output desc size failed.");
  490. return FAILED;
  491. }
  492. }
  493. return SUCCESS;
  494. }
  495. Status GraphBuilder::SecondPartition(ge::ComputeGraphPtr &comp_graph, vector<ge::SubGraphInfoPtr> &subgraph_ptr_list) {
  496. GELOGI("[SecondPartition] second partition.");
  497. GE_TIMESTAMP_START(GraphPartition2);
  498. auto ret = graph_partitioner_.Partition(comp_graph, GraphPartitioner::kSecondPartitioning);
  499. if (ret != SUCCESS) {
  500. GELOGE(ret, "Graph partition Failed");
  501. return ret;
  502. }
  503. GE_CHK_STATUS_RET(ret, "Graph partition Failed.");
  504. auto graph_2_subgraphlist = graph_partitioner_.GetSubGraphMap();
  505. if (graph_2_subgraphlist.find(comp_graph) != graph_2_subgraphlist.end()) {
  506. subgraph_ptr_list = graph_2_subgraphlist[comp_graph];
  507. } else {
  508. GELOGE(FAILED, "Find subgraph failed.");
  509. return FAILED;
  510. }
  511. GE_TIMESTAMP_END(GraphPartition2, "GraphPartitioner::Partition2");
  512. return ret;
  513. }
  514. Status GraphBuilder::AddOutputMemTypeForNode(const NodePtr &node) {
  515. auto op_desc = node->GetOpDesc();
  516. GE_CHECK_NOTNULL(op_desc);
  517. uint32_t mem_type;
  518. if (!AttrUtils::GetInt(op_desc, ATTR_INPUT_MEMORY_TYPE, mem_type)) {
  519. return SUCCESS;
  520. }
  521. GELOGD("[%s] has attr input_memory_type %ld", op_desc->GetName().c_str(), mem_type);
  522. for (const auto &in_data_anchor : node->GetAllInDataAnchors()) {
  523. const auto &peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  524. GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue);
  525. bool valid_flag = false;
  526. auto src_node = peer_out_anchor->GetOwnerNode();
  527. auto src_out_anchor = peer_out_anchor;
  528. while (true) {
  529. const auto &src_desc = src_node->GetOpDesc();
  530. GE_IF_BOOL_EXEC(src_desc == nullptr, continue);
  531. GELOGD("[%s:%u] set attr output_memory_type %ld", src_desc->GetName().c_str(), src_out_anchor->GetIdx(),
  532. mem_type);
  533. if (!AttrUtils::SetInt(src_desc->MutableOutputDesc(src_out_anchor->GetIdx()), ATTR_OUTPUT_MEMORY_TYPE,
  534. mem_type)) {
  535. GELOGE(INTERNAL_ERROR, "Set out_memory_type attr for [%s:%d] failed.", src_desc->GetName().c_str(),
  536. src_out_anchor->GetIdx());
  537. return INTERNAL_ERROR;
  538. }
  539. switch (TransferNodeType(src_node)) {
  540. case kSubgraphNode:
  541. GE_CHK_STATUS_RET(HandleSubgraphNode(src_node, src_out_anchor), "Handle subgraph node %s failed",
  542. src_node->GetName().c_str());
  543. break;
  544. case kSubgraphData:
  545. GE_CHK_STATUS_RET(HandleSubgraphDataNode(src_node, src_out_anchor), "Handle Data node %s in subgraph failed",
  546. src_node->GetName().c_str());
  547. break;
  548. case kOthers:
  549. default:
  550. valid_flag = true;
  551. break;
  552. }
  553. if (valid_flag) {
  554. break;
  555. }
  556. }
  557. }
  558. return SUCCESS;
  559. }
  560. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示