You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

subexpression_migration_pass.cc 21 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "subexpression_migration_pass.h"
  17. #include "graph/utils/node_utils.h"
  18. #include "ge_local_engine/engine/host_cpu_engine.h"
  19. #include "graph/passes/folding_pass.h"
  20. namespace ge {
  21. constexpr uint32_t kDataOutIndex = 0;
  22. constexpr uint32_t kCaseInputBase = 1;
  23. constexpr uint32_t kInvalidParent = 0x7fffffffU;
  24. bool IsSameTensor(ConstGeTensorDescPtr src_tensor, ConstGeTensorDescPtr dst_tensor) {
  25. if ((src_tensor == nullptr) && (dst_tensor == nullptr)) {
  26. return true;
  27. }
  28. if ((src_tensor == nullptr) || (dst_tensor == nullptr)) {
  29. return false;
  30. }
  31. if ((src_tensor->GetDataType() != dst_tensor->GetDataType()) ||
  32. (src_tensor->GetFormat() != dst_tensor->GetFormat())) {
  33. return false;
  34. }
  35. const auto src_dims = src_tensor->GetShape().GetDims();
  36. const auto dst_dims = dst_tensor->GetShape().GetDims();
  37. if (src_dims != dst_dims) {
  38. return false;
  39. }
  40. const auto src_orig_dims = src_tensor->GetOriginShape().GetDims();
  41. const auto dst_orig_dims = dst_tensor->GetOriginShape().GetDims();
  42. if (src_orig_dims != dst_orig_dims) {
  43. return false;
  44. }
  45. return true;
  46. }
  47. bool IsSameOpDesc(const OpDescPtr &src_desc, const OpDescPtr &dst_desc) {
  48. if ((src_desc == nullptr) && (dst_desc == nullptr)) {
  49. return true;
  50. }
  51. if ((src_desc == nullptr) || (dst_desc == nullptr)) {
  52. return false;
  53. }
  54. if (src_desc->GetType() != dst_desc->GetType()) {
  55. return false;
  56. }
  57. if ((src_desc->GetInputsSize() != dst_desc->GetInputsSize()) ||
  58. (src_desc->GetOutputsSize() != dst_desc->GetOutputsSize())) {
  59. return false;
  60. }
  61. for (uint32_t i = 0; i < src_desc->GetInputsSize(); ++i) {
  62. if (!IsSameTensor(src_desc->GetInputDescPtr(i), dst_desc->GetInputDescPtr(i))) {
  63. return false;
  64. }
  65. }
  66. for (uint32_t i = 0; i < src_desc->GetOutputsSize(); ++i) {
  67. if (!IsSameTensor(src_desc->GetOutputDescPtr(i), dst_desc->GetOutputDescPtr(i))) {
  68. return false;
  69. }
  70. }
  71. return true;
  72. }
  73. Status SubexpressionMigrationPass::Run(ComputeGraphPtr graph) {
  74. GE_CHECK_NOTNULL(graph);
  75. if (graph->GetParentGraph() != nullptr) {
  76. GELOGD("Subgraph %s skip the SubexpressionMigrationPass", graph->GetName().c_str());
  77. return SUCCESS;
  78. }
  79. GELOGD("Begin to run Subexpression Migration on graph: %s", graph->GetName().c_str());
  80. for (const auto &node : graph->GetDirectNode()) {
  81. if (node->GetType() != CASE) {
  82. continue;
  83. }
  84. const auto &func_desc = node->GetOpDesc();
  85. if (!func_desc->HasAttr(ATTR_NAME_BATCH_NUM)) {
  86. GELOGD("Not multi-batch, Case: %s", node->GetName().c_str());
  87. continue;
  88. }
  89. do {
  90. migration_append_ = false;
  91. map<ComputeGraphPtr, map<uint32_t, NodePtr>> graph_nodes;
  92. if (ClassifyDataNodes(graph, func_desc, graph_nodes) != SUCCESS) {
  93. return FAILED;
  94. }
  95. if (graph_nodes.empty()) {
  96. GELOGW("Graph: %s nodes is empty", graph->GetName().c_str());
  97. break;
  98. }
  99. // {subgraph0, {{1, Data}, {2, Data}, {3, Data}, {4, Data}, ..., {n, Data}}}
  100. // {subgraph1, {{1, Data}, {2, Data}, {3, Data}, {4, Data}, ..., {n, Data}}}
  101. // {subgraph2, {{1, Data}, {2, Data}, {3, Data}, {4, Data}, ..., {n, Data}}}
  102. const auto base_nodes = graph_nodes.begin()->second; // Need copy.
  103. for (const auto &node_item : base_nodes) {
  104. if (GraphNodeMigration(graph, node, graph_nodes, node_item.second, node_item.first) != SUCCESS) {
  105. return FAILED;
  106. }
  107. }
  108. } while (migration_append_);
  109. }
  110. return SUCCESS;
  111. }
  112. ///
  113. /// @ingroup ge
  114. /// @brief Get all Data nodes for all subgraph.
  115. /// @param [in] graph: Root compute graph.
  116. /// @param [in] func_desc: functional OpDesc of Case.
  117. /// @param [out] graph_nodes: Data groups of subgraph.
  118. /// @return 0: SUCCESS / others: FAILED
  119. ///
  120. Status SubexpressionMigrationPass::ClassifyDataNodes(const ComputeGraphPtr &graph, const OpDescPtr &func_desc,
  121. map<ComputeGraphPtr, map<uint32_t, NodePtr>> &graph_nodes) {
  122. for (const auto &name : func_desc->GetSubgraphInstanceNames()) {
  123. const auto &subgraph = graph->GetSubgraph(name);
  124. if (subgraph == nullptr) {
  125. GELOGE(GE_GRAPH_EMPTY_SUBGRAPH, "Subgraph not found, name: %s", name.c_str());
  126. return GE_GRAPH_EMPTY_SUBGRAPH;
  127. }
  128. auto &data_nodes = graph_nodes[subgraph];
  129. for (auto &data : subgraph->GetDirectNode()) {
  130. if (data->GetType() != DATA) {
  131. continue;
  132. }
  133. uint32_t parent_index = 0;
  134. if (!AttrUtils::GetInt(data->GetOpDesc(), ATTR_NAME_PARENT_NODE_INDEX, parent_index)) {
  135. GELOGE(FAILED, "Parent index not found, name: %s", data->GetName().c_str());
  136. return FAILED;
  137. }
  138. data_nodes[parent_index] = data;
  139. GELOGD("%s, Parent index: %u, Data: %s", subgraph->GetName().c_str(), parent_index, data->GetName().c_str());
  140. }
  141. }
  142. for (const auto &data_nodes : graph_nodes) {
  143. if (data_nodes.second.size() != graph_nodes.begin()->second.size()) {
  144. GELOGE(FAILED, "Subgraph %s has invalid Data nodes[%zu != %zu]",
  145. data_nodes.first->GetName().c_str(), data_nodes.second.size(), graph_nodes.begin()->second.size());
  146. return FAILED;
  147. }
  148. }
  149. return SUCCESS;
  150. }
  151. ///
  152. /// @ingroup ge
  153. /// @brief Get all Data nodes for all subgraph.
  154. /// @param [in] node: Node Directly to Data.
  155. /// @param [out] inputs: parent index of Input.
  156. /// @param [out] outputs: parent index of Output.
  157. /// @return true: SUCCESS / false: FAILED
  158. ///
  159. bool SubexpressionMigrationPass::GetAssociatedNodes(const NodePtr &node, map<uint32_t, uint32_t> &inputs,
  160. map<uint32_t, uint32_t> &outputs) {
  161. for (uint32_t i = 0; i < node->GetAllOutDataAnchorsSize(); ++i) {
  162. outputs[i] = kInvalidParent;
  163. }
  164. uint32_t out_index = 0;
  165. for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  166. const auto &in_anchor = node->GetInDataAnchor(i);
  167. const auto &out_anchor = in_anchor->GetPeerOutAnchor();
  168. if (out_anchor == nullptr) {
  169. inputs[i] = kInvalidParent;
  170. continue;
  171. }
  172. // Has none Data input node, Can not move to parent.
  173. const auto &owner_node = out_anchor->GetOwnerNode();
  174. if (owner_node->GetType() != DATA) {
  175. return false;
  176. }
  177. uint32_t parent_index = 0;
  178. if (!AttrUtils::GetInt(owner_node->GetOpDesc(), ATTR_NAME_PARENT_NODE_INDEX, parent_index)) {
  179. return false;
  180. }
  181. // Input Data feed other Node, need add new Data.
  182. inputs[i] = parent_index;
  183. if ((out_index < outputs.size()) && (owner_node->GetOutDataNodesSize() == 1)) {
  184. outputs[out_index] = parent_index;
  185. ++out_index;
  186. }
  187. }
  188. return true;
  189. }
  190. ///
  191. /// @ingroup ge
  192. /// @brief Get all Data nodes for all subgraph.
  193. /// @param [in] graph_nodes: Data groups of subgraph.
  194. /// @param [in] base_node: Data Node for migration.
  195. /// @param [in] node_idx: Parent index of Data node.
  196. /// @param [in] anchor_idx: Anchor index of node.
  197. /// @return true: Same / false: not same
  198. ///
  199. bool SubexpressionMigrationPass::IsParallelNodeSame(const map<ComputeGraphPtr, map<uint32_t, NodePtr>> &graph_nodes,
  200. const NodePtr &base_node, uint32_t node_idx, uint32_t anchor_idx) {
  201. auto it = graph_nodes.begin();
  202. for (++it; it != graph_nodes.end(); ++it) {
  203. const auto &data_nodes = it->second;
  204. auto data_it = data_nodes.find(node_idx);
  205. if (data_it == data_nodes.end()) {
  206. GELOGE(FAILED, "Data: %s not fount, index: %u", base_node->GetName().c_str(), node_idx);
  207. return false;
  208. }
  209. const auto &work_data = data_it->second;
  210. const auto &out_anchor = work_data->GetOutDataAnchor(kDataOutIndex);
  211. const auto &in_anchors = out_anchor->GetPeerInDataAnchors();
  212. const auto &in_anchor = in_anchors.at(anchor_idx);
  213. if (in_anchor == nullptr) {
  214. GELOGE(FAILED, "Data anchor size: %u, anchor size: %zu", anchor_idx, in_anchors.size());
  215. return false;
  216. }
  217. const auto &work_node = in_anchor->GetOwnerNode();
  218. if (work_node == nullptr) {
  219. GELOGE(FAILED, "Data: %s not found, index: %u", base_node->GetName().c_str(), node_idx);
  220. return false;
  221. }
  222. if (!IsSameOpDesc(base_node->GetOpDesc(), work_node->GetOpDesc())) {
  223. GELOGI("OpDesc diff: %s %s", base_node->GetName().c_str(), work_node->GetName().c_str());
  224. return false;
  225. }
  226. }
  227. return true;
  228. }
  229. ///
  230. /// @ingroup ge
  231. /// @brief Migration subgraph Node to Root
  232. /// @param [in] graph: Root compute graph.
  233. /// @param [in] func_node: functional Node of Case.
  234. /// @param [in] graph_nodes: Data groups of subgraph.
  235. /// @param [in] data_base: Data Node for migration.
  236. /// @param [in] data_idx: Data groups of subgraph.
  237. /// @return 0: SUCCESS / others: FAILED
  238. ///
  239. Status SubexpressionMigrationPass::GraphNodeMigration(const ComputeGraphPtr &graph, const NodePtr &func_node,
  240. map<ComputeGraphPtr, map<uint32_t, NodePtr>> &graph_nodes,
  241. const NodePtr &base_data, uint32_t base_idx) {
  242. bool can_extrapolation = false;
  243. do {
  244. can_extrapolation = false;
  245. const auto out_anchor = base_data->GetOutDataAnchor(kDataOutIndex);
  246. const auto in_anchors = out_anchor->GetPeerInDataAnchors();
  247. for (size_t i = 0; i < in_anchors.size(); ++i) {
  248. const auto &in_anchor = in_anchors.at(i);
  249. const auto &base_node = in_anchor->GetOwnerNode();
  250. GELOGD("Get Data direct node: %s", base_node->GetName().c_str());
  251. if (!base_node->GetHostNode()) {
  252. continue;
  253. }
  254. // Get associated Data, if Data feed other nodes, need append new Data.
  255. map<uint32_t, uint32_t> inputs;
  256. map<uint32_t, uint32_t> outputs;
  257. if (!GetAssociatedNodes(base_node, inputs, outputs)) {
  258. continue;
  259. }
  260. if (!IsParallelNodeSame(graph_nodes, base_node, base_idx, i)) {
  261. continue;
  262. }
  263. GELOGI("Move to parent: %s, parent index: %u", base_node->GetName().c_str(), base_idx);
  264. if (AppendParallelNode(graph_nodes, func_node, outputs) != SUCCESS) {
  265. return FAILED;
  266. }
  267. if (MoveNodeToParent(graph, func_node, graph_nodes, i, inputs, outputs) != SUCCESS) {
  268. return FAILED;
  269. }
  270. can_extrapolation = true;
  271. break;
  272. }
  273. } while (can_extrapolation);
  274. return SUCCESS;
  275. }
  276. ///
  277. /// @ingroup ge
  278. /// @brief Append Input Tensor for functional node.
  279. /// @param [in] graph_nodes: Data groups of subgraph.
  280. /// @param [in] func_node: functional Node of Case.
  281. /// @param [in] outputs: Parent index of Node output.
  282. /// @return 0: SUCCESS / others: FAILED
  283. ///
  284. Status SubexpressionMigrationPass::AppendParallelNode(map<ComputeGraphPtr, map<uint32_t, NodePtr>> &graph_nodes,
  285. const NodePtr &func_node, map<uint32_t, uint32_t> &outputs) {
  286. // If outputs index invalid, add Data and Input Tensor.
  287. for (auto &item : outputs) {
  288. if (item.second != kInvalidParent) {
  289. continue;
  290. }
  291. // Add Data to subgraph.
  292. map<ComputeGraphPtr, uint32_t> append_num;
  293. for (auto &groups : graph_nodes) {
  294. const auto &subgraph = groups.first;
  295. auto &data_nodes = groups.second;
  296. item.second = func_node->GetAllInDataAnchorsSize() + append_num[subgraph]; // Update to valid parent index.
  297. std::string data_name = subgraph->GetName() + "_data_" + std::to_string(item.second);
  298. OpDescBuilder op_builder(data_name, DATA);
  299. const OpDescPtr op_desc = op_builder.AddInput("x").AddOutput("y").Build();
  300. if (op_desc == nullptr) {
  301. GELOGE(OUT_OF_MEMORY, "Create multi-batch case desc failed");
  302. return OUT_OF_MEMORY;
  303. }
  304. uint32_t data_index = item.second - kCaseInputBase;
  305. if (!AttrUtils::SetInt(op_desc, ATTR_NAME_INDEX, data_index)) {
  306. GELOGE(FAILED, "Parent index not found, name: %s", op_desc->GetName().c_str());
  307. return FAILED;
  308. }
  309. if (!AttrUtils::SetInt(op_desc, ATTR_NAME_PARENT_NODE_INDEX, item.second)) {
  310. GELOGE(FAILED, "Parent index not found, name: %s", op_desc->GetName().c_str());
  311. return FAILED;
  312. }
  313. append_num[subgraph]++;
  314. data_nodes[item.second] = subgraph->AddNode(op_desc);
  315. GELOGI("Add Node: %s, parent index: %u", op_desc->GetName().c_str(), item.second);
  316. }
  317. // Add InputTensor to functional Node.
  318. GE_CHK_GRAPH_STATUS_RET(NodeUtils::AppendInputAnchor(func_node, item.second + 1), "Append input failed");
  319. migration_append_ = true;
  320. }
  321. return SUCCESS;
  322. }
  323. ///
  324. /// @ingroup ge
  325. /// @brief Delete Node from all subgraph.
  326. /// @param [in] graph_nodes: Data groups of subgraph.
  327. /// @param [in] detach: Node will move to parent.
  328. /// @param [in] outputs: Parent index of Node output.
  329. /// @return 0: SUCCESS / others: FAILED
  330. ///
  331. Status SubexpressionMigrationPass::DetachParallelNode(const map<uint32_t, NodePtr> &graph_datas, const NodePtr &detach,
  332. const map<uint32_t, uint32_t> &outputs) {
  333. // Break Data and Move node.
  334. for (const auto &in_anchor : detach->GetAllInDataAnchors()) {
  335. const auto &out_anchor = in_anchor->GetPeerOutAnchor();
  336. if (out_anchor == nullptr) {
  337. continue;
  338. }
  339. GE_CHK_GRAPH_STATUS_RET(GraphUtils::RemoveEdge(out_anchor, in_anchor), "Remove edge failed");
  340. const auto &owner_node = out_anchor->GetOwnerNode();
  341. GELOGI("Remove Edge: %s %s", owner_node->GetName().c_str(), detach->GetName().c_str());
  342. }
  343. // Break Move and follow, Link Data and follow.
  344. for (uint32_t i = 0; i < detach->GetAllOutDataAnchorsSize(); ++i) {
  345. auto it_idx = outputs.find(i);
  346. if (it_idx == outputs.end()) {
  347. GELOGE(FAILED, "Node: %s parent index %u not found", detach->GetName().c_str(), i);
  348. return FAILED;
  349. }
  350. auto it_data = graph_datas.find(it_idx->second);
  351. if (it_data == graph_datas.end()) {
  352. GELOGE(FAILED, "Node: %s parent index %u not found", detach->GetName().c_str(), i);
  353. return FAILED;
  354. }
  355. const auto &data_node = it_data->second;
  356. const auto &out_anchor = detach->GetOutDataAnchor(i);
  357. const auto &out_desc = detach->GetOpDesc()->GetOutputDesc(i);
  358. const auto &data_desc = data_node->GetOpDesc();
  359. (void)data_desc->UpdateInputDesc(kDataOutIndex, out_desc); // Set Data Input to new connect Node.
  360. (void)data_desc->UpdateOutputDesc(kDataOutIndex, out_desc); // Set Data Output to new connect Node.
  361. for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) {
  362. if (in_anchor == nullptr) {
  363. continue;
  364. }
  365. GE_CHK_GRAPH_STATUS_RET(GraphUtils::RemoveEdge(out_anchor, in_anchor), "Remove edge failed");
  366. const auto &owner_node = in_anchor->GetOwnerNode();
  367. GELOGI("Remove Edge: %s %s", detach->GetName().c_str(), owner_node->GetName().c_str());
  368. const auto &data_out_anchor = data_node->GetOutDataAnchor(kDataOutIndex);
  369. GE_CHK_GRAPH_STATUS_RET(GraphUtils::AddEdge(data_out_anchor, in_anchor), "Add edge failed");
  370. GELOGI("Add Edge: %s %s", data_node->GetName().c_str(), owner_node->GetName().c_str());
  371. }
  372. }
  373. return SUCCESS;
  374. }
  375. ///
  376. /// @ingroup ge
  377. /// @brief Move Node to Parent Graph.
  378. /// @param [in] graph: Parent compute graph.
  379. /// @param [in] func_node: functional Node of Case.
  380. /// @param [in] attach: Node will move to parent.
  381. /// @param [in] inputs: Parent index of Node input.
  382. /// @param [in] outputs: Parent index of Node output.
  383. /// @return 0: SUCCESS / others: FAILED
  384. ///
  385. Status SubexpressionMigrationPass::AttachParallelNode(const ComputeGraphPtr &graph, const NodePtr &func_node,
  386. const NodePtr &attach, const map<uint32_t, uint32_t> &inputs,
  387. const map<uint32_t, uint32_t> &outputs) {
  388. GE_CHECK_NOTNULL(attach);
  389. for (uint32_t i = 0; i < attach->GetAllInDataAnchorsSize(); ++i) {
  390. auto it_idx = inputs.find(i);
  391. if (it_idx == inputs.end()) {
  392. GELOGE(FAILED, "Node: %s parent index %u not found", attach->GetName().c_str(), i);
  393. return FAILED;
  394. }
  395. if (it_idx->second == kInvalidParent) { // Not connect, Skip.
  396. continue;
  397. }
  398. const auto &in_anchor = func_node->GetInDataAnchor(it_idx->second);
  399. const auto &out_anchor = in_anchor->GetPeerOutAnchor();
  400. GE_CHK_GRAPH_STATUS_RET(GraphUtils::AddEdge(out_anchor, attach->GetInDataAnchor(i)), "Add edge failed");
  401. const auto &owner_node = out_anchor->GetOwnerNode();
  402. GELOGI("Add Edge: %s %s", owner_node->GetName().c_str(), attach->GetName().c_str());
  403. }
  404. for (uint32_t i = 0; i < attach->GetAllOutDataAnchorsSize(); ++i) {
  405. auto it_idx = outputs.find(i);
  406. if (it_idx == outputs.end()) {
  407. return FAILED;
  408. }
  409. if (it_idx->second == kInvalidParent) { // Not connect, Skip.
  410. continue;
  411. }
  412. const auto &out_desc = attach->GetOpDesc()->GetOutputDesc(i);
  413. const auto &func_desc = func_node->GetOpDesc();
  414. (void)func_desc->UpdateInputDesc(it_idx->second, out_desc); // Set Data Input to new connect Node.
  415. const auto &in_anchor = func_node->GetInDataAnchor(it_idx->second);
  416. const auto &out_anchor = in_anchor->GetPeerOutAnchor();
  417. if (out_anchor != nullptr) {
  418. GE_CHK_GRAPH_STATUS_RET(GraphUtils::RemoveEdge(out_anchor, in_anchor), "Remove edge failed");
  419. const auto &owner_node = out_anchor->GetOwnerNode();
  420. GELOGI("Remove Edge: %s %s", owner_node->GetName().c_str(), func_node->GetName().c_str());
  421. }
  422. GE_CHK_GRAPH_STATUS_RET(GraphUtils::AddEdge(attach->GetOutDataAnchor(i), in_anchor), "Add edge failed");
  423. GELOGI("Add Edge: %s %s", attach->GetName().c_str(), func_node->GetName().c_str());
  424. }
  425. (void)graph->AddNode(attach);
  426. (void)attach->SetOwnerComputeGraph(graph);
  427. GELOGI("Add Node: %s %s", graph->GetName().c_str(), attach->GetName().c_str());
  428. return SUCCESS;
  429. }
  430. ///
  431. /// @ingroup ge
  432. /// @brief Move node to Parent graph.
  433. /// @param [in] graph: Root compute graph.
  434. /// @param [in] func_node: functional Node of Case.
  435. /// @param [in] graph_nodes: Data groups of subgraph.
  436. /// @param [in] anchor_idx: anchor index of move Node.
  437. /// @param [in] inputs: Parent index of Node input.
  438. /// @param [in] outputs: Parent index of Node output.
  439. /// @return 0: SUCCESS / others: FAILED
  440. ///
  441. Status SubexpressionMigrationPass::MoveNodeToParent(const ComputeGraphPtr &graph, const NodePtr &func_node,
  442. const map<ComputeGraphPtr, map<uint32_t, NodePtr>> &graph_nodes,
  443. uint32_t anchor_idx, const map<uint32_t, uint32_t> &inputs,
  444. const map<uint32_t, uint32_t> &outputs) {
  445. if (inputs.empty()) {
  446. GELOGE(FAILED, "Graph: %s, inputs is empty", graph->GetName().c_str());
  447. return FAILED;
  448. }
  449. NodePtr move_node;
  450. uint32_t base_index = inputs.begin()->second;
  451. for (auto &groups : graph_nodes) {
  452. const auto &subgraph = groups.first;
  453. const auto &subnodes = groups.second;
  454. auto it = subnodes.find(base_index);
  455. if (it == subnodes.end()) {
  456. GELOGE(FAILED, "Graph: %s, Data: %u node not found", subgraph->GetName().c_str(), base_index);
  457. return FAILED;
  458. }
  459. const auto &base_data = it->second;
  460. const auto &out_anchor = base_data->GetOutDataAnchor(kDataOutIndex);
  461. const auto &in_anchors = out_anchor->GetPeerInDataAnchors();
  462. const auto &in_anchor = in_anchors.at(anchor_idx);
  463. if (in_anchor == nullptr) {
  464. GELOGE(FAILED, "Data anchor index: %u, anchor size: %zu", anchor_idx, in_anchors.size());
  465. return FAILED;
  466. }
  467. move_node = in_anchor->GetOwnerNode();
  468. if (move_node == nullptr) {
  469. GELOGE(FAILED, "Data: %s not found, index: %u", base_data->GetName().c_str(), base_index);
  470. return FAILED;
  471. }
  472. if (DetachParallelNode(subnodes, move_node, outputs) != SUCCESS) {
  473. GELOGE(FAILED, "Data: %s not found, index: %u", base_data->GetName().c_str(), base_index);
  474. return FAILED;
  475. }
  476. GE_CHK_GRAPH_STATUS_RET(subgraph->RemoveNode(move_node), "Remove node failed");
  477. GELOGI("Remove Node: %s %s", subgraph->GetName().c_str(), move_node->GetName().c_str());
  478. }
  479. if (AttachParallelNode(graph, func_node, move_node, inputs, outputs) != SUCCESS) {
  480. return FAILED;
  481. }
  482. return SUCCESS;
  483. }
  484. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示