You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

hccl_memcpy_pass.cc 8.8 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/passes/hccl_memcpy_pass.h"
  17. #include <string>
  18. #include "common/debug/log.h"
  19. #include "framework/common/debug/ge_log.h"
  20. #include "common/ge_inner_error_codes.h"
  21. #include "common/ge/ge_util.h"
  22. #include "framework/common/types.h"
  23. #include "graph/utils/graph_utils.h"
  24. namespace {
  25. const int32_t kAnchorSize = 1;
  26. const int kAnchorNum = 0;
  27. const char *const kInputMutable = "_input_mutable";
  28. } // namespace
  29. namespace ge {
  30. Status HcclMemcpyPass::Run(ge::ComputeGraphPtr graph) {
  31. GE_IF_BOOL_EXEC(graph == nullptr, GELOGE(PARAM_INVALID, "param [graph] must not be null."); return PARAM_INVALID);
  32. for (const auto &node : graph->GetDirectNode()) {
  33. auto op_desc = node->GetOpDesc();
  34. GE_IF_BOOL_EXEC(op_desc == nullptr, continue);
  35. Status ret = ProcessBroadcastMemcpy(graph, node);
  36. if (ret != SUCCESS) {
  37. GELOGE(INTERNAL_ERROR, "failed ProcessBroadcastMemcpy.");
  38. return ret;
  39. }
  40. bool node_input_mutable = false;
  41. if (!AttrUtils::HasAttr(op_desc, kInputMutable)) {
  42. continue;
  43. }
  44. GE_IF_BOOL_EXEC(!AttrUtils::GetBool(op_desc, kInputMutable, node_input_mutable),
  45. GELOGE(INTERNAL_ERROR, "node:%s get attr:_input_mutable failed.", node->GetName().c_str()); return FAILED);
  46. if (!node_input_mutable) {
  47. continue;
  48. }
  49. GELOGI("hcom op is:%s.", op_desc->GetName().c_str());
  50. for (auto &hccl_in_anchor : node->GetAllInDataAnchors()) {
  51. if (hccl_in_anchor == nullptr) {
  52. continue;
  53. }
  54. auto src_out_anchor = hccl_in_anchor->GetPeerOutAnchor();
  55. GE_CHECK_NOTNULL(src_out_anchor);
  56. int32_t src_out_anchor_size = src_out_anchor->GetPeerInDataAnchors().size();
  57. if (src_out_anchor_size == kAnchorSize) {
  58. // Memcpyasync needs to be inserted between constant (/data) and hcomallreduce to avoid constant being cleared.
  59. NodePtr src_node = src_out_anchor->GetOwnerNode();
  60. std::string src_type = src_node->GetType();
  61. bool check_src_type = (src_type == CONSTANTOP) || (src_type == VARIABLE) || (src_type == DATA) || (src_type == CONSTANT);
  62. if (check_src_type) {
  63. Status ret = ModifyEdgeConnection(graph, src_out_anchor, hccl_in_anchor);
  64. if (ret != SUCCESS) {
  65. GELOGE(INTERNAL_ERROR, "Failed to modify the connection.");
  66. return ret;
  67. }
  68. }
  69. continue;
  70. }
  71. Status ret = ModifyEdgeConnection(graph, src_out_anchor, hccl_in_anchor);
  72. if (ret != SUCCESS) {
  73. GELOGE(INTERNAL_ERROR, "Failed to modify the connection.");
  74. return ret;
  75. }
  76. }
  77. }
  78. return SUCCESS;
  79. }
  80. // If broadcast input size is bigger than 1, and input from variable,
  81. // cause by broadcast input memory should be continuous,
  82. // another featuremap mem will be allocated for broadcast input.
  83. // In this condition, move data from variable mem to broadcast input featuremap mem will be executed each step.
  84. // In order to avoid move action out of model, use memcpy node instead of move action code.
  85. Status HcclMemcpyPass::ProcessBroadcastMemcpy(const ComputeGraphPtr &graph, const NodePtr node) {
  86. auto op_desc = node->GetOpDesc();
  87. if (op_desc == nullptr) {
  88. GELOGE(INTERNAL_ERROR, "node has no op_desc, node_name : %s.", node->GetName().c_str());
  89. return INTERNAL_ERROR;
  90. }
  91. if ((node->GetType() == HCOMBROADCAST || node->GetType() == HVDCALLBACKBROADCAST) && op_desc->GetInputSize() > 1) {
  92. for (auto &hccl_in_anchor : node->GetAllInDataAnchors()) {
  93. if (hccl_in_anchor == nullptr) {
  94. continue;
  95. }
  96. auto src_out_anchor = hccl_in_anchor->GetPeerOutAnchor();
  97. if (src_out_anchor == nullptr) {
  98. GELOGE(INTERNAL_ERROR, "hcom op input has no peer anchor, node_name:%s", node->GetName().c_str());
  99. return INTERNAL_ERROR;
  100. }
  101. NodePtr src_node = src_out_anchor->GetOwnerNode();
  102. std::string src_type = src_node->GetType();
  103. bool check_src_type = (src_type == CONSTANTOP) || (src_type == VARIABLE) || (src_type == DATA) || (src_type == CONSTANT);
  104. if (check_src_type) {
  105. Status ret = ModifyEdgeConnection(graph, src_out_anchor, hccl_in_anchor);
  106. if (ret != SUCCESS) {
  107. GELOGE(INTERNAL_ERROR, "Failed to modify the connection.");
  108. return ret;
  109. }
  110. }
  111. }
  112. }
  113. return SUCCESS;
  114. }
  115. ///
  116. /// @brief Add MemcpyAsync Node
  117. /// @param [in] ge::ComputeGraphPtr graph
  118. /// @param [in] ge::OutDataAnchorPtr in_node
  119. /// @return ge::NodePtr
  120. ///
  121. NodePtr HcclMemcpyPass::CreateIdentityNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_data_anchor) {
  122. GE_IF_BOOL_EXEC(graph == nullptr, return nullptr);
  123. NodePtr pre_node = out_data_anchor->GetOwnerNode();
  124. OpDescPtr pre_op_desc = pre_node->GetOpDesc();
  125. if (pre_op_desc == nullptr) {
  126. GELOGE(INTERNAL_ERROR, "OpDesc of pre node is invalid.");
  127. return nullptr;
  128. }
  129. std::string node_name = pre_node->GetName() + "_" + IDENTITY;
  130. node_name = CheckDuplicateName(node_name);
  131. OpDescPtr op_desc = MakeShared<OpDesc>(node_name.c_str(), IDENTITY);
  132. if (op_desc == nullptr) {
  133. GELOGE(INTERNAL_ERROR, "Create identity op: MakeShared op_desc fail.");
  134. return nullptr;
  135. }
  136. GELOGI("Create identity op:%s.", op_desc->GetName().c_str());
  137. graphStatus ret = op_desc->AddInputDesc("x", pre_op_desc->GetOutputDesc(out_data_anchor->GetIdx()));
  138. if (ret != GRAPH_SUCCESS) {
  139. GELOGE(INTERNAL_ERROR, "Create identity op: add input desc fail.");
  140. return nullptr;
  141. }
  142. ret = op_desc->AddOutputDesc("y", pre_op_desc->GetOutputDesc(out_data_anchor->GetIdx()));
  143. if (ret != GRAPH_SUCCESS) {
  144. GELOGE(INTERNAL_ERROR, "Create identity op: add output desc fail.");
  145. return nullptr;
  146. }
  147. // because history reason ,this pass can not do work after constant fold so mark it
  148. (void)AttrUtils::SetBool(op_desc, ATTR_NO_NEED_CONSTANT_FOLDING, false);
  149. NodePtr memcpy_node = graph->AddNode(op_desc);
  150. if (memcpy_node == nullptr) {
  151. GELOGE(INTERNAL_ERROR, "Insert identity node fail.");
  152. return nullptr;
  153. }
  154. return memcpy_node;
  155. }
  156. ///
  157. /// @brief Check duplicate node_name
  158. /// @param [in] std::string& node_name
  159. /// @return std::string
  160. ///
  161. std::string HcclMemcpyPass::CheckDuplicateName(const std::string &node_name) {
  162. std::string tmp_name = node_name;
  163. auto iter = node_num_map_.find(tmp_name);
  164. if (iter != node_num_map_.end()) {
  165. tmp_name = tmp_name + "_" + std::to_string(iter->second);
  166. (iter->second)++;
  167. } else {
  168. node_num_map_[tmp_name] = 1;
  169. }
  170. return tmp_name;
  171. }
  172. ///
  173. /// @brief Modify edge connection
  174. /// @param [in] ComputeGraphPtr graph
  175. /// @param [in] OutDataAnchorPtr src_out_anchor
  176. /// @param [in] InDataAnchorPtr hccl_in_anchor
  177. /// @return status
  178. ///
  179. Status HcclMemcpyPass::ModifyEdgeConnection(const ComputeGraphPtr &graph, const OutDataAnchorPtr &src_out_anchor,
  180. const InDataAnchorPtr &hccl_in_anchor) {
  181. GELOGI("The op %s need insert memcpy async op.", src_out_anchor->GetOwnerNode()->GetName().c_str());
  182. NodePtr memcpy_node = CreateIdentityNode(graph, src_out_anchor);
  183. GE_CHECK_NOTNULL(memcpy_node);
  184. Status ret1 = src_out_anchor->Unlink(hccl_in_anchor);
  185. if (ret1 != SUCCESS) {
  186. GELOGE(INTERNAL_ERROR, "The op %s Unlink anchor %s fail.", src_out_anchor->GetOwnerNode()->GetName().c_str(),
  187. hccl_in_anchor->GetOwnerNode()->GetName().c_str());
  188. return FAILED;
  189. }
  190. auto out_data_anchor_0 = memcpy_node->GetOutDataAnchor(kAnchorNum);
  191. GE_CHECK_NOTNULL(out_data_anchor_0);
  192. ret1 = out_data_anchor_0->LinkTo(hccl_in_anchor);
  193. if (ret1 != SUCCESS) {
  194. GELOGE(INTERNAL_ERROR, "The op %s link anchor %s fail.", memcpy_node->GetName().c_str(),
  195. hccl_in_anchor->GetOwnerNode()->GetName().c_str());
  196. return FAILED;
  197. }
  198. Status ret = src_out_anchor->LinkTo(memcpy_node->GetInDataAnchor(kAnchorNum));
  199. if (ret != SUCCESS) {
  200. GELOGE(INTERNAL_ERROR, "The op %s link anchor %s fail.", src_out_anchor->GetOwnerNode()->GetName().c_str(),
  201. memcpy_node->GetName().c_str());
  202. return FAILED;
  203. }
  204. return SUCCESS;
  205. }
  206. ///
  207. /// @brief Clear Status, used for subgraph pass
  208. /// @return SUCCESS
  209. ///
  210. Status HcclMemcpyPass::ClearStatus() {
  211. node_num_map_.clear();
  212. return SUCCESS;
  213. }
  214. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示