You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

cond_pass.cc 13 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/passes/cond_pass.h"
  17. #include "common/op/ge_op_utils.h"
  18. #include "graph/utils/graph_utils.h"
  19. #include "graph/utils/type_utils.h"
  20. #include "graph/utils/node_utils.h"
  21. namespace {
  22. const std::string kStringLength = "StringLength";
  23. const size_t kScalarDimNum = 1;
  24. }
  25. namespace ge {
  26. Status CondPass::Run(NodePtr &node) {
  27. ComputeGraphPtr graph = nullptr;
  28. OutDataAnchorPtr cond_out_anchor = nullptr;
  29. InDataAnchorPtr cond_in_anchor = nullptr;
  30. Status ret = GetCondInfo(node, graph, cond_out_anchor, cond_in_anchor);
  31. if (ret == NOT_CHANGED) {
  32. return SUCCESS;
  33. } else if (ret != SUCCESS) {
  34. GELOGE(FAILED, "Get cond_info for node %s failed.", node->GetName().c_str());
  35. return FAILED;
  36. }
  37. /// cond
  38. /// 1. NonScalar: cond->Size(int32)->If / NetOutput(while)
  39. /// 2. String Scalar: cond->StringLength(int32)->If / NetOutput(while)
  40. /// 3. bool / float / double / uint8 / int16 / int8 / int64 Scalar: cond->Cast(2int32)->If / NetOutput(while)
  41. /// 4. Int32 Scalar: cond->If / NetOutput(while)
  42. OpDescPtr op_desc = cond_in_anchor->GetOwnerNode()->GetOpDesc();
  43. GE_CHECK_NOTNULL(op_desc);
  44. GELOGI("Handle cond for node %s.", op_desc->GetName().c_str());
  45. GeTensorDesc cond_tensor = op_desc->GetInputDesc(cond_in_anchor->GetIdx());
  46. if (cond_tensor.MutableShape().GetDim(0) == UNKNOWN_DIM_NUM) {
  47. GELOGI("Output tensor rank of Cond is unknown.");
  48. if (cond_tensor.GetDataType() == DT_STRING) {
  49. GE_CHK_STATUS_RET(HandleStringCond(graph, cond_out_anchor, cond_in_anchor), "HandleStringCond for %s failed.",
  50. op_desc->GetName().c_str())
  51. }
  52. return SUCCESS;
  53. }
  54. if (!cond_tensor.GetShape().IsScalar()) {
  55. GE_CHK_STATUS_RET(HandleNonScalarCond(graph, cond_out_anchor, cond_in_anchor), "HandleNonScalarCond for %s failed.",
  56. op_desc->GetName().c_str())
  57. } else {
  58. switch (cond_tensor.GetDataType()) {
  59. case DT_STRING:
  60. GE_CHK_STATUS_RET(HandleStringCond(graph, cond_out_anchor, cond_in_anchor), "HandleStringCond for %s failed.",
  61. op_desc->GetName().c_str())
  62. break;
  63. case DT_BOOL:
  64. case DT_FLOAT:
  65. case DT_DOUBLE:
  66. case DT_UINT8:
  67. case DT_INT16:
  68. case DT_INT8:
  69. case DT_INT64:
  70. GE_CHK_STATUS_RET(HandleScalarCond(graph, cond_out_anchor, cond_in_anchor, cond_tensor.GetDataType()),
  71. "HandleScalarCond for %s failed.", op_desc->GetName().c_str())
  72. break;
  73. case DT_INT32:
  74. break;
  75. default:
  76. GELOGE(FAILED, "UpdateInputDesc for node %s failed.", op_desc->GetName().c_str());
  77. return FAILED;
  78. }
  79. }
  80. cond_tensor.SetDataType(DT_INT32);
  81. cond_tensor.SetOriginDataType(DT_INT32);
  82. cond_tensor.SetShape(GeShape());
  83. cond_tensor.SetOriginShape(GeShape());
  84. if (op_desc->UpdateInputDesc(cond_in_anchor->GetIdx(), cond_tensor) != GRAPH_SUCCESS) {
  85. GELOGE(FAILED, "UpdateInputDesc for node %s failed.", op_desc->GetName().c_str());
  86. return FAILED;
  87. }
  88. return SUCCESS;
  89. }
  90. ///
  91. /// @brief Get cond info for if / while
  92. /// @param [in] node: If / While op
  93. /// @param [out] graph: owner_graph of if node / while_cond subgraph
  94. /// @param [out] cond_out_anchor: peer_cond_anchor
  95. /// @param [out] cond_in_anchor: cond_input
  96. /// @return Status
  97. ///
  98. Status CondPass::GetCondInfo(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &cond_out_anchor,
  99. InDataAnchorPtr &cond_in_anchor) {
  100. GE_CHECK_NOTNULL(node);
  101. std::string type = node->GetType();
  102. if (kIfOpTypes.count(type) != 0) {
  103. if (GetCondInfoForIf(node, graph, cond_out_anchor, cond_in_anchor) != SUCCESS) {
  104. GELOGE(FAILED, "Get cond_info for if node failed.");
  105. return FAILED;
  106. }
  107. } else if (kWhileOpTypes.count(type) != 0) {
  108. if (GetCondInfoForWhile(node, graph, cond_out_anchor, cond_in_anchor) != SUCCESS) {
  109. GELOGE(FAILED, "Get cond_info for while node failed.");
  110. return FAILED;
  111. }
  112. } else {
  113. GELOGD("no need cond_pass for node %s.", node->GetName().c_str());
  114. return NOT_CHANGED;
  115. }
  116. return SUCCESS;
  117. }
  118. ///
  119. /// @brief Get cond info for if node
  120. /// @param [in] node: If op
  121. /// @param [out] graph: owner_graph of if node
  122. /// @param [out] cond_out_anchor: peer_cond_anchor
  123. /// @param [out] cond_in_anchor: cond_input of if
  124. /// @return Status
  125. ///
  126. Status CondPass::GetCondInfoForIf(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &cond_out_anchor,
  127. InDataAnchorPtr &cond_in_anchor) {
  128. GE_CHECK_NOTNULL(node);
  129. graph = node->GetOwnerComputeGraph();
  130. GE_CHECK_NOTNULL(graph);
  131. cond_in_anchor = node->GetInDataAnchor(IF_COND_INPUT);
  132. GE_CHECK_NOTNULL(cond_in_anchor);
  133. cond_out_anchor = cond_in_anchor->GetPeerOutAnchor();
  134. GE_CHECK_NOTNULL(cond_out_anchor);
  135. return SUCCESS;
  136. }
  137. ///
  138. /// @brief Get cond info for while node
  139. /// @param [in] node: While op
  140. /// @param [out] graph: while_cond subgraph
  141. /// @param [out] cond_out_anchor: peer_cond_anchor
  142. /// @param [out] cond_in_anchor: input of NetOutput in cond_graph
  143. /// @return Status
  144. ///
  145. Status CondPass::GetCondInfoForWhile(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &cond_out_anchor,
  146. InDataAnchorPtr &cond_in_anchor) {
  147. GE_CHECK_NOTNULL(node);
  148. OpDescPtr op_desc = node->GetOpDesc();
  149. GE_CHECK_NOTNULL(op_desc);
  150. std::map<std::string, uint32_t> subgraph_names_to_index = op_desc->GetSubgraphNameIndexes();
  151. auto iter = subgraph_names_to_index.find(ATTR_NAME_WHILE_COND);
  152. if (iter == subgraph_names_to_index.end()) {
  153. GELOGE(FAILED, "Get cond_graph index failed, while_node:%s.", node->GetName().c_str());
  154. return FAILED;
  155. }
  156. std::string cond_graph_instance_name = op_desc->GetSubgraphInstanceName(iter->second);
  157. graph = GraphUtils::FindRootGraph(node->GetOwnerComputeGraph())->GetSubgraph(cond_graph_instance_name);
  158. GE_CHECK_NOTNULL(graph);
  159. NodePtr net_output_node = graph->FindFirstNodeMatchType(NETOUTPUT);
  160. GE_CHECK_NOTNULL(net_output_node);
  161. // cond_graph has and only has one output
  162. uint32_t output_num = net_output_node->GetAllInDataAnchorsSize();
  163. if (output_num != 1) {
  164. GELOGE(FAILED, "output size of cond_graph is invalid, expect 1 but %u exactly, while_node:%s.",
  165. output_num, node->GetName().c_str());
  166. return FAILED;
  167. }
  168. cond_in_anchor = net_output_node->GetInDataAnchor(0);
  169. GE_CHECK_NOTNULL(cond_in_anchor);
  170. cond_out_anchor = cond_in_anchor->GetPeerOutAnchor();
  171. GE_CHECK_NOTNULL(cond_out_anchor);
  172. return SUCCESS;
  173. }
  174. ///
  175. /// @brief Process Cond Op with non-scalar cond_input: cond->Size->If / NetOutput(while)
  176. /// @param [in] graph
  177. /// @param [in] out_anchor: peer_cond_anchor
  178. /// @param [in] in_anchor: cond_input
  179. /// @return Status
  180. ///
  181. Status CondPass::HandleNonScalarCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  182. const InDataAnchorPtr &in_anchor) {
  183. GELOGI("Handle cond with non-scalar cond-input.");
  184. return InsertNode(graph, out_anchor, in_anchor, SIZE);
  185. }
  186. ///
  187. /// @brief Process Cond Op with scalar-string cond_input: cond->StringLength(int32)->If / NetOutput(while)
  188. /// @param [in] graph
  189. /// @param [in] out_anchor: peer_cond_anchor
  190. /// @param [in] in_anchor: cond_input
  191. /// @return Status
  192. ///
  193. Status CondPass::HandleStringCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  194. const InDataAnchorPtr &in_anchor) {
  195. GELOGI("Handle cond with scalar-string cond-input.");
  196. return InsertNode(graph, out_anchor, in_anchor, kStringLength);
  197. }
  198. ///
  199. /// @brief Process Cond Op with scalar cond_input: cond->Cast(2int32)->If / NetOutput(while)
  200. /// @param [in] graph
  201. /// @param [in] out_anchor: peer_cond_anchor
  202. /// @param [in] in_anchor: cond_input
  203. /// @param [in] src_type
  204. /// @return Status
  205. ///
  206. Status CondPass::HandleScalarCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  207. const InDataAnchorPtr &in_anchor, DataType src_type) {
  208. GE_CHECK_NOTNULL(in_anchor);
  209. GE_CHECK_NOTNULL(out_anchor);
  210. GE_CHECK_NOTNULL(out_anchor->GetOwnerNode()->GetOpDesc());
  211. GELOGI("Handle cond with scalar cond-input.");
  212. GeTensorDesc tensor = out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(out_anchor->GetIdx());
  213. std::string cast_name = in_anchor->GetOwnerNode()->GetName() + "_Cast";
  214. NodePtr cast_node = AddCastNode(graph, cast_name, tensor, src_type, DT_INT32);
  215. if (cast_node == nullptr) {
  216. GELOGE(FAILED, "Add Cast node failed, name:%s.", cast_name.c_str());
  217. return FAILED;
  218. }
  219. if (GraphUtils::InsertNodeAfter(out_anchor, { in_anchor }, cast_node) != GRAPH_SUCCESS) {
  220. GELOGE(FAILED, "Insert Cast node %s between %s->%s failed.",
  221. cast_node->GetName().c_str(), out_anchor->GetOwnerNode()->GetName().c_str(),
  222. in_anchor->GetOwnerNode()->GetName().c_str());
  223. return FAILED;
  224. }
  225. return SUCCESS;
  226. }
  227. ///
  228. /// @brief Insert node
  229. /// @param [in] graph
  230. /// @param [in] out_anchor
  231. /// @param [in] in_anchor
  232. /// @param [in] type
  233. /// @return Status
  234. ///
  235. Status CondPass::InsertNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  236. const InDataAnchorPtr &in_anchor, const std::string &type) {
  237. GE_CHECK_NOTNULL(out_anchor);
  238. GE_CHECK_NOTNULL(in_anchor);
  239. GELOGD("Begin to insert %s node.", type.c_str());
  240. GE_CHECK_NOTNULL(out_anchor->GetOwnerNode()->GetOpDesc());
  241. GE_CHECK_NOTNULL(in_anchor->GetOwnerNode()->GetOpDesc());
  242. GeTensorDesc in_tensor = out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(out_anchor->GetIdx());
  243. GeTensorDesc out_tensor = in_anchor->GetOwnerNode()->GetOpDesc()->GetInputDesc(out_anchor->GetIdx());
  244. out_tensor.SetDataType(DT_INT32);
  245. out_tensor.SetOriginDataType(DT_INT32);
  246. out_tensor.SetShape(in_tensor.GetShape());
  247. out_tensor.SetOriginShape(in_tensor.GetOriginShape());
  248. OpDescBuilder op_desc_builder(in_anchor->GetOwnerNode()->GetName() + "_" + type, type);
  249. OpDescPtr op_desc = op_desc_builder.AddInput("x", in_tensor).AddOutput("y", out_tensor).Build();
  250. if (op_desc == nullptr) {
  251. GELOGE(FAILED, "Create op_desc failed.");
  252. return FAILED;
  253. }
  254. NodePtr new_node = graph->AddNode(op_desc);
  255. if (new_node == nullptr) {
  256. GELOGE(FAILED, "Create %s node failed.", type.c_str());
  257. return FAILED;
  258. }
  259. AddRePassNode(new_node);
  260. if (GraphUtils::InsertNodeAfter(out_anchor, { in_anchor }, new_node) != GRAPH_SUCCESS) {
  261. GELOGE(FAILED, "Insert %s node %s between %s->%s failed.", type.c_str(),
  262. new_node->GetName().c_str(), out_anchor->GetOwnerNode()->GetName().c_str(),
  263. in_anchor->GetOwnerNode()->GetName().c_str());
  264. return FAILED;
  265. }
  266. return SUCCESS;
  267. }
  268. ///
  269. /// @brief Add cast node
  270. /// @param [in] graph
  271. /// @param [in] name
  272. /// @param [in] tensor
  273. /// @param [in] src
  274. /// @param [in] dst
  275. /// @return NodePtr
  276. ///
  277. NodePtr CondPass::AddCastNode(const ComputeGraphPtr &graph, const std::string &name, const GeTensorDesc &tensor,
  278. DataType src, DataType dst) {
  279. GELOGI("Begin to create cast op: %s, from %d to %d", name.c_str(), src, dst);
  280. GeTensorDesc in_tensor = tensor;
  281. in_tensor.SetDataType(src);
  282. in_tensor.SetOriginDataType(src);
  283. GeTensorDesc out_tensor = tensor;
  284. out_tensor.SetDataType(dst);
  285. out_tensor.SetOriginDataType(dst);
  286. OpDescBuilder op_desc_builder(name, CAST);
  287. OpDescPtr cast_desc = op_desc_builder.AddInput("x", in_tensor).AddOutput("y", out_tensor).Build();
  288. if (cast_desc == nullptr) {
  289. GELOGE(FAILED, "Create cast op_desc failed, name: %s.", name.c_str());
  290. return nullptr;
  291. }
  292. if (!(AttrUtils::SetInt(cast_desc, CAST_ATTR_SRCT, src) &&
  293. AttrUtils::SetInt(cast_desc, CAST_ATTR_DSTT, dst) &&
  294. AttrUtils::SetInt(cast_desc, CAST_ATTR_DST_TYPE, dst) &&
  295. AttrUtils::SetBool(cast_desc, CAST_ATTR_TRUNCATE, false))) {
  296. GELOGE(FAILED, "Set CAST_ATTR failed, node: %s.", name.c_str());
  297. return nullptr;
  298. }
  299. NodePtr cast_node = graph->AddNode(cast_desc);
  300. if (cast_node == nullptr) {
  301. GELOGE(FAILED, "Add cast node failed, name: %s.", name.c_str());
  302. return nullptr;
  303. }
  304. AddRePassNode(cast_node);
  305. return cast_node;
  306. }
  307. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示