You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

subgraph_pass.h 4.8 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef GE_GRAPH_PASSES_SUBGRAPH_PASS_H_
  17. #define GE_GRAPH_PASSES_SUBGRAPH_PASS_H_
  18. #include "inc/graph_pass.h"
  19. namespace ge {
  20. class SubgraphPass : public GraphPass {
  21. public:
  22. /**
  23. * @ingroup ge
  24. * @brief Subgraph optimizer.
  25. * @param [in] graph: Input ComputeGraph
  26. * @return: 0 for success / others for fail
  27. */
  28. Status Run(ComputeGraphPtr graph) override;
  29. private:
  30. /**
  31. * @ingroup ge
  32. * @brief Check Subgraph Data node.
  33. * @param [in] graph: ComputeGraph.
  34. * @param [in] node: NetOutput node in Subgraph.
  35. * @return: 0 for SUCCESS / others for FAILED
  36. */
  37. Status SubgraphInputNode(const ComputeGraphPtr &graph, const NodePtr &node);
  38. /**
  39. * @ingroup ge
  40. * @brief Check Subgraph NetOutput node.
  41. * @param [in] graph: ComputeGraph.
  42. * @param [in] node: NetOutput node in Subgraph.
  43. * @return: 0 for SUCCESS / others for FAILED
  44. */
  45. Status SubgraphOutputNode(const ComputeGraphPtr &graph, const NodePtr &node);
  46. /**
  47. * @ingroup ge
  48. * @brief Check is Input->While and Input link to other nodes
  49. * @param [in] graph: ComputeGraph.
  50. * @param [in] node: While node.
  51. * @return: 0 for SUCCESS / others for FAILED
  52. */
  53. Status WhileInputNodes(const ComputeGraphPtr &graph, const NodePtr &node);
  54. /**
  55. * @ingroup ge
  56. * @brief Check body subgraph of While op
  57. * @param [in] graph: ComputeGraph.
  58. * @param [in] node: While node.
  59. * @return: 0 for SUCCESS / others for FAILED
  60. */
  61. Status WhileBodySubgraph(const ComputeGraphPtr &graph, const NodePtr &node);
  62. /**
  63. * @ingroup ge
  64. * @brief Insert input memcpy node in while_body
  65. * @param [in] graph: while_body
  66. * @param [in] data_nodes: data_nodes
  67. * @return: 0 for SUCCESS / others for FAILED
  68. */
  69. Status InsertInputMemcpy(const ComputeGraphPtr &graph, const std::vector<NodePtr> &data_nodes);
  70. /**
  71. * @ingroup ge
  72. * @brief Insert output memcpy node in while_body
  73. * @param [in] graph: while_body
  74. * @param [in] output_node: NetOutput
  75. * @param [in] bypass_index
  76. * @return: 0 for SUCCESS / others for FAILED
  77. */
  78. Status InsertOutputMemcpy(const ComputeGraphPtr &graph, const NodePtr &output_node,
  79. const std::set<uint32_t> &bypass_index);
  80. /**
  81. * @ingroup ge
  82. * @brief Check is data->netoutput without change in while body
  83. * @param [in] node: data node
  84. * @param [out] bypass_index
  85. * @return: false for data->netoutput without change in while body / for true for others
  86. */
  87. bool CheckInsertInputMemcpy(const NodePtr &node, std::set<uint32_t> &bypass_index);
  88. /**
  89. * @ingroup ge
  90. * @brief Check is AtomicOp->NetOutput
  91. * @param [in] node
  92. * @param [in] out_index
  93. * @return: true for AtomicOp->NetOutput / false for others
  94. */
  95. bool IsAtomicRequired(const NodePtr &node, int64_t out_index);
  96. /**
  97. * @ingroup ge
  98. * @brief Check is OutputContinuesRequiredOp->NetOutput
  99. * @param [in] node
  100. * @return: true for OutputContinuesRequiredOp->NetOutput / false for others
  101. */
  102. bool IsOutputContinuesRequired(const NodePtr &node);
  103. /**
  104. * @ingroup ge
  105. * @brief Check is InputContinuesRequiredOp->NetOutput
  106. * @param [in] node
  107. * @return: true for InputContinuesRequiredOp->NetOutput / false for others
  108. */
  109. bool IsInputContinuesRequired(const NodePtr &node);
  110. /**
  111. * @ingroup ge
  112. * @brief Insert memcpy node
  113. * @param [in] graph
  114. * @param [in] out_anchor
  115. * @param [in] in_anchors
  116. * @param [in] name
  117. * @return: 0 for success / others for fail
  118. */
  119. Status InsertMemcpyNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  120. const std::vector<InDataAnchorPtr> &in_anchors, const std::string &name);
  121. ///
  122. /// @brief Insert node: src->insert_node:input_index, insert_node:output_index->dst
  123. /// @param [in] src
  124. /// @param [in] dsts
  125. /// @param [in] insert_node
  126. /// @param [in] input_index
  127. /// @param [in] output_index
  128. /// @return Status
  129. ///
  130. Status InsertNodeBetween(const OutDataAnchorPtr &src, const std::vector<InDataAnchorPtr> &dsts,
  131. const NodePtr &insert_node, uint32_t input_index, uint32_t output_index);
  132. };
  133. } // namespace ge
  134. #endif // GE_GRAPH_PASSES_SUBGRAPH_PASS_H_

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示