You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

analyzer.h 4.9 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef DOMI_ANALYZER_ANANLYZER_H_
  17. #define DOMI_ANALYZER_ANANLYZER_H_
  18. #include "nlohmann/json.hpp"
  19. #include <map>
  20. #include <string>
  21. #include <mutex>
  22. #include <memory>
  23. #include <fstream>
  24. #include "external/ge/ge_api_types.h"
  25. #include "graph/compute_graph.h"
  26. #include "graph/node.h"
  27. namespace ge {
  28. namespace analyzer {
  29. enum AnalyzeType {
  30. PARSER = 0,
  31. INFER_SHAPE = 1,
  32. CHECKSUPPORT = 2,
  33. GRAPH_OPTIMIZE = 3,
  34. GRAPH_PARTION = 4,
  35. GRAPH_BUILDER = 5,
  36. };
  37. struct TensorInfo {
  38. vector<int64_t> shape;
  39. string d_type;
  40. string layout;
  41. };
  42. struct OpInfo {
  43. string error_type;
  44. string op_name;
  45. string op_type;
  46. std::vector<TensorInfo> input_info;
  47. std::vector<TensorInfo> output_info;
  48. string reason;
  49. };
  50. struct GraphInfo {
  51. uint64_t session_id = 0;
  52. uint64_t graph_id = 0;
  53. std::vector<OpInfo> op_info;
  54. };
  55. struct DataInfo {
  56. DataInfo() = default;
  57. ~DataInfo() = default;
  58. DataInfo(uint64_t sess, uint64_t graph, AnalyzeType type, ge::NodePtr node, std::string error_info) {
  59. session_id = sess;
  60. graph_id = graph;
  61. analyze_type = type;
  62. node_ptr = node;
  63. reason = error_info;
  64. }
  65. uint64_t session_id;
  66. uint64_t graph_id;
  67. AnalyzeType analyze_type;
  68. ge::NodePtr node_ptr{nullptr};
  69. std::string reason;
  70. };
  71. } // namespace analyzer
  72. class Analyzer {
  73. public:
  74. /**
  75. * @ingroup ge
  76. * @brief: get analyzer instance.
  77. * @param [in]: None
  78. * @return: Analyzer instance ptr
  79. */
  80. static Analyzer *GetInstance();
  81. /**
  82. * @ingroup ge
  83. * @brief: check whether env var ENABLE_NETWORK_ANALYSIS_DEBUG is enabled.
  84. * When enable env, it will keep adaptor sink geop graph even though fail.
  85. * @param [in]: None
  86. * @return: true: enable env false : disable env
  87. */
  88. bool IsEnableNetAnalyzeDebug() { return std::getenv("ENABLE_NETWORK_ANALYSIS_DEBUG") != nullptr; }
  89. /**
  90. * @ingroup ge
  91. * @brief: build buff object by sess id and graph id .
  92. * @param [in]: session id & graph id
  93. * @return: 0: success other: failed
  94. */
  95. ge::Status BuildJsonObject(uint64_t session_id, uint64_t graph_id);
  96. /**
  97. * @ingroup ge
  98. * @brief: get buff object by sess id and graph id .
  99. * @param [in]: session id & graph id
  100. * @return: nullptr if failed
  101. */
  102. std::shared_ptr<analyzer::GraphInfo> GetJsonObject(uint64_t session_id, uint64_t graph_id);
  103. /**
  104. * @ingroup ge
  105. * @brief: analyzer globle init method.
  106. * @param [in]: None
  107. * @return: None
  108. */
  109. ge::Status Initialize();
  110. /**
  111. * @ingroup ge
  112. * @brief: DeConstruct method. Release all used resource of analyzer.
  113. * @param [in]: None
  114. * @return: None
  115. */
  116. void Finalize();
  117. /**
  118. * @ingroup ge
  119. * @brief: DeConstruct method. Only release resource about session id.
  120. * @param [in]: None
  121. * @return: None
  122. */
  123. void DestroySessionJsonObject(uint64_t session_id);
  124. /**
  125. * @ingroup ge
  126. * @brief: DeConstruct method. Only release resource about session id and graph id.
  127. * @param [in]: None
  128. * @return: None
  129. */
  130. void DestroyGraphJsonObject(uint64_t session_id, uint64_t graph_id);
  131. /**
  132. * @ingroup ge
  133. * @brief: main process method. Buff analyzed data and output to json file
  134. * @param [in]: DataInfo Object
  135. * @return: 0: SUCCESS other: FAILED
  136. */
  137. ge::Status DoAnalyze(analyzer::DataInfo &data_info);
  138. Analyzer(const Analyzer &) = delete;
  139. Analyzer &operator=(const Analyzer &) = delete;
  140. Analyzer(Analyzer &&) = delete;
  141. Analyzer &operator=(Analyzer &&) = delete;
  142. private:
  143. void TensorInfoToJson(nlohmann::json &j, const analyzer::TensorInfo &tensor_info);
  144. void OpInfoToJson(nlohmann::json &j, const analyzer::OpInfo &op_info);
  145. void GraphInfoToJson(nlohmann::json &j, const analyzer::GraphInfo &graph_info);
  146. ge::Status SaveAnalyzerDataToFile();
  147. ge::Status SaveOpInfo(ge::OpDescPtr desc, analyzer::DataInfo &data_info,
  148. std::shared_ptr<analyzer::GraphInfo> graph_info);
  149. void ClearHistoryFile();
  150. ge::Status CreateAnalyzerFile();
  151. explicit Analyzer(){};
  152. ~Analyzer() = default;
  153. private:
  154. std::map<uint64_t, std::map<uint64_t, std::shared_ptr<analyzer::GraphInfo>>> graph_infos_;
  155. std::recursive_mutex mutex_; // protect graph_infos_
  156. std::mutex file_mutex_; // protect json_file_
  157. std::ofstream json_file_;
  158. std::string json_file_name_;
  159. };
  160. } // namespace ge
  161. #endif // DOMI_ANALYZER_ANANLYZER_H_

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示