You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

single_op_parser.cc 20 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "single_op_parser.h"
  17. #include <vector>
  18. #include <algorithm>
  19. #include <fstream>
  20. #include <sstream>
  21. #include <nlohmann/json.hpp>
  22. #include "framework/common/debug/ge_log.h"
  23. #include "common/util/error_manager/error_manager.h"
  24. #include "common/ge_inner_error_codes.h"
  25. #include "framework/common/util.h"
  26. #include "graph/utils/tensor_utils.h"
  27. #include "graph/utils/op_desc_utils.h"
  28. #include "graph/operator_factory_impl.h"
  29. using Json = nlohmann::json;
  30. using std::string;
  31. using std::vector;
  32. using std::map;
  33. namespace ge {
  34. namespace {
  35. constexpr char const *kKeyOp = "op";
  36. constexpr char const *kKeyInputDesc = "input_desc";
  37. constexpr char const *kKeyOutputDesc = "output_desc";
  38. constexpr char const *kKeyAttr = "attr";
  39. constexpr char const *kKeyName = "name";
  40. constexpr char const *kKeyType = "type";
  41. constexpr char const *kKeyShape = "shape";
  42. constexpr char const *kKeyShapeRange = "shape_range";
  43. constexpr char const *kKeyValue = "value";
  44. constexpr char const *kKeyFormat = "format";
  45. constexpr char const *kFileSuffix = ".om";
  46. constexpr char const *kKeyDynamicInput = "dynamic_input";
  47. constexpr char const *kKeyDynamicOutput = "dynamic_output";
  48. constexpr int kDumpJsonIndent = 2;
  49. constexpr int kShapeRangePairSize = 2;
  50. constexpr int kShapeRangeLow = 0;
  51. constexpr int kShapeRangeHigh = 1;
  52. map<string, GeAttrValue::ValueType> kAttrTypeDict = {
  53. {"bool", GeAttrValue::VT_BOOL},
  54. {"int", GeAttrValue::VT_INT},
  55. {"float", GeAttrValue::VT_FLOAT},
  56. {"string", GeAttrValue::VT_STRING},
  57. {"list_bool", GeAttrValue::VT_LIST_BOOL},
  58. {"list_int", GeAttrValue::VT_LIST_INT},
  59. {"list_float", GeAttrValue::VT_LIST_FLOAT},
  60. {"list_string", GeAttrValue::VT_LIST_STRING},
  61. {"list_list_int", GeAttrValue::VT_LIST_LIST_INT},
  62. {"data_type", GeAttrValue::VT_DATA_TYPE},
  63. };
  64. map<string, DataType> kDataTypeDict = {
  65. {"bool", DT_BOOL},
  66. {"int8", DT_INT8},
  67. {"uint8", DT_UINT8},
  68. {"int16", DT_INT16},
  69. {"uint16", DT_UINT16},
  70. {"int32", DT_INT32},
  71. {"uint32", DT_UINT32},
  72. {"int64", DT_INT64},
  73. {"uint64", DT_UINT64},
  74. {"float16", DT_FLOAT16},
  75. {"half", DT_FLOAT16},
  76. {"fp16", DT_FLOAT16},
  77. {"float", DT_FLOAT},
  78. {"float32", DT_FLOAT},
  79. {"double", DT_DOUBLE},
  80. };
  81. map<string, Format> kFormatDict = {
  82. {"nchw", FORMAT_NCHW},
  83. {"nhwc", FORMAT_NHWC},
  84. {"nd", FORMAT_ND},
  85. {"fractal_nz", FORMAT_FRACTAL_NZ},
  86. {"fractal_z", FORMAT_FRACTAL_Z},
  87. {"nc1hwc0", FORMAT_NC1HWC0},
  88. };
  89. }
  90. template<typename T>
  91. void SetAttrValue(const Json &j, SingleOpAttr &attr) {
  92. attr.value.SetValue<T>(j.at(kKeyValue).get<T>());
  93. }
  94. template<typename T>
  95. T GetValue(const map<string, T> &dict, string &key, T default_val) {
  96. transform(key.begin(), key.end(), key.begin(), ::tolower);
  97. auto it = dict.find(key);
  98. if (it == dict.end()) {
  99. return default_val;
  100. }
  101. return it->second;
  102. }
  103. void from_json(const Json &j, SingleOpTensorDesc &desc) {
  104. desc.dims = j.at(kKeyShape).get<vector<int64_t>>();
  105. auto it = j.find(kKeyShapeRange);
  106. if (it != j.end()) {
  107. desc.dim_ranges = j.at(kKeyShapeRange).get<vector<std::vector<int64_t>>>();
  108. }
  109. string format_str = j.at(kKeyFormat).get<string>();
  110. string type_str = j.at(kKeyType).get<string>();
  111. desc.format = GetValue(kFormatDict, format_str, FORMAT_RESERVED);
  112. desc.type = GetValue(kDataTypeDict, type_str, DT_UNDEFINED);
  113. auto tensor_name = j.find(kKeyName);
  114. if (tensor_name != j.end()) {
  115. desc.name = tensor_name->get<string>();
  116. }
  117. auto dynamic_input_name = j.find(kKeyDynamicInput);
  118. if (dynamic_input_name != j.end()) {
  119. desc.dynamic_input_name = dynamic_input_name->get<string>();
  120. }
  121. }
  122. void from_json(const Json &j, SingleOpAttr &attr) {
  123. attr.name = j.at(kKeyName).get<string>();
  124. attr.type = j.at(kKeyType).get<string>();
  125. auto it = kAttrTypeDict.find(attr.type);
  126. if (it == kAttrTypeDict.end()) {
  127. GELOGE(UNSUPPORTED, "Parse attr[%s] failed. Unsupported type: %s", attr.name.c_str(), attr.type.c_str());
  128. return;
  129. }
  130. switch (it->second) {
  131. case GeAttrValue::VT_BOOL:
  132. SetAttrValue<bool>(j, attr);
  133. break;
  134. case GeAttrValue::VT_INT:
  135. SetAttrValue<int64_t>(j, attr);
  136. break;
  137. case GeAttrValue::VT_FLOAT:
  138. SetAttrValue<float>(j, attr);
  139. break;
  140. case GeAttrValue::VT_STRING:
  141. SetAttrValue<string>(j, attr);
  142. break;
  143. case GeAttrValue::VT_LIST_BOOL:
  144. SetAttrValue<vector<bool>>(j, attr);
  145. break;
  146. case GeAttrValue::VT_LIST_INT:
  147. SetAttrValue<vector<int64_t>>(j, attr);
  148. break;
  149. case GeAttrValue::VT_LIST_FLOAT:
  150. SetAttrValue<vector<float>>(j, attr);
  151. break;
  152. case GeAttrValue::VT_LIST_STRING:
  153. SetAttrValue<vector<string>>(j, attr);
  154. break;
  155. case GeAttrValue::VT_LIST_LIST_INT:
  156. SetAttrValue<vector<vector<int64_t>>>(j, attr);
  157. break;
  158. case GeAttrValue::VT_DATA_TYPE:
  159. SetAttrValue<DataType>(j, attr);
  160. break;
  161. default:
  162. GELOGE(UNSUPPORTED, "Parse attr[%s] failed. Unsupported type: %s", attr.name.c_str(), attr.type.c_str());
  163. break;
  164. }
  165. }
  166. void from_json(const Json &j, SingleOpDesc &desc) {
  167. desc.op = j.at(kKeyOp).get<string>();
  168. auto input_desc = j.find(kKeyInputDesc);
  169. if (input_desc != j.end()) {
  170. desc.input_desc = input_desc->get<vector<SingleOpTensorDesc>>();
  171. }
  172. auto output_desc = j.find(kKeyOutputDesc);
  173. if (output_desc != j.end()) {
  174. desc.output_desc = output_desc->get<vector<SingleOpTensorDesc>>();
  175. }
  176. auto attr_field = j.find(kKeyAttr);
  177. if (attr_field != j.end()) {
  178. desc.attrs = attr_field->get<vector<SingleOpAttr>>();
  179. }
  180. }
  181. Status SingleOpParser::ReadJsonFile(const std::string &file, Json &json_obj) {
  182. std::string real_path = RealPath(file.c_str());
  183. if (real_path.empty()) {
  184. ErrorManager::GetInstance().ATCReportErrMessage("E10023", {"value"}, {file});
  185. GELOGE(FAILED, "Input parameter[--singleop]'s value[%s] is not a valid path.", file.c_str());
  186. return INTERNAL_ERROR;
  187. }
  188. std::ifstream ifs(real_path);
  189. if (!ifs.is_open()) {
  190. ErrorManager::GetInstance().ATCReportErrMessage("E10024", {"value"}, {file});
  191. GELOGE(FAILED, "Open file[%s] provided in input parameter[--singleop] failed.", file.c_str());
  192. return FAILED;
  193. }
  194. try {
  195. ifs >> json_obj;
  196. } catch (const std::exception &e) {
  197. ErrorManager::GetInstance().ATCReportErrMessage("E10025", {"realpath", "errmsg"}, {real_path, e.what()});
  198. GELOGE(PARAM_INVALID, "Parse file[%s] provided in input parameter[--singleop] failed, exception = %s.",
  199. real_path.c_str(), e.what());
  200. return PARAM_INVALID;
  201. }
  202. ifs.close();
  203. return SUCCESS;
  204. }
  205. bool SingleOpParser::Validate(const SingleOpDesc &op_desc) {
  206. if (op_desc.op.empty()) {
  207. ErrorManager::GetInstance().ATCReportErrMessage("E10026");
  208. GELOGE(PARAM_INVALID, "Op name is empty");
  209. return false;
  210. }
  211. int index = 0;
  212. for (auto &tensor_desc : op_desc.input_desc) {
  213. if ((tensor_desc.type == DT_UNDEFINED && tensor_desc.format != FORMAT_RESERVED) ||
  214. (tensor_desc.type != DT_UNDEFINED && tensor_desc.format == FORMAT_RESERVED)){
  215. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"input", "type", "index"},
  216. {"intput", "datatype or format", std::to_string(index)});
  217. GELOGE(PARAM_INVALID, "Input's dataType or format is invalid when the index is %d", index);
  218. return false;
  219. }
  220. ++index;
  221. }
  222. index = 0;
  223. for (auto &tensor_desc : op_desc.output_desc) {
  224. if (tensor_desc.type == DT_UNDEFINED) {
  225. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"input", "type", "index"},
  226. {"output", "datatype", std::to_string(index)});
  227. GELOGE(PARAM_INVALID, "Output's dataType is invalid when the index is %d", index);
  228. return false;
  229. }
  230. if (tensor_desc.format == FORMAT_RESERVED) {
  231. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"input", "type", "index"},
  232. {"output", "format", std::to_string(index)});
  233. GELOGE(PARAM_INVALID, "Output's format is invalid when the index is %d", index);
  234. return false;
  235. }
  236. ++index;
  237. }
  238. for (auto &attr : op_desc.attrs) {
  239. if (attr.name.empty()) {
  240. ErrorManager::GetInstance().ATCReportErrMessage("E10029");
  241. GELOGE(PARAM_INVALID, "attr name is empty");
  242. return false;
  243. }
  244. if (attr.value.IsEmpty()) {
  245. ErrorManager::GetInstance().ATCReportErrMessage("E10030", {"attrname"}, {attr.name});
  246. GELOGE(PARAM_INVALID, "Parse attr \"%s\" failed. ", attr.name.c_str());
  247. return false;
  248. }
  249. }
  250. return true;
  251. }
  252. std::unique_ptr<OpDesc> SingleOpParser::CreateOpDesc(const string &op_type) {
  253. return std::unique_ptr<OpDesc>(new(std::nothrow) OpDesc(op_type, op_type));
  254. }
  255. Status SingleOpParser::UpdateDynamicTensorName(std::vector<SingleOpTensorDesc> &desc) {
  256. std::map<std::string, int> dynamic_name_map;
  257. for (auto &tensor : desc) {
  258. if (tensor.dynamic_input_name.empty()) {
  259. continue;
  260. }
  261. if (dynamic_name_map.find(tensor.dynamic_input_name) == dynamic_name_map.end()) {
  262. dynamic_name_map[tensor.dynamic_input_name] = 0;
  263. } else {
  264. dynamic_name_map[tensor.dynamic_input_name]++;
  265. }
  266. tensor.name = tensor.dynamic_input_name + std::to_string(dynamic_name_map[tensor.dynamic_input_name]);
  267. }
  268. GELOGD("Update dynamic tensor name success!");
  269. return SUCCESS;
  270. }
  271. Status SingleOpParser::ConvertToBuildParam(int index,
  272. const SingleOpDesc &single_op_desc,
  273. SingleOpBuildParam &build_param) {
  274. auto op_desc = CreateOpDesc(single_op_desc.op);
  275. if (op_desc == nullptr) {
  276. GELOGE(MEMALLOC_FAILED, "Failed to create instance of opDesc");
  277. return MEMALLOC_FAILED;
  278. }
  279. std::stringstream file_name;
  280. file_name << index;
  281. file_name << "_" << single_op_desc.op;
  282. for (auto &desc : single_op_desc.input_desc) {
  283. file_name << "_" << desc.type << "_" << desc.format;
  284. for (auto dim : desc.dims) {
  285. file_name << "_" << dim;
  286. }
  287. GeTensorDesc ge_tensor_desc(GeShape(desc.dims),
  288. desc.format,
  289. desc.type);
  290. ge_tensor_desc.SetOriginFormat(desc.format);
  291. GE_CHK_STATUS_RET_NOLOG(SetShapeRange(op_desc->GetName(), desc, ge_tensor_desc));
  292. TensorUtils::SetRealDimCnt(ge_tensor_desc, desc.dims.size());
  293. TensorUtils::SetInputTensor(ge_tensor_desc, true);
  294. TensorUtils::SetOutputTensor(ge_tensor_desc, false);
  295. if (desc.name.empty()) {
  296. op_desc->AddInputDesc(ge_tensor_desc);
  297. } else {
  298. op_desc->AddInputDesc(desc.name, ge_tensor_desc);
  299. }
  300. build_param.inputs.emplace_back(ge_tensor_desc);
  301. }
  302. for (auto &desc : single_op_desc.output_desc) {
  303. file_name << "_" << desc.type << "_" << desc.format;
  304. for (auto dim : desc.dims) {
  305. file_name << "_" << dim;
  306. }
  307. GeTensorDesc ge_tensor_desc(GeShape(desc.dims),
  308. desc.format,
  309. desc.type);
  310. ge_tensor_desc.SetOriginFormat(desc.format);
  311. GE_CHK_STATUS_RET_NOLOG(SetShapeRange(op_desc->GetName(), desc, ge_tensor_desc));
  312. TensorUtils::SetRealDimCnt(ge_tensor_desc, desc.dims.size());
  313. TensorUtils::SetInputTensor(ge_tensor_desc, false);
  314. TensorUtils::SetOutputTensor(ge_tensor_desc, true);
  315. if (desc.name.empty()) {
  316. op_desc->AddOutputDesc(ge_tensor_desc);
  317. } else {
  318. op_desc->AddOutputDesc(desc.name, ge_tensor_desc);
  319. }
  320. build_param.outputs.emplace_back(ge_tensor_desc);
  321. }
  322. for (const auto &attr : single_op_desc.attrs) {
  323. op_desc->SetAttr(attr.name, attr.value);
  324. }
  325. if (VerifyOpInputOutputSizeByIr(*op_desc) != SUCCESS) {
  326. GELOGE(PARAM_INVALID, "Verify op [%s] input or output size failed.", op_desc->GetType().c_str());
  327. return PARAM_INVALID;
  328. }
  329. file_name << kFileSuffix;
  330. build_param.file_name = file_name.str();
  331. build_param.op_desc.reset(op_desc.release());
  332. return SUCCESS;
  333. }
  334. Status SingleOpParser::VerifyOpInputOutputSizeByIr(const OpDesc &current_op_desc) {
  335. ge::Operator operator_ir = ge::OperatorFactory::CreateOperator("tmp_operator", current_op_desc.GetType());
  336. if (!operator_ir.IsEmpty()) {
  337. auto opdesc_ir = ge::OpDescUtils::GetOpDescFromOperator(operator_ir);
  338. GE_CHECK_NOTNULL(opdesc_ir);
  339. size_t current_opdesc_inputs_num = current_op_desc.GetInputsSize();
  340. size_t ir_opdesc_inputs_num = opdesc_ir->GetInputsSize();
  341. if (current_opdesc_inputs_num < ir_opdesc_inputs_num) {
  342. string reason = "is smaller than the ir needed input size " + std::to_string(ir_opdesc_inputs_num);
  343. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  344. {current_op_desc.GetName(), "input size " + std::to_string(current_opdesc_inputs_num), reason});
  345. GELOGE(PARAM_INVALID, "This op [%s] input size %zu is smaller than the ir needed input size %zu",
  346. current_op_desc.GetName().c_str(), current_opdesc_inputs_num, ir_opdesc_inputs_num);
  347. return PARAM_INVALID;
  348. }
  349. size_t current_opdesc_outputs_num = current_op_desc.GetOutputsSize();
  350. size_t ir_opdesc_outputs_num = opdesc_ir->GetOutputsSize();
  351. if (current_opdesc_outputs_num < ir_opdesc_outputs_num) {
  352. string reason = "is smaller than the ir needed output size " + std::to_string(ir_opdesc_outputs_num);
  353. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  354. {current_op_desc.GetName(), "output size " + std::to_string(current_opdesc_outputs_num), reason});
  355. GELOGE(PARAM_INVALID, "This op [%s] output size %zu is smaller than the ir needed output size %zu",
  356. current_op_desc.GetName().c_str(), current_opdesc_outputs_num, ir_opdesc_outputs_num);
  357. return PARAM_INVALID;
  358. }
  359. }
  360. return SUCCESS;
  361. }
  362. Status SingleOpParser::SetShapeRange(const std::string &op_name,
  363. const SingleOpTensorDesc &tensor_desc,
  364. GeTensorDesc &ge_tensor_desc) {
  365. auto num_shape_ranges = tensor_desc.dim_ranges.size();
  366. GELOGD("Number of shape ranges = %zu", num_shape_ranges);
  367. auto it = std::find(tensor_desc.dims.begin(), tensor_desc.dims.end(), ge::UNKNOWN_DIM_NUM);
  368. if (it != tensor_desc.dims.end()) {
  369. if (tensor_desc.dims != ge::UNKNOWN_RANK) {
  370. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  371. {op_name,
  372. "shape",
  373. "has unknown rank but dim size is not one"});
  374. GELOGE(PARAM_INVALID, "Invalid tensor shape: [%s]", ge_tensor_desc.MutableShape().ToString().c_str());
  375. return PARAM_INVALID;
  376. }
  377. if (!tensor_desc.dim_ranges.empty()) {
  378. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  379. {op_name,
  380. "shape range",
  381. "is not needed while the rank the shape is unknown"});
  382. GELOGE(PARAM_INVALID, "Shape range is not needed while the rank the shape is unknown");
  383. return PARAM_INVALID;
  384. }
  385. GELOGD("Shape is unknown rank, do not set shape range");
  386. return SUCCESS;
  387. }
  388. std::vector<std::pair<int64_t, int64_t>> shape_range;
  389. size_t range_index = 0;
  390. for (auto dim : tensor_desc.dims) {
  391. if (dim >= 0) {
  392. shape_range.emplace_back(dim, dim);
  393. GELOGD("Adding shape range: [%ld, %ld]", dim, dim);
  394. } else {
  395. GELOGD("To get shape range by index = %zu", range_index);
  396. if (range_index >= num_shape_ranges) {
  397. string reason = "is smaller than the unknown dim size " + std::to_string(++range_index);
  398. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  399. {op_name,
  400. "shape range size " + std::to_string(num_shape_ranges),
  401. reason});
  402. GELOGE(PARAM_INVALID, "The number of shape_range mismatches that of unknown dims.");
  403. return PARAM_INVALID;
  404. }
  405. auto &range = tensor_desc.dim_ranges[range_index];
  406. if (range.size() != kShapeRangePairSize) {
  407. string reason = "has " + std::to_string(range.size()) + " item(s)";
  408. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  409. {op_name,
  410. "shape range " + std::to_string(range_index),
  411. reason});
  412. GELOGE(PARAM_INVALID, "Invalid shape range entry. index = %zu, size = %zu", range_index, range.size());
  413. return PARAM_INVALID;
  414. }
  415. shape_range.emplace_back(range[kShapeRangeLow], range[kShapeRangeHigh]);
  416. GELOGD("Adding shape range: [%ld, %ld]", range[kShapeRangeLow], range[kShapeRangeHigh]);
  417. ++range_index;
  418. }
  419. }
  420. if (num_shape_ranges != range_index) {
  421. string reason = "is greater than the unknown dim size " + std::to_string(range_index);
  422. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  423. {op_name,
  424. "shape range size " + std::to_string(num_shape_ranges),
  425. reason});
  426. GELOGE(PARAM_INVALID,
  427. "The number of shape_range(%zu) mismatches that of unknown dims(%zu).",
  428. num_shape_ranges,
  429. range_index);
  430. return PARAM_INVALID;
  431. }
  432. if (range_index > 0) {
  433. ge_tensor_desc.SetShapeRange(shape_range);
  434. }
  435. return SUCCESS;
  436. }
  437. Status SingleOpParser::ParseSingleOpList(const std::string &file, std::vector<SingleOpBuildParam> &op_list) {
  438. int index = 0;
  439. try {
  440. Json single_op_list_json;
  441. auto ret = ReadJsonFile(file, single_op_list_json);
  442. if (ret != SUCCESS) {
  443. return ret;
  444. }
  445. for (const Json &single_op_json : single_op_list_json) {
  446. SingleOpDesc single_op_desc;
  447. GELOGI("Parsing op[%d], jsonStr = %s", index, single_op_json.dump(kDumpJsonIndent).c_str());
  448. single_op_desc = single_op_json;
  449. if (UpdateDynamicTensorName(single_op_desc.input_desc) != SUCCESS) {
  450. GELOGE(FAILED, "Update dynamic tensor name failed!");
  451. return FAILED;
  452. }
  453. if (!Validate(single_op_desc)) {
  454. GELOGE(PARAM_INVALID, "Validate the index[%d] of op failed when read json file[%s].", index, file.c_str());
  455. return PARAM_INVALID;
  456. }
  457. SingleOpBuildParam param;
  458. ret = ConvertToBuildParam(index, single_op_desc, param);
  459. if (ret != SUCCESS) {
  460. return ret;
  461. }
  462. op_list.emplace_back(param);
  463. GELOGI("Parse the index[%d] of op success", index);
  464. index += 1;
  465. }
  466. } catch (const nlohmann::json::exception &e) {
  467. ErrorManager::GetInstance().ATCReportErrMessage("E10032", {"index", "jsonfile", "exception"},
  468. {std::to_string(index), file, e.what()});
  469. GELOGE(PARAM_INVALID, "Parse the index[%d] of op failed when read json file[%s], exception %s",
  470. index, file.c_str(), e.what());
  471. return PARAM_INVALID;
  472. }
  473. return SUCCESS;
  474. }
  475. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示