You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

single_op_parser.cc 22 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "single_op_parser.h"
  17. #include <vector>
  18. #include <algorithm>
  19. #include <fstream>
  20. #include <sstream>
  21. #include <nlohmann/json.hpp>
  22. #include "framework/common/debug/ge_log.h"
  23. #include "common/util/error_manager/error_manager.h"
  24. #include "common/ge_inner_error_codes.h"
  25. #include "framework/common/util.h"
  26. #include "graph/utils/tensor_utils.h"
  27. #include "graph/utils/op_desc_utils.h"
  28. #include "graph/operator_factory_impl.h"
  29. using Json = nlohmann::json;
  30. using std::string;
  31. using std::vector;
  32. using std::map;
  33. namespace ge {
  34. namespace {
  35. constexpr char const *kKeyOp = "op";
  36. constexpr char const *kKeyInputDesc = "input_desc";
  37. constexpr char const *kKeyOutputDesc = "output_desc";
  38. constexpr char const *kKeyAttr = "attr";
  39. constexpr char const *kKeyName = "name";
  40. constexpr char const *kKeyType = "type";
  41. constexpr char const *kKeyShape = "shape";
  42. constexpr char const *kKeyOriginShape = "origin_shape";
  43. constexpr char const *kKeyShapeRange = "shape_range";
  44. constexpr char const *kKeyValue = "value";
  45. constexpr char const *kKeyFormat = "format";
  46. constexpr char const *kKeyOriginFormat = "origin_format";
  47. constexpr char const *kFileSuffix = ".om";
  48. constexpr char const *kKeyDynamicInput = "dynamic_input";
  49. constexpr char const *kKeyDynamicOutput = "dynamic_output";
  50. constexpr int kDumpJsonIndent = 2;
  51. constexpr int kShapeRangePairSize = 2;
  52. constexpr int kShapeRangeLow = 0;
  53. constexpr int kShapeRangeHigh = 1;
  54. map<string, GeAttrValue::ValueType> kAttrTypeDict = {
  55. {"bool", GeAttrValue::VT_BOOL},
  56. {"int", GeAttrValue::VT_INT},
  57. {"float", GeAttrValue::VT_FLOAT},
  58. {"string", GeAttrValue::VT_STRING},
  59. {"list_bool", GeAttrValue::VT_LIST_BOOL},
  60. {"list_int", GeAttrValue::VT_LIST_INT},
  61. {"list_float", GeAttrValue::VT_LIST_FLOAT},
  62. {"list_string", GeAttrValue::VT_LIST_STRING},
  63. {"list_list_int", GeAttrValue::VT_LIST_LIST_INT},
  64. {"data_type", GeAttrValue::VT_DATA_TYPE},
  65. };
  66. map<string, DataType> kDataTypeDict = {
  67. {"bool", DT_BOOL},
  68. {"int8", DT_INT8},
  69. {"uint8", DT_UINT8},
  70. {"int16", DT_INT16},
  71. {"uint16", DT_UINT16},
  72. {"int32", DT_INT32},
  73. {"uint32", DT_UINT32},
  74. {"int64", DT_INT64},
  75. {"uint64", DT_UINT64},
  76. {"float16", DT_FLOAT16},
  77. {"half", DT_FLOAT16},
  78. {"fp16", DT_FLOAT16},
  79. {"float", DT_FLOAT},
  80. {"float32", DT_FLOAT},
  81. {"double", DT_DOUBLE},
  82. };
  83. map<string, Format> kFormatDict = {
  84. {"nchw", FORMAT_NCHW},
  85. {"nhwc", FORMAT_NHWC},
  86. {"nd", FORMAT_ND},
  87. {"nc1hwc0", FORMAT_NC1HWC0},
  88. {"fractal_z", FORMAT_FRACTAL_Z},
  89. {"nc1c0hwpad", FORMAT_NC1C0HWPAD},
  90. {"nhwc1c0", FORMAT_NHWC1C0},
  91. {"fsr_nchw", FORMAT_FSR_NCHW},
  92. {"fractal_deconv", FORMAT_FRACTAL_DECONV},
  93. {"c1hwnc0", FORMAT_C1HWNC0},
  94. {"fractal_deconv_transpose", FORMAT_FRACTAL_DECONV_TRANSPOSE},
  95. {"fractal_deconv_sp_stride_trans", FORMAT_FRACTAL_DECONV_SP_STRIDE_TRANS},
  96. {"nc1hwc0_c04", FORMAT_NC1HWC0_C04},
  97. {"fractal_z_c04", FORMAT_FRACTAL_Z_C04},
  98. {"chwn", FORMAT_CHWN},
  99. {"deconv_sp_stride8_trans", FORMAT_FRACTAL_DECONV_SP_STRIDE8_TRANS},
  100. {"nc1khkwhwc0", FORMAT_NC1KHKWHWC0},
  101. {"bn_weight", FORMAT_BN_WEIGHT},
  102. {"filter_hwck", FORMAT_FILTER_HWCK},
  103. {"hwcn", FORMAT_HWCN},
  104. {"lookup_lookups", FORMAT_HASHTABLE_LOOKUP_LOOKUPS},
  105. {"lookup_keys", FORMAT_HASHTABLE_LOOKUP_KEYS},
  106. {"lookup_value", FORMAT_HASHTABLE_LOOKUP_VALUE},
  107. {"lookup_output", FORMAT_HASHTABLE_LOOKUP_OUTPUT},
  108. {"lookup_hits", FORMAT_HASHTABLE_LOOKUP_HITS},
  109. {"md", FORMAT_MD},
  110. {"c1hwncoc0", FORMAT_C1HWNCoC0},
  111. {"fractal_nz", FORMAT_FRACTAL_NZ},
  112. {"ndhwc", FORMAT_NDHWC},
  113. {"ncdhw", FORMAT_NCDHW},
  114. {"dhwcn", FORMAT_DHWCN},
  115. {"dhwnc", FORMAT_DHWNC},
  116. {"ndc1hwc0", FORMAT_NDC1HWC0},
  117. {"fractal_z_3d", FORMAT_FRACTAL_Z_3D},
  118. {"fractal_z_3d_transpose", FORMAT_FRACTAL_Z_3D_TRANSPOSE},
  119. {"cn", FORMAT_CN},
  120. {"nc", FORMAT_NC},
  121. {"fractal_zn_lstm", FORMAT_FRACTAL_ZN_LSTM},
  122. {"fractal_z_g", FORMAT_FRACTAL_Z_G}
  123. };
  124. }
  125. template<typename T>
  126. void SetAttrValue(const Json &j, SingleOpAttr &attr) {
  127. attr.value.SetValue<T>(j.at(kKeyValue).get<T>());
  128. }
  129. template<typename T>
  130. T GetValue(const map<string, T> &dict, string &key, T default_val) {
  131. transform(key.begin(), key.end(), key.begin(), ::tolower);
  132. auto it = dict.find(key);
  133. if (it == dict.end()) {
  134. return default_val;
  135. }
  136. return it->second;
  137. }
  138. void from_json(const Json &j, SingleOpTensorDesc &desc) {
  139. desc.dims = j.at(kKeyShape).get<vector<int64_t>>();
  140. auto it = j.find(kKeyShapeRange);
  141. if (it != j.end()) {
  142. desc.dim_ranges = j.at(kKeyShapeRange).get<vector<std::vector<int64_t>>>();
  143. }
  144. it = j.find(kKeyOriginShape);
  145. if (it != j.end()) {
  146. desc.ori_dims = j.at(kKeyOriginShape).get<vector<int64_t>>();
  147. }
  148. string format_str = j.at(kKeyFormat).get<string>();
  149. string type_str = j.at(kKeyType).get<string>();
  150. desc.format = GetValue(kFormatDict, format_str, FORMAT_RESERVED);
  151. desc.type = GetValue(kDataTypeDict, type_str, DT_UNDEFINED);
  152. it = j.find(kKeyOriginFormat);
  153. if (it != j.end()) {
  154. string origin_format_str = j.at(kKeyOriginFormat).get<string>();
  155. desc.ori_format = GetValue(kFormatDict, origin_format_str, FORMAT_RESERVED);
  156. }
  157. auto tensor_name = j.find(kKeyName);
  158. if (tensor_name != j.end()) {
  159. desc.name = tensor_name->get<string>();
  160. }
  161. auto dynamic_input_name = j.find(kKeyDynamicInput);
  162. if (dynamic_input_name != j.end()) {
  163. desc.dynamic_input_name = dynamic_input_name->get<string>();
  164. }
  165. }
  166. void from_json(const Json &j, SingleOpAttr &attr) {
  167. attr.name = j.at(kKeyName).get<string>();
  168. attr.type = j.at(kKeyType).get<string>();
  169. auto it = kAttrTypeDict.find(attr.type);
  170. if (it == kAttrTypeDict.end()) {
  171. GELOGE(UNSUPPORTED, "Parse attr[%s] failed. Unsupported type: %s", attr.name.c_str(), attr.type.c_str());
  172. return;
  173. }
  174. switch (it->second) {
  175. case GeAttrValue::VT_BOOL:
  176. SetAttrValue<bool>(j, attr);
  177. break;
  178. case GeAttrValue::VT_INT:
  179. SetAttrValue<int64_t>(j, attr);
  180. break;
  181. case GeAttrValue::VT_FLOAT:
  182. SetAttrValue<float>(j, attr);
  183. break;
  184. case GeAttrValue::VT_STRING:
  185. SetAttrValue<string>(j, attr);
  186. break;
  187. case GeAttrValue::VT_LIST_BOOL:
  188. SetAttrValue<vector<bool>>(j, attr);
  189. break;
  190. case GeAttrValue::VT_LIST_INT:
  191. SetAttrValue<vector<int64_t>>(j, attr);
  192. break;
  193. case GeAttrValue::VT_LIST_FLOAT:
  194. SetAttrValue<vector<float>>(j, attr);
  195. break;
  196. case GeAttrValue::VT_LIST_STRING:
  197. SetAttrValue<vector<string>>(j, attr);
  198. break;
  199. case GeAttrValue::VT_LIST_LIST_INT:
  200. SetAttrValue<vector<vector<int64_t>>>(j, attr);
  201. break;
  202. case GeAttrValue::VT_DATA_TYPE:
  203. SetAttrValue<DataType>(j, attr);
  204. break;
  205. default:
  206. GELOGE(UNSUPPORTED, "Parse attr[%s] failed. Unsupported type: %s", attr.name.c_str(), attr.type.c_str());
  207. break;
  208. }
  209. }
  210. void from_json(const Json &j, SingleOpDesc &desc) {
  211. desc.op = j.at(kKeyOp).get<string>();
  212. auto input_desc = j.find(kKeyInputDesc);
  213. if (input_desc != j.end()) {
  214. desc.input_desc = input_desc->get<vector<SingleOpTensorDesc>>();
  215. }
  216. auto output_desc = j.find(kKeyOutputDesc);
  217. if (output_desc != j.end()) {
  218. desc.output_desc = output_desc->get<vector<SingleOpTensorDesc>>();
  219. }
  220. auto attr_field = j.find(kKeyAttr);
  221. if (attr_field != j.end()) {
  222. desc.attrs = attr_field->get<vector<SingleOpAttr>>();
  223. }
  224. }
  225. Status SingleOpParser::ReadJsonFile(const std::string &file, Json &json_obj) {
  226. std::string real_path = RealPath(file.c_str());
  227. if (real_path.empty()) {
  228. ErrorManager::GetInstance().ATCReportErrMessage("E10023", {"value"}, {file});
  229. GELOGE(FAILED, "Input parameter[--singleop]'s value[%s] is not a valid path.", file.c_str());
  230. return INTERNAL_ERROR;
  231. }
  232. std::ifstream ifs(real_path);
  233. if (!ifs.is_open()) {
  234. ErrorManager::GetInstance().ATCReportErrMessage("E10024", {"value"}, {file});
  235. GELOGE(FAILED, "Open file[%s] provided in input parameter[--singleop] failed.", file.c_str());
  236. return FAILED;
  237. }
  238. try {
  239. ifs >> json_obj;
  240. } catch (const std::exception &e) {
  241. ErrorManager::GetInstance().ATCReportErrMessage("E10025", {"realpath", "errmsg"}, {real_path, e.what()});
  242. GELOGE(PARAM_INVALID, "Parse file[%s] provided in input parameter[--singleop] failed, exception = %s.",
  243. real_path.c_str(), e.what());
  244. return PARAM_INVALID;
  245. }
  246. ifs.close();
  247. return SUCCESS;
  248. }
  249. bool SingleOpParser::Validate(const SingleOpDesc &op_desc) {
  250. if (op_desc.op.empty()) {
  251. ErrorManager::GetInstance().ATCReportErrMessage("E10026");
  252. GELOGE(PARAM_INVALID, "Op name is empty");
  253. return false;
  254. }
  255. int index = 0;
  256. for (auto &tensor_desc : op_desc.input_desc) {
  257. if ((tensor_desc.type == DT_UNDEFINED && tensor_desc.format != FORMAT_RESERVED) ||
  258. (tensor_desc.type != DT_UNDEFINED && tensor_desc.format == FORMAT_RESERVED)){
  259. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"input", "type", "index"},
  260. {"intput", "datatype or format", std::to_string(index)});
  261. GELOGE(PARAM_INVALID, "Input's dataType or format is invalid when the index is %d", index);
  262. return false;
  263. }
  264. ++index;
  265. }
  266. index = 0;
  267. for (auto &tensor_desc : op_desc.output_desc) {
  268. if (tensor_desc.type == DT_UNDEFINED) {
  269. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"input", "type", "index"},
  270. {"output", "datatype", std::to_string(index)});
  271. GELOGE(PARAM_INVALID, "Output's dataType is invalid when the index is %d", index);
  272. return false;
  273. }
  274. if (tensor_desc.format == FORMAT_RESERVED) {
  275. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"input", "type", "index"},
  276. {"output", "format", std::to_string(index)});
  277. GELOGE(PARAM_INVALID, "Output's format is invalid when the index is %d", index);
  278. return false;
  279. }
  280. ++index;
  281. }
  282. for (auto &attr : op_desc.attrs) {
  283. if (attr.name.empty()) {
  284. ErrorManager::GetInstance().ATCReportErrMessage("E10029");
  285. GELOGE(PARAM_INVALID, "attr name is empty");
  286. return false;
  287. }
  288. if (attr.value.IsEmpty()) {
  289. ErrorManager::GetInstance().ATCReportErrMessage("E10030", {"attrname"}, {attr.name});
  290. GELOGE(PARAM_INVALID, "Parse attr \"%s\" failed. ", attr.name.c_str());
  291. return false;
  292. }
  293. }
  294. return true;
  295. }
  296. std::unique_ptr<OpDesc> SingleOpParser::CreateOpDesc(const string &op_type) {
  297. return std::unique_ptr<OpDesc>(new(std::nothrow) OpDesc(op_type, op_type));
  298. }
  299. Status SingleOpParser::UpdateDynamicTensorName(std::vector<SingleOpTensorDesc> &desc) {
  300. std::map<std::string, int> dynamic_name_map;
  301. for (auto &tensor : desc) {
  302. if (tensor.dynamic_input_name.empty()) {
  303. continue;
  304. }
  305. if (dynamic_name_map.find(tensor.dynamic_input_name) == dynamic_name_map.end()) {
  306. dynamic_name_map[tensor.dynamic_input_name] = 0;
  307. } else {
  308. dynamic_name_map[tensor.dynamic_input_name]++;
  309. }
  310. tensor.name = tensor.dynamic_input_name + std::to_string(dynamic_name_map[tensor.dynamic_input_name]);
  311. }
  312. GELOGD("Update dynamic tensor name success!");
  313. return SUCCESS;
  314. }
  315. Status SingleOpParser::ConvertToBuildParam(int index,
  316. const SingleOpDesc &single_op_desc,
  317. SingleOpBuildParam &build_param) {
  318. auto op_desc = CreateOpDesc(single_op_desc.op);
  319. GE_CHECK_NOTNULL(op_desc);
  320. std::stringstream file_name;
  321. file_name << index;
  322. file_name << "_" << single_op_desc.op;
  323. for (auto &desc : single_op_desc.input_desc) {
  324. file_name << "_" << desc.type << "_" << desc.format;
  325. for (auto dim : desc.dims) {
  326. file_name << "_" << dim;
  327. }
  328. GeTensorDesc ge_tensor_desc(GeShape(desc.dims),
  329. desc.format,
  330. desc.type);
  331. auto ori_format_to_set = desc.ori_format != FORMAT_RESERVED ? desc.ori_format : desc.format;
  332. auto ori_dims = !desc.ori_dims.empty() ? desc.ori_dims : desc.dims;
  333. ge_tensor_desc.SetOriginFormat(ori_format_to_set);
  334. ge_tensor_desc.SetOriginShape(GeShape(ori_dims));
  335. GE_CHK_STATUS_RET_NOLOG(SetShapeRange(op_desc->GetName(), desc, ge_tensor_desc));
  336. TensorUtils::SetRealDimCnt(ge_tensor_desc, ori_dims.size());
  337. TensorUtils::SetInputTensor(ge_tensor_desc, true);
  338. TensorUtils::SetOutputTensor(ge_tensor_desc, false);
  339. if (desc.name.empty()) {
  340. op_desc->AddInputDesc(ge_tensor_desc);
  341. } else {
  342. op_desc->AddInputDesc(desc.name, ge_tensor_desc);
  343. }
  344. build_param.inputs.emplace_back(ge_tensor_desc);
  345. }
  346. for (auto &desc : single_op_desc.output_desc) {
  347. file_name << "_" << desc.type << "_" << desc.format;
  348. for (auto dim : desc.dims) {
  349. file_name << "_" << dim;
  350. }
  351. GeTensorDesc ge_tensor_desc(GeShape(desc.dims),
  352. desc.format,
  353. desc.type);
  354. auto ori_format_to_set = desc.ori_format != FORMAT_RESERVED ? desc.ori_format : desc.format;
  355. auto ori_dims = !desc.ori_dims.empty() ? desc.ori_dims : desc.dims;
  356. ge_tensor_desc.SetOriginFormat(ori_format_to_set);
  357. ge_tensor_desc.SetOriginShape(GeShape(ori_dims));
  358. GE_CHK_STATUS_RET_NOLOG(SetShapeRange(op_desc->GetName(), desc, ge_tensor_desc));
  359. TensorUtils::SetRealDimCnt(ge_tensor_desc, ori_dims.size());
  360. TensorUtils::SetInputTensor(ge_tensor_desc, false);
  361. TensorUtils::SetOutputTensor(ge_tensor_desc, true);
  362. if (desc.name.empty()) {
  363. op_desc->AddOutputDesc(ge_tensor_desc);
  364. } else {
  365. op_desc->AddOutputDesc(desc.name, ge_tensor_desc);
  366. }
  367. build_param.outputs.emplace_back(ge_tensor_desc);
  368. }
  369. for (const auto &attr : single_op_desc.attrs) {
  370. op_desc->SetAttr(attr.name, attr.value);
  371. }
  372. if (VerifyOpInputOutputSizeByIr(*op_desc) != SUCCESS) {
  373. GELOGE(PARAM_INVALID, "Verify op [%s] input or output size failed.", op_desc->GetType().c_str());
  374. return PARAM_INVALID;
  375. }
  376. file_name << kFileSuffix;
  377. build_param.file_name = file_name.str();
  378. build_param.op_desc.reset(op_desc.release());
  379. return SUCCESS;
  380. }
  381. Status SingleOpParser::VerifyOpInputOutputSizeByIr(const OpDesc &current_op_desc) {
  382. ge::Operator operator_ir = ge::OperatorFactory::CreateOperator("tmp_operator", current_op_desc.GetType());
  383. if (!operator_ir.IsEmpty()) {
  384. auto opdesc_ir = ge::OpDescUtils::GetOpDescFromOperator(operator_ir);
  385. GE_CHECK_NOTNULL(opdesc_ir);
  386. size_t current_opdesc_inputs_num = current_op_desc.GetInputsSize();
  387. size_t ir_opdesc_inputs_num = opdesc_ir->GetInputsSize();
  388. if (current_opdesc_inputs_num < ir_opdesc_inputs_num) {
  389. string reason = "is smaller than the ir needed input size " + std::to_string(ir_opdesc_inputs_num);
  390. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  391. {current_op_desc.GetName(), "input size " + std::to_string(current_opdesc_inputs_num), reason});
  392. GELOGE(PARAM_INVALID, "This op [%s] input size %zu is smaller than the ir needed input size %zu",
  393. current_op_desc.GetName().c_str(), current_opdesc_inputs_num, ir_opdesc_inputs_num);
  394. return PARAM_INVALID;
  395. }
  396. size_t current_opdesc_outputs_num = current_op_desc.GetOutputsSize();
  397. size_t ir_opdesc_outputs_num = opdesc_ir->GetOutputsSize();
  398. if (current_opdesc_outputs_num < ir_opdesc_outputs_num) {
  399. string reason = "is smaller than the ir needed output size " + std::to_string(ir_opdesc_outputs_num);
  400. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  401. {current_op_desc.GetName(), "output size " + std::to_string(current_opdesc_outputs_num), reason});
  402. GELOGE(PARAM_INVALID, "This op [%s] output size %zu is smaller than the ir needed output size %zu",
  403. current_op_desc.GetName().c_str(), current_opdesc_outputs_num, ir_opdesc_outputs_num);
  404. return PARAM_INVALID;
  405. }
  406. }
  407. return SUCCESS;
  408. }
  409. Status SingleOpParser::SetShapeRange(const std::string &op_name,
  410. const SingleOpTensorDesc &tensor_desc,
  411. GeTensorDesc &ge_tensor_desc) {
  412. auto num_shape_ranges = tensor_desc.dim_ranges.size();
  413. GELOGD("Number of shape ranges = %zu", num_shape_ranges);
  414. auto it = std::find(tensor_desc.dims.begin(), tensor_desc.dims.end(), ge::UNKNOWN_DIM_NUM);
  415. if (it != tensor_desc.dims.end()) {
  416. if (tensor_desc.dims != ge::UNKNOWN_RANK) {
  417. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  418. {op_name,
  419. "shape",
  420. "has unknown rank but dim size is not one"});
  421. GELOGE(PARAM_INVALID, "Invalid tensor shape: [%s]", ge_tensor_desc.MutableShape().ToString().c_str());
  422. return PARAM_INVALID;
  423. }
  424. if (!tensor_desc.dim_ranges.empty()) {
  425. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  426. {op_name,
  427. "shape range",
  428. "is not needed while the rank the shape is unknown"});
  429. GELOGE(PARAM_INVALID, "Shape range is not needed while the rank the shape is unknown");
  430. return PARAM_INVALID;
  431. }
  432. GELOGD("Shape is unknown rank, do not set shape range");
  433. return SUCCESS;
  434. }
  435. std::vector<std::pair<int64_t, int64_t>> shape_range;
  436. size_t range_index = 0;
  437. for (auto dim : tensor_desc.dims) {
  438. if (dim >= 0) {
  439. shape_range.emplace_back(dim, dim);
  440. GELOGD("Adding shape range: [%ld, %ld]", dim, dim);
  441. } else {
  442. GELOGD("To get shape range by index = %zu", range_index);
  443. if (range_index >= num_shape_ranges) {
  444. string reason = "is smaller than the unknown dim size " + std::to_string(++range_index);
  445. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  446. {op_name,
  447. "shape range size " + std::to_string(num_shape_ranges),
  448. reason});
  449. GELOGE(PARAM_INVALID, "The number of shape_range mismatches that of unknown dims.");
  450. return PARAM_INVALID;
  451. }
  452. auto &range = tensor_desc.dim_ranges[range_index];
  453. if (range.size() != kShapeRangePairSize) {
  454. string reason = "has " + std::to_string(range.size()) + " item(s)";
  455. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  456. {op_name,
  457. "shape range " + std::to_string(range_index),
  458. reason});
  459. GELOGE(PARAM_INVALID, "Invalid shape range entry. index = %zu, size = %zu", range_index, range.size());
  460. return PARAM_INVALID;
  461. }
  462. shape_range.emplace_back(range[kShapeRangeLow], range[kShapeRangeHigh]);
  463. GELOGD("Adding shape range: [%ld, %ld]", range[kShapeRangeLow], range[kShapeRangeHigh]);
  464. ++range_index;
  465. }
  466. }
  467. if (num_shape_ranges != range_index) {
  468. string reason = "is greater than the unknown dim size " + std::to_string(range_index);
  469. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  470. {op_name,
  471. "shape range size " + std::to_string(num_shape_ranges),
  472. reason});
  473. GELOGE(PARAM_INVALID,
  474. "The number of shape_range(%zu) mismatches that of unknown dims(%zu).",
  475. num_shape_ranges,
  476. range_index);
  477. return PARAM_INVALID;
  478. }
  479. if (range_index > 0) {
  480. ge_tensor_desc.SetShapeRange(shape_range);
  481. }
  482. return SUCCESS;
  483. }
  484. Status SingleOpParser::ParseSingleOpList(const std::string &file, std::vector<SingleOpBuildParam> &op_list) {
  485. int index = 0;
  486. try {
  487. Json single_op_list_json;
  488. auto ret = ReadJsonFile(file, single_op_list_json);
  489. if (ret != SUCCESS) {
  490. return ret;
  491. }
  492. for (const Json &single_op_json : single_op_list_json) {
  493. SingleOpDesc single_op_desc;
  494. GELOGI("Parsing op[%d], jsonStr = %s", index, single_op_json.dump(kDumpJsonIndent).c_str());
  495. single_op_desc = single_op_json;
  496. if (UpdateDynamicTensorName(single_op_desc.input_desc) != SUCCESS) {
  497. GELOGE(FAILED, "Update dynamic tensor name failed!");
  498. return FAILED;
  499. }
  500. if (!Validate(single_op_desc)) {
  501. GELOGE(PARAM_INVALID, "Validate the index[%d] of op failed when read json file[%s].", index, file.c_str());
  502. return PARAM_INVALID;
  503. }
  504. SingleOpBuildParam param;
  505. ret = ConvertToBuildParam(index, single_op_desc, param);
  506. if (ret != SUCCESS) {
  507. return ret;
  508. }
  509. op_list.emplace_back(param);
  510. GELOGI("Parse the index[%d] of op success", index);
  511. index += 1;
  512. }
  513. } catch (const nlohmann::json::exception &e) {
  514. ErrorManager::GetInstance().ATCReportErrMessage("E10032", {"index", "jsonfile", "exception"},
  515. {std::to_string(index), file, e.what()});
  516. GELOGE(PARAM_INVALID, "Parse the index[%d] of op failed when read json file[%s], exception %s",
  517. index, file.c_str(), e.what());
  518. return PARAM_INVALID;
  519. }
  520. return SUCCESS;
  521. }
  522. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示