You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

single_op_parser.cc 25 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "single_op_parser.h"
  17. #include <vector>
  18. #include <algorithm>
  19. #include <fstream>
  20. #include <sstream>
  21. #include <nlohmann/json.hpp>
  22. #include "framework/common/debug/ge_log.h"
  23. #include "common/util/error_manager/error_manager.h"
  24. #include "common/ge_inner_error_codes.h"
  25. #include "framework/common/util.h"
  26. #include "graph/utils/tensor_utils.h"
  27. #include "graph/utils/type_utils.h"
  28. #include "graph/utils/op_desc_utils.h"
  29. #include "graph/operator_factory_impl.h"
  30. using Json = nlohmann::json;
  31. using std::string;
  32. using std::vector;
  33. using std::map;
  34. namespace ge {
  35. namespace {
  36. constexpr char const *kKeyOp = "op";
  37. constexpr char const *kKeyInputDesc = "input_desc";
  38. constexpr char const *kKeyOutputDesc = "output_desc";
  39. constexpr char const *kKeyAttr = "attr";
  40. constexpr char const *kKeyName = "name";
  41. constexpr char const *kKeyType = "type";
  42. constexpr char const *kKeyShape = "shape";
  43. constexpr char const *kKeyOriginShape = "origin_shape";
  44. constexpr char const *kKeyShapeRange = "shape_range";
  45. constexpr char const *kKeyValue = "value";
  46. constexpr char const *kKeyFormat = "format";
  47. constexpr char const *kKeyOriginFormat = "origin_format";
  48. constexpr char const *kFileSuffix = ".om";
  49. constexpr char const *kKeyDynamicInput = "dynamic_input";
  50. constexpr char const *kKeyDynamicOutput = "dynamic_output";
  51. constexpr char const *kKeyCompileFlag = "compile_flag";
  52. constexpr int kDumpJsonIndent = 2;
  53. constexpr int kShapeRangePairSize = 2;
  54. constexpr int kShapeRangeLow = 0;
  55. constexpr int kShapeRangeHigh = 1;
  56. constexpr int kMaxFileNameLen = 128;
  57. map<string, GeAttrValue::ValueType> kAttrTypeDict = {
  58. {"bool", GeAttrValue::VT_BOOL},
  59. {"int", GeAttrValue::VT_INT},
  60. {"float", GeAttrValue::VT_FLOAT},
  61. {"string", GeAttrValue::VT_STRING},
  62. {"list_bool", GeAttrValue::VT_LIST_BOOL},
  63. {"list_int", GeAttrValue::VT_LIST_INT},
  64. {"list_float", GeAttrValue::VT_LIST_FLOAT},
  65. {"list_string", GeAttrValue::VT_LIST_STRING},
  66. {"list_list_int", GeAttrValue::VT_LIST_LIST_INT},
  67. {"data_type", GeAttrValue::VT_DATA_TYPE},
  68. };
  69. map<string, DataType> kDataTypeDict = {
  70. {"bool", DT_BOOL},
  71. {"int8", DT_INT8},
  72. {"uint8", DT_UINT8},
  73. {"int16", DT_INT16},
  74. {"uint16", DT_UINT16},
  75. {"int32", DT_INT32},
  76. {"uint32", DT_UINT32},
  77. {"int64", DT_INT64},
  78. {"uint64", DT_UINT64},
  79. {"float16", DT_FLOAT16},
  80. {"half", DT_FLOAT16},
  81. {"fp16", DT_FLOAT16},
  82. {"float", DT_FLOAT},
  83. {"float32", DT_FLOAT},
  84. {"double", DT_DOUBLE},
  85. };
  86. map<string, Format> kFormatDict = {
  87. {"nchw", FORMAT_NCHW},
  88. {"nhwc", FORMAT_NHWC},
  89. {"nd", FORMAT_ND},
  90. {"nc1hwc0", FORMAT_NC1HWC0},
  91. {"fractal_z", FORMAT_FRACTAL_Z},
  92. {"nc1c0hwpad", FORMAT_NC1C0HWPAD},
  93. {"nhwc1c0", FORMAT_NHWC1C0},
  94. {"fsr_nchw", FORMAT_FSR_NCHW},
  95. {"fractal_deconv", FORMAT_FRACTAL_DECONV},
  96. {"c1hwnc0", FORMAT_C1HWNC0},
  97. {"fractal_deconv_transpose", FORMAT_FRACTAL_DECONV_TRANSPOSE},
  98. {"fractal_deconv_sp_stride_trans", FORMAT_FRACTAL_DECONV_SP_STRIDE_TRANS},
  99. {"nc1hwc0_c04", FORMAT_NC1HWC0_C04},
  100. {"fractal_z_c04", FORMAT_FRACTAL_Z_C04},
  101. {"chwn", FORMAT_CHWN},
  102. {"deconv_sp_stride8_trans", FORMAT_FRACTAL_DECONV_SP_STRIDE8_TRANS},
  103. {"nc1khkwhwc0", FORMAT_NC1KHKWHWC0},
  104. {"bn_weight", FORMAT_BN_WEIGHT},
  105. {"filter_hwck", FORMAT_FILTER_HWCK},
  106. {"hwcn", FORMAT_HWCN},
  107. {"lookup_lookups", FORMAT_HASHTABLE_LOOKUP_LOOKUPS},
  108. {"lookup_keys", FORMAT_HASHTABLE_LOOKUP_KEYS},
  109. {"lookup_value", FORMAT_HASHTABLE_LOOKUP_VALUE},
  110. {"lookup_output", FORMAT_HASHTABLE_LOOKUP_OUTPUT},
  111. {"lookup_hits", FORMAT_HASHTABLE_LOOKUP_HITS},
  112. {"md", FORMAT_MD},
  113. {"c1hwncoc0", FORMAT_C1HWNCoC0},
  114. {"fractal_nz", FORMAT_FRACTAL_NZ},
  115. {"ndhwc", FORMAT_NDHWC},
  116. {"ncdhw", FORMAT_NCDHW},
  117. {"dhwcn", FORMAT_DHWCN},
  118. {"dhwnc", FORMAT_DHWNC},
  119. {"ndc1hwc0", FORMAT_NDC1HWC0},
  120. {"fractal_z_3d", FORMAT_FRACTAL_Z_3D},
  121. {"fractal_z_3d_transpose", FORMAT_FRACTAL_Z_3D_TRANSPOSE},
  122. {"cn", FORMAT_CN},
  123. {"nc", FORMAT_NC},
  124. {"fractal_zn_lstm", FORMAT_FRACTAL_ZN_LSTM},
  125. {"fractal_z_g", FORMAT_FRACTAL_Z_G}
  126. };
  127. std::string GenerateFileName(const SingleOpDesc &single_op_desc, int index) {
  128. std::stringstream file_name_ss;
  129. file_name_ss << index;
  130. file_name_ss << "_" << single_op_desc.op;
  131. for (auto &desc : single_op_desc.input_desc) {
  132. file_name_ss << "_" << desc.type << "_" << desc.format;
  133. for (auto dim : desc.dims) {
  134. file_name_ss << "_" << dim;
  135. }
  136. }
  137. for (auto &desc : single_op_desc.output_desc) {
  138. file_name_ss << "_" << desc.type << "_" << desc.format;
  139. for (auto dim : desc.dims) {
  140. file_name_ss << "_" << dim;
  141. }
  142. }
  143. std::string file_name = file_name_ss.str();
  144. if (file_name.length() > kMaxFileNameLen) {
  145. GELOGI("Trim file name for it is too long, origin file name = %s", file_name.c_str());
  146. file_name = file_name.substr(0, kMaxFileNameLen);
  147. }
  148. file_name += kFileSuffix;
  149. return file_name;
  150. }
  151. } // namespace
  152. template<typename T>
  153. void SetAttrValue(const Json &j, SingleOpAttr &attr) {
  154. attr.value.SetValue<T>(j.at(kKeyValue).get<T>());
  155. }
  156. template<typename T>
  157. T GetValue(const map<string, T> &dict, string &key, T default_val) {
  158. transform(key.begin(), key.end(), key.begin(), ::tolower);
  159. auto it = dict.find(key);
  160. if (it == dict.end()) {
  161. return default_val;
  162. }
  163. return it->second;
  164. }
  165. void from_json(const Json &j, SingleOpTensorDesc &desc) {
  166. bool is_tensor_valid = true;
  167. desc.dims = j.at(kKeyShape).get<vector<int64_t>>();
  168. auto it = j.find(kKeyShapeRange);
  169. if (it != j.end()) {
  170. desc.dim_ranges = j.at(kKeyShapeRange).get<vector<std::vector<int64_t>>>();
  171. }
  172. it = j.find(kKeyOriginShape);
  173. if (it != j.end()) {
  174. desc.ori_dims = j.at(kKeyOriginShape).get<vector<int64_t>>();
  175. }
  176. string format_str = j.at(kKeyFormat).get<string>();
  177. string type_str = j.at(kKeyType).get<string>();
  178. desc.format = GetValue(kFormatDict, format_str, FORMAT_RESERVED);
  179. desc.type = GetValue(kDataTypeDict, type_str, DT_UNDEFINED);
  180. is_tensor_valid = is_tensor_valid && ge::TypeUtils::IsFormatValid(format_str);
  181. is_tensor_valid = is_tensor_valid && ge::TypeUtils::IsDataTypeValid(type_str);
  182. it = j.find(kKeyOriginFormat);
  183. if (it != j.end()) {
  184. string origin_format_str = j.at(kKeyOriginFormat).get<string>();
  185. is_tensor_valid = is_tensor_valid && ge::TypeUtils::IsFormatValid(origin_format_str);
  186. desc.ori_format = GetValue(kFormatDict, origin_format_str, FORMAT_RESERVED);
  187. }
  188. auto tensor_name = j.find(kKeyName);
  189. if (tensor_name != j.end()) {
  190. desc.name = tensor_name->get<string>();
  191. }
  192. auto dynamic_input_name = j.find(kKeyDynamicInput);
  193. if (dynamic_input_name != j.end()) {
  194. desc.dynamic_input_name = dynamic_input_name->get<string>();
  195. }
  196. if (!is_tensor_valid) {
  197. desc.SetValidFlag(is_tensor_valid);
  198. }
  199. }
  200. void from_json(const Json &j, SingleOpAttr &attr) {
  201. attr.name = j.at(kKeyName).get<string>();
  202. attr.type = j.at(kKeyType).get<string>();
  203. auto it = kAttrTypeDict.find(attr.type);
  204. if (it == kAttrTypeDict.end()) {
  205. GELOGE(UNSUPPORTED, "[Find][JsonAttr] name=%s, type=%s failed for Unsupported type.",
  206. attr.name.c_str(), attr.type.c_str());
  207. REPORT_INNER_ERROR("E19999", "Find jsonattr name=%s, type=%s failed for Unsupported type.",
  208. attr.name.c_str(), attr.type.c_str());
  209. return;
  210. }
  211. switch (it->second) {
  212. case GeAttrValue::VT_BOOL:
  213. SetAttrValue<bool>(j, attr);
  214. break;
  215. case GeAttrValue::VT_INT:
  216. SetAttrValue<int64_t>(j, attr);
  217. break;
  218. case GeAttrValue::VT_FLOAT:
  219. SetAttrValue<float>(j, attr);
  220. break;
  221. case GeAttrValue::VT_STRING:
  222. SetAttrValue<string>(j, attr);
  223. break;
  224. case GeAttrValue::VT_LIST_BOOL:
  225. SetAttrValue<vector<bool>>(j, attr);
  226. break;
  227. case GeAttrValue::VT_LIST_INT:
  228. SetAttrValue<vector<int64_t>>(j, attr);
  229. break;
  230. case GeAttrValue::VT_LIST_FLOAT:
  231. SetAttrValue<vector<float>>(j, attr);
  232. break;
  233. case GeAttrValue::VT_LIST_STRING:
  234. SetAttrValue<vector<string>>(j, attr);
  235. break;
  236. case GeAttrValue::VT_LIST_LIST_INT:
  237. SetAttrValue<vector<vector<int64_t>>>(j, attr);
  238. break;
  239. case GeAttrValue::VT_DATA_TYPE:
  240. SetAttrValue<DataType>(j, attr);
  241. break;
  242. default:
  243. GELOGE(UNSUPPORTED, "[Find][JsonAttr] name=%s, type=%s failed for Unsupported type.",
  244. attr.name.c_str(), attr.type.c_str());
  245. REPORT_INNER_ERROR("E19999", "Find jsonattr name=%s, type=%s failed for Unsupported type.",
  246. attr.name.c_str(), attr.type.c_str());
  247. break;
  248. }
  249. }
  250. void from_json(const Json &j, SingleOpDesc &desc) {
  251. auto op = j.find(kKeyOp);
  252. if (op != j.end()) {
  253. desc.op = j.at(kKeyOp).get<string>();
  254. }
  255. auto input_desc = j.find(kKeyInputDesc);
  256. if (input_desc != j.end()) {
  257. desc.input_desc = input_desc->get<vector<SingleOpTensorDesc>>();
  258. }
  259. auto output_desc = j.find(kKeyOutputDesc);
  260. if (output_desc != j.end()) {
  261. desc.output_desc = output_desc->get<vector<SingleOpTensorDesc>>();
  262. }
  263. auto attr_field = j.find(kKeyAttr);
  264. if (attr_field != j.end()) {
  265. desc.attrs = attr_field->get<vector<SingleOpAttr>>();
  266. }
  267. auto compile_flag = j.find(kKeyCompileFlag);
  268. if (compile_flag != j.end()) {
  269. desc.compile_flag = compile_flag->get<int32_t>();
  270. }
  271. }
  272. Status SingleOpParser::ReadJsonFile(const std::string &file, Json &json_obj) {
  273. std::string real_path = RealPath(file.c_str());
  274. if (real_path.empty()) {
  275. ErrorManager::GetInstance().ATCReportErrMessage("E10023", {"value"}, {file});
  276. GELOGE(FAILED, "[Read][JsonFile]Input parameter[--singleop]'s value[%s] is not a valid path.", file.c_str());
  277. return INTERNAL_ERROR;
  278. }
  279. std::ifstream ifs(real_path);
  280. if (!ifs.is_open()) {
  281. ErrorManager::GetInstance().ATCReportErrMessage("E10024", {"value"}, {file});
  282. GELOGE(FAILED, "[Open][JsonFile] failed for file[%s] provided in input parameter[--singleop].", file.c_str());
  283. return FAILED;
  284. }
  285. try {
  286. ifs >> json_obj;
  287. } catch (const std::exception &e) {
  288. ErrorManager::GetInstance().ATCReportErrMessage("E10025", {"realpath", "errmsg"}, {real_path, e.what()});
  289. GELOGE(PARAM_INVALID,
  290. "[Parse][JsonFile] fail for file[%s] provided in input parameter[--singleop], exception = %s.",
  291. real_path.c_str(), e.what());
  292. return PARAM_INVALID;
  293. }
  294. ifs.close();
  295. return SUCCESS;
  296. }
  297. bool SingleOpParser::Validate(const SingleOpDesc &op_desc) {
  298. if (op_desc.op.empty()) {
  299. ErrorManager::GetInstance().ATCReportErrMessage("E10026");
  300. GELOGE(PARAM_INVALID, "[Check][Param] fail for name of input SingleOpDesc is empty.");
  301. return false;
  302. }
  303. int index = 0;
  304. for (auto &tensor_desc : op_desc.input_desc) {
  305. if (!tensor_desc.GetValidFlag()) {
  306. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"op_name", "input", "type", "index"},
  307. {op_desc.op, "input", "tensor", std::to_string(index)});
  308. GELOGE(PARAM_INVALID,
  309. "[Check][Param] fail for Input's dataType or format is invalid when the index is %d", index);
  310. return false;
  311. }
  312. if ((tensor_desc.type == DT_UNDEFINED && tensor_desc.format != FORMAT_RESERVED) ||
  313. (tensor_desc.type != DT_UNDEFINED && tensor_desc.format == FORMAT_RESERVED)){
  314. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"op_name", "input", "type", "index"},
  315. {op_desc.op, "input", "datatype or format", std::to_string(index)});
  316. GELOGE(PARAM_INVALID, "[Check][Param]Input's dataType or format is invalid when the index is %d", index);
  317. return false;
  318. }
  319. ++index;
  320. }
  321. index = 0;
  322. for (auto &tensor_desc : op_desc.output_desc) {
  323. if (!tensor_desc.GetValidFlag()) {
  324. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"op_name", "input", "type", "index"},
  325. {op_desc.op, "output", "tensor", std::to_string(index)});
  326. GELOGE(PARAM_INVALID, "[Check][Param]fail for Output's dataType is invalid when the index is %d", index);
  327. return false;
  328. }
  329. if (tensor_desc.type == DT_UNDEFINED) {
  330. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"op_name", "input", "type", "index"},
  331. {op_desc.op, "output", "datatype", std::to_string(index)});
  332. GELOGE(PARAM_INVALID, "[Check][Param]Output's dataType is invalid when the index is %d", index);
  333. return false;
  334. }
  335. if (tensor_desc.format == FORMAT_RESERVED) {
  336. ErrorManager::GetInstance().ATCReportErrMessage("E10027", {"op_name", "input", "type", "index"},
  337. {op_desc.op, "output", "format", std::to_string(index)});
  338. GELOGE(PARAM_INVALID, "[Check][Param]Output's format is invalid when the index is %d", index);
  339. return false;
  340. }
  341. ++index;
  342. }
  343. for (auto &attr : op_desc.attrs) {
  344. if (attr.name.empty()) {
  345. ErrorManager::GetInstance().ATCReportErrMessage("E10029", {"op_name"}, {op_desc.op});
  346. GELOGE(PARAM_INVALID, "[Parse][Attr]attr name is empty");
  347. return false;
  348. }
  349. if (attr.value.IsEmpty()) {
  350. ErrorManager::GetInstance().ATCReportErrMessage("E10030", {"op_name", "attrname"}, {op_desc.op, attr.name});
  351. GELOGE(PARAM_INVALID, "[Parse][Attr] fail for vale of attr name:\"%s\" is empty. ", attr.name.c_str());
  352. return false;
  353. }
  354. }
  355. return true;
  356. }
  357. std::unique_ptr<OpDesc> SingleOpParser::CreateOpDesc(const string &op_type) {
  358. return std::unique_ptr<OpDesc>(new(std::nothrow) OpDesc(op_type, op_type));
  359. }
  360. Status SingleOpParser::UpdateDynamicTensorName(std::vector<SingleOpTensorDesc> &desc) {
  361. std::map<std::string, int> dynamic_name_map;
  362. for (auto &tensor : desc) {
  363. if (tensor.dynamic_input_name.empty()) {
  364. continue;
  365. }
  366. if (dynamic_name_map.find(tensor.dynamic_input_name) == dynamic_name_map.end()) {
  367. dynamic_name_map[tensor.dynamic_input_name] = 0;
  368. } else {
  369. dynamic_name_map[tensor.dynamic_input_name]++;
  370. }
  371. tensor.name = tensor.dynamic_input_name + std::to_string(dynamic_name_map[tensor.dynamic_input_name]);
  372. }
  373. GELOGD("Update dynamic tensor name success!");
  374. return SUCCESS;
  375. }
  376. Status SingleOpParser::ConvertToBuildParam(int index,
  377. const SingleOpDesc &single_op_desc,
  378. SingleOpBuildParam &build_param) {
  379. auto op_desc = CreateOpDesc(single_op_desc.op);
  380. GE_CHECK_NOTNULL(op_desc);
  381. for (auto &desc : single_op_desc.input_desc) {
  382. GeTensorDesc ge_tensor_desc(GeShape(desc.dims),
  383. desc.format,
  384. desc.type);
  385. auto ori_format_to_set = desc.ori_format != FORMAT_RESERVED ? desc.ori_format : desc.format;
  386. auto ori_dims = !desc.ori_dims.empty() ? desc.ori_dims : desc.dims;
  387. ge_tensor_desc.SetOriginFormat(ori_format_to_set);
  388. ge_tensor_desc.SetOriginShape(GeShape(ori_dims));
  389. GE_CHK_STATUS_RET_NOLOG(SetShapeRange(op_desc->GetName(), desc, ge_tensor_desc));
  390. TensorUtils::SetRealDimCnt(ge_tensor_desc, ori_dims.size());
  391. TensorUtils::SetInputTensor(ge_tensor_desc, true);
  392. TensorUtils::SetOutputTensor(ge_tensor_desc, false);
  393. if (desc.name.empty()) {
  394. op_desc->AddInputDesc(ge_tensor_desc);
  395. } else {
  396. op_desc->AddInputDesc(desc.name, ge_tensor_desc);
  397. }
  398. build_param.inputs.emplace_back(ge_tensor_desc);
  399. }
  400. for (auto &desc : single_op_desc.output_desc) {
  401. GeTensorDesc ge_tensor_desc(GeShape(desc.dims),
  402. desc.format,
  403. desc.type);
  404. auto ori_format_to_set = desc.ori_format != FORMAT_RESERVED ? desc.ori_format : desc.format;
  405. auto ori_dims = !desc.ori_dims.empty() ? desc.ori_dims : desc.dims;
  406. ge_tensor_desc.SetOriginFormat(ori_format_to_set);
  407. ge_tensor_desc.SetOriginShape(GeShape(ori_dims));
  408. GE_CHK_STATUS_RET_NOLOG(SetShapeRange(op_desc->GetName(), desc, ge_tensor_desc));
  409. TensorUtils::SetRealDimCnt(ge_tensor_desc, ori_dims.size());
  410. TensorUtils::SetInputTensor(ge_tensor_desc, false);
  411. TensorUtils::SetOutputTensor(ge_tensor_desc, true);
  412. if (desc.name.empty()) {
  413. op_desc->AddOutputDesc(ge_tensor_desc);
  414. } else {
  415. op_desc->AddOutputDesc(desc.name, ge_tensor_desc);
  416. }
  417. build_param.outputs.emplace_back(ge_tensor_desc);
  418. }
  419. for (const auto &attr : single_op_desc.attrs) {
  420. op_desc->SetAttr(attr.name, attr.value);
  421. }
  422. if (VerifyOpInputOutputSizeByIr(*op_desc) != SUCCESS) {
  423. GELOGE(PARAM_INVALID, "[Verify][OpInputOutputSize] fail for input op [%s] invalid.", op_desc->GetType().c_str());
  424. return PARAM_INVALID;
  425. }
  426. build_param.file_name = GenerateFileName(single_op_desc, index);
  427. build_param.op_desc.reset(op_desc.release());
  428. return SUCCESS;
  429. }
  430. Status SingleOpParser::VerifyOpInputOutputSizeByIr(const OpDesc &current_op_desc) {
  431. ge::Operator operator_ir = ge::OperatorFactory::CreateOperator("tmp_operator", current_op_desc.GetType());
  432. if (!operator_ir.IsEmpty()) {
  433. auto opdesc_ir = ge::OpDescUtils::GetOpDescFromOperator(operator_ir);
  434. GE_CHECK_NOTNULL(opdesc_ir);
  435. size_t current_opdesc_inputs_num = current_op_desc.GetInputsSize();
  436. size_t ir_opdesc_inputs_num = opdesc_ir->GetInputsSize();
  437. if (current_opdesc_inputs_num < ir_opdesc_inputs_num) {
  438. string reason = "is smaller than the ir needed input size " + std::to_string(ir_opdesc_inputs_num);
  439. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  440. {current_op_desc.GetName(), "input size " + std::to_string(current_opdesc_inputs_num), reason});
  441. GELOGE(PARAM_INVALID,
  442. "[Verify][OpInputOutputSize]This op:%s input size %zu is smaller than the ir needed input size %zu",
  443. current_op_desc.GetName().c_str(), current_opdesc_inputs_num, ir_opdesc_inputs_num);
  444. return PARAM_INVALID;
  445. }
  446. size_t current_opdesc_outputs_num = current_op_desc.GetOutputsSize();
  447. size_t ir_opdesc_outputs_num = opdesc_ir->GetOutputsSize();
  448. if (current_opdesc_outputs_num < ir_opdesc_outputs_num) {
  449. string reason = "is smaller than the ir needed output size " + std::to_string(ir_opdesc_outputs_num);
  450. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  451. {current_op_desc.GetName(), "output size " + std::to_string(current_opdesc_outputs_num), reason});
  452. GELOGE(PARAM_INVALID,
  453. "[Verify][OpInputOutputSize]This op:%s output size %zu is smaller than the ir needed output size %zu",
  454. current_op_desc.GetName().c_str(), current_opdesc_outputs_num, ir_opdesc_outputs_num);
  455. return PARAM_INVALID;
  456. }
  457. }
  458. return SUCCESS;
  459. }
  460. Status SingleOpParser::SetShapeRange(const std::string &op_name,
  461. const SingleOpTensorDesc &tensor_desc,
  462. GeTensorDesc &ge_tensor_desc) {
  463. auto num_shape_ranges = tensor_desc.dim_ranges.size();
  464. GELOGD("Number of shape ranges = %zu", num_shape_ranges);
  465. auto it = std::find(tensor_desc.dims.begin(), tensor_desc.dims.end(), ge::UNKNOWN_DIM_NUM);
  466. if (it != tensor_desc.dims.end()) {
  467. if (tensor_desc.dims != ge::UNKNOWN_RANK) {
  468. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  469. {op_name,
  470. "shape",
  471. "has unknown rank but dim size is not one"});
  472. GELOGE(PARAM_INVALID, "[Set][ShapeRange]Invalid tensor shape:%s.",
  473. ge_tensor_desc.MutableShape().ToString().c_str());
  474. return PARAM_INVALID;
  475. }
  476. if (!tensor_desc.dim_ranges.empty()) {
  477. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  478. {op_name,
  479. "shape range",
  480. "is not needed while the rank the shape is unknown"});
  481. GELOGE(PARAM_INVALID, "[Set][ShapeRange]Shape range is not needed while the rank the shape is unknown.");
  482. return PARAM_INVALID;
  483. }
  484. GELOGD("Shape is unknown rank, do not set shape range");
  485. return SUCCESS;
  486. }
  487. std::vector<std::pair<int64_t, int64_t>> shape_range;
  488. size_t range_index = 0;
  489. for (auto dim : tensor_desc.dims) {
  490. if (dim >= 0) {
  491. shape_range.emplace_back(dim, dim);
  492. GELOGD("Adding shape range: [%ld, %ld]", dim, dim);
  493. } else {
  494. GELOGD("To get shape range by index = %zu", range_index);
  495. if (range_index >= num_shape_ranges) {
  496. string reason = "is smaller than the unknown dim size " + std::to_string(++range_index);
  497. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  498. {op_name,
  499. "shape range size " + std::to_string(num_shape_ranges),
  500. reason});
  501. GELOGE(PARAM_INVALID, "[Set][ShapeRange]The number of shape_range mismatches that of unknown dims.");
  502. return PARAM_INVALID;
  503. }
  504. auto &range = tensor_desc.dim_ranges[range_index];
  505. if (range.size() != kShapeRangePairSize) {
  506. string reason = "has " + std::to_string(range.size()) + " item(s)";
  507. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  508. {op_name,
  509. "shape range " + std::to_string(range_index),
  510. reason});
  511. GELOGE(PARAM_INVALID, "[Set][ShapeRange]Invalid shape range entry. index = %zu, size = %zu",
  512. range_index, range.size());
  513. return PARAM_INVALID;
  514. }
  515. shape_range.emplace_back(range[kShapeRangeLow], range[kShapeRangeHigh]);
  516. GELOGD("Adding shape range: [%ld, %ld]", range[kShapeRangeLow], range[kShapeRangeHigh]);
  517. ++range_index;
  518. }
  519. }
  520. if (num_shape_ranges != range_index) {
  521. string reason = "is greater than the unknown dim size " + std::to_string(range_index);
  522. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  523. {op_name,
  524. "shape range size " + std::to_string(num_shape_ranges),
  525. reason});
  526. GELOGE(PARAM_INVALID,
  527. "[Set][ShapeRange]The number of shape_range(%zu) mismatches that of unknown dims(%zu).",
  528. num_shape_ranges, range_index);
  529. return PARAM_INVALID;
  530. }
  531. if (range_index > 0) {
  532. ge_tensor_desc.SetShapeRange(shape_range);
  533. }
  534. return SUCCESS;
  535. }
  536. Status SingleOpParser::ParseSingleOpList(const std::string &file, std::vector<SingleOpBuildParam> &op_list) {
  537. int index = 0;
  538. try {
  539. Json single_op_list_json;
  540. auto ret = ReadJsonFile(file, single_op_list_json);
  541. if (ret != SUCCESS) {
  542. return ret;
  543. }
  544. int32_t compile_flag = 0;
  545. for (const Json &single_op_json : single_op_list_json) {
  546. SingleOpDesc single_op_desc;
  547. GELOGI("Parsing op[%d], jsonStr = %s", index, single_op_json.dump(kDumpJsonIndent).c_str());
  548. single_op_desc = single_op_json;
  549. GELOGD("Compile flag is %d.", single_op_desc.compile_flag);
  550. if (single_op_desc.compile_flag == 1) {
  551. compile_flag = single_op_desc.compile_flag;
  552. continue;
  553. }
  554. if (UpdateDynamicTensorName(single_op_desc.input_desc) != SUCCESS) {
  555. GELOGE(FAILED, "[Update][DynamicTensorName] failed for invalid input param!");
  556. REPORT_CALL_ERROR("E19999", "UpdateDynamicTensorName failed for invalid input param.");
  557. return FAILED;
  558. }
  559. if (!Validate(single_op_desc)) {
  560. GELOGE(PARAM_INVALID,
  561. "[Check][OpDesc]Validate the index[%d] of op failed when read json file[%s].", index, file.c_str());
  562. return PARAM_INVALID;
  563. }
  564. SingleOpBuildParam param;
  565. ret = ConvertToBuildParam(index, single_op_desc, param);
  566. if (ret != SUCCESS) {
  567. return ret;
  568. }
  569. param.compile_flag = compile_flag;
  570. op_list.emplace_back(param);
  571. GELOGI("Parse the index[%d] of op success", index);
  572. index += 1;
  573. }
  574. } catch (const nlohmann::json::exception &e) {
  575. ErrorManager::GetInstance().ATCReportErrMessage("E10032", {"index", "jsonfile", "exception"},
  576. {std::to_string(index), file, e.what()});
  577. GELOGE(PARAM_INVALID, "[Parse][OpList] the index:%d of op failed when read json file:%s, exception:%s",
  578. index, file.c_str(), e.what());
  579. return PARAM_INVALID;
  580. }
  581. return SUCCESS;
  582. }
  583. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示