You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

graph_preprocess.cc 80 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/preprocess/graph_preprocess.h"
  17. #include <map>
  18. #include <set>
  19. #include <string>
  20. #include <utility>
  21. #include "common/formats/format_transfers/format_transfer_fractal_nz.h"
  22. #include "common/formats/format_transfers/format_transfer_fractal_z.h"
  23. #include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h"
  24. #include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h"
  25. #include "common/formats/format_transfers/format_transfer_transpose.h"
  26. #include "common/formats/utils/formats_trans_utils.h"
  27. #include "common/helper/model_helper.h"
  28. #include "common/math/math_util.h"
  29. #include "common/op/ge_op_utils.h"
  30. #include "common/util/error_manager/error_manager.h"
  31. #include "common/formats/utils/formats_trans_utils.h"
  32. #include "framework/common/debug/ge_log.h"
  33. #include "graph/common/ge_call_wrapper.h"
  34. #include "graph/common/local_context.h"
  35. #include "graph/common/transop_util.h"
  36. #include "graph/debug/ge_attr_define.h"
  37. #include "graph/ge_context.h"
  38. #include "graph/shape_refiner.h"
  39. #include "graph/manager/graph_var_manager.h"
  40. #include "graph/manager/util/rt_context_util.h"
  41. #include "graph/optimize/graph_optimize.h"
  42. #include "graph/passes/addn_pass.h"
  43. #include "graph/passes/aicpu_constant_folding_pass.h"
  44. #include "graph/passes/assert_pass.h"
  45. #include "graph/passes/assign_pass.h"
  46. #include "graph/passes/base_pass.h"
  47. #include "graph/passes/common_subexpression_elimination_pass.h"
  48. #include "graph/passes/cond_pass.h"
  49. #include "graph/passes/cond_remove_pass.h"
  50. #include "graph/passes/constant_folding_pass.h"
  51. #include "graph/passes/constant_fuse_same_pass.h"
  52. #include "graph/passes/control_trigger_pass.h"
  53. #include "graph/passes/dimension_adjust_pass.h"
  54. #include "graph/passes/dimension_compute_pass.h"
  55. #include "graph/passes/dropout_pass.h"
  56. #include "graph/passes/enter_pass.h"
  57. #include "graph/passes/flow_ctrl_pass.h"
  58. #include "graph/passes/for_pass.h"
  59. #include "graph/passes/get_original_format_pass.h"
  60. #include "graph/passes/guarantee_const_pass.h"
  61. #include "graph/passes/hccl_group_pass.h"
  62. #include "graph/passes/hccl_memcpy_pass.h"
  63. #include "graph/passes/identity_pass.h"
  64. #include "graph/passes/infershape_pass.h"
  65. #include "graph/passes/iterator_op_pass.h"
  66. #include "graph/passes/merge_pass.h"
  67. #include "graph/passes/net_output_pass.h"
  68. #include "graph/passes/next_iteration_pass.h"
  69. #include "graph/passes/no_use_reshape_remove_pass.h"
  70. #include "graph/passes/parallel_concat_start_op_pass.h"
  71. #include "graph/passes/placeholder_with_default_pass.h"
  72. #include "graph/passes/prevent_gradient_pass.h"
  73. #include "graph/passes/print_op_pass.h"
  74. #include "graph/passes/prune_pass.h"
  75. #include "graph/passes/replace_transshape_pass.h"
  76. #include "graph/passes/replace_with_empty_const_pass.h"
  77. #include "graph/passes/resource_pair_add_control_pass.h"
  78. #include "graph/passes/resource_pair_remove_control_pass.h"
  79. #include "graph/passes/save_pass.h"
  80. #include "graph/passes/shape_operate_op_remove_pass.h"
  81. #include "graph/passes/snapshot_pass.h"
  82. #include "graph/passes/stop_gradient_pass.h"
  83. #include "graph/passes/subgraph_pass.h"
  84. #include "graph/passes/switch_data_edges_bypass.h"
  85. #include "graph/passes/switch_dead_branch_elimination.h"
  86. #include "graph/passes/switch_logic_remove_pass.h"
  87. #include "graph/passes/merge_to_stream_merge_pass.h"
  88. #include "graph/passes/switch_to_stream_switch_pass.h"
  89. #include "graph/passes/attach_stream_label_pass.h"
  90. #include "graph/passes/unused_const_pass.h"
  91. #include "graph/passes/unused_op_remove_pass.h"
  92. #include "graph/passes/var_is_initialized_op_pass.h"
  93. #include "graph/passes/variable_prepare_op_pass.h"
  94. #include "graph/preprocess/insert_op/util_insert_aipp_op.h"
  95. #include "graph/types.h"
  96. #include "graph/utils/tensor_utils.h"
  97. #include "graph/utils/type_utils.h"
  98. #include "inc/pass_manager.h"
  99. #include "init/gelib.h"
  100. #include "multi_batch_copy_graph.h"
  101. #include "runtime/dev.h"
  102. #include "graph/passes/dimension_adjust_pass.h"
  103. #include "graph/passes/link_gen_mask_nodes_pass.h"
  104. #include "graph/passes/permute_pass.h"
  105. #include "graph/passes/reshape_remove_pass.h"
  106. #include "graph/passes/same_transdata_breadth_fusion_pass.h"
  107. #include "graph/passes/transop_breadth_fusion_pass.h"
  108. #include "graph/passes/transop_depth_fusion_pass.h"
  109. #include "graph/passes/transop_nearby_allreduce_fusion_pass.h"
  110. #include "graph/passes/cast_remove_pass.h"
  111. #include "graph/passes/data_pass.h"
  112. #include "graph/passes/transop_without_reshape_fusion_pass.h"
  113. #include "graph/passes/transpose_transdata_pass.h"
  114. #include "graph/passes/variable_op_pass.h"
  115. #include "graph/passes/variable_prepare_op_pass.h"
  116. #include "graph/passes/variable_ref_delete_op_pass.h"
  117. namespace ge {
  118. namespace {
  119. static std::map<std::string, ge::DataType> output_type_str_to_datatype = {
  120. {"FP32", ge::DT_FLOAT}, {"FP16", ge::DT_FLOAT16}, {"INT8", ge::DT_INT8}, {"INT16", ge::DT_INT16},
  121. {"UINT16", ge::DT_UINT16}, {"UINT8", ge::DT_UINT8}, {"INT32", ge::DT_INT32}, {"INT64", ge::DT_INT64},
  122. {"UINT32", ge::DT_UINT32}, {"UINT64", ge::DT_UINT64}, {"DOUBLE", ge::DT_DOUBLE}};
  123. const char *const kMbatchSwitchnName = "mbatch-switch-name";
  124. // the size of user defined output datatype or format string after split by ":".
  125. const size_t kUserDefinedElementCount = 2;
  126. const int kDataOutIndex = 0;
  127. const int64_t kInvalidDynaimcDimsType = -1;
  128. OpDescPtr CreateTensorShape(const GeTensorDesc &data_tensor) {
  129. GeTensorPtr tensor = MakeShared<GeTensor>();
  130. if (tensor == nullptr) {
  131. GELOGE(INTERNAL_ERROR, "Create shared ptr for GeTensor failed");
  132. return nullptr;
  133. }
  134. tensor->MutableTensorDesc().SetDataType(DT_INT32);
  135. tensor->MutableTensorDesc().SetFormat(FORMAT_ND);
  136. auto dst_ge_shape = data_tensor.GetShape();
  137. auto dim_cnt = static_cast<int64_t>(dst_ge_shape.GetDimNum());
  138. if (dim_cnt == 0) { // if the dim_cnt is 0, the tensor is a scalar
  139. tensor->MutableTensorDesc().SetShape(GeShape());
  140. int32_t dst_shape = 1;
  141. if (tensor->SetData(reinterpret_cast<const uint8_t *>(&dst_shape), sizeof(int32_t)) != GRAPH_SUCCESS) {
  142. GELOGE(INTERNAL_ERROR, "tensor set data failed");
  143. return nullptr;
  144. }
  145. } else {
  146. tensor->MutableTensorDesc().SetShape(GeShape(std::vector<int64_t>({dim_cnt})));
  147. unique_ptr<int32_t[]> dst_shape(new (std::nothrow) int32_t[dim_cnt]());
  148. if (dst_shape == nullptr) {
  149. GELOGE(INTERNAL_ERROR, "Create unique ptr failed");
  150. return nullptr;
  151. }
  152. for (int64_t i = 0; i < dim_cnt; ++i) {
  153. dst_shape[i] = dst_ge_shape.GetDim(static_cast<size_t>(i));
  154. }
  155. GE_IF_BOOL_EXEC(
  156. tensor->SetData(reinterpret_cast<const uint8_t *>(dst_shape.get()), dim_cnt * sizeof(int32_t)) != GRAPH_SUCCESS,
  157. GELOGE(INTERNAL_ERROR, "tensor set data failed");
  158. return nullptr;)
  159. }
  160. GELOGD("Create shape input dim [%s]", dst_ge_shape.ToString().c_str());
  161. return OpDescUtils::CreateConstOp(tensor);
  162. }
  163. void AddTransNodeAttr(const std::string &node_type, const GeTensorDesc &input, const GeTensorDesc &output,
  164. OpDescPtr &op_desc) {
  165. // For format transfer node, the IR definition has src/dst format attrs
  166. if (node_type == TRANSDATA) {
  167. GE_IF_BOOL_EXEC(
  168. !AttrUtils::SetStr(op_desc, FORMAT_TRANSFER_SRC_FORMAT, TypeUtils::FormatToSerialString(input.GetFormat())),
  169. GELOGW("SetStr FORMAT_TRANSFER_SRC_FORMAT failed");)
  170. GE_IF_BOOL_EXEC(
  171. !AttrUtils::SetStr(op_desc, FORMAT_TRANSFER_DST_FORMAT, TypeUtils::FormatToSerialString(output.GetFormat())),
  172. GELOGW("SetStr FORMAT_TRANSFER_DST_FORMAT failed");)
  173. }
  174. // For TransposeD node, the IR definition has perm attrs
  175. if (node_type == TRANSPOSED) {
  176. Format src_format = input.GetFormat();
  177. Format dst_format = output.GetFormat();
  178. std::vector<int64_t> perm_arg;
  179. GE_CHK_BOOL_EXEC_WARN(formats::GetPermByForamt(src_format, dst_format, perm_arg) == SUCCESS, return,
  180. "Get perm by foramt failed.");
  181. GE_CHK_BOOL_EXEC_WARN(AttrUtils::SetListInt(op_desc, PERMUTE_ATTR_PERM, perm_arg), return,
  182. "SetStr PERMUTE_ATTR_PERM failed")
  183. }
  184. // For cast node, the IR definition has src/dst attrs
  185. if (node_type == CAST) {
  186. GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_SRCT, static_cast<int64_t>(input.GetDataType())),
  187. GELOGW("SetInt CAST_ATTR_SRCT failed");)
  188. GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_DSTT, static_cast<int64_t>(output.GetDataType())),
  189. GELOGW("SetInt CAST_ATTR_DSTT failed");)
  190. GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_DST_TYPE, static_cast<int64_t>(output.GetDataType())),
  191. GELOGW("SetInt CAST_ATTR_DST_TYPE failed");)
  192. GE_IF_BOOL_EXEC(!AttrUtils::SetBool(op_desc, CAST_ATTR_TRUNCATE, false),
  193. GELOGW("SetBool CAST_ATTR_TRUNCATE failed");)
  194. }
  195. }
  196. NodePtr CreateTransNode(const std::string &name, const std::string &node_type, const GeTensorDesc &input,
  197. const GeTensorDesc &output, NodePtr &node) {
  198. if (node == nullptr) {
  199. GELOGE(PARAM_INVALID, "node is null.");
  200. return nullptr;
  201. }
  202. auto graph = node->GetOwnerComputeGraph();
  203. if (graph == nullptr) {
  204. GELOGE(PARAM_INVALID, "Owner graph is null, node name:%s.", node->GetName().c_str());
  205. return nullptr;
  206. }
  207. auto index = TransOpUtil::GetTransOpDataIndex(node_type);
  208. if (index < 0) {
  209. ErrorManager::GetInstance().ATCReportErrMessage(
  210. "E19025", {"situation", "reason"},
  211. {"The trans node type[" + node_type + "]", "it must be " + TransOpUtil::TransopMapToString()});
  212. GELOGE(INTERNAL_ERROR, "The trans node type %s does not exists", node_type.c_str());
  213. return nullptr;
  214. }
  215. OpDescPtr op_desc = MakeShared<OpDesc>(name, node_type);
  216. if (op_desc == nullptr) {
  217. GELOGE(INTERNAL_ERROR, "Create shared ptr for OpDesc failed");
  218. return nullptr;
  219. }
  220. // for data dump
  221. GE_IF_BOOL_EXEC(
  222. !AttrUtils::SetListStr(op_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, std::move(std::vector<std::string>())),
  223. GELOGW("CreateTransNode: SetListStr failed");)
  224. // Default single input and single output
  225. auto ret = op_desc->AddInputDesc(input);
  226. if (ret != GRAPH_SUCCESS) {
  227. GELOGE(INTERNAL_ERROR, "Failed to add input desc when create node %s type %s", name.c_str(), node_type.c_str());
  228. return nullptr;
  229. }
  230. ret = op_desc->AddOutputDesc(output);
  231. if (ret != GRAPH_SUCCESS) {
  232. GELOGE(INTERNAL_ERROR, "Failed to add output desc when create node %s type %s", name.c_str(), node_type.c_str());
  233. return nullptr;
  234. }
  235. AddTransNodeAttr(node_type, input, output, op_desc);
  236. NodePtr shape_node = nullptr;
  237. if (node_type == RESHAPE) {
  238. auto shape_desc = CreateTensorShape(output);
  239. if (shape_desc == nullptr) {
  240. GELOGE(INTERNAL_ERROR, "Failed to add shape for reshape %s, can not create the shape input",
  241. node->GetName().c_str());
  242. return nullptr;
  243. }
  244. ret = op_desc->AddInputDesc(shape_desc->GetOutputDesc(0));
  245. if (ret != GRAPH_SUCCESS) {
  246. GELOGE(INTERNAL_ERROR, "Failed to add the first input for reshape %s", name.c_str());
  247. return nullptr;
  248. }
  249. shape_node = graph->AddNode(shape_desc);
  250. if (shape_node == nullptr) {
  251. GELOGE(INTERNAL_ERROR, "Failed to add shape node for reshape %s, can not add the shape to graph", name.c_str());
  252. return nullptr;
  253. }
  254. }
  255. auto trans_node = graph->AddNode(op_desc);
  256. if (trans_node == nullptr) {
  257. GELOGE(INTERNAL_ERROR, "Failed to add trans node %s to graph", name.c_str());
  258. return nullptr;
  259. }
  260. if (node_type == RESHAPE) {
  261. if (GraphUtils::AddEdge(shape_node->GetOutDataAnchor(0), trans_node->GetInDataAnchor(1)) != GRAPH_SUCCESS) {
  262. GELOGE(INTERNAL_ERROR, "Failed to add shape node for reshape %s, can not add the edge", name.c_str());
  263. return nullptr;
  264. }
  265. }
  266. return trans_node;
  267. }
  268. Status RecoverOneTransNodeForVar(const std::string &name, const TransNodeInfo &trans_node_info, NodePtr node,
  269. NodePtr &trans_node) {
  270. GE_CHECK_NOTNULL(node);
  271. trans_node = CreateTransNode(name, trans_node_info.node_type, trans_node_info.output, trans_node_info.input, node);
  272. if (trans_node == nullptr) {
  273. return INTERNAL_ERROR;
  274. }
  275. auto ret = GraphUtils::ReplaceNodeDataAnchors(trans_node, node, {}, {0});
  276. if (ret != GRAPH_SUCCESS) {
  277. GELOGE(INTERNAL_ERROR, "Failed to replace out anchors when recover trans node for %s type %s",
  278. node->GetName().c_str(), node->GetType().c_str());
  279. return INTERNAL_ERROR;
  280. }
  281. ret = GraphUtils::AddEdge(node->GetOutDataAnchor(0), trans_node->GetInDataAnchor(0));
  282. if (ret != GRAPH_SUCCESS) {
  283. GELOGE(INTERNAL_ERROR, "Failed to connect node %s to trans node %s", node->GetName().c_str(),
  284. trans_node->GetName().c_str());
  285. return INTERNAL_ERROR;
  286. }
  287. ret = GraphUtils::MoveOutCtrlEdges(node, trans_node);
  288. if (ret != GRAPH_SUCCESS) {
  289. GELOGE(INTERNAL_ERROR, "Failed to move out control edges from %s to %s when recover trans node.",
  290. node->GetName().c_str(), trans_node->GetName().c_str());
  291. return INTERNAL_ERROR;
  292. }
  293. return SUCCESS;
  294. }
  295. Status RecoverOneTransNodeForVarRef(const std::string &name, const TransNodeInfo &trans_node_info, NodePtr node,
  296. NodePtr &trans_node) {
  297. GE_CHECK_NOTNULL(node);
  298. trans_node = CreateTransNode(name, trans_node_info.node_type, trans_node_info.input, trans_node_info.output, node);
  299. if (trans_node == nullptr) {
  300. return INTERNAL_ERROR;
  301. }
  302. auto ret = GraphUtils::ReplaceNodeDataAnchors(trans_node, node, {0}, {});
  303. if (ret != GRAPH_SUCCESS) {
  304. GELOGE(INTERNAL_ERROR, "Failed to replace int anchors when recover trans node for %s type %s",
  305. node->GetName().c_str(), node->GetType().c_str());
  306. return INTERNAL_ERROR;
  307. }
  308. ret = GraphUtils::AddEdge(trans_node->GetOutDataAnchor(0), node->GetInDataAnchor(0));
  309. if (ret != GRAPH_SUCCESS) {
  310. GELOGE(INTERNAL_ERROR, "Failed to connect trans node %s to node %s", trans_node->GetName().c_str(),
  311. node->GetName().c_str());
  312. return INTERNAL_ERROR;
  313. }
  314. ret = GraphUtils::MoveInCtrlEdges(node, trans_node);
  315. if (ret != GRAPH_SUCCESS) {
  316. GELOGE(INTERNAL_ERROR, "Failed to move int control edges from %s to %s when recover trans node.",
  317. node->GetName().c_str(), trans_node->GetName().c_str());
  318. return INTERNAL_ERROR;
  319. }
  320. return SUCCESS;
  321. }
  322. Status UpdateVarFormats(const NodePtr &var, const GeTensorDesc &tensor_desc) {
  323. GE_IF_BOOL_EXEC(var == nullptr, GELOGW("node : var is nullptr"); return INTERNAL_ERROR);
  324. GE_CHECK_NOTNULL(var->GetOpDesc());
  325. if (var->GetOpDesc()->GetOutputsSize() > 0) {
  326. auto output_desc = var->GetOpDesc()->GetOutputDesc(0);
  327. output_desc.SetFormat(tensor_desc.GetFormat());
  328. output_desc.SetDataType(tensor_desc.GetDataType());
  329. output_desc.SetShape(tensor_desc.GetShape());
  330. output_desc.SetOriginFormat(tensor_desc.GetOriginFormat());
  331. output_desc.SetOriginDataType(tensor_desc.GetOriginDataType());
  332. output_desc.SetOriginShape(tensor_desc.GetOriginShape());
  333. GE_IF_BOOL_EXEC(var->GetOpDesc()->UpdateOutputDesc(0, output_desc) != GRAPH_SUCCESS,
  334. GELOGE(INTERNAL_ERROR, "UpdateOutputDesc failed");
  335. return INTERNAL_ERROR;);
  336. }
  337. if (var->GetOpDesc()->GetInputsSize() > 0) {
  338. auto desc = var->GetOpDesc()->GetInputDesc(0);
  339. desc.SetFormat(tensor_desc.GetFormat());
  340. desc.SetDataType(tensor_desc.GetDataType());
  341. desc.SetShape(tensor_desc.GetShape());
  342. desc.SetOriginFormat(tensor_desc.GetOriginFormat());
  343. desc.SetOriginDataType(tensor_desc.GetOriginDataType());
  344. desc.SetOriginShape(tensor_desc.GetOriginShape());
  345. GE_IF_BOOL_EXEC(var->GetOpDesc()->UpdateInputDesc(0, desc) != GRAPH_SUCCESS,
  346. GELOGE(INTERNAL_ERROR, "UpdateInputDesc failed");
  347. return INTERNAL_ERROR;)
  348. }
  349. return SUCCESS;
  350. }
  351. Status RecoverTransRoadForVar(const NodePtr &var, const VarTransRoad &road) {
  352. GE_CHECK_NOTNULL(var);
  353. int index = 0;
  354. NodePtr last_node = var;
  355. for (auto iter = road.rbegin(); iter != road.rend(); ++iter) {
  356. auto trans_name = var->GetName() + "_trans_" + std::to_string(index++);
  357. auto ret = RecoverOneTransNodeForVar(trans_name, *iter, last_node, last_node);
  358. if (ret != SUCCESS) {
  359. ErrorManager::GetInstance().ATCReportErrMessage(
  360. "E15001", {"variable", "index", "type"}, {var->GetName(), std::to_string(index), iter->node_type});
  361. GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", var->GetName().c_str(),
  362. index, iter->node_type.c_str());
  363. return INTERNAL_ERROR;
  364. }
  365. // set stream_label
  366. OpDescPtr var_desc = var->GetOpDesc();
  367. GE_CHECK_NOTNULL(var_desc);
  368. std::string stream_label;
  369. (void)AttrUtils::GetStr(var_desc, ATTR_NAME_STREAM_LABEL, stream_label);
  370. if (!stream_label.empty()) {
  371. GE_CHK_STATUS_RET(SetStreamLabel(last_node, stream_label), "set stream label failed");
  372. }
  373. GE_CHK_BOOL_EXEC((ge::AttrUtils::SetBool(last_node->GetOpDesc(), ge::ATTR_INSERTED_BY_GE, true)),
  374. return INTERNAL_ERROR, "Set attr ATTR_INSERTED_BY_GE failed.");
  375. GELOGD("Recover trans node %s type %s success", trans_name.c_str(), iter->node_type.c_str());
  376. }
  377. if (road.empty()) {
  378. return SUCCESS;
  379. }
  380. return UpdateVarFormats(var, road.rbegin()->output);
  381. }
  382. Status RecoverTransRoadForVarRef(const std::set<NodePtr> &nodes, const VarTransRoad &road) {
  383. for (auto &var : nodes) {
  384. GE_CHECK_NOTNULL(var);
  385. int index = 0;
  386. NodePtr last_node = var;
  387. GELOGI("Recover trans nodes for variable ref %s", var->GetName().c_str());
  388. for (auto iter = road.rbegin(); iter != road.rend(); ++iter) {
  389. auto trans_name = var->GetName() + "_trans_" + std::to_string(index++);
  390. auto ret = RecoverOneTransNodeForVarRef(trans_name, *iter, last_node, last_node);
  391. if (ret != SUCCESS) {
  392. ErrorManager::GetInstance().ATCReportErrMessage(
  393. "E15001", {"variable", "index", "type"}, {var->GetName(), std::to_string(index), iter->node_type});
  394. GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s",
  395. var->GetName().c_str(), index, iter->node_type.c_str());
  396. return INTERNAL_ERROR;
  397. }
  398. // set stream_label
  399. OpDescPtr var_desc = var->GetOpDesc();
  400. GE_CHECK_NOTNULL(var_desc);
  401. std::string stream_label;
  402. (void)AttrUtils::GetStr(var_desc, ATTR_NAME_STREAM_LABEL, stream_label);
  403. if (!stream_label.empty()) {
  404. GE_CHK_STATUS_RET(SetStreamLabel(last_node, stream_label), "set stream label failed");
  405. }
  406. GE_CHK_BOOL_EXEC((ge::AttrUtils::SetBool(last_node->GetOpDesc(), ge::ATTR_INSERTED_BY_GE, true)),
  407. return INTERNAL_ERROR, "Set attr ATTR_INSERTED_BY_GE failed.");
  408. }
  409. if (!(road.empty()) && (UpdateVarFormats(var, road.rbegin()->output) != SUCCESS)) {
  410. return INTERNAL_ERROR;
  411. }
  412. }
  413. return SUCCESS;
  414. }
  415. using VarNamesToRefs = std::map<std::string, std::set<NodePtr>>;
  416. VarNamesToRefs CollectVarNamesToRefs(const ComputeGraphPtr &graph) {
  417. VarNamesToRefs names_to_refs;
  418. std::string var_name;
  419. if (graph == nullptr) {
  420. GELOGE(PARAM_INVALID, "graph is null.");
  421. return names_to_refs;
  422. }
  423. for (auto &node : graph->GetAllNodes()) {
  424. if (node->GetType() != VARIABLE) {
  425. continue;
  426. }
  427. if (AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, var_name)) {
  428. (void)names_to_refs[var_name].insert(node);
  429. }
  430. }
  431. return names_to_refs;
  432. }
  433. Status TransferShape2NC1HWC0(Format src_format, const std::vector<int64_t> &src_shape, DataType dt, Format dst_format,
  434. std::vector<int64_t> &dst_shape) {
  435. if (src_format == FORMAT_NCHW) {
  436. formats::FormatTransferNchwNc1hwc0 transfer;
  437. if (transfer.TransShape(src_format, src_shape, dt, dst_format, dst_shape) != SUCCESS) {
  438. GELOGE(INTERNAL_ERROR, "TransShape failed");
  439. return FAILED;
  440. }
  441. } else if (src_format == FORMAT_NHWC) {
  442. formats::FormatTransferNhwcNc1hwc0 transfer;
  443. if (transfer.TransShape(src_format, src_shape, dt, dst_format, dst_shape) != SUCCESS) {
  444. GELOGE(INTERNAL_ERROR, "TransShape failed");
  445. return FAILED;
  446. }
  447. }
  448. return SUCCESS;
  449. }
  450. Status ModifyInputFormatAndShape(NodePtr &node_ptr) {
  451. GE_CHECK_NOTNULL(node_ptr);
  452. auto op_desc = node_ptr->GetOpDesc();
  453. GE_CHECK_NOTNULL(op_desc);
  454. const GeTensorDescPtr &input = op_desc->MutableInputDesc(0);
  455. GE_CHECK_NOTNULL(input);
  456. ge::Format old_format = input->GetFormat();
  457. std::vector<int64_t> old_shape = input->GetShape().GetDims();
  458. ge::DataType dt = input->GetDataType();
  459. std::vector<int64_t> dst_shape_dims;
  460. if (TransferShape2NC1HWC0(old_format, old_shape, dt, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) {
  461. GELOGE(INTERNAL_ERROR, "Trans shape failed");
  462. return FAILED;
  463. }
  464. input->SetFormat(FORMAT_NC1HWC0);
  465. input->SetShape(ge::GeShape(dst_shape_dims));
  466. auto output = op_desc->MutableOutputDesc(0);
  467. GE_CHECK_NOTNULL(output);
  468. output->SetFormat(FORMAT_NC1HWC0);
  469. output->SetShape(ge::GeShape(dst_shape_dims));
  470. int64_t size = 0;
  471. graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(*output, size);
  472. if (graph_status != ge::GRAPH_SUCCESS) {
  473. GELOGE(graph_status, "GetTensorSizeInBytes failed!");
  474. return FAILED;
  475. }
  476. ge::TensorUtils::SetSize(*output, size);
  477. ge::TensorUtils::SetSize(*input, size);
  478. return SUCCESS;
  479. }
  480. Status ModifyFormatAndShapeForSingleTensor(const GeTensorDescPtr &input_output) {
  481. GE_CHECK_NOTNULL(input_output);
  482. ge::Format old_format = input_output->GetFormat();
  483. std::vector<int64_t> old_shape = input_output->GetShape().GetDims();
  484. ge::DataType dt = input_output->GetDataType();
  485. std::vector<int64_t> dst_shape_dims;
  486. if (TransferShape2NC1HWC0(old_format, old_shape, dt, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) {
  487. GELOGE(INTERNAL_ERROR, "Trans shape failed");
  488. return FAILED;
  489. }
  490. input_output->SetFormat(FORMAT_NC1HWC0);
  491. input_output->SetShape(ge::GeShape(dst_shape_dims));
  492. return SUCCESS;
  493. }
  494. Status ModifyDataNetOutputFormatAndShape(OpDescPtr &op_desc, uint32_t index, Format storage_format,
  495. vector<int64_t> &dst_shape_dims) {
  496. GE_CHECK_NOTNULL(op_desc);
  497. const GeTensorDescPtr &input = op_desc->MutableInputDesc(index);
  498. GE_CHECK_NOTNULL(input);
  499. ge::Format old_format = input->GetFormat();
  500. std::vector<int64_t> old_shape = input->GetShape().GetDims();
  501. input->SetShape(ge::GeShape(dst_shape_dims));
  502. input->SetFormat(storage_format);
  503. auto output = op_desc->MutableOutputDesc(index);
  504. GE_CHECK_NOTNULL(output);
  505. output->SetShape(ge::GeShape(dst_shape_dims));
  506. output->SetFormat(storage_format);
  507. if (!output->MutableShape().IsUnknownShape()) {
  508. int64_t size = 0;
  509. graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(*output, size);
  510. if (graph_status != ge::GRAPH_SUCCESS) {
  511. GELOGE(graph_status, "GetTensorSizeInBytes failed!");
  512. return FAILED;
  513. }
  514. ge::TensorUtils::SetSize(*input, size);
  515. ge::TensorUtils::SetSize(*output, size);
  516. GELOGI("Modify Data NetOutput format and shape success, node:%s, index:%d, old_shape:%s, old_Format:%s, "
  517. "new_shape:%s, new_format:%s, new_size:%lu",
  518. op_desc->GetName().c_str(), index, formats::JoinToString(old_shape).c_str(),
  519. ge::TypeUtils::FormatToSerialString(old_format).c_str(), formats::JoinToString(dst_shape_dims).c_str(),
  520. ge::TypeUtils::FormatToSerialString(storage_format).c_str(), size);
  521. }
  522. return SUCCESS;
  523. }
  524. Status CheckIfDynamicBatchScene(NodePtr &data_node, bool &is_dynamic_batch, NodePtr &switchn_node) {
  525. is_dynamic_batch = false;
  526. std::string related_node_name;
  527. if (AttrUtils::GetStr(data_node->GetOpDesc(), kMbatchSwitchnName, related_node_name)) {
  528. if (related_node_name.empty()) {
  529. ErrorManager::GetInstance().ATCReportErrMessage(
  530. "E15002", {"opname", "value", "reason"}, {data_node->GetName(), "flag", "but the value is empty"});
  531. GELOGE(INTERNAL_ERROR, "The data node %s has switchn node flag, but the value is empty",
  532. data_node->GetName().c_str());
  533. return INTERNAL_ERROR;
  534. }
  535. for (const NodePtr &next_node : data_node->GetOutNodes()) {
  536. if (next_node->GetName() == related_node_name) {
  537. switchn_node = next_node;
  538. break;
  539. }
  540. }
  541. if (switchn_node == nullptr) {
  542. ErrorManager::GetInstance().ATCReportErrMessage(
  543. "E15002", {"opname", "value", "reason"},
  544. {data_node->GetName(), related_node_name, "but can not find it on the graph"});
  545. GELOGE(INTERNAL_ERROR, "The data node %s has switchn node %s, but can not find it on the graph",
  546. data_node->GetName().c_str(), related_node_name.c_str());
  547. return INTERNAL_ERROR;
  548. }
  549. is_dynamic_batch = true;
  550. }
  551. return SUCCESS;
  552. }
  553. bool CheckOpType(const NodePtr &node, const std::string type) {
  554. if (node->GetType() == type) {
  555. return true;
  556. }
  557. return false;
  558. }
  559. Status CheckIfNeedSetNdFormat(const NodePtr &node_ptr) {
  560. auto op = node_ptr->GetOpDesc();
  561. GE_CHECK_NOTNULL(op);
  562. auto inputDescsPtr = op->GetAllInputsDescPtr();
  563. auto outputDescsPtr = op->GetAllOutputsDescPtr();
  564. ge::Format format = ge::FORMAT_ND;
  565. // if user set shape larger than 4, inferformat may set NCHW or NHWC, GE should set ND before FE
  566. // process, otherwise fe will insert transdata.
  567. for (auto &inputDescPtr : inputDescsPtr) {
  568. GE_CHECK_NOTNULL(inputDescPtr);
  569. if ((inputDescPtr->GetShape().GetDims().size() > ge::DIM_DEFAULT_SIZE) &&
  570. ((inputDescPtr->GetFormat() == ge::FORMAT_NCHW) || (inputDescPtr->GetFormat() == ge::FORMAT_NHWC))) {
  571. GELOGI("The node inputdesc [%s] format need to be set ND", op->GetName().c_str());
  572. inputDescPtr->SetFormat(format);
  573. inputDescPtr->SetOriginFormat(format);
  574. }
  575. }
  576. for (auto &outputDescPtr : outputDescsPtr) {
  577. GE_CHECK_NOTNULL(outputDescPtr);
  578. if ((outputDescPtr->GetShape().GetDims().size() > ge::DIM_DEFAULT_SIZE) &&
  579. ((outputDescPtr->GetFormat() == ge::FORMAT_NCHW) || (outputDescPtr->GetFormat() == ge::FORMAT_NHWC))) {
  580. GELOGI("The node outputdesc [%s] format need to be set ND", op->GetName().c_str());
  581. outputDescPtr->SetFormat(format);
  582. outputDescPtr->SetOriginFormat(format);
  583. }
  584. }
  585. return SUCCESS;
  586. }
  587. // A new function ending in 'DynShape' has been added for the dynamic shape processing.
  588. // In the dynamic shape process, transnode insertion by FE is advanced to the stage of whole
  589. // graph optimization, GE only sets the final data_type/format/shape information for variable,
  590. // data and netoutput, and no longer inserts the transnode.
  591. Status ProcessInputDtDynShape(NodePtr &node_ptr, bool &is_dynamic_batch, NodePtr &switchn_node, DataType &dt_set) {
  592. GE_CHECK_NOTNULL(node_ptr);
  593. auto op_desc = node_ptr->GetOpDesc();
  594. GE_CHECK_NOTNULL(op_desc);
  595. const GeTensorDescPtr &input = op_desc->MutableInputDesc(0);
  596. GE_CHECK_NOTNULL(input);
  597. ge::DataType src_dtype = input->GetDataType();
  598. if (src_dtype == dt_set) {
  599. GELOGI("The node name, %s dtype is fp16", node_ptr->GetName().c_str());
  600. return SUCCESS;
  601. }
  602. input->SetDataType(dt_set);
  603. int64_t input_shape_size = 0;
  604. int64_t output_shape_size = 0;
  605. ge::graphStatus input_graph_status = ge::TensorUtils::GetTensorSizeInBytes(*input, input_shape_size);
  606. ge::graphStatus output_graph_status = ge::TensorUtils::GetTensorMemorySizeInBytes(*input, output_shape_size);
  607. if (input_graph_status != ge::GRAPH_SUCCESS && output_graph_status != ge::GRAPH_SUCCESS) {
  608. GELOGE(GRAPH_FAILED, "GetTensorSize failed!");
  609. return FAILED;
  610. }
  611. ge::TensorUtils::SetSize(*input, input_shape_size);
  612. const GeTensorDescPtr &output = op_desc->MutableOutputDesc(0);
  613. GE_CHECK_NOTNULL(output);
  614. output->SetDataType(dt_set);
  615. ge::TensorUtils::SetSize(*output, output_shape_size);
  616. if (is_dynamic_batch) {
  617. GELOGI("The node [%s] dtype set fp16", switchn_node->GetName().c_str());
  618. auto switchn_op_desc = switchn_node->GetOpDesc();
  619. GE_CHECK_NOTNULL(switchn_op_desc);
  620. auto switchn_input = switchn_op_desc->MutableInputDesc(0);
  621. GE_CHECK_NOTNULL(switchn_input);
  622. switchn_input->SetDataType(dt_set);
  623. for (uint32_t i = 0; i < switchn_node->GetAllOutDataAnchorsSize(); ++i) {
  624. const GeTensorDescPtr &switchn_output = switchn_op_desc->MutableOutputDesc(i);
  625. GE_CHECK_NOTNULL(switchn_output);
  626. switchn_output->SetDataType(dt_set);
  627. }
  628. }
  629. return SUCCESS;
  630. }
  631. Status ProcessInputNC1HWC0DynShape(NodePtr &node_ptr, bool &is_dynamic_batch, NodePtr &switchn_node) {
  632. GE_CHECK_NOTNULL(node_ptr);
  633. auto op_desc = node_ptr->GetOpDesc();
  634. GE_CHECK_NOTNULL(op_desc);
  635. const GeTensorDescPtr &input = op_desc->MutableInputDesc(0);
  636. GE_CHECK_NOTNULL(input);
  637. ge::Format old_format = input->GetFormat();
  638. ge::GeShape old_shape = input->GetShape();
  639. bool support = ((old_format == FORMAT_NC1HWC0) || (old_format == FORMAT_NCHW) || (old_format == FORMAT_NHWC));
  640. if (!support) {
  641. ErrorManager::GetInstance().ATCReportErrMessage(
  642. "E19014", {"opname", "value", "reason"},
  643. {op_desc->GetName(), "format[" + TypeUtils::FormatToSerialString(old_format) + "]",
  644. "only support FORMAT_NC1HWC0,FORMAT_NCHW,FORMAT_NHWC"});
  645. GELOGE(INTERNAL_ERROR, "The format [%s] is unsupported", TypeUtils::FormatToSerialString(old_format).c_str());
  646. return FAILED;
  647. }
  648. if (ModifyInputFormatAndShape(node_ptr) != SUCCESS) {
  649. GELOGE(INTERNAL_ERROR, "modify format and shape failed");
  650. return FAILED;
  651. }
  652. if (is_dynamic_batch) {
  653. auto switchn_op_desc = switchn_node->GetOpDesc();
  654. GE_CHECK_NOTNULL(switchn_op_desc);
  655. const GeTensorDescPtr &switchn_input = switchn_op_desc->MutableInputDesc(0);
  656. if (ModifyFormatAndShapeForSingleTensor(switchn_input) != SUCCESS) {
  657. GELOGE(INTERNAL_ERROR, "modify format and shape failed");
  658. return FAILED;
  659. }
  660. for (uint32_t i = 0; i < switchn_node->GetAllOutDataAnchorsSize(); ++i) {
  661. auto switchn_output = switchn_op_desc->MutableOutputDesc(i);
  662. GE_CHECK_NOTNULL(switchn_output);
  663. old_format = switchn_output->GetFormat();
  664. old_shape = switchn_output->GetShape();
  665. if (ModifyFormatAndShapeForSingleTensor(switchn_output) != SUCCESS) {
  666. GELOGE(INTERNAL_ERROR, "modify format and shape failed");
  667. return FAILED;
  668. }
  669. }
  670. }
  671. return SUCCESS;
  672. }
  673. Status ProcessDataNodeDynShape(NodePtr &node_ptr) {
  674. auto op_desc = node_ptr->GetOpDesc();
  675. GE_CHECK_NOTNULL(op_desc);
  676. string set_dt_str;
  677. if (!ge::AttrUtils::GetStr(node_ptr->GetOpDesc(), ATTR_ATC_USER_DEFINE_DATATYPE, set_dt_str)) {
  678. return SUCCESS;
  679. }
  680. DataType dt_set = TypeUtils::SerialStringToDataType(set_dt_str);
  681. GELOGI("input_fp16 is found, the node name is %s.", node_ptr->GetName().c_str());
  682. bool is_dynamic_batch = false;
  683. NodePtr switchn_node = nullptr;
  684. if (CheckIfDynamicBatchScene(node_ptr, is_dynamic_batch, switchn_node)) {
  685. GELOGE(INTERNAL_ERROR, "CheckIfDynamicBatchScene failed");
  686. return FAILED;
  687. }
  688. if (ProcessInputDtDynShape(node_ptr, is_dynamic_batch, switchn_node, dt_set) != SUCCESS) {
  689. GELOGE(INTERNAL_ERROR, "ProcessInputFP16 failed");
  690. return FAILED;
  691. }
  692. // check if need to set format
  693. string set_format;
  694. bool ret = ge::AttrUtils::GetStr(node_ptr->GetOpDesc(), ATTR_ATC_USER_DEFINE_FORMAT, set_format);
  695. if (ret && (!set_format.empty()) && TypeUtils::SerialStringToFormat(set_format) == FORMAT_NC1HWC0) {
  696. GELOGI("The format of node [%s] should be set NC1HWC0.", node_ptr->GetName().c_str());
  697. if (ProcessInputNC1HWC0DynShape(node_ptr, is_dynamic_batch, switchn_node) != SUCCESS) {
  698. GELOGE(INTERNAL_ERROR, "ProcessInputNC1HWC0 failed");
  699. return FAILED;
  700. }
  701. }
  702. return SUCCESS;
  703. }
  704. Status GetStorageFormatAndShape(OpDescPtr &op_desc, const GeTensorDescPtr &tensor_desc_ptr,
  705. Format &storage_format, vector<int64_t> &dst_shape_dims) {
  706. GE_CHECK_NOTNULL(op_desc);
  707. GE_CHECK_NOTNULL(tensor_desc_ptr);
  708. storage_format = FORMAT_RESERVED;
  709. int64_t format = FORMAT_RESERVED;
  710. dst_shape_dims.clear();
  711. if (ge::AttrUtils::GetInt(*tensor_desc_ptr, ATTR_NAME_STORAGE_FORMAT, format)) {
  712. storage_format = static_cast<Format>(format);
  713. vector<int32_t> storage_shape;
  714. if (ge::AttrUtils::GetListInt(*tensor_desc_ptr, ATTR_NAME_STORAGE_SHAPE, storage_shape)) {
  715. for (auto dim : storage_shape) {
  716. dst_shape_dims.push_back(static_cast<int64_t>(dim));
  717. }
  718. GELOGI("Update node by storage format, node: [%s], storage_format: [%s], storage_shape:[%s]",
  719. op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str(),
  720. formats::JoinToString(storage_shape).c_str());
  721. } else {
  722. ErrorManager::GetInstance().ATCReportErrMessage(
  723. "15003", {"opname", "format"},
  724. {op_desc->GetName(), TypeUtils::FormatToSerialString(storage_format)});
  725. GELOGE(PARAM_INVALID, "Update node by storage format failed, storage_shape not set. "
  726. "node: [%s], storage_format [%s]",
  727. op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str());
  728. return FAILED;
  729. }
  730. ge::Format old_format = tensor_desc_ptr->GetFormat();
  731. auto old_shape = tensor_desc_ptr->GetShape().GetDims();
  732. if (old_format == storage_format && old_shape == dst_shape_dims) {
  733. GELOGI("Update node by storage format, not changed.");
  734. storage_format = FORMAT_RESERVED;
  735. return SUCCESS;
  736. }
  737. }
  738. return SUCCESS;
  739. }
  740. Status ProcessNetoutputNodeFp16Nc1hwc0DynShape(GeTensorDesc &src_desc, GeTensorDescPtr &net_output_input_desc,
  741. NodePtr &node) {
  742. bool is_dynamic = CheckOpType(node, MERGE);
  743. auto src_op_desc = node->GetOpDesc();
  744. GE_CHECK_NOTNULL(src_op_desc);
  745. ge::GeShape src_shape = src_desc.GetShape();
  746. ge::Format src_format = src_desc.GetFormat();
  747. net_output_input_desc->SetDataType(DT_FLOAT16);
  748. if (is_dynamic) {
  749. auto merge_output = src_op_desc->MutableOutputDesc(0);
  750. GE_CHECK_NOTNULL(merge_output);
  751. merge_output->SetDataType(DT_FLOAT16);
  752. for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  753. auto merge_input = src_op_desc->MutableInputDesc(i);
  754. GE_CHECK_NOTNULL(merge_input);
  755. merge_input->SetDataType(DT_FLOAT16);
  756. }
  757. }
  758. std::vector<int64_t> dst_shape_dims;
  759. std::vector<int64_t> src_shape_dims = src_shape.GetDims();
  760. if (TransferShape2NC1HWC0(src_format, src_shape_dims, DT_FLOAT16, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) {
  761. GELOGE(INTERNAL_ERROR, "Trans shape failed");
  762. return FAILED;
  763. }
  764. ge::GeShape dst_shape(dst_shape_dims);
  765. net_output_input_desc->SetFormat(FORMAT_NC1HWC0);
  766. net_output_input_desc->SetShape(dst_shape);
  767. if (is_dynamic) {
  768. auto merge_out = src_op_desc->MutableOutputDesc(0);
  769. GE_CHECK_NOTNULL(merge_out);
  770. if (ModifyFormatAndShapeForSingleTensor(merge_out) != SUCCESS) {
  771. GELOGE(INTERNAL_ERROR, "modify format and shape failed");
  772. return FAILED;
  773. }
  774. for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  775. auto merge_in = src_op_desc->MutableInputDesc(i);
  776. GE_CHECK_NOTNULL(merge_in);
  777. if (ModifyFormatAndShapeForSingleTensor(merge_in) != SUCCESS) {
  778. GELOGE(INTERNAL_ERROR, "modify format and shape failed");
  779. return FAILED;
  780. }
  781. }
  782. }
  783. return SUCCESS;
  784. }
  785. bool NeedUpdateDtByOutputTypeParm(OpDescPtr &netout_desc, uint32_t &index, ge::DataType &dt) {
  786. GE_CHECK_NOTNULL(netout_desc);
  787. vector<string> output_dt_str;
  788. if (ge::AttrUtils::GetListStr(netout_desc, ATTR_ATC_USER_DEFINE_DATATYPE, output_dt_str)) {
  789. for (auto dt_str : output_dt_str) {
  790. vector<string> dt_str_split = StringUtils::Split(dt_str, ':');
  791. if (dt_str_split.size() == kUserDefinedElementCount) {
  792. if (dt_str_split[0] == to_string(index)) {
  793. dt = TypeUtils::SerialStringToDataType(dt_str_split[1]);
  794. GELOGI("Find netoutput node output %u datatype should be set %s .", index,
  795. TypeUtils::DataTypeToSerialString(dt).c_str());
  796. return true;
  797. }
  798. }
  799. }
  800. }
  801. return false;
  802. }
  803. bool NeedUpdateFormatByOutputTypeParm(OpDescPtr &netout_desc, uint32_t &index) {
  804. GE_CHECK_NOTNULL(netout_desc);
  805. vector<string> output_format_str;
  806. if (ge::AttrUtils::GetListStr(netout_desc, ATTR_ATC_USER_DEFINE_FORMAT, output_format_str)) {
  807. for (auto format_str : output_format_str) {
  808. vector<string> format_str_split = StringUtils::Split(format_str, ':');
  809. if (format_str_split.size() == kUserDefinedElementCount) {
  810. if (format_str_split[0] == to_string(index)) {
  811. GELOGI("Find netoutput node output %u format should be set NC1HWC0.", index);
  812. return true;
  813. }
  814. }
  815. }
  816. }
  817. return false;
  818. }
  819. Status ProcessNetoutputNodeDynShape(NodePtr &node) {
  820. auto op_desc = node->GetOpDesc();
  821. GE_CHECK_NOTNULL(op_desc);
  822. ge::DataType output_data_type = ge::DT_FLOAT;
  823. for (const auto &in_anchor : node->GetAllInDataAnchors()) {
  824. auto index = static_cast<uint32_t>(in_anchor->GetIdx());
  825. auto peer_out = in_anchor->GetPeerOutAnchor();
  826. GE_CHECK_NOTNULL(peer_out);
  827. auto src_node = peer_out->GetOwnerNode();
  828. GE_CHECK_NOTNULL(src_node);
  829. bool is_dynamic = CheckOpType(src_node, MERGE);
  830. OpDescPtr src_op_desc = src_node->GetOpDesc();
  831. GE_CHECK_NOTNULL(src_op_desc);
  832. auto net_output_input_desc = op_desc->MutableInputDesc(index);
  833. GE_CHECK_NOTNULL(net_output_input_desc);
  834. ge::GeShape old_shape = net_output_input_desc->GetShape();
  835. ge::Format old_format = net_output_input_desc->GetFormat();
  836. ge::DataType old_dtype = net_output_input_desc->GetDataType();
  837. // Update datatype
  838. if (NeedUpdateDtByOutputTypeParm(op_desc, index, output_data_type)) {
  839. GELOGI("Enter into process output_type schedule");
  840. net_output_input_desc->SetDataType(output_data_type);
  841. if (is_dynamic) {
  842. auto merge_output = src_op_desc->MutableOutputDesc(0);
  843. GE_CHECK_NOTNULL(merge_output);
  844. merge_output->SetDataType(output_data_type);
  845. for (uint32_t i = 0; i < src_node->GetAllInDataAnchorsSize(); ++i) {
  846. auto merge_input = src_op_desc->MutableInputDesc(i);
  847. GE_CHECK_NOTNULL(merge_input);
  848. merge_input->SetDataType(output_data_type);
  849. }
  850. }
  851. }
  852. // check if is_output_adjust_hw_layout is set
  853. if (NeedUpdateFormatByOutputTypeParm(op_desc, index)) {
  854. if ((old_format != FORMAT_NCHW) && (old_format != FORMAT_NHWC) && (old_format != FORMAT_NC1HWC0)) {
  855. ErrorManager::GetInstance().ATCReportErrMessage(
  856. "E19014", {"opname", "value", "reason"},
  857. {op_desc->GetName(), "format[" + TypeUtils::FormatToSerialString(old_format) + "]",
  858. "only support FORMAT_NC1HWC0,FORMAT_NCHW,FORMAT_NHWC"});
  859. GELOGE(INTERNAL_ERROR, "Format is not one of NCHW, NHWC, NC1HWC0.");
  860. return FAILED;
  861. }
  862. GeTensorDesc old_desc(old_shape, old_format, old_dtype);
  863. if (ProcessNetoutputNodeFp16Nc1hwc0DynShape(old_desc, net_output_input_desc, src_node) != SUCCESS) {
  864. GELOGE(INTERNAL_ERROR, "Process netoutput fp16 nc1hwc0.");
  865. return FAILED;
  866. }
  867. }
  868. }
  869. return SUCCESS;
  870. }
  871. } // namespace
  872. GraphPrepare::GraphPrepare() : compute_graph_(nullptr) {}
  873. GraphPrepare::~GraphPrepare() {}
  874. /**
  875. * @param graph
  876. * @return
  877. */
  878. Status GraphPrepare::UpdateVariableFormats(ComputeGraphPtr &graph) {
  879. GE_CHECK_NOTNULL(graph);
  880. auto var_names_to_refs = CollectVarNamesToRefs(graph);
  881. for (auto &node : graph->GetAllNodes()) {
  882. if (node == nullptr) {
  883. continue;
  884. }
  885. if (node->GetType() != VARIABLE) {
  886. continue;
  887. }
  888. auto trans_road = VarManager::Instance(graph->GetSessionID())->GetTransRoad(node->GetName());
  889. if (trans_road == nullptr) {
  890. GELOGD("The variable %s does not have any trans road", node->GetName().c_str());
  891. continue;
  892. }
  893. GELOGI("Recover the trans road for var %s reversely", node->GetName().c_str());
  894. auto ret = RecoverTransRoadForVar(node, *trans_road);
  895. if (ret != SUCCESS) {
  896. GELOGE(INTERNAL_ERROR, "Failed to recovery trans road for var %s", node->GetName().c_str());
  897. return INTERNAL_ERROR;
  898. }
  899. auto iter = var_names_to_refs.find(node->GetName());
  900. if (iter != var_names_to_refs.end()) {
  901. ret = RecoverTransRoadForVarRef(iter->second, *trans_road);
  902. if (ret != SUCCESS) {
  903. GELOGE(INTERNAL_ERROR, "Failed to recovery trans road for var ref %s", node->GetName().c_str());
  904. return INTERNAL_ERROR;
  905. }
  906. }
  907. }
  908. return SUCCESS;
  909. }
  910. void GraphPrepare::SetOptions(const ge::GraphManagerOptions &options) { options_ = options; }
  911. Status GraphPrepare::Init(const ge::Graph &graph, uint64_t session_id) {
  912. compute_graph_ = GraphUtils::GetComputeGraph(graph);
  913. if (compute_graph_ != nullptr) {
  914. compute_graph_->SetSessionID(session_id);
  915. }
  916. session_id_ = session_id;
  917. Status ret = CheckGraph();
  918. if (ret != SUCCESS) {
  919. GELOGE(ret, "RunGraph graph check fail, ret:%u", ret);
  920. return ret;
  921. }
  922. (void)compute_graph_->TopologicalSorting();
  923. ret = CheckRefOp();
  924. if (ret != SUCCESS) {
  925. GELOGE(ret, "RunGraph check ref op fail, ret:%u", ret);
  926. return ret;
  927. }
  928. return SUCCESS;
  929. }
  930. Status GraphPrepare::CheckGraph() {
  931. if (compute_graph_ == nullptr) {
  932. GELOGE(GE_GRAPH_INIT_FAILED, "Graph prepare init compute graph is NULLPTR");
  933. return GE_GRAPH_INIT_FAILED;
  934. }
  935. auto nodes = compute_graph_->GetAllNodes();
  936. if (nodes.empty()) {
  937. GELOGE(GE_GRAPH_INIT_FAILED, "Invalid graph, no nodes in this graph.");
  938. return GE_GRAPH_INIT_FAILED;
  939. }
  940. for (const NodePtr &node : compute_graph_->GetAllNodes()) {
  941. GE_CHECK_NOTNULL(node);
  942. if (node->GetOpDesc() == nullptr) {
  943. GELOGE(GE_GRAPH_INIT_FAILED, "Check Graph node opdesc is NULL");
  944. return GE_GRAPH_INIT_FAILED;
  945. }
  946. }
  947. return SUCCESS;
  948. }
  949. Status GraphPrepare::CheckRefInputNode(const NodePtr &node, const std::string &input_name,
  950. const std::set<NodePtr> &ref_nodes) {
  951. // Acceptable input types should be ref node, variable or Switch operator, which is issued by ME for dynamic
  952. // lossscale and would be optimized in SwitchToStreamSwitchPass.
  953. // Since ME dont differentiate between RefSwitch and Switch, and only issue Switch.
  954. static std::set<std::string> acceptable_types = {ge::VARIABLE, ge::VARIABLEV2, ge::VARHANDLEOP,
  955. ge::REFSWITCH, ge::REFMERGE, ge::REFENTER,
  956. ge::REFNEXTITERATION, ge::REFEXIT, ge::SWITCH};
  957. GE_CHECK_NOTNULL(node);
  958. const auto &op_desc = node->GetOpDesc();
  959. GE_CHECK_NOTNULL(op_desc);
  960. const auto input_index = op_desc->GetInputIndexByName(input_name);
  961. const auto &in_anchor = node->GetInDataAnchor(input_index);
  962. GE_CHECK_NOTNULL(in_anchor);
  963. const auto &peer_out_anchor = in_anchor->GetPeerOutAnchor();
  964. GE_CHECK_NOTNULL(peer_out_anchor);
  965. const auto &input_node = peer_out_anchor->GetOwnerNode();
  966. GE_CHECK_NOTNULL(input_node);
  967. const auto &input_op_desc = input_node->GetOpDesc();
  968. GE_CHECK_NOTNULL(input_op_desc);
  969. bool is_ref = (ref_nodes.find(input_node) != ref_nodes.end());
  970. if (is_ref) {
  971. return SUCCESS;
  972. }
  973. auto input_type = input_op_desc->GetType();
  974. if (input_type == ge::FRAMEWORKOP) {
  975. if (!ge::AttrUtils::GetStr(input_op_desc, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, input_type)) {
  976. GELOGE(PARAM_INVALID, "Get original type failed.");
  977. return PARAM_INVALID;
  978. }
  979. }
  980. bool is_acceptable = (acceptable_types.find(input_type) != acceptable_types.end());
  981. if (!is_acceptable) {
  982. ErrorManager::GetInstance().ATCReportErrMessage(
  983. "E15005", {"opname", "optype", "opname1", "optype1"},
  984. {op_desc->GetName(), node->GetType(), input_op_desc->GetName(), input_op_desc->GetType()});
  985. GELOGE(PARAM_INVALID, "The ref input of ref node %s[%s] must be ref node or variable, but %s[%s]isn't.",
  986. node->GetName().c_str(), node->GetType().c_str(), input_op_desc->GetName().c_str(),
  987. input_op_desc->GetType().c_str());
  988. return PARAM_INVALID;
  989. }
  990. return SUCCESS;
  991. }
  992. Status GraphPrepare::CheckRefOp() {
  993. GE_CHECK_NOTNULL(compute_graph_);
  994. std::set<NodePtr> ref_nodes;
  995. for (const NodePtr &node : compute_graph_->GetDirectNode()) {
  996. if (node == nullptr) {
  997. GELOGE(PARAM_INVALID, "param [node] must not be null.");
  998. return PARAM_INVALID;
  999. }
  1000. auto op_desc = node->GetOpDesc();
  1001. if (op_desc == nullptr) {
  1002. GELOGE(PARAM_INVALID, "OpDesc of param [node] must not be null.");
  1003. return PARAM_INVALID;
  1004. }
  1005. auto input_name_index = op_desc->GetAllInputName();
  1006. auto outputs = op_desc->GetAllOutputName();
  1007. for (const auto &name_index : input_name_index) {
  1008. if (op_desc->GetOutputIndexByName(name_index.first) != -1) {
  1009. if (CheckRefInputNode(node, name_index.first, ref_nodes) != SUCCESS) {
  1010. GELOGE(PARAM_INVALID, "CheckRefInputNode failed.");
  1011. return PARAM_INVALID;
  1012. }
  1013. (void)ref_nodes.insert(node); // no need to check value
  1014. }
  1015. }
  1016. }
  1017. return SUCCESS;
  1018. };
  1019. Status GraphPrepare::SetRtContext(rtContext_t rt_context, rtCtxMode_t mode) {
  1020. GE_CHECK_NOTNULL(compute_graph_);
  1021. GELOGI("set rt_context, session id: %lu, graph id: %u, mode %d, device id:%u.", session_id_,
  1022. compute_graph_->GetGraphID(), static_cast<int>(mode), ge::GetContext().DeviceId());
  1023. GE_CHK_RT_RET(rtCtxCreate(&rt_context, mode, ge::GetContext().DeviceId()));
  1024. GE_CHK_RT_RET(rtCtxSetCurrent(rt_context));
  1025. RtContextUtil::GetInstance().AddRtContext(session_id_, compute_graph_->GetGraphID(), rt_context);
  1026. return SUCCESS;
  1027. }
  1028. Status GraphPrepare::AdjustDataOpOutput(const NodePtr &node) {
  1029. if (node == nullptr) {
  1030. GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "Input node is NULL");
  1031. return GE_GRAPH_GRAPH_NODE_NULL;
  1032. }
  1033. OpDescPtr op_desc_ptr = node->GetOpDesc();
  1034. if (op_desc_ptr == nullptr) {
  1035. GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "Input node opdesc is NULL");
  1036. return GE_GRAPH_GRAPH_NODE_NULL;
  1037. }
  1038. GeTensorDesc output = op_desc_ptr->GetOutputDesc(0);
  1039. int64_t tensor_size = 0;
  1040. graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(output, tensor_size);
  1041. if (graph_status != GRAPH_SUCCESS) {
  1042. ErrorManager::GetInstance().ATCReportErrMessage(
  1043. "E19012", {"function", "reason"}, {"GetTensorMemorySizeInBytes", "opname is " + node->GetName()});
  1044. GELOGE(graph_status, "GetTensorMemorySizeInBytes failed!");
  1045. return FAILED;
  1046. }
  1047. TensorUtils::SetSize(output, tensor_size);
  1048. graphStatus graph_ret = op_desc_ptr->UpdateOutputDesc(0, output);
  1049. if (graph_ret != GRAPH_SUCCESS) {
  1050. GELOGE(graph_ret, "UpdateOutputDesc fail, graph_ret:%u", graph_ret);
  1051. return graph_ret;
  1052. }
  1053. return SUCCESS;
  1054. }
  1055. Status GraphPrepare::UpdateInput(const std::vector<GeTensor> &user_input) {
  1056. compute_graph_->SaveDataFormat(ge::TypeUtils::DomiFormatToFormat(GetLocalOmgContext().format));
  1057. for (NodePtr &input_node : compute_graph_->GetDirectNode()) {
  1058. GE_CHECK_NOTNULL(input_node);
  1059. OpDescPtr op = input_node->GetOpDesc();
  1060. GE_CHECK_NOTNULL(op);
  1061. if (op->GetType() == DATA) {
  1062. GeAttrValue::INT index = 0;
  1063. if ((!(AttrUtils::GetInt(op, ATTR_NAME_INDEX, index))) || (GetLocalOmgContext().is_dynamic_input)) {
  1064. GELOGW("Get index from data attr failed");
  1065. continue;
  1066. }
  1067. if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) {
  1068. std::string situation = "data op index[" + std::to_string(index) + "]";
  1069. std::string reason = "it must less than user_input size[" + std::to_string(user_input.size()) + "]";
  1070. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason});
  1071. GELOGE(PARAM_INVALID, "user_input size = %zu, graph data op index = %ld.", user_input.size(), index);
  1072. return FAILED;
  1073. }
  1074. if (IsDynamicDims(input_node)) {
  1075. continue;
  1076. }
  1077. GeTensorDesc desc(user_input[index].GetTensorDesc());
  1078. auto format = desc.GetFormat();
  1079. auto origin_format = desc.GetOriginFormat();
  1080. // data maybe internal format [FRACTAL_NZ] at singleop process such as GEMM.
  1081. bool need_check_internal_format = (!IsTansDataOpData(input_node)) && (!options_.is_single_op);
  1082. if (need_check_internal_format) {
  1083. bool is_internal = TypeUtils::IsInternalFormat(format) || TypeUtils::IsInternalFormat(origin_format);
  1084. if (is_internal) {
  1085. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"},
  1086. {"Input format[" + TypeUtils::FormatToSerialString(format) + "] or origin_format[" +
  1087. TypeUtils::FormatToSerialString(origin_format) + "]", "it is not support"});
  1088. GELOGE(PARAM_INVALID, "Input format %s or origin_format %s is not support.",
  1089. TypeUtils::FormatToSerialString(format).c_str(),
  1090. TypeUtils::FormatToSerialString(origin_format).c_str());
  1091. return FAILED;
  1092. }
  1093. }
  1094. auto data_type = desc.GetDataType();
  1095. uint32_t length = 1;
  1096. bool type_ret = TypeUtils::GetDataTypeLength(data_type, length);
  1097. if (!type_ret) {
  1098. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"},
  1099. {"Input datatype[" + TypeUtils::DataTypeToSerialString(data_type) + "]", "it is not support"});
  1100. GELOGE(PARAM_INVALID, "Input datatype %s is not support.",
  1101. TypeUtils::DataTypeToSerialString(data_type).c_str());
  1102. return FAILED;
  1103. }
  1104. int64_t desc_shape = desc.GetShape().GetShapeSize();
  1105. FMK_INT64_UINT32_MULCHECK(desc_shape, length);
  1106. int64_t shape_size = desc_shape * length;
  1107. GE_IF_BOOL_EXEC(shape_size == 0 && desc.GetShape().GetDimNum() == 0, shape_size = static_cast<int64_t>(length));
  1108. int64_t size = 0;
  1109. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(desc, size) != GRAPH_SUCCESS,
  1110. GELOGE(INTERNAL_ERROR, "TensorUtils GetSize failed");
  1111. return FAILED);
  1112. bool size_check = (size != 0 && shape_size != size);
  1113. if (size_check) {
  1114. std::string situation = "input data size[" + std::to_string(size) +
  1115. "] and shape_size[" + std::to_string(size) + "]";
  1116. std::string reason = "because size != 0 and shape_size != size";
  1117. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason});
  1118. GELOGE(PARAM_INVALID, "input data size =%ld, shape_size =%ld.", size, shape_size);
  1119. return FAILED;
  1120. }
  1121. ge::TensorUtils::SetSize(desc, shape_size);
  1122. graphStatus graph_ret = op->UpdateInputDesc(0, desc);
  1123. if (graph_ret != GRAPH_SUCCESS) {
  1124. GELOGE(graph_ret, "UpdateInputDesc fail, graph_ret:%u", graph_ret);
  1125. return graph_ret;
  1126. }
  1127. // Size will be recalculated in the build stage
  1128. ge::TensorUtils::SetSize(desc, 0);
  1129. graph_ret = op->UpdateOutputDesc(0, desc);
  1130. if (graph_ret != GRAPH_SUCCESS) {
  1131. GELOGE(graph_ret, "UpdateOutputDesc fail, graph_ret:%u", graph_ret);
  1132. return graph_ret;
  1133. }
  1134. if (!options_.train_graph_flag) {
  1135. Status ret = AdjustDataOpOutput(input_node);
  1136. GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(ret, "AdjustDataOpOutput fail, ret:%u", ret); return ret);
  1137. }
  1138. }
  1139. }
  1140. return SUCCESS;
  1141. }
  1142. Status GraphPrepare::TryDoAipp() {
  1143. // infer and with aipp configure file, then call aipp insert
  1144. if ((!options_.train_graph_flag) && (!options_.insert_op_file.empty())) {
  1145. GE_DUMP(compute_graph_, "Before_insert_aipp");
  1146. Status ret = ge::InsertNewOpUtil::Instance().Init();
  1147. if (ret != SUCCESS) {
  1148. GELOGE(INTERNAL_ERROR, "TryDoAipp: InsertNewOpUtil instance failed.");
  1149. return INTERNAL_ERROR;
  1150. }
  1151. ret = ge::InsertNewOpUtil::Instance().Parse(options_.insert_op_file.c_str());
  1152. if (ret != SUCCESS) {
  1153. GELOGE(GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED, "TryDoAipp: parse config file %s failed",
  1154. options_.insert_op_file.c_str());
  1155. return GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED;
  1156. }
  1157. ret = ge::InsertNewOpUtil::Instance().InsertAippOps(compute_graph_, options_.insert_op_file);
  1158. if (ret != SUCCESS) {
  1159. GELOGE(GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED, "TryDoAipp: insert aipp op ret failed, ret:%u", ret);
  1160. return GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED;
  1161. }
  1162. }
  1163. return SUCCESS;
  1164. }
  1165. Status GraphPrepare::FormatAndShapeProcess() {
  1166. Status ret = ResourcePairProcess("add");
  1167. if (ret != SUCCESS) {
  1168. GELOGE(ret, "ResourcePairProcess failed");
  1169. return ret;
  1170. }
  1171. GE_TIMESTAMP_START(InferOriginFormat1);
  1172. ret = compute_graph_->InferOriginFormat();
  1173. GE_TIMESTAMP_END(InferOriginFormat1, "GraphPrepare::InferOriginFormat1");
  1174. GE_DUMP(compute_graph_, "after_first_inferformat");
  1175. if (ret != SUCCESS) {
  1176. GELOGE(ret, "Prepare Graph first inferformat failed");
  1177. return ret;
  1178. }
  1179. GE_TIMESTAMP_START(InferShapeForPreprocess);
  1180. ret = InferShapeForPreprocess();
  1181. GE_TIMESTAMP_END(InferShapeForPreprocess, "GraphPrepare::InferShapeForPreprocess");
  1182. GE_DUMP(compute_graph_, "after_infershape");
  1183. if (ret != SUCCESS) {
  1184. GELOGE(GE_GRAPH_INFERSHAPE_FAILED, "Prepare Graph infershape failed");
  1185. return GE_GRAPH_INFERSHAPE_FAILED;
  1186. }
  1187. GE_TIMESTAMP_START(InferOriginFormat2);
  1188. ret = compute_graph_->InferOriginFormat();
  1189. GE_TIMESTAMP_END(InferOriginFormat2, "GraphPrepare::InferOriginFormat2");
  1190. if (ret != SUCCESS) {
  1191. GELOGE(ret, "Prepare Graph inferformat failed");
  1192. return ret;
  1193. }
  1194. ret = ResourcePairProcess("remove");
  1195. if (ret != SUCCESS) {
  1196. return ret;
  1197. }
  1198. return ret;
  1199. }
  1200. Status GraphPrepare::ResourcePairProcess(const std::string &action) {
  1201. PassManager control_pass;
  1202. // Graph pass tmp logic for resource infershape
  1203. if (options_.train_graph_flag) {
  1204. try {
  1205. if (action == "add") {
  1206. (void)control_pass.AddPass("ResourcePairProcess::ResourcePairAddControlPass", new ResourcePairAddControlPass);
  1207. } else {
  1208. (void)control_pass.AddPass("ResourcePairProcess::ResourcePairRemoveControlPass",
  1209. new ResourcePairRemoveControlPass);
  1210. }
  1211. } catch (std::bad_alloc &e) {
  1212. GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occur, action:%s.", action.c_str());
  1213. return INTERNAL_ERROR;
  1214. }
  1215. }
  1216. Status ret = control_pass.Run(compute_graph_);
  1217. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1218. GELOGE(ret, "Run ResourcePairControlPass failed, action:%s, ret:%u.", action.c_str(), ret);
  1219. return ret;
  1220. }
  1221. return SUCCESS;
  1222. }
  1223. Status GraphPrepare::UpdateDataNetOutputByStorageFormat() {
  1224. for (auto &node_ptr : compute_graph_->GetAllNodes()) {
  1225. GE_CHECK_NOTNULL(node_ptr);
  1226. if (node_ptr->GetType() == DATA) {
  1227. uint32_t index = 0;
  1228. auto op_desc = node_ptr->GetOpDesc();
  1229. GE_CHECK_NOTNULL(op_desc);
  1230. const GeTensorDescPtr input = op_desc->MutableInputDesc(index);
  1231. Format storage_format = FORMAT_RESERVED;
  1232. vector<int64_t> dst_shape_dims;
  1233. if (GetStorageFormatAndShape(op_desc, input, storage_format, dst_shape_dims) != SUCCESS) {
  1234. GELOGE(INTERNAL_ERROR, "Get storage format for input failed");
  1235. return FAILED;
  1236. }
  1237. if (storage_format == FORMAT_RESERVED) {
  1238. continue;
  1239. }
  1240. if (ModifyDataNetOutputFormatAndShape(op_desc, index, storage_format, dst_shape_dims) != SUCCESS) {
  1241. GELOGE(INTERNAL_ERROR, "Modify format and shape for inputfailed");
  1242. return FAILED;
  1243. }
  1244. }
  1245. if (node_ptr->GetType() == ge::NETOUTPUT) {
  1246. auto op_desc = node_ptr->GetOpDesc();
  1247. GE_CHECK_NOTNULL(op_desc);
  1248. for (uint32_t index = 0; index < op_desc->GetOutputsSize(); index++) {
  1249. const GeTensorDescPtr output = op_desc->MutableOutputDesc(index);
  1250. Format storage_format = FORMAT_RESERVED;
  1251. vector<int64_t> dst_shape_dims;
  1252. if (GetStorageFormatAndShape(op_desc, output, storage_format, dst_shape_dims) != SUCCESS) {
  1253. GELOGE(INTERNAL_ERROR, "Get storage format from output failed");
  1254. return FAILED;
  1255. }
  1256. if (storage_format == FORMAT_RESERVED) {
  1257. continue;
  1258. }
  1259. if (ModifyDataNetOutputFormatAndShape(op_desc, index, storage_format, dst_shape_dims) != SUCCESS) {
  1260. GELOGE(INTERNAL_ERROR, "Modify format and shape for output failed");
  1261. return FAILED;
  1262. }
  1263. }
  1264. }
  1265. }
  1266. return SUCCESS;
  1267. }
  1268. Status GraphPrepare::SaveOriginalGraphToOmModel() {
  1269. if (options_.save_original_model == "true") {
  1270. ModelHelper model_helper;
  1271. Status ret = model_helper.SaveOriginalGraphToOmModel(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph_),
  1272. options_.original_model_file);
  1273. if (ret != SUCCESS) {
  1274. // If save original model fail, process continue
  1275. GELOGW("SaveOriginalGraphToOmModel fail");
  1276. }
  1277. }
  1278. return SUCCESS;
  1279. }
  1280. #define PP_RUN_AND_DUMP(name, func, ...) \
  1281. do { \
  1282. GE_RUN(Prepare, func, __VA_ARGS__); \
  1283. GE_DUMP(compute_graph, "PrepareAfter" name); \
  1284. GELOGI("Prepare %s on graph %s success.", name, compute_graph->GetName().c_str()); \
  1285. } while (0)
  1286. #define PP_RUN(name, func, ...) \
  1287. do { \
  1288. GE_RUN(Prepare, func, __VA_ARGS__); \
  1289. GELOGI("Prepare %s on graph %s success.", name, compute_graph->GetName().c_str()); \
  1290. } while (0)
  1291. Status GraphPrepare::PrepareDynShape(ConstGraphPtr graph, const std::vector<GeTensor> &user_input,
  1292. ge::ComputeGraphPtr &compute_graph, uint64_t session_id) {
  1293. GE_CHECK_NOTNULL(graph);
  1294. GE_CHECK_NOTNULL(compute_graph);
  1295. GetLocalOmgContext().type = static_cast<domi::FrameworkType>(options_.framework_type);
  1296. const Graph &const_graph = *graph;
  1297. PP_RUN("Init", Init, const_graph, session_id);
  1298. PP_RUN("SetRtContext", SetRtContext, rtContext_t(), RT_CTX_GEN_MODE);
  1299. PP_RUN_AND_DUMP("CheckAndUpdateInput", CheckAndUpdateInput, user_input);
  1300. PP_RUN_AND_DUMP("GraphEquivalentTransformation", GraphEquivalentTransformation);
  1301. PP_RUN_AND_DUMP("ProcessOutput", ProcessNetOutput);
  1302. PP_RUN_AND_DUMP("ProcessMultiBatch", multibatch::ProcessMultiBatch, compute_graph_);
  1303. PP_RUN_AND_DUMP("InsertAipp", TryDoAipp);
  1304. PP_RUN_AND_DUMP("ProcessBeforeInfershape", ProcessBeforeInfershape);
  1305. PP_RUN_AND_DUMP("InferFormatAndShape", FormatAndShapeProcess);
  1306. PP_RUN_AND_DUMP("GetDynamicOutputShape", multibatch::GetDynamicOutputShape, compute_graph_);
  1307. PP_RUN_AND_DUMP("ProcessAippStage2", InsertNewOpUtil::Instance().UpdateDataNodeByAipp, compute_graph_);
  1308. PP_RUN("SaveOriginalGraphToOmModel", SaveOriginalGraphToOmModel);
  1309. PP_RUN_AND_DUMP("PrepareOptimize", PrepareOptimize);
  1310. return SUCCESS;
  1311. }
  1312. Status GraphPrepare::RecordAIPPInfo(ge::ComputeGraphPtr &compute_graph) {
  1313. PP_RUN("RecordAIPPInfo", InsertNewOpUtil::Instance().RecordAIPPInfoToData, compute_graph_);
  1314. return SUCCESS;
  1315. }
  1316. Status GraphPrepare::PrepareRunningFormatRefiner() {
  1317. auto compute_graph = compute_graph_;
  1318. PassManager pass_manager;
  1319. GE_CHK_STATUS_RET(pass_manager.AddPass("PrepareRunningFormatRefiner::VariablePrepareOpPass",
  1320. new (std::nothrow) VariablePrepareOpPass))
  1321. GE_TIMESTAMP_START(pass_manager);
  1322. auto ret = pass_manager.Run(compute_graph);
  1323. GE_TIMESTAMP_END(pass_manager, "GraphPrepare::PrepareRunningFormatRefiner");
  1324. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1325. GELOGE(ret, "Run passes for running format refiner failed, ret:%u.", ret);
  1326. return ret;
  1327. }
  1328. PP_RUN_AND_DUMP("UpdateInputOutputByUserOptions", UpdateInputOutputByOptions);
  1329. PP_RUN_AND_DUMP("UpdateVariableFormats", UpdateVariableFormats, compute_graph_);
  1330. return SUCCESS;
  1331. }
  1332. Status GraphPrepare::SwitchOpOptimize(ComputeGraphPtr &compute_graph) {
  1333. if (compute_graph == nullptr) {
  1334. GELOGE(GE_GRAPH_NULL_INPUT, "Input Graph is NULL");
  1335. return GE_GRAPH_NULL_INPUT;
  1336. }
  1337. GEPass ge_passes(compute_graph);
  1338. NamesToPass hccl_group;
  1339. HcclGroupPass hccl_group_pass;
  1340. GELOGD("Add hccl group pass success");
  1341. hccl_group.emplace_back("HcclGroupPass", &hccl_group_pass);
  1342. auto ret = ge_passes.Run(hccl_group);
  1343. if (ret != SUCCESS) {
  1344. GELOGE(ret, "Run HcclGroupPass pass for preprocess failed, ret:%u.", ret);
  1345. return ret;
  1346. }
  1347. ret = compute_graph->TopologicalSorting();
  1348. if (ret != SUCCESS) {
  1349. GELOGE(ret, "Graph topological sort failed, ret:%u.", ret);
  1350. return ret;
  1351. }
  1352. return SUCCESS;
  1353. }
  1354. #undef PP_RUN_AND_DUMP
  1355. #undef PP_RUN
  1356. Status GraphPrepare::GenerateInfershapeGraph(ConstGraphPtr graph) {
  1357. if (graph == nullptr) {
  1358. GELOGE(GE_GRAPH_NULL_INPUT, "Input Graph is NULL");
  1359. return GE_GRAPH_NULL_INPUT;
  1360. }
  1361. const Graph &const_graph = *graph;
  1362. Status ret = Init(const_graph, 0);
  1363. if (ret != SUCCESS) {
  1364. GELOGE(ret, "Init graph_prepare fail, ret:%u", ret);
  1365. return ret;
  1366. }
  1367. GE_DUMP(compute_graph_, "after_parser");
  1368. GELOGI("Start infershape for dump json process.");
  1369. ret = compute_graph_->InferOriginFormat();
  1370. GE_DUMP(compute_graph_, "after_inferformat");
  1371. if (ret != SUCCESS) {
  1372. GELOGE(ret, "Prepare Graph inferformat failed");
  1373. return ret;
  1374. }
  1375. InferShapePass infer_shape_pass;
  1376. NamesToPass names_to_passes;
  1377. names_to_passes.emplace_back("InferShapePass", &infer_shape_pass);
  1378. GEPass ge_passes(compute_graph_);
  1379. ret = ge_passes.Run(names_to_passes);
  1380. GE_DUMP(compute_graph_, "after_infershape");
  1381. if (ret != SUCCESS) {
  1382. GELOGE(ret, "Run ge_passes infershape for preprocess failed, ret:%u.", ret);
  1383. return ret;
  1384. }
  1385. ShapeRefiner::ClearContextMap();
  1386. return SUCCESS;
  1387. }
  1388. Status GraphPrepare::CheckConstOp() {
  1389. for (auto &node_ptr : compute_graph_->GetAllNodes()) {
  1390. GE_CHECK_NOTNULL(node_ptr);
  1391. if (node_ptr->GetType() == CONSTANT) {
  1392. Status ret = VerifyConstOp(node_ptr);
  1393. GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, ret, "Const Op Check failed");
  1394. } else if (node_ptr->GetType() == FRAMEWORKOP) {
  1395. auto op_desc = node_ptr->GetOpDesc();
  1396. if (op_desc == nullptr) {
  1397. GELOGE(PARAM_INVALID, "Get op desc failed");
  1398. return PARAM_INVALID;
  1399. }
  1400. std::string original_type;
  1401. GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(op_desc, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, original_type),
  1402. GELOGI("Get FrameWorkOp original type [%s]", original_type.c_str()));
  1403. GELOGI("original type is %s", original_type.c_str());
  1404. if (original_type == CONSTANT) {
  1405. Status ret = VerifyConstOp(node_ptr);
  1406. GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, ret, "Const Op Check failed");
  1407. }
  1408. }
  1409. }
  1410. return SUCCESS;
  1411. }
  1412. Status GraphPrepare::VerifyConstOp(const NodePtr &node) {
  1413. GE_CHECK_NOTNULL(node);
  1414. auto op_desc = node->GetOpDesc();
  1415. GE_CHECK_NOTNULL(op_desc);
  1416. ConstGeTensorPtr ge_tensor_ptr;
  1417. if (!(AttrUtils::GetTensor(op_desc, ATTR_NAME_WEIGHTS, ge_tensor_ptr))) {
  1418. GELOGE(PARAM_INVALID, "Get value from const attr failed");
  1419. return PARAM_INVALID;
  1420. }
  1421. GE_CHECK_NOTNULL(ge_tensor_ptr);
  1422. auto data_size = ge_tensor_ptr->GetData().GetSize();
  1423. auto ge_tensor_desc = ge_tensor_ptr->GetTensorDesc();
  1424. int64_t shape_size = ge_tensor_desc.GetShape().GetShapeSize();
  1425. auto data_type = ge_tensor_desc.GetDataType();
  1426. uint32_t length = 1;
  1427. bool type_ret = TypeUtils::GetDataTypeLength(data_type, length);
  1428. if (!type_ret) {
  1429. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"},
  1430. {"Input datatype[" + TypeUtils::DataTypeToSerialString(data_type) + "]", "it is not support"});
  1431. GELOGE(PARAM_INVALID, "Input datatype %s is not support.", TypeUtils::DataTypeToSerialString(data_type).c_str());
  1432. return FAILED;
  1433. }
  1434. FMK_INT64_UINT32_MULCHECK(shape_size, length);
  1435. GELOGI("Const real value Size:%zu, op_desc Shape Size:%ld, data_type:%s.", data_size, shape_size * length,
  1436. TypeUtils::DataTypeToSerialString(data_type).c_str());
  1437. if (shape_size == 0) {
  1438. if (ge_tensor_desc.GetShape().GetDims().size() == 0) {
  1439. // shape = [], means it's a sclar tensor.
  1440. GE_CHK_BOOL_EXEC(data_size / length == 1,
  1441. ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {"Const is invalid scalar tensor."});
  1442. return PARAM_INVALID, "Const is invalid scalar tensor.");
  1443. } else {
  1444. // shape = [x, y, 0,...], means it's a vector tensor that value is [].
  1445. GE_CHK_BOOL_EXEC(data_size == 0,
  1446. ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {"Const is invalid vector scalar."});
  1447. return PARAM_INVALID, "Const is invalid vector scalar.");
  1448. }
  1449. } else {
  1450. GE_CHK_BOOL_EXEC(data_size == static_cast<size_t>(shape_size * length) && data_size != 0,
  1451. ErrorManager::GetInstance().ATCReportErrMessage(
  1452. "E10043", {"reason"}, {"Const input data size is not equal with tensor desc shape"});
  1453. return PARAM_INVALID, "Const input data size is not equal with tensor desc shape");
  1454. }
  1455. return SUCCESS;
  1456. }
  1457. bool GraphPrepare::IsDynamicDims(const NodePtr &input_node) {
  1458. auto data_shape = NodeUtils::GetOutputDesc(*input_node, kDataOutIndex).GetShape();
  1459. const auto &dims = data_shape.GetDims();
  1460. bool all_is_positive = false;
  1461. if (std::all_of(dims.begin(), dims.end(), [](int64_t val) { return val >= 0; })) {
  1462. all_is_positive = true;
  1463. }
  1464. if (!all_is_positive && !options_.input_shape.empty() && !options_.dynamic_dims.empty() &&
  1465. options_.dynamic_node_type != kInvalidDynaimcDimsType) {
  1466. GELOGI("No need to check and update desc info, the dims of %s is %s.", input_node->GetName().c_str(),
  1467. formats::JoinToString(dims).c_str());
  1468. return true;
  1469. }
  1470. return false;
  1471. }
  1472. Status GraphPrepare::CheckUserInput(const std::vector<GeTensor> &user_input) {
  1473. if (GetLocalOmgContext().is_dynamic_input) {
  1474. return SUCCESS;
  1475. }
  1476. unsigned int node_num = 0;
  1477. unsigned int data_num = 0;
  1478. for (NodePtr &input_node : compute_graph_->GetDirectNode()) {
  1479. GE_CHECK_NOTNULL(input_node);
  1480. OpDescPtr op = input_node->GetOpDesc();
  1481. GE_CHECK_NOTNULL(op);
  1482. node_num++;
  1483. if (op->GetType() == DATA || op->GetType() == AIPPDATA) {
  1484. data_num++;
  1485. GeAttrValue::INT index = 0;
  1486. if (!(AttrUtils::GetInt(op, ATTR_NAME_INDEX, index))) {
  1487. GELOGE(GE_GRAPH_INIT_FAILED, "Get index from attr failed");
  1488. return GE_GRAPH_INIT_FAILED;
  1489. }
  1490. if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) {
  1491. std::string situation = "data op index[" + std::to_string(index) + "]";
  1492. std::string reason = "it must less than user_input size[" + std::to_string(user_input.size()) + "]";
  1493. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason});
  1494. GELOGE(GE_GRAPH_INIT_FAILED, "user_input size:%zu, data op index:%ld.", user_input.size(), index);
  1495. return GE_GRAPH_INIT_FAILED;
  1496. }
  1497. if (IsDynamicDims(input_node)) {
  1498. continue;
  1499. }
  1500. GeTensorDesc desc(user_input[index].GetTensorDesc());
  1501. for (size_t i = 0; i < desc.GetShape().GetDimNum(); ++i) {
  1502. if (desc.GetShape().GetDim(i) < 0) {
  1503. std::string situation = "data dim[" + std::to_string(i) + "][" + std::to_string(desc.GetShape().GetDim(i)) + "]" ;
  1504. std::string reason = "it need >= 0";
  1505. ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason});
  1506. GELOGE(GE_GRAPH_INIT_FAILED, "data dim %zu is not supported, need >= 0, real:%ld.", i,
  1507. desc.GetShape().GetDim(i));
  1508. return GE_GRAPH_INIT_FAILED;
  1509. }
  1510. }
  1511. }
  1512. }
  1513. if (node_num <= data_num) {
  1514. GELOGW("Prepare check user input, data_num = %u, node_num = %u", data_num, node_num);
  1515. }
  1516. return SUCCESS;
  1517. }
  1518. Status GraphPrepare::InferShapeForPreprocess() {
  1519. GELOGI("Start infershape for preprocess.");
  1520. GEPass ge_passes(compute_graph_);
  1521. NamesToPass names_to_passes;
  1522. AssertPass assert_pass;
  1523. if (!options_.train_graph_flag) {
  1524. names_to_passes.emplace_back("AssertPass", &assert_pass);
  1525. }
  1526. InferShapePass infer_shape_pass;
  1527. names_to_passes.emplace_back("InferShapePass", &infer_shape_pass);
  1528. ReplaceWithEmptyConstPass replace_with_empty_const_pass;
  1529. names_to_passes.emplace_back("ReplaceWithEmptyConstPass", &replace_with_empty_const_pass);
  1530. DimensionComputePass dimension_compute_pass;
  1531. names_to_passes.emplace_back("DimensionComputePass", &dimension_compute_pass);
  1532. ConstantFoldingPass constant_folding_pass;
  1533. names_to_passes.emplace_back("ConstantFoldingPass", &constant_folding_pass);
  1534. int32_t dev_count = 0;
  1535. AicpuConstantFoldingPass aicpu_constant_folding_pass;
  1536. const char *aicpu_constant_folding_on = std::getenv("AICPU_CONSTANT_FOLDING_ON");
  1537. rtError_t rt_err = RT_ERROR_NONE;
  1538. if (aicpu_constant_folding_on != nullptr) {
  1539. rt_err = rtGetDeviceCount(&dev_count);
  1540. if (rt_err == RT_ERROR_NONE) {
  1541. Status result = SetRtContext(rtContext_t(), RT_CTX_NORMAL_MODE);
  1542. if (result != SUCCESS) {
  1543. GELOGE(result, "Set rt context failed.");
  1544. return result;
  1545. }
  1546. names_to_passes.emplace_back("AicpuConstantFoldingPass", &aicpu_constant_folding_pass);
  1547. }
  1548. }
  1549. Status ret = ge_passes.Run(names_to_passes);
  1550. if (aicpu_constant_folding_on != nullptr) {
  1551. if (rt_err == RT_ERROR_NONE) {
  1552. Status result = SetRtContext(rtContext_t(), RT_CTX_GEN_MODE);
  1553. if (result != SUCCESS) {
  1554. GELOGE(result, "Set rt context failed.");
  1555. return result;
  1556. }
  1557. }
  1558. }
  1559. ShapeRefiner::ClearContextMap();
  1560. if (ret != SUCCESS) {
  1561. GELOGE(ret, "Run ge_passes infershape for preprocess failed, ret:%u.", ret);
  1562. return ret;
  1563. }
  1564. return SUCCESS;
  1565. }
  1566. Status GraphPrepare::PrepareOptimize() {
  1567. GELOGI("Start optimize for preprocess.");
  1568. // check rw type
  1569. GraphOptimize graph_optimize;
  1570. bool has_conflict = false;
  1571. graph_optimize.CheckRWConflict(compute_graph_, has_conflict);
  1572. if (has_conflict) {
  1573. GELOGE(GRAPH_PARAM_INVALID, "There has rw conflict.Stop optimize.");
  1574. return FAILED;
  1575. }
  1576. PassManager original_graph_passes;
  1577. // Graph pass
  1578. try {
  1579. (void)original_graph_passes.AddPass("PrepareOptimize::ShapeOperateOpRemovePass", new ShapeOperateOpRemovePass);
  1580. (void)original_graph_passes.AddPass("PrepareOptimize::ReplaceTransShapePass", new ReplaceTransShapePass);
  1581. } catch (std::bad_alloc &e) {
  1582. GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs.");
  1583. return INTERNAL_ERROR;
  1584. }
  1585. GE_TIMESTAMP_START(original_graph_passes);
  1586. Status ret = original_graph_passes.Run(compute_graph_);
  1587. GE_TIMESTAMP_END(original_graph_passes, "GraphPrepare::OriginalGraphPasses");
  1588. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1589. GELOGE(ret, "Run graph passes optimize for preprocess failed, ret:%u.", ret);
  1590. return ret;
  1591. }
  1592. // New pass
  1593. GEPass ge_passes(compute_graph_);
  1594. NamesToPass names_to_passes;
  1595. EnterPass enter_pass;
  1596. names_to_passes.emplace_back("EnterPass", &enter_pass);
  1597. CondPass cond_pass;
  1598. names_to_passes.emplace_back("CondPass", &cond_pass);
  1599. PrintOpPass print_pass;
  1600. if (options_.enable_print_op_pass) {
  1601. names_to_passes.emplace_back("PrintOpPass", &print_pass);
  1602. }
  1603. NoUseReshapeRemovePass no_use_reshape_remove_pass;
  1604. names_to_passes.emplace_back("NoUseReshapeRemovePass", &no_use_reshape_remove_pass);
  1605. DropOutPass dropout_pass;
  1606. AssertPass assert_pass;
  1607. UnusedConstPass unused_const_pass;
  1608. StopGradientPass stop_gradient_pass;
  1609. PreventGradientPass prevent_gradient_pass;
  1610. PlaceholderWithDefaultPass placeholder_with_default_pass;
  1611. GuaranteeConstPass guarantee_const_pass;
  1612. VarIsInitializedOpPass var_is_initialized_pass;
  1613. ParallelConcatStartOpPass parallel_concat_start_op_pass;
  1614. IdentityPass identity_pass(false);
  1615. AssignPass assign_pass;
  1616. SnapshotPass snapshot_pass;
  1617. if (!options_.train_graph_flag) {
  1618. names_to_passes.emplace_back("DropOutPass", &dropout_pass);
  1619. names_to_passes.emplace_back("AssertPass", &assert_pass);
  1620. }
  1621. names_to_passes.emplace_back("UnusedConstPass", &unused_const_pass);
  1622. names_to_passes.emplace_back("StopGradientPass", &stop_gradient_pass);
  1623. names_to_passes.emplace_back("PreventGradientPass", &prevent_gradient_pass);
  1624. names_to_passes.emplace_back("PlaceholderWithDefaultPass", &placeholder_with_default_pass);
  1625. names_to_passes.emplace_back("SnapshotPass", &snapshot_pass);
  1626. names_to_passes.emplace_back("GuaranteeConstPass", &guarantee_const_pass);
  1627. names_to_passes.emplace_back("VarIsInitializedOpPass", &var_is_initialized_pass);
  1628. names_to_passes.emplace_back("ParallelConcatStartOpPass", &parallel_concat_start_op_pass);
  1629. names_to_passes.emplace_back("IdentityPass", &identity_pass);
  1630. if (GetContext().GetHostExecFlag()) {
  1631. names_to_passes.emplace_back("AssignPass", &assign_pass);
  1632. }
  1633. GE_TIMESTAMP_START(names_to_passes);
  1634. ret = ge_passes.Run(names_to_passes);
  1635. GE_TIMESTAMP_END(names_to_passes, "GraphPrepare::NamesToPasses");
  1636. if (ret != SUCCESS) {
  1637. GELOGE(ret, "Run ge_passes optimize for preprocess failed, ret:%u.", ret);
  1638. return ret;
  1639. }
  1640. PassManager graph_pass;
  1641. try {
  1642. (void)graph_pass.AddPass("PrepareOptimize::PrunePass", new PrunePass);
  1643. // todo 临时把hccl的memcpy插入放到图准备,为了防止其多插memcpy
  1644. (void)graph_pass.AddPass("PrepareOptimize::HcclMemcpyPass", new (std::nothrow) HcclMemcpyPass);
  1645. } catch (std::bad_alloc &e) {
  1646. GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs.");
  1647. return INTERNAL_ERROR;
  1648. }
  1649. GE_TIMESTAMP_START(graph_passes);
  1650. ret = graph_pass.Run(compute_graph_);
  1651. GE_TIMESTAMP_END(graph_passes, "GraphPrepare::GraphPasses");
  1652. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1653. GELOGE(ret, "Run graph passes optimize for preprocess failed, ret:%u.", ret);
  1654. return ret;
  1655. }
  1656. // The constant for train is CONSTANTOP, and is CONSTANT for inference. They will be unified in future.
  1657. TypeConversionOfConstant();
  1658. ret = compute_graph_->TopologicalSorting();
  1659. if (ret != SUCCESS) {
  1660. GELOGE(ret, "Graph topological sort failed, ret:%u.", ret);
  1661. return ret;
  1662. }
  1663. GELOGI("End optimize for preprocess.");
  1664. return SUCCESS;
  1665. }
  1666. void GraphPrepare::TypeConversionOfConstant() {
  1667. if (options_.train_graph_flag) {
  1668. GELOGD("trans CONSTANT to CONSTANTOP in train.");
  1669. for (ge::NodePtr &n : compute_graph_->GetAllNodes()) {
  1670. // This can ensure that n is not a null pointer
  1671. if (n->GetOpDesc()->GetType() == CONSTANT) {
  1672. n->GetOpDesc()->SetType(CONSTANTOP);
  1673. }
  1674. }
  1675. } else {
  1676. GELOGD("trans CONSTANTOP to CONSTANT in inferrence.");
  1677. for (ge::NodePtr &n : compute_graph_->GetAllNodes()) {
  1678. // This can ensure that n is not a null pointer
  1679. if (n->GetOpDesc()->GetType() == CONSTANTOP) {
  1680. n->GetOpDesc()->SetType(CONSTANT);
  1681. }
  1682. }
  1683. }
  1684. }
  1685. Status GraphPrepare::GraphEquivalentTransformation() {
  1686. NamesToPass names_to_pass;
  1687. ForPass for_pass;
  1688. names_to_pass.emplace_back("ForToWhilePass", &for_pass);
  1689. return GEPass(compute_graph_).Run(names_to_pass);
  1690. }
  1691. Status GraphPrepare::ProcessBeforeInfershape() {
  1692. NamesToPass names_to_passes;
  1693. CondRemovePass condition_remove_pass;
  1694. names_to_passes.emplace_back("CondRemovePass", &condition_remove_pass);
  1695. GE_TIMESTAMP_START(ProcessCondRemove);
  1696. auto ret = GEPass(compute_graph_).Run(names_to_passes);
  1697. GE_TIMESTAMP_END(ProcessCondRemove, "GraphManager::ProcessCondRemove");
  1698. if (ret != SUCCESS) {
  1699. GELOGE(ret, "Run ge_passes optimize for OptimizeAfterMergeSubGraph failed, ret:%d.", ret);
  1700. return ret;
  1701. }
  1702. return SUCCESS;
  1703. }
  1704. Status GraphPrepare::ProcessNetOutput() {
  1705. PassManager graph_passes_before_infershape;
  1706. try {
  1707. if (options_.train_graph_flag) {
  1708. graph_passes_before_infershape.AddPass("ProcessNetOutput::SavePass", new (std::nothrow) SavePass);
  1709. }
  1710. graph_passes_before_infershape.AddPass("ProcessNetOutput::NetOutputPass", new (std::nothrow) NetOutputPass);
  1711. graph_passes_before_infershape.AddPass("ProcessNetOutput::DataPass",
  1712. new (std::nothrow) DataPass); // Add NetOutput first.
  1713. } catch (std::bad_alloc) {
  1714. GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs.");
  1715. return INTERNAL_ERROR;
  1716. }
  1717. auto ret = graph_passes_before_infershape.Run(compute_graph_);
  1718. if ((ret != SUCCESS) && (ret != NOT_CHANGED)) {
  1719. GELOGE(ret, "Run graph_passes_before_infershape failed, ret:%d.", ret);
  1720. return ret;
  1721. }
  1722. return SUCCESS;
  1723. }
  1724. Status GraphPrepare::CheckAndUpdateInput(const std::vector<GeTensor> &user_input) {
  1725. compute_graph_->SetInputSize(user_input.size());
  1726. if (user_input.empty()) {
  1727. return SUCCESS;
  1728. }
  1729. auto ret = CheckUserInput(user_input);
  1730. if (ret != SUCCESS) {
  1731. GELOGE(ret, "Check user input failed.");
  1732. return ret;
  1733. }
  1734. ret = UpdateInput(user_input);
  1735. if (ret != SUCCESS) {
  1736. GELOGE(ret, "UpdateInput fail, ret:%u", ret);
  1737. return ret;
  1738. }
  1739. if (user_input.size() != 0) {
  1740. ret = CheckConstOp();
  1741. if (ret != SUCCESS) {
  1742. GELOGE(ret, "CheckConstOp fail, ret:%u", ret);
  1743. return ret;
  1744. }
  1745. } else {
  1746. ret = compute_graph_->TopologicalSorting();
  1747. if (ret != SUCCESS) {
  1748. GELOGE(ret, "graph prepare error: compute_graph_->Topological Sorting");
  1749. return FAILED;
  1750. }
  1751. }
  1752. return SUCCESS;
  1753. }
  1754. Status GraphPrepare::UpdateInputOutputByOptions() {
  1755. auto ret = UpdateDataNetOutputByStorageFormat();
  1756. if (ret != SUCCESS) {
  1757. GELOGE(ret, "Update format acoording to storage format failed.");
  1758. return ret;
  1759. }
  1760. if (options_.train_graph_flag) {
  1761. GELOGI("This is train mode, no need to do this schedule.");
  1762. return SUCCESS;
  1763. }
  1764. for (auto &node_ptr : compute_graph_->GetDirectNode()) {
  1765. GE_CHECK_NOTNULL(node_ptr);
  1766. if (CheckIfNeedSetNdFormat(node_ptr) != SUCCESS) {
  1767. GELOGE(INTERNAL_ERROR, "Set node [%s] format ND failed", node_ptr->GetName().c_str());
  1768. return FAILED;
  1769. }
  1770. if (node_ptr->GetType() == DATA) {
  1771. if (ProcessDataNodeDynShape(node_ptr) != SUCCESS) {
  1772. GELOGE(INTERNAL_ERROR, "Process data node failed");
  1773. return FAILED;
  1774. }
  1775. }
  1776. if (node_ptr->GetType() == ge::NETOUTPUT) {
  1777. if (ProcessNetoutputNodeDynShape(node_ptr) != SUCCESS) {
  1778. GELOGE(INTERNAL_ERROR, "Process netoutput node failed");
  1779. return FAILED;
  1780. }
  1781. }
  1782. }
  1783. return SUCCESS;
  1784. }
  1785. bool GraphPrepare::IsTansDataOpData(const ge::NodePtr &var_node) {
  1786. for (auto &out_anchor : var_node->GetAllOutDataAnchors()) {
  1787. GE_RT_FALSE_CHECK_NOTNULL(out_anchor);
  1788. for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) {
  1789. GE_RT_FALSE_CHECK_NOTNULL(in_anchor);
  1790. ge::NodePtr dst_node = in_anchor->GetOwnerNode();
  1791. GE_RT_FALSE_CHECK_NOTNULL(dst_node);
  1792. if (dst_node->GetType() == TRANSDATA) {
  1793. return true;
  1794. }
  1795. }
  1796. }
  1797. return false;
  1798. }
  1799. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示