You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

multi_batch_copy_graph.cc 52 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/preprocess/multi_batch_copy_graph.h"
  17. #include <queue>
  18. #include <set>
  19. #include <string>
  20. #include "common/formats/utils/formats_trans_utils.h"
  21. #include "common/ge/ge_util.h"
  22. #include "common/util/error_manager/error_manager.h"
  23. #include "framework/common/debug/ge_log.h"
  24. #include "framework/common/ge_inner_error_codes.h"
  25. #include "framework/common/string_util.h"
  26. #include "framework/common/types.h"
  27. #include "framework/omg/omg_inner_types.h"
  28. #include "graph/debug/ge_attr_define.h"
  29. #include "graph/ge_context.h"
  30. #include "graph/passes/multi_batch_clone_pass.h"
  31. #include "graph/passes/prune_pass.h"
  32. #include "graph/preprocess/multi_batch_options.h"
  33. #include "graph/utils/attr_utils.h"
  34. #include "graph/utils/graph_utils.h"
  35. #include "graph/utils/node_utils.h"
  36. #include "graph/utils/tensor_utils.h"
  37. #include "graph/utils/type_utils.h"
  38. #include "inc/pass_manager.h"
  39. #include "graph/common/local_context.h"
  40. using std::set;
  41. using std::string;
  42. using std::vector;
  43. namespace ge {
  44. namespace multibatch {
  45. namespace {
  46. const char *const kMbatchSwitchnName = "mbatch-switch-name";
  47. const int kSwitchNDataIndex = 0;
  48. const int kSwitchNPredIndex = 1;
  49. const int kDataOutIndex = 0;
  50. const int kDataInIndex = 0;
  51. const int kMergeDataOutIndex = 0;
  52. const int kStaticOutput = -1;
  53. const int kDynmaicDims = -1;
  54. const int kDynamicBatchDynamicDimsNum = 1;
  55. const int kDynamicImgSizeDynamciDimsNum = 2;
  56. inline bool IsDataLikeType(const std::string &node_type) { return (node_type == DATA) || (node_type == AIPP); }
  57. NodePtr InsertMergeNodeToGraph(const std::string &name, size_t input_num, const ComputeGraphPtr &graph) {
  58. OpDescPtr desc = MakeShared<OpDesc>();
  59. if (desc == nullptr) {
  60. GELOGE(OUT_OF_MEMORY, "Failed to insert merge node, name %s", name.c_str());
  61. return nullptr;
  62. }
  63. desc->SetName(name);
  64. desc->SetType(MERGE);
  65. GeTensorDesc tensor_desc;
  66. for (size_t i = 0; i < input_num; ++i) {
  67. auto ret = desc->AddInputDesc("x" + std::to_string(i), tensor_desc);
  68. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS,
  69. GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add input %zu, error-code %u",
  70. name.c_str(), i, ret);
  71. return nullptr);
  72. }
  73. auto ret = desc->AddOutputDesc("y", tensor_desc);
  74. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS,
  75. GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add output 'y', error-code %u",
  76. name.c_str(), ret);
  77. return nullptr);
  78. tensor_desc.SetDataType(DT_INT32);
  79. ret = desc->AddOutputDesc("value_index", tensor_desc);
  80. if (ret != GRAPH_SUCCESS) {
  81. GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add output 'value_index', error-code %u",
  82. name.c_str(), ret);
  83. return nullptr;
  84. }
  85. if (!AttrUtils::SetBool(desc, ATTR_INSERT_BY_MBATCH, true)) {
  86. GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add attr", name.c_str());
  87. return nullptr;
  88. }
  89. return graph->AddNode(desc);
  90. }
  91. NodePtr InsertCopyNode(const NodePtr &node, size_t n) {
  92. const std::string &name = node->GetName() + "_ascend_mbatch_batch_" + std::to_string(n);
  93. auto src_op_desc = node->GetOpDesc();
  94. GE_IF_BOOL_EXEC(src_op_desc == nullptr, GELOGE(INTERNAL_ERROR, "Failed to copy node %s to %s, the OpDesc is null",
  95. node->GetName().c_str(), name.c_str());
  96. return nullptr);
  97. auto desc = AttrUtils::CopyOpDesc(src_op_desc);
  98. GE_IF_BOOL_EXEC(desc == nullptr, GELOGE(OUT_OF_MEMORY, "Failed to create op desc for copy node for node %s name %s",
  99. node->GetName().c_str(), name.c_str());
  100. return nullptr);
  101. desc->SetName(name);
  102. desc->CopyAttrsFrom(*src_op_desc);
  103. for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  104. auto input_desc = desc->MutableInputDesc(i);
  105. GE_IF_BOOL_EXEC(input_desc == nullptr, GELOGW("Get null input desc by index %u from node %s when copy from %s", i,
  106. desc->GetName().c_str(), node->GetName().c_str());
  107. continue);
  108. input_desc->CopyAttrsFrom(src_op_desc->GetInputDesc(i));
  109. }
  110. for (uint32_t i = 0; i < node->GetAllOutDataAnchorsSize(); ++i) {
  111. auto output_desc = desc->MutableOutputDesc(i);
  112. GE_IF_BOOL_EXEC(output_desc == nullptr,
  113. GELOGE(INTERNAL_ERROR, "Failed to get output desc by index %u from node %s when copy from %s", i,
  114. desc->GetName().c_str(), node->GetName().c_str());
  115. return nullptr);
  116. output_desc->CopyAttrsFrom(src_op_desc->GetOutputDesc(i));
  117. }
  118. const std::string &batch_label = "Batch_" + std::to_string(n);
  119. if (!AttrUtils::SetStr(desc, ATTR_NAME_BATCH_LABEL, batch_label)) {
  120. GELOGE(FAILED, "set attr ATTR_NAME_BATCH_LABEL failed, node:%s.", name.c_str());
  121. return nullptr;
  122. }
  123. (void)AttrUtils::SetListStr(desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, {node->GetName()});
  124. auto graph = node->GetOwnerComputeGraph();
  125. return graph->AddNode(desc);
  126. }
  127. bool IsAllDimsPositive(const std::vector<int64_t> &dims) {
  128. for (auto dim : dims) {
  129. if (dim < 0) {
  130. return false;
  131. }
  132. }
  133. return true;
  134. }
  135. NodePtr InsertConst(const std::string &name, const ComputeGraphPtr &graph) {
  136. auto desc = MakeShared<OpDesc>();
  137. if (desc == nullptr) {
  138. GELOGE(OUT_OF_MEMORY, "Failed to create const op %s, out of memory", name.c_str());
  139. return nullptr;
  140. }
  141. desc->SetName(name);
  142. desc->SetType(CONSTANT);
  143. GeTensor tensor;
  144. tensor.SetData(std::vector<uint8_t>({0}));
  145. if (!AttrUtils::SetTensor(desc, ATTR_NAME_WEIGHTS, tensor)) {
  146. GELOGE(OUT_OF_MEMORY, "Failed to init tensor value for const %s", name.c_str());
  147. return nullptr;
  148. }
  149. if (!AttrUtils::SetBool(desc, ATTR_INSERT_BY_MBATCH, true)) {
  150. GELOGE(OUT_OF_MEMORY, "Failed to set insert flag for const node %s", name.c_str());
  151. return nullptr;
  152. }
  153. if (desc->AddOutputDesc(GeTensorDesc()) != GRAPH_SUCCESS) {
  154. GELOGE(OUT_OF_MEMORY, "Failed to add output desc for const node %s", name.c_str());
  155. return nullptr;
  156. }
  157. return graph->AddNode(desc);
  158. }
  159. bool IsOnlyOutputToAipp(const NodePtr &node) {
  160. for (const auto &out_node : node->GetOutDataNodes()) {
  161. if (out_node->GetType() != AIPP) {
  162. return false;
  163. }
  164. }
  165. return true;
  166. }
  167. Status CheckDataShape(const std::vector<NodePtr> &nodes) {
  168. size_t unknown_shape_count = 0;
  169. for (const auto &node : nodes) {
  170. if (node->GetType() != DATA) {
  171. continue;
  172. }
  173. for (auto dim : NodeUtils::GetOutputDesc(*node, kDataOutIndex).GetShape().GetDims()) {
  174. if (dim < 0) {
  175. unknown_shape_count++;
  176. break;
  177. }
  178. }
  179. }
  180. if (unknown_shape_count == 0) {
  181. ErrorManager::GetInstance().ATCReportErrMessage("E10040");
  182. GELOGE(PARAM_INVALID,
  183. "Need unknow shape data when user set --dynamic_batch_size, --dynamic_image_size or --dynamic_dims");
  184. return PARAM_INVALID;
  185. }
  186. return SUCCESS;
  187. }
  188. } // namespace
  189. Status MultiBatchGraphCopyer::CopyGraph() {
  190. auto ret = Init();
  191. if (ret != SUCCESS) {
  192. return ret;
  193. }
  194. ret = CheckDataShape(origin_data_nodes_);
  195. if (ret != SUCCESS) {
  196. return ret;
  197. }
  198. if (LabelStatus() != SUCCESS) {
  199. GELOGE(INTERNAL_ERROR, "Failed to label status for all nodes.");
  200. return INTERNAL_ERROR;
  201. }
  202. ret = CreateNewNodes();
  203. if (ret != SUCCESS) {
  204. return ret;
  205. }
  206. ret = LinkEdges();
  207. if (ret != SUCCESS) {
  208. return ret;
  209. }
  210. ret = InsertIdentityAfterSwitchN();
  211. if (ret != SUCCESS) {
  212. GELOGE(INTERNAL_ERROR, "Failed to insert identity nodes after switchn node.");
  213. return INTERNAL_ERROR;
  214. }
  215. GELOGI("Begin to remove useless nodes by prune pass after copy process");
  216. PrunePass prune_pass;
  217. ret = prune_pass.Run(graph_);
  218. if (ret != SUCCESS) {
  219. GELOGE(ret, "Failed to prune");
  220. return ret;
  221. }
  222. return CheckCopyResult(origin_data_nodes_);
  223. }
  224. Status MultiBatchGraphCopyer::Init() {
  225. auto ret = CheckArguments();
  226. if (ret != SUCCESS) {
  227. return ret;
  228. }
  229. for (auto &node : graph_->GetAllNodes()) {
  230. origin_all_nodes_.emplace_back(node);
  231. if (IsDataLikeType(node->GetType())) {
  232. origin_data_nodes_.emplace_back(node);
  233. }
  234. }
  235. return SUCCESS;
  236. }
  237. Status MultiBatchGraphCopyer::LabelStatus() {
  238. for (const auto &data : origin_data_nodes_) {
  239. auto data_shape = NodeUtils::GetOutputDesc(*data, kDataOutIndex).GetShape();
  240. if (!IsAllDimsPositive(data_shape.GetDims())) {
  241. origin_nodes_status_[data.get()] = kNodeInBatchBranch;
  242. }
  243. }
  244. bool changed = true;
  245. // If anyone of in node is kNodeInBatchBranch, it is also kNodeInBatchBranch
  246. while (changed) {
  247. changed = false;
  248. for (const auto &node : origin_all_nodes_) {
  249. auto iter = origin_nodes_status_.find(node.get());
  250. if (iter != origin_nodes_status_.end()) {
  251. continue;
  252. }
  253. for (auto &in_node : node->GetInAllNodes()) {
  254. bool is_in_batch = origin_nodes_status_.find(in_node.get()) != origin_nodes_status_.end() &&
  255. origin_nodes_status_[in_node.get()] == kNodeInBatchBranch;
  256. if (is_in_batch) {
  257. origin_nodes_status_[node.get()] = kNodeInBatchBranch;
  258. changed = true;
  259. break;
  260. }
  261. }
  262. }
  263. }
  264. for (const auto &node : origin_all_nodes_) {
  265. if (!(node->GetOpDesc()->GetSubgraphInstanceNames().empty())) {
  266. origin_nodes_status_[node.get()] = kNodeNotSupportNode;
  267. continue;
  268. }
  269. if (node->GetType() == NETOUTPUT) {
  270. origin_nodes_status_[node.get()] = kNodeOutBatchBranch;
  271. continue;
  272. }
  273. if (IsDataLikeType(node->GetType())) {
  274. if (IsOnlyOutputToAipp(node)) {
  275. origin_nodes_status_[node.get()] = kNodeOutBatchBranch;
  276. } else {
  277. origin_nodes_status_[node.get()] = kNodeStartNode;
  278. }
  279. continue;
  280. }
  281. if (origin_nodes_status_.find(node.get()) == origin_nodes_status_.end()) {
  282. origin_nodes_status_[node.get()] = kNodeOutBatchBranch;
  283. }
  284. }
  285. return SUCCESS;
  286. }
  287. Status MultiBatchGraphCopyer::CreateNewNodes() {
  288. shape_data_ = InsertShapeDataNode();
  289. if (shape_data_ == nullptr) {
  290. GELOGE(INTERNAL_ERROR, "Failed to create the shape data node for muti-batch");
  291. return INTERNAL_ERROR;
  292. }
  293. for (const auto &node : origin_all_nodes_) {
  294. auto node_type = node->GetType();
  295. Status ret = INTERNAL_ERROR;
  296. auto branch_status = GetNodeStatus(node);
  297. GELOGD("Process node %s, status %d", node->GetName().c_str(), static_cast<int>(branch_status));
  298. switch (branch_status) {
  299. case kNodeStartNode:
  300. GELOGD("Name: %s, type: %s, status: kNodeStartNode.", node->GetName().c_str(), node->GetType().c_str());
  301. ret = UpdateDataToDynamicInfo(node);
  302. if (ret != SUCCESS) {
  303. break;
  304. }
  305. ret = InsertSwitchNForData(node);
  306. if (ret == SUCCESS) {
  307. ret = UpdateMaxShapeToData(node);
  308. }
  309. break;
  310. case kNodeInBatchBranch:
  311. GELOGD("Name: %s, type: %s, status: kNodeInBatchBranch.", node->GetName().c_str(), node->GetType().c_str());
  312. ret = CopyNodeInBatchBranch(node);
  313. break;
  314. case kNodeOutBatchBranch:
  315. GELOGD("Name: %s, type: %s, status: kNodeOutBatchBranch.", node->GetName().c_str(), node->GetType().c_str());
  316. ret = InsertMergeForEdgeNode(node);
  317. break;
  318. case kNodeNotSupportNode:
  319. GELOGD("Name: %s, type: %s, status: kNodeNotSupportNode.", node->GetName().c_str(), node->GetType().c_str());
  320. break;
  321. default:
  322. GELOGE(INTERNAL_ERROR, "Unexpected status %d on node %s", static_cast<int>(branch_status),
  323. node->GetName().c_str());
  324. break;
  325. }
  326. if (ret != SUCCESS) {
  327. GELOGE(ret, "Failed to deal with node %s in multi-batch process", node->GetName().c_str());
  328. return ret;
  329. }
  330. }
  331. return SUCCESS;
  332. }
  333. NodePtr MultiBatchGraphCopyer::InsertMergeNode(const NodePtr &node, int index) {
  334. if (index < 0) {
  335. // the merge node must has data inputs, if origin connection is a control
  336. // edge, we use data edge instead
  337. index = 0;
  338. }
  339. auto &merge_nodes = nodes_to_merge_nodes_[node.get()];
  340. if (merge_nodes.empty()) {
  341. auto count = node->GetAllOutDataAnchorsSize();
  342. if (count == 0) {
  343. count = 1;
  344. }
  345. merge_nodes.resize(count, nullptr);
  346. }
  347. if (merge_nodes.at(index) != nullptr) {
  348. return merge_nodes[index];
  349. }
  350. auto merge_node_name = node->GetName() + "_ascend_mbatch_merge_" + std::to_string(index);
  351. auto merge_node = InsertMergeNodeToGraph(merge_node_name, shapes_.size(), node->GetOwnerComputeGraph());
  352. GE_IF_BOOL_EXEC(merge_node == nullptr, GELOGE(INTERNAL_ERROR, "Failed to create merge node for node %s, out index %d",
  353. node->GetName().c_str(), index);
  354. return nullptr);
  355. merge_nodes[index] = merge_node;
  356. GELOGI("Create merge node %s for node %s index %d", merge_node_name.c_str(), node->GetName().c_str(), index);
  357. return merge_node;
  358. }
  359. Status MultiBatchGraphCopyer::CopyInDataEdges(const NodePtr &origin_node, int batch_num, const NodePtr &copyed_node) {
  360. for (auto &in_anchor : origin_node->GetAllInDataAnchors()) {
  361. auto origin_src_anchor = in_anchor->GetPeerOutAnchor();
  362. if (origin_src_anchor == nullptr) {
  363. GELOGD("The node %s does not have input on index %d", origin_node->GetName().c_str(), in_anchor->GetIdx());
  364. continue;
  365. }
  366. auto origin_src_node = origin_src_anchor->GetOwnerNode();
  367. auto dst_anchor = copyed_node->GetInDataAnchor(in_anchor->GetIdx());
  368. GE_CHECK_NOTNULL(dst_anchor);
  369. auto switchn_iter = data_nodes_to_switchn_.find(origin_src_node.get());
  370. if (switchn_iter != data_nodes_to_switchn_.end()) {
  371. auto ret = GraphUtils::AddEdge(switchn_iter->second->GetOutDataAnchor(batch_num), dst_anchor);
  372. if (ret != GRAPH_SUCCESS) {
  373. GELOGE(INTERNAL_ERROR, "Failed to add data edge between %s(%d) to %s(%d), error-code %u",
  374. switchn_iter->second->GetName().c_str(), batch_num, copyed_node->GetName().c_str(), in_anchor->GetIdx(),
  375. ret);
  376. return INTERNAL_ERROR;
  377. }
  378. GELOGD("Add data edge from %s(%d) to %s(%d)", switchn_iter->second->GetName().c_str(), batch_num,
  379. copyed_node->GetName().c_str(), in_anchor->GetIdx());
  380. continue;
  381. }
  382. auto batch_branch_iter = nodes_to_batch_nodes_.find(origin_src_node.get());
  383. if (batch_branch_iter != nodes_to_batch_nodes_.end()) {
  384. auto src_batch_node = batch_branch_iter->second.at(batch_num);
  385. auto ret = GraphUtils::AddEdge(src_batch_node->GetOutDataAnchor(origin_src_anchor->GetIdx()), dst_anchor);
  386. if (ret != GRAPH_SUCCESS) {
  387. GELOGE(INTERNAL_ERROR, "Failed to add data edge between %s(%d) to %s(%d), error-code %u",
  388. src_batch_node->GetName().c_str(), batch_num, copyed_node->GetName().c_str(), in_anchor->GetIdx(), ret);
  389. return INTERNAL_ERROR;
  390. }
  391. GELOGD("Add data edge from %s(%d) to %s(%d)", src_batch_node->GetName().c_str(), batch_num,
  392. copyed_node->GetName().c_str(), in_anchor->GetIdx());
  393. continue;
  394. }
  395. auto ret = GraphUtils::AddEdge(origin_src_anchor, dst_anchor);
  396. if (ret != GRAPH_SUCCESS) {
  397. GELOGE(INTERNAL_ERROR, "Failed to add data edge between origin node %s(%d) to copyed %s(%d)",
  398. origin_src_node->GetName().c_str(), origin_src_anchor->GetIdx(), copyed_node->GetName().c_str(),
  399. dst_anchor->GetIdx());
  400. return INTERNAL_ERROR;
  401. }
  402. GELOGD("Add data edge between branch-out %s(%d) to branch-in %s(%d)", origin_src_node->GetName().c_str(),
  403. origin_src_anchor->GetIdx(), copyed_node->GetName().c_str(), dst_anchor->GetIdx());
  404. }
  405. return SUCCESS;
  406. }
  407. Status MultiBatchGraphCopyer::CopyInControlEdges(const NodePtr &node, int batch_num, const NodePtr &copyed_node) {
  408. for (auto &origin_src_node : node->GetInControlNodes()) {
  409. auto switchn_iter = data_nodes_to_switchn_.find(origin_src_node.get());
  410. if (switchn_iter != data_nodes_to_switchn_.end()) {
  411. // reconnect data node
  412. auto ret = GraphUtils::AddEdge(switchn_iter->second->GetOutControlAnchor(), copyed_node->GetInControlAnchor());
  413. if (ret != GRAPH_SUCCESS) {
  414. GELOGE(INTERNAL_ERROR, "Failed to add control edge between %s to %s, error-code %u",
  415. switchn_iter->second->GetName().c_str(), copyed_node->GetName().c_str(), ret);
  416. return INTERNAL_ERROR;
  417. }
  418. GELOGD("Add control edge from %s to %s", switchn_iter->second->GetName().c_str(), copyed_node->GetName().c_str());
  419. continue;
  420. }
  421. auto batch_branch_iter = nodes_to_batch_nodes_.find(origin_src_node.get());
  422. if (batch_branch_iter != nodes_to_batch_nodes_.end()) {
  423. // reconnect node in batch branch
  424. auto src_batch_node = batch_branch_iter->second.at(batch_num);
  425. auto ret = GraphUtils::AddEdge(src_batch_node->GetOutControlAnchor(), copyed_node->GetInControlAnchor());
  426. if (ret != GRAPH_SUCCESS) {
  427. GELOGE(INTERNAL_ERROR, "Failed to add data edge between %s to %s, error-code %u",
  428. src_batch_node->GetName().c_str(), copyed_node->GetName().c_str(), ret);
  429. return INTERNAL_ERROR;
  430. }
  431. GELOGD("Add control edge from %s to %s", src_batch_node->GetName().c_str(), copyed_node->GetName().c_str());
  432. continue;
  433. }
  434. auto ret = GraphUtils::AddEdge(origin_src_node->GetOutControlAnchor(), copyed_node->GetInControlAnchor());
  435. if (ret != GRAPH_SUCCESS) {
  436. GELOGE(INTERNAL_ERROR, "Failed to add control edge from origin %s to copyed %s",
  437. origin_src_node->GetName().c_str(), copyed_node->GetName().c_str());
  438. return INTERNAL_ERROR;
  439. }
  440. GELOGD("Add control edge between branch-out %s to branch-in %s", origin_src_node->GetName().c_str(),
  441. copyed_node->GetName().c_str());
  442. }
  443. return SUCCESS;
  444. }
  445. NodePtr MultiBatchGraphCopyer::InsertShapeDataNode() {
  446. auto desc = MakeShared<OpDesc>();
  447. if (desc == nullptr) {
  448. GELOGE(OUT_OF_MEMORY, "Failed to create shape data node, out of memory");
  449. return nullptr;
  450. }
  451. string node_name = "ascend_mbatch_shape_data";
  452. // Only flush subgraph name
  453. if (graph_->GetParentGraph() != nullptr) {
  454. node_name = graph_->GetName() + "_" + node_name;
  455. }
  456. desc->SetName(node_name);
  457. desc->SetType(DATA);
  458. GeTensorDesc tensor_desc;
  459. tensor_desc.SetFormat(FORMAT_ND);
  460. tensor_desc.SetShape(GeShape({static_cast<int64_t>(shapes_.at(0).size())}));
  461. tensor_desc.SetDataType(DT_INT64);
  462. auto ret = desc->AddInputDesc(tensor_desc);
  463. if (ret != GRAPH_SUCCESS) {
  464. GELOGE(INTERNAL_ERROR, "Failed to add input desc for created data");
  465. return nullptr;
  466. }
  467. ret = desc->AddOutputDesc(tensor_desc);
  468. if (ret != GRAPH_SUCCESS) {
  469. GELOGE(INTERNAL_ERROR, "Failed to add output desc for created data");
  470. return nullptr;
  471. }
  472. if (!AttrUtils::SetBool(desc, ATTR_INSERT_BY_MBATCH, true)) {
  473. GELOGE(INTERNAL_ERROR, "Failed to add attr for created data");
  474. return nullptr;
  475. }
  476. auto data_node = graph_->AddNode(desc);
  477. if (data_node == nullptr) {
  478. GELOGE(INTERNAL_ERROR, "Failed to add shape data node to graph");
  479. return nullptr;
  480. }
  481. ret = GraphUtils::AppendInputNode(graph_, data_node);
  482. if (ret != GRAPH_SUCCESS) {
  483. GELOGE(INTERNAL_ERROR, "Failed to append data node %s as input to graph", data_node->GetName().c_str());
  484. return nullptr;
  485. }
  486. return data_node;
  487. }
  488. Status MultiBatchGraphCopyer::CheckArguments() {
  489. if (graph_ == nullptr) {
  490. GELOGE(PARAM_INVALID, "Failed to copy graph, the graph is null");
  491. return PARAM_INVALID;
  492. }
  493. return CheckDynamicParams(shapes_);
  494. }
  495. Status MultiBatchGraphCopyer::CheckCopyResult(const std::vector<NodePtr> &start_nodes) {
  496. for (auto &node : start_nodes) {
  497. if (IsOnlyOutputToAipp(node)) {
  498. continue;
  499. }
  500. auto dims = NodeUtils::GetOutputDesc(*node, kDataOutIndex).GetShape().GetDims();
  501. if (!IsAllDimsPositive(dims)) {
  502. GELOGE(INTERNAL_ERROR, "Failed to copy multi batch graph, the node %s still has unknown shape %s",
  503. node->GetName().c_str(), formats::ShapeToString(dims).c_str());
  504. return INTERNAL_ERROR;
  505. }
  506. }
  507. return SUCCESS;
  508. }
  509. bool MultiBatchGraphCopyer::IsInBatchBranch(const NodePtr &node) {
  510. return (nodes_to_batch_nodes_.count(node.get()) > 0) || (data_nodes_to_switchn_.count(node.get()) > 0);
  511. }
  512. Status MultiBatchGraphCopyer::LinkDataToMerge(const NodePtr &data, const NodePtr &merge) {
  513. // The caller should make sure that the there is a SwitchN node in the map
  514. auto &switchn = data_nodes_to_switchn_[data.get()];
  515. GELOGI("Link edge between data %s to merge %s throw switchn %s", data->GetName().c_str(), merge->GetName().c_str(),
  516. switchn->GetName().c_str());
  517. for (size_t i = 0; i < shapes_.size(); ++i) {
  518. auto ret = GraphUtils::AddEdge(switchn->GetOutDataAnchor(i), merge->GetInDataAnchor(i));
  519. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS,
  520. GELOGE(INTERNAL_ERROR, "Failed to add edge between switchn %s(%zu) to merge %s(%zu), error-code %u",
  521. switchn->GetName().c_str(), i, merge->GetName().c_str(), i, ret);
  522. return INTERNAL_ERROR);
  523. }
  524. return SUCCESS;
  525. }
  526. Status MultiBatchGraphCopyer::LinkNodeToMerge(const NodePtr &node, int out_index, const NodePtr &merge) {
  527. auto &copyed_nodes = nodes_to_batch_nodes_[node.get()];
  528. if (copyed_nodes.size() != shapes_.size()) {
  529. GELOGE(INTERNAL_ERROR,
  530. "Failed to create merge node for node %s, the copyed nodes for it count %zu different with shape %zu",
  531. node->GetName().c_str(), copyed_nodes.size(), shapes_.size());
  532. return INTERNAL_ERROR;
  533. }
  534. for (size_t i = 0; i < copyed_nodes.size(); ++i) {
  535. auto src_node = copyed_nodes[i];
  536. if (src_node->GetAllOutDataAnchorsSize() == 0) {
  537. // if the node does not has any data output, we should create an const for it, like this:
  538. // c d
  539. // node ---> const ---> merge
  540. auto const_name = src_node->GetName() + "_merge_const";
  541. GELOGI("The node %s on the batch branch edge does not have any data output, create a const %s for it",
  542. src_node->GetName().c_str(), const_name.c_str());
  543. auto const_node = InsertConst(const_name, graph_);
  544. GE_IF_BOOL_EXEC(const_node == nullptr,
  545. GELOGE(OUT_OF_MEMORY, "Failed to create const for node %s to connect to a merge node",
  546. src_node->GetName().c_str());
  547. return OUT_OF_MEMORY);
  548. auto ret = GraphUtils::AddEdge(src_node->GetOutControlAnchor(), const_node->GetInControlAnchor());
  549. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS, GELOGE(INTERNAL_ERROR, "Failed to add control edge from %s to %s",
  550. src_node->GetName().c_str(), const_node->GetName().c_str());
  551. return INTERNAL_ERROR);
  552. src_node = const_node;
  553. }
  554. auto ret = GraphUtils::AddEdge(src_node->GetOutDataAnchor(out_index), merge->GetInDataAnchor(i));
  555. if (ret != GRAPH_SUCCESS) {
  556. GELOGE(INTERNAL_ERROR,
  557. "Failed to add edge between copyed node %s(%d) to inserted merge node %s(%zu), error-code %u",
  558. copyed_nodes[i]->GetName().c_str(), out_index, merge->GetName().c_str(), i, ret);
  559. return INTERNAL_ERROR;
  560. }
  561. }
  562. return SUCCESS;
  563. }
  564. Status MultiBatchGraphCopyer::UpdateMaxShapeToData(const NodePtr &data) {
  565. auto data_shape = NodeUtils::GetOutputDesc(*data, kDataOutIndex).GetShape();
  566. auto data_name = data->GetName();
  567. if (IsAllDimsPositive(data_shape.GetDims())) {
  568. return SUCCESS;
  569. }
  570. size_t max_shape_index = 0;
  571. int64_t max_size = 0;
  572. for (size_t i = 0; i < shapes_.size(); ++i) {
  573. int64_t size = 1;
  574. for (auto dim : data_to_dynamic_info_.at(data_name).at(i)) {
  575. if (INT64_MAX / dim < size) {
  576. GELOGE(PARAM_INVALID, "The shape %s size overflow",
  577. formats::ShapeToString(data_to_dynamic_info_[data_name].at(i)).c_str());
  578. return PARAM_INVALID;
  579. }
  580. size *= dim;
  581. }
  582. if (size > max_size) {
  583. max_size = size;
  584. max_shape_index = i;
  585. }
  586. }
  587. // must not be error, the calc result has been checked in function InsertSwitchNForData
  588. (void)CalcShape(data_to_dynamic_info_.at(data_name).at(max_shape_index), data_shape);
  589. auto ret = NodeUtils::UpdateOutputShape(*data, kDataOutIndex, data_shape);
  590. if (ret != GRAPH_SUCCESS) {
  591. GELOGE(INTERNAL_ERROR, "Failed to update output shape for data %s", data->GetName().c_str());
  592. return INTERNAL_ERROR;
  593. }
  594. ret = NodeUtils::UpdateInputShape(*data, kDataInIndex, data_shape);
  595. if (ret != GRAPH_SUCCESS) {
  596. GELOGE(INTERNAL_ERROR, "Failed to update input shape for data %s", data->GetName().c_str());
  597. return INTERNAL_ERROR;
  598. }
  599. GELOGI("Update the data %s input/output shape to the max %s", data->GetName().c_str(),
  600. formats::ShapeToString(data_shape).c_str());
  601. return SUCCESS;
  602. }
  603. Status MultiBatchGraphCopyer::InsertSwitchNForData(const NodePtr &data) {
  604. auto data_shape = NodeUtils::GetOutputDesc(*data, kDataOutIndex).GetShape();
  605. auto data_name = data->GetName();
  606. (void)AttrUtils::SetListInt(data->GetOpDesc(), ATTR_MBATCH_ORIGIN_INPUT_DIMS, data_shape.GetDims());
  607. if (IsAllDimsPositive(data_shape.GetDims())) {
  608. GELOGI("The shape of data %s are positive(%s), skip the multi batch process", data->GetName().c_str(),
  609. data_shape.ToString().c_str());
  610. return SUCCESS;
  611. }
  612. auto switchn_desc = MakeShared<OpDesc>();
  613. if (switchn_desc == nullptr) {
  614. GELOGE(OUT_OF_MEMORY, "Failed to create switchn for data %s", data->GetName().c_str());
  615. return OUT_OF_MEMORY;
  616. }
  617. switchn_desc->SetName(data->GetName() + "_ascend_mbatch_switchn");
  618. switchn_desc->SetType(SWITCHN);
  619. GeTensorDesc tensor(NodeUtils::GetOutputDesc(*data, kDataOutIndex));
  620. if (switchn_desc->AddInputDesc("data", tensor) != GRAPH_SUCCESS) { // data
  621. return OUT_OF_MEMORY;
  622. }
  623. GeTensorDesc pred_tensor;
  624. if (switchn_desc->AddInputDesc("pred_value", pred_tensor) != GRAPH_SUCCESS) { // pred
  625. return OUT_OF_MEMORY;
  626. }
  627. std::vector<std::string> input_dims_str;
  628. for (size_t i = 0; i < shapes_.size(); ++i) {
  629. auto shape = data_shape;
  630. auto ret = CalcShape(data_to_dynamic_info_.at(data_name).at(i), shape);
  631. if (ret != SUCCESS) {
  632. GELOGE(ret, "Failed to calculate the batched shape for data node %s, the shapes may not match",
  633. data->GetName().c_str());
  634. return ret;
  635. }
  636. tensor.SetShape(shape);
  637. string input_str;
  638. int64_t tensor_size = 0;
  639. (void)TensorUtils::GetTensorSizeInBytes(tensor, tensor_size);
  640. input_str = TypeUtils::FormatToSerialString(tensor.GetFormat()) + ":" +
  641. TypeUtils::DataTypeToSerialString(tensor.GetDataType()) + ":" + data->GetName() + ":" +
  642. std::to_string(tensor_size) + ":" + std::to_string(tensor.GetShape().GetDimNum()) + ":" +
  643. formats::JoinToString(tensor.GetShape().GetDims());
  644. input_dims_str.emplace_back(input_str);
  645. if (!AttrUtils::SetListInt(tensor, ATTR_NAME_SWITCHN_PRED_VALUE, shapes_.at(i))) {
  646. GELOGE(INTERNAL_ERROR, "Failed to add attr value on output %zu tensor", i);
  647. return INTERNAL_ERROR;
  648. }
  649. (void)AttrUtils::SetListInt(tensor, ATTR_NAME_COMBINED_DYNAMIC_DIMS, shape.GetDims());
  650. if (switchn_desc->AddOutputDesc("output" + std::to_string(i), tensor) != GRAPH_SUCCESS) {
  651. GELOGE(GRAPH_FAILED, "Opdesc AddOutputDesc failed");
  652. return GRAPH_FAILED;
  653. }
  654. GELOGD("The SwitchN %s output index %zu, shape %s", switchn_desc->GetName().c_str(), i, shape.ToString().c_str());
  655. }
  656. (void)AttrUtils::SetListStr(data->GetOpDesc(), "_all_origin_gears_inputs", input_dims_str);
  657. if (!AttrUtils::SetListStr(switchn_desc, ATTR_USER_DESIGNEATE_SHAPE_ORDER, data_name_order_)) {
  658. GELOGE(INTERNAL_ERROR, "Failed to add user designate shape order attr on switchn node %s",
  659. switchn_desc->GetName().c_str());
  660. return INTERNAL_ERROR;
  661. }
  662. if (!AttrUtils::SetBool(switchn_desc, ATTR_INSERT_BY_MBATCH, true)) {
  663. GELOGE(INTERNAL_ERROR, "Failed to add insert attr on switchn node %s", switchn_desc->GetName().c_str());
  664. return INTERNAL_ERROR;
  665. }
  666. if (!AttrUtils::SetStr(data->GetOpDesc(), kMbatchSwitchnName, switchn_desc->GetName())) {
  667. GELOGE(INTERNAL_ERROR, "Failed to add switchn attr on data node %s", data->GetName().c_str());
  668. return INTERNAL_ERROR;
  669. }
  670. if (StampDynamicType(switchn_desc) != SUCCESS) {
  671. GELOGE(INTERNAL_ERROR, "Failed to add dynamic type attr on switchn node %s", switchn_desc->GetName().c_str());
  672. return INTERNAL_ERROR;
  673. }
  674. auto switchn = graph_->AddNode(switchn_desc);
  675. if (switchn == nullptr) {
  676. GELOGE(OUT_OF_MEMORY, "Failed to create switchn %s from desc", switchn_desc->GetName().c_str());
  677. return OUT_OF_MEMORY;
  678. }
  679. data_nodes_to_switchn_[data.get()] = switchn;
  680. return SUCCESS;
  681. }
  682. Status MultiBatchGraphCopyer::UpdateDataToDynamicInfo(const NodePtr &data) {
  683. auto data_desc = NodeUtils::GetOutputDesc(*data, kDataOutIndex);
  684. auto data_shape = data_desc.GetShape();
  685. auto data_format = data_desc.GetFormat();
  686. auto data_name = data->GetName();
  687. if (IsAllDimsPositive(data_shape.GetDims())) {
  688. return SUCCESS;
  689. }
  690. if (data_to_dynamic_info_.find(data_name) == data_to_dynamic_info_.end()) {
  691. auto data_shape_dims = data_shape.GetDims();
  692. auto dynamic_dims_num = std::count_if(data_shape_dims.begin(), data_shape_dims.end(),
  693. [&data_shape_dims](int64_t dim) { return dim < 0; });
  694. if (dynamic_type_ == DynamicType::kDynamicBatch) {
  695. if (dynamic_dims_num != kDynamicBatchDynamicDimsNum || data_shape.GetDim(0) != kDynmaicDims) {
  696. GELOGE(INTERNAL_ERROR, "data: %s shape:%s do not satisfy dynamic batch rule", data->GetName().c_str(),
  697. data_shape.ToString().c_str());
  698. return INTERNAL_ERROR;
  699. }
  700. } else if (dynamic_type_ == DynamicType::kDynamicImageSize) {
  701. int64_t height = 0;
  702. int64_t width = 0;
  703. if (data_format == FORMAT_NCHW) {
  704. height = data_shape.GetDim(NCHW_DIM_H);
  705. width = data_shape.GetDim(NCHW_DIM_W);
  706. } else if (data_format == FORMAT_NHWC) {
  707. height = data_shape.GetDim(NHWC_DIM_H);
  708. width = data_shape.GetDim(NHWC_DIM_W);
  709. }
  710. if (dynamic_dims_num != kDynamicImgSizeDynamciDimsNum || height != kDynmaicDims || width != kDynmaicDims) {
  711. GELOGE(INTERNAL_ERROR, "data: %s shape:%s do not satisfy dynamic image size rule", data->GetName().c_str(),
  712. data_shape.ToString().c_str());
  713. return INTERNAL_ERROR;
  714. }
  715. } else if (dynamic_type_ == DynamicType::kDynamicDims) {
  716. GELOGE(INTERNAL_ERROR, "data: %s shape:%s must be set int --input_shape", data->GetName().c_str(),
  717. data_shape.ToString().c_str());
  718. return INTERNAL_ERROR;
  719. }
  720. // all data has dynamic dims are not in atc parameter --input_shape
  721. if (data_to_dynamic_info_.empty()) {
  722. vector<pair<string, vector<int64_t>>> tmp_data_name_and_shape{std::make_pair(data_name, data_shape_dims)};
  723. auto ret = ParserDataToDynmaicInfo(shapes_, tmp_data_name_and_shape, data_to_dynamic_info_);
  724. if (ret != SUCCESS) {
  725. GELOGE(INTERNAL_ERROR, "parse data : %s dynamic gear info failed", data_name.c_str());
  726. return INTERNAL_ERROR;
  727. }
  728. }
  729. data_to_dynamic_info_[data_name] = data_to_dynamic_info_.begin()->second;
  730. }
  731. return SUCCESS;
  732. }
  733. Status MultiBatchGraphCopyer::InsertMergeForEdgeNode(const NodePtr &node) {
  734. for (auto &in_data_anchor : node->GetAllInDataAnchors()) {
  735. auto src_out_anchor = in_data_anchor->GetPeerOutAnchor();
  736. if (src_out_anchor == nullptr) {
  737. GELOGD("The node %s does not has input at index %d", node->GetName().c_str(), in_data_anchor->GetIdx());
  738. continue;
  739. }
  740. auto in_node = src_out_anchor->GetOwnerNode();
  741. if (!IsInBatchBranch(in_node)) {
  742. continue;
  743. }
  744. auto merge_node = InsertMergeNode(in_node, src_out_anchor->GetIdx());
  745. if (merge_node == nullptr) {
  746. return INTERNAL_ERROR;
  747. }
  748. }
  749. for (auto &in_node : node->GetInControlNodes()) {
  750. if (!IsInBatchBranch(in_node)) {
  751. continue;
  752. }
  753. auto merge_node = InsertMergeNode(in_node, -1);
  754. if (merge_node == nullptr) {
  755. return INTERNAL_ERROR;
  756. }
  757. }
  758. return SUCCESS;
  759. }
  760. Status MultiBatchGraphCopyer::CopyNodeInBatchBranch(const NodePtr &node) {
  761. auto &copyed_nodes = nodes_to_batch_nodes_[node.get()];
  762. for (size_t i = 0; i < shapes_.size(); ++i) {
  763. auto copyed_node = InsertCopyNode(node, i);
  764. if (copyed_node == nullptr) {
  765. GELOGE(INTERNAL_ERROR, "Failed to add node to graph when copy node %s", node->GetName().c_str());
  766. return INTERNAL_ERROR;
  767. }
  768. copyed_nodes.emplace_back(copyed_node);
  769. GELOGI("Copy node %s type %s for shape %s, new node name %s", node->GetName().c_str(), node->GetType().c_str(),
  770. formats::JoinToString(shapes_.at(i)).c_str(), copyed_node->GetName().c_str());
  771. }
  772. return SUCCESS;
  773. }
  774. Status MultiBatchGraphCopyer::LinkEdges() {
  775. Status ret;
  776. for (const auto &node : origin_all_nodes_) {
  777. if (data_nodes_to_switchn_.count(node.get()) > 0) {
  778. ret = LinkDataToSwitchN(node);
  779. if (ret != SUCCESS) {
  780. return ret;
  781. }
  782. }
  783. if (nodes_to_merge_nodes_.count(node.get()) > 0) {
  784. ret = LinkToMerge(node);
  785. if (ret != SUCCESS) {
  786. return ret;
  787. }
  788. }
  789. if (nodes_to_batch_nodes_.count(node.get()) > 0) {
  790. ret = LinkToNodeInBranch(node);
  791. } else {
  792. ret = LinkToNodeOutBranch(node);
  793. }
  794. if (ret != SUCCESS) {
  795. return ret;
  796. }
  797. }
  798. return SUCCESS;
  799. }
  800. Status MultiBatchGraphCopyer::LinkDataToSwitchN(const NodePtr &data) {
  801. auto switchn = data_nodes_to_switchn_[data.get()];
  802. auto ret =
  803. GraphUtils::AddEdge(shape_data_->GetOutDataAnchor(kDataOutIndex), switchn->GetInDataAnchor(kSwitchNPredIndex));
  804. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS, GELOGE(INTERNAL_ERROR, "Failed to link shape data %s to switchn %s",
  805. shape_data_->GetName().c_str(), switchn->GetName().c_str());
  806. return INTERNAL_ERROR);
  807. ret = GraphUtils::AddEdge(data->GetOutDataAnchor(kDataOutIndex), switchn->GetInDataAnchor(kSwitchNDataIndex));
  808. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS, GELOGE(INTERNAL_ERROR, "Failed to link data %s to switchn %s",
  809. data->GetName().c_str(), switchn->GetName().c_str());
  810. return INTERNAL_ERROR);
  811. return SUCCESS;
  812. }
  813. Status MultiBatchGraphCopyer::LinkToMerge(const NodePtr &node) {
  814. auto &merge_nodes = nodes_to_merge_nodes_[node.get()];
  815. for (size_t i = 0; i < merge_nodes.size(); ++i) {
  816. auto merge_node = merge_nodes[i];
  817. if (merge_node == nullptr) {
  818. continue;
  819. }
  820. if (nodes_to_batch_nodes_.count(node.get()) > 0) {
  821. auto ret = LinkNodeToMerge(node, i, merge_node);
  822. if (ret != SUCCESS) {
  823. return ret;
  824. }
  825. continue;
  826. }
  827. if (data_nodes_to_switchn_.count(node.get()) > 0) {
  828. auto ret = LinkDataToMerge(node, merge_node);
  829. if (ret != SUCCESS) {
  830. return ret;
  831. }
  832. continue;
  833. }
  834. GELOGE(INTERNAL_ERROR, "The merge node %s is created, index %zu, but can not find the src node",
  835. merge_node->GetName().c_str(), i);
  836. return INTERNAL_ERROR;
  837. }
  838. return SUCCESS;
  839. }
  840. Status MultiBatchGraphCopyer::LinkToNodeInBranch(const NodePtr &node) {
  841. auto &branch_nodes = nodes_to_batch_nodes_[node.get()];
  842. for (size_t i = 0; i < branch_nodes.size(); ++i) {
  843. auto ret = CopyInDataEdges(node, i, branch_nodes[i]);
  844. if (ret != SUCCESS) {
  845. return ret;
  846. }
  847. ret = CopyInControlEdges(node, i, branch_nodes[i]);
  848. if (ret != SUCCESS) {
  849. return ret;
  850. }
  851. }
  852. return SUCCESS;
  853. }
  854. Status MultiBatchGraphCopyer::LinkToNodeOutBranch(const NodePtr &node) {
  855. for (auto &in_data_anchor : node->GetAllInDataAnchors()) {
  856. auto src_out_anchor = in_data_anchor->GetPeerOutAnchor();
  857. if (src_out_anchor == nullptr) {
  858. GELOGD("The node %s does not has input at index %d", node->GetName().c_str(), in_data_anchor->GetIdx());
  859. continue;
  860. }
  861. auto in_node = src_out_anchor->GetOwnerNode();
  862. if (!IsInBatchBranch(in_node)) {
  863. continue;
  864. }
  865. auto iter = nodes_to_merge_nodes_.find(in_node.get());
  866. if (iter == nodes_to_merge_nodes_.end()) {
  867. GELOGE(INTERNAL_ERROR, "Failed to link IO data edge from %s(%d) to %s(%d), no merge node found",
  868. in_node->GetName().c_str(), src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx());
  869. return INTERNAL_ERROR;
  870. }
  871. auto merge_node = iter->second[src_out_anchor->GetIdx()];
  872. if (merge_node == nullptr) {
  873. GELOGE(INTERNAL_ERROR, "Failed to link IO data edge from %s(%d) to %s(%d), no merge node found",
  874. in_node->GetName().c_str(), src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx());
  875. return INTERNAL_ERROR;
  876. }
  877. auto ret = src_out_anchor->Unlink(in_data_anchor);
  878. if (ret != GRAPH_SUCCESS) {
  879. GELOGE(INTERNAL_ERROR, "Failed to unlink the control edge from %s(%d) to %s(%d)", in_node->GetName().c_str(),
  880. src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx());
  881. return INTERNAL_ERROR;
  882. }
  883. ret = GraphUtils::AddEdge(merge_node->GetOutDataAnchor(kMergeDataOutIndex), in_data_anchor);
  884. if (ret != GRAPH_SUCCESS) {
  885. GELOGE(INTERNAL_ERROR, "Failed to add data edge from %s(%d) to %s(%d)", merge_node->GetName().c_str(),
  886. src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx());
  887. return INTERNAL_ERROR;
  888. }
  889. GELOGI("Link data edge from merge %s(from %s(%d)) to %s(%d)", merge_node->GetName().c_str(),
  890. in_node->GetName().c_str(), src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx());
  891. }
  892. for (auto &in_node : node->GetInControlNodes()) {
  893. if (!IsInBatchBranch(in_node)) {
  894. continue;
  895. }
  896. auto iter = nodes_to_merge_nodes_.find(in_node.get());
  897. if (iter == nodes_to_merge_nodes_.end()) {
  898. GELOGE(INTERNAL_ERROR, "Failed to link IO control edge from %s to %s, no merge node found",
  899. in_node->GetName().c_str(), node->GetName().c_str());
  900. return INTERNAL_ERROR;
  901. }
  902. auto merge_node = iter->second[0];
  903. if (merge_node == nullptr) {
  904. GELOGE(INTERNAL_ERROR, "Failed to link IO control edge from %s to %s, no merge node found",
  905. in_node->GetName().c_str(), node->GetName().c_str());
  906. return INTERNAL_ERROR;
  907. }
  908. GE_IF_BOOL_EXEC(in_node->GetOutControlAnchor() == nullptr,
  909. GELOGE(INTERNAL_ERROR, "Innode outputControlAnchor is null");
  910. return INTERNAL_ERROR);
  911. auto ret = in_node->GetOutControlAnchor()->Unlink(node->GetInControlAnchor());
  912. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS, GELOGE(INTERNAL_ERROR, "Failed to unlink the control edge from %s to %s",
  913. in_node->GetName().c_str(), node->GetName().c_str());
  914. return INTERNAL_ERROR);
  915. ret = GraphUtils::AddEdge(merge_node->GetOutControlAnchor(), node->GetInControlAnchor());
  916. GE_IF_BOOL_EXEC(ret != GRAPH_SUCCESS, GELOGE(INTERNAL_ERROR, "Failed to add control edge from %s to %s",
  917. merge_node->GetName().c_str(), node->GetName().c_str());
  918. return INTERNAL_ERROR);
  919. GELOGI("Link control edge from merge %s(from %s) to %s", merge_node->GetName().c_str(), in_node->GetName().c_str(),
  920. node->GetName().c_str());
  921. }
  922. return SUCCESS;
  923. }
  924. Status MultiBatchGraphCopyer::InsertIdentityAfterSwitchN() {
  925. for (auto &node : graph_->GetAllNodes()) {
  926. if (node->GetType() != SWITCHN) {
  927. continue;
  928. }
  929. auto switchn_desc = node->GetOpDesc();
  930. GE_CHECK_NOTNULL(switchn_desc);
  931. size_t i = 0;
  932. for (auto &out_data_anchor : node->GetAllOutDataAnchors()) {
  933. for (auto &in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) {
  934. auto out_node = in_data_anchor->GetOwnerNode();
  935. auto op_desc = out_node->GetOpDesc();
  936. GE_CHECK_NOTNULL(op_desc);
  937. if ((out_node->GetType() == MERGE) && (op_desc->HasAttr(ATTR_INSERT_BY_MBATCH))) {
  938. GELOGD("No need to insert identity between %s and %s.", node->GetName().c_str(), out_node->GetName().c_str());
  939. continue;
  940. }
  941. auto identity_desc = MakeShared<OpDesc>(node->GetName() + "_identity_" + std::to_string(i), IDENTITY);
  942. GE_CHECK_NOTNULL(identity_desc);
  943. string batch_label;
  944. if (AttrUtils::GetStr(op_desc, ATTR_NAME_BATCH_LABEL, batch_label)) {
  945. if (!AttrUtils::SetStr(identity_desc, ATTR_NAME_BATCH_LABEL, batch_label)) {
  946. GELOGE(FAILED, "Set attr ATTR_NAME_BATCH_LABEL failed, node:%s.", identity_desc->GetName().c_str());
  947. return FAILED;
  948. }
  949. }
  950. auto data_desc = switchn_desc->GetOutputDesc(i);
  951. i++;
  952. GE_CHK_STATUS_RET(identity_desc->AddInputDesc("x", data_desc));
  953. GE_CHK_STATUS_RET(identity_desc->AddOutputDesc("y", data_desc));
  954. auto identity_node = graph_->AddNode(identity_desc);
  955. GE_CHECK_NOTNULL(identity_node);
  956. GE_CHK_STATUS_RET(out_data_anchor->LinkTo(identity_node->GetInDataAnchor(0)));
  957. GE_CHECK_NOTNULL(identity_node->GetOutControlAnchor());
  958. GE_CHK_STATUS_RET(identity_node->GetOutControlAnchor()->LinkTo(out_node->GetInControlAnchor()));
  959. }
  960. }
  961. }
  962. return SUCCESS;
  963. }
  964. Status ProcessMultiBatch(ComputeGraphPtr &graph) {
  965. std::vector<std::vector<int64_t>> shapes;
  966. if (!InitDynamicParams(shapes)) {
  967. GELOGD("There is no multi-batch options, no need to process multi-batch copy");
  968. return SUCCESS;
  969. }
  970. map<string, vector<vector<int64_t>>> data_to_dynamic_info;
  971. // parser data dynamic info from atc parameter --input_shape
  972. if (ParserDataToDynmaicInfo(shapes, GetLocalOmgContext().user_input_dims, data_to_dynamic_info) != SUCCESS) {
  973. GELOGE(PARAM_INVALID, "Parse each data's own dynamic info failed");
  974. return PARAM_INVALID;
  975. }
  976. DynamicType dynamic_type = DynamicType::kDynamicUnknown;
  977. if (!GetLocalOmgContext().dynamic_batch_size.empty()) {
  978. dynamic_type = DynamicType::kDynamicBatch;
  979. } else if (!GetLocalOmgContext().dynamic_image_size.empty()) {
  980. dynamic_type = DynamicType::kDynamicImageSize;
  981. ;
  982. } else if (!GetLocalOmgContext().dynamic_dims.empty()) {
  983. dynamic_type = DynamicType::kDynamicDims;
  984. }
  985. std::vector<std::pair<std::string, std::vector<int64_t>>> user_designate_shape;
  986. user_designate_shape = GetLocalOmgContext().user_input_dims;
  987. GELOGI("Begin to copy graph for multi-batch");
  988. multibatch::MultiBatchGraphCopyer copyer(graph);
  989. for (auto &shape : shapes) {
  990. copyer.AddShape(shape);
  991. }
  992. copyer.SetDynamicType(dynamic_type);
  993. copyer.SetUserDesignateShape(user_designate_shape);
  994. copyer.SetDataToDynamicInfo(data_to_dynamic_info);
  995. return copyer.CopyGraph();
  996. }
  997. // +-----------+
  998. // | Data | +-----------+ +-----------+ +-----------+
  999. // +-----------+ | Data | ----> | SoftmaxV2 | ----> | NetOutput |
  1000. // \ /. +-----------+ +-----------+ +-----------+
  1001. // \ /.
  1002. // +-----------+ +-----------+ /. +-----------+ +-----------+ +-----------+
  1003. // | Data | ----> | Case | S--- | Data | ----> | SoftmaxV2 | ----> | NetOutput |
  1004. // +-----------+ +-----------+ \. +-----------+ +-----------+ +-----------+
  1005. // \ \.
  1006. // \ \. +-----------+ +-----------+ +-----------+
  1007. // +-----------+ | Data | ----> | SoftmaxV2 | ----> | NetOutput |
  1008. // | NetOutput | +-----------+ +-----------+ +-----------+
  1009. // +-----------+
  1010. // +-----------+ /
  1011. // | Data | --------------->/
  1012. // +-----------+
  1013. void GetDynamicShapeByGraph(const ComputeGraphPtr &graph, const NodePtr &node, set<size_t> &dynamic_output_index,
  1014. vector<string> &dynamic_output_dims) {
  1015. GELOGD("Try get dynamic shape info, Graph: %s, Node: %s", graph->GetName().c_str(), node->GetName().c_str());
  1016. const auto &func_desc = node->GetOpDesc();
  1017. if (!func_desc->HasAttr(ATTR_NAME_BATCH_NUM)) {
  1018. GELOGD("Graph: %s Not multi-batch, Node: %s", graph->GetName().c_str(), node->GetName().c_str());
  1019. return;
  1020. }
  1021. const auto &dynamic_branch_names = func_desc->GetSubgraphInstanceNames();
  1022. for (size_t i = 0; i < func_desc->GetOutputsSize(); ++i) {
  1023. for (size_t j = 0; j < dynamic_branch_names.size(); ++j) {
  1024. const auto &subgraph = graph->GetSubgraph(dynamic_branch_names[j]);
  1025. if (subgraph == nullptr) {
  1026. GELOGE(GE_GRAPH_EMPTY_SUBGRAPH, "Subgraph not found, name: %s", dynamic_branch_names[j].c_str());
  1027. dynamic_output_dims.clear();
  1028. return;
  1029. }
  1030. const auto &out_node = subgraph->FindFirstNodeMatchType(NETOUTPUT);
  1031. if (out_node == nullptr) {
  1032. GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "NetOutput not found, name: %s", dynamic_branch_names[j].c_str());
  1033. dynamic_output_dims.clear();
  1034. return;
  1035. }
  1036. GELOGI("Find the subgraph Output node %s and the index is %zu", out_node->GetName().c_str(), i);
  1037. const auto &out_desc = out_node->GetOpDesc();
  1038. if (out_desc == nullptr || out_desc->GetInputsSize() <= i) {
  1039. GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "Get Input desc failed, name: %s, index: %zu", out_node->GetName().c_str(), i);
  1040. dynamic_output_dims.clear();
  1041. return;
  1042. }
  1043. const auto &input_tensor = out_desc->GetInputDesc(i);
  1044. const auto &shape_msg = input_tensor.GetShape().ToString();
  1045. string output_shape = std::to_string(j) + "," + std::to_string(i) + "," + shape_msg;
  1046. GELOGI("The shape msg in dynamic batch is %s", output_shape.c_str());
  1047. dynamic_output_dims.emplace_back(output_shape);
  1048. uint32_t parent_index = 0;
  1049. (void)AttrUtils::GetInt(input_tensor, ATTR_NAME_PARENT_NODE_INDEX, parent_index);
  1050. dynamic_output_index.insert(parent_index);
  1051. }
  1052. }
  1053. }
  1054. // +-----------+ +-----------+ i = 0
  1055. // +----> | SoftmaxV2 | ----> |MemcpyAsync| ----> \.
  1056. // / +-----------+ +-----------+ \.
  1057. // / \.
  1058. // +-----------+ +-----------+ +-----------+ +-----------+ i = 1 +-----------+
  1059. // | Data | ----> | SwitchN | ----> | SoftmaxV2 | ----> |MemcpyAsync| ----> | Merge |
  1060. // +-----------+ +-----------+ +-----------+ +-----------+ +-----------+
  1061. // \ / \. j = 0
  1062. // \ +-----------+ +-----------+ i = 2 / \.
  1063. // +----> | SoftmaxV2 | ----> |MemcpyAsync| ----> / +-----------+
  1064. // +-----------+ +-----------+ | NetOutput |
  1065. // +-----------+
  1066. // +-----------+ /.
  1067. // | Data | --------------------------------------------------------------------------->/. j = 1
  1068. // +-----------+
  1069. void GetDynamicShapeByMerge(const ComputeGraphPtr &graph, const NodePtr &node, set<size_t> &dynamic_output_index,
  1070. vector<string> &dynamic_output_dims) {
  1071. GELOGD("Try get dynamic shape info, Graph: %s, Node: %s", graph->GetName().c_str(), node->GetName().c_str());
  1072. const auto &netoutput_desc = node->GetOpDesc();
  1073. const auto &inputnode_to_netoutput = node->GetInAllNodes();
  1074. for (size_t i = 0; i < inputnode_to_netoutput.size(); ++i) {
  1075. bool insert_by_mbatch = false;
  1076. (void)AttrUtils::GetBool(inputnode_to_netoutput.at(i)->GetOpDesc(), ATTR_INSERT_BY_MBATCH, insert_by_mbatch);
  1077. if (inputnode_to_netoutput.at(i)->GetType() == MERGE && insert_by_mbatch) {
  1078. GELOGI("Find the merge node %s with mbatch attr and the index is %zu",
  1079. inputnode_to_netoutput.at(i)->GetName().c_str(), i);
  1080. dynamic_output_index.insert(i);
  1081. for (size_t j = 0; j < inputnode_to_netoutput.at(i)->GetInNodes().size(); ++j) {
  1082. auto input_desc = inputnode_to_netoutput.at(i)->GetOpDesc();
  1083. auto input_tensor_desc = input_desc->GetInputDesc(j);
  1084. auto shape_msg = input_tensor_desc.GetShape().ToString();
  1085. string output_shape = std::to_string(j) + "," + std::to_string(i) + "," + shape_msg;
  1086. GELOGI("The shape msg in dynamic batch is %s", output_shape.c_str());
  1087. dynamic_output_dims.emplace_back(output_shape);
  1088. }
  1089. }
  1090. }
  1091. }
  1092. // Connect NetOutput directly: DTS2020070612498
  1093. void GetDirectOutputShape(const ComputeGraphPtr &graph, const NodePtr &node, const set<size_t> &dynamic_output_index,
  1094. vector<string> &dynamic_output_dims) {
  1095. GELOGD("Try get directly shape info, Graph: %s, Node: %s", graph->GetName().c_str(), node->GetName().c_str());
  1096. const auto &netoutput_desc = node->GetOpDesc();
  1097. const auto &inputnode_to_netoutput = node->GetInAllNodes();
  1098. for (size_t i = 0; i < inputnode_to_netoutput.size(); ++i) {
  1099. if (dynamic_output_index.count(i) > 0) {
  1100. continue;
  1101. }
  1102. auto tensor_desc = netoutput_desc->GetInputDesc(i);
  1103. auto shape = tensor_desc.GetShape().ToString();
  1104. string static_output_shape = std::to_string(kStaticOutput) + "," + std::to_string(i) + "," + shape;
  1105. GELOGI("The static output shape msg is %s", static_output_shape.c_str());
  1106. dynamic_output_dims.emplace_back(static_output_shape);
  1107. }
  1108. }
  1109. Status GetDynamicOutputShape(ComputeGraphPtr &graph) {
  1110. GE_CHECK_NOTNULL(graph);
  1111. GELOGI("Start to get output dynamic batch shape message");
  1112. NodePtr net_output;
  1113. set<size_t> dynamic_output_index;
  1114. vector<string> dynamic_output_dims;
  1115. for (auto &node : graph->GetDirectNode()) {
  1116. if (node->GetType() == NETOUTPUT) {
  1117. net_output = node;
  1118. GetDynamicShapeByMerge(graph, node, dynamic_output_index, dynamic_output_dims);
  1119. } else if (node->GetType() == CASE) {
  1120. GetDynamicShapeByGraph(graph, node, dynamic_output_index, dynamic_output_dims);
  1121. }
  1122. }
  1123. if ((net_output != nullptr) && !dynamic_output_dims.empty()) {
  1124. GetDirectOutputShape(graph, net_output, dynamic_output_index, dynamic_output_dims);
  1125. if (!AttrUtils::SetListStr(net_output->GetOpDesc(), ATTR_NAME_DYNAMIC_OUTPUT_DIMS, dynamic_output_dims)) {
  1126. GELOGE(FAILED, "Set dynamic output dims attr failed");
  1127. return FAILED;
  1128. }
  1129. }
  1130. return SUCCESS;
  1131. }
  1132. } // namespace multibatch
  1133. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示