You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_tensorflow_parser.cc 161 kB

3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <gtest/gtest.h>
  17. #define protected public
  18. #define private public
  19. #include "parser/common/op_parser_factory.h"
  20. #include "parser/tensorflow/tensorflow_parser.h"
  21. #include "graph/operator_reg.h"
  22. #include "register/op_registry.h"
  23. #include "external/register/register.h"
  24. #include "parser/common/register_tbe.h"
  25. #include "st/parser_st_utils.h"
  26. #include "tests/depends/ops_stub/ops_stub.h"
  27. #include "parser/common/acl_graph_parser_util.h"
  28. #include "metadef/third_party/graphengine/inc/external/ge/ge_api_types.h"
  29. #include "omg/parser/parser_factory.h"
  30. #include "common/pre_checker.h"
  31. #include "common/util.h"
  32. #include "external/parser/tensorflow_parser.h"
  33. #include "parser/tensorflow/tensorflow_constant_parser.h"
  34. #include "common/types.h"
  35. #include "parser/common/op_def/variable_op.h"
  36. #include "parser/tensorflow/tensorflow_ref_switch_parser.h"
  37. #include "parser/tensorflow/tensorflow_fusion_op_parser.h"
  38. #include "parser/tensorflow/tensorflow_auto_mapping_parser_adapter.h"
  39. #include "parser/common/op_def/arg_op.h"
  40. #include "parser/tensorflow/tensorflow_fusion_custom_parser_adapter.h"
  41. #include "parser/tensorflow/tensorflow_reshape_parser.h"
  42. #include "parser/tensorflow/tensorflow_custom_parser_adapter.h"
  43. #include "parser/tensorflow/tensorflow_squeeze_parser.h"
  44. #include "parser/tensorflow/graph_functiondef.h"
  45. #include "parser/tensorflow/graph_optimizer.h"
  46. #include "cce/dnn_base_def.hpp"
  47. #include "parser/tensorflow/scope/scope_pass_manager.h"
  48. #include "parser/tensorflow/tensorflow_util.h"
  49. #include "compute_graph_impl.h"
  50. #include "parser/tensorflow/tensorflow_enter_parser.h"
  51. #include "parser/common/op_def/ir_pb_converter.h"
  52. #include "parser/common/tuple.h"
  53. #include "common/op_def/frameworkop_op.h"
  54. #include "common/op_def/shape_n_op.h"
  55. #include "common/op_def/var_is_initialized_op_op.h"
  56. #include "common/op_def/fill_op.h"
  57. #include "common/convert/pb2json.h"
  58. #include "common/convert/message2operator.h"
  59. #include "parser/common/proto_file_parser.h"
  60. #include "parser/common/pre_checker.h"
  61. #include "parser/common/tbe_plugin_loader.h"
  62. #include "parser/common/data_op_parser.h"
  63. #include "parser/common/model_saver.h"
  64. #include "framework/omg/parser/parser_api.h"
  65. #include "parser/common/parser_fp16_t.h"
  66. #include "parser/common/op_parser_factory.h"
  67. #include "parser/common/prototype_pass_manager.h"
  68. #include "parser/common/register_tbe.h"
  69. #include "parser/common/pass_manager.h"
  70. #include "parser/tensorflow/graph_optimizer.h"
  71. #include "metadef/inc/register/scope/scope_pass_registry_impl.h"
  72. #include "register/scope/scope_fusion_pass_register.h"
  73. #undef protected
  74. #undef private
  75. using namespace std;
  76. using namespace domi::tensorflow;
  77. using namespace domi;
  78. using namespace cce;
  79. using namespace testing;
  80. using namespace std;
  81. using namespace google::protobuf;
  82. static const string GRAPH_DEFAULT_NAME = "default";
  83. namespace ge {
  84. class STestTensorflowParser : public testing::Test {
  85. protected:
  86. void SetUp() {
  87. ParerSTestsUtils::ClearParserInnerCtx();
  88. }
  89. void TearDown() {}
  90. public:
  91. void RegisterCustomOp();
  92. };
  93. class TestOperator : public ParserOperator
  94. {
  95. public:
  96. TestOperator()
  97. : ParserOperator("test")
  98. {
  99. }
  100. ~TestOperator()
  101. {
  102. }
  103. };
  104. class ErrorGraphPass: public GraphPass
  105. {
  106. Status Run(ComputeGraphPtr graph)
  107. {
  108. return domi::FAILED;
  109. }
  110. };
  111. class ScopeTestPass : public ScopeBasePass {
  112. protected:
  113. vector<ScopeFusionPatterns> DefinePatterns() {
  114. vector<ScopeFusionPatterns> patterns_list;
  115. return patterns_list;
  116. };
  117. string PassName() {
  118. return "test";
  119. };
  120. Status LastMatchScopesAndOPs(shared_ptr<ScopeGraph> &scope_graph, vector<ScopesResult> &results) {
  121. return domi::SUCCESS;
  122. };
  123. void GenerateFusionResult(const vector<Scope *> &scopes, FusionScopesResult *fusion_rlt) {
  124. return;
  125. };
  126. };
  127. static Status ParseParams(const google::protobuf::Message* op_src, ge::Operator& op_dest) {
  128. return SUCCESS;
  129. }
  130. static Status ParseParamByOpFunc(const ge::Operator &op_src, ge::Operator& op_dest) {
  131. return SUCCESS;
  132. }
  133. void STestTensorflowParser::RegisterCustomOp() {
  134. REGISTER_CUSTOM_OP("Add")
  135. .FrameworkType(domi::TENSORFLOW)
  136. .OriginOpType("Add")
  137. .ParseParamsFn(ParseParams);
  138. std::vector<OpRegistrationData> reg_datas = domi::OpRegistry::Instance()->registrationDatas;
  139. for (auto reg_data : reg_datas) {
  140. OpRegistrationTbe::Instance()->Finalize(reg_data);
  141. domi::OpRegistry::Instance()->Register(reg_data);
  142. }
  143. domi::OpRegistry::Instance()->registrationDatas.clear();
  144. }
  145. namespace {
  146. NodeDef* AddNode(GraphDef& graph, string type, string name) {
  147. NodeDef* nodeDef = graph.add_node();
  148. nodeDef->set_op(type);
  149. nodeDef->set_name(name);
  150. tensorflow::OpDef op_def;
  151. string op_def_string;
  152. op_def.SerializeToString(&op_def_string);
  153. tensorflow::AttrValue value;
  154. value.set_s(op_def_string);
  155. nodeDef->mutable_attr()->insert({"op_def", value});
  156. return nodeDef;
  157. }
  158. void AddInput(NodeDef* src, NodeDef* dst, int srcIndex) {
  159. if(srcIndex == -1){
  160. dst->add_input("^"+src->name());
  161. } else {
  162. if (srcIndex == 0) {
  163. dst->add_input(src->name());
  164. } else {
  165. dst->add_input(src->name() + ":" + std::to_string(srcIndex));
  166. }
  167. {
  168. auto input = (*dst->mutable_attr())[ge::ATTR_NAME_INPUT_TENSOR_DESC].mutable_list()->add_func();
  169. tensorflow::AttrValue val1;
  170. val1.set_i(0);
  171. (*input->mutable_attr())["serialize_format"] = val1;
  172. tensorflow::AttrValue val2;
  173. val2.set_i(tensorflow::DT_FLOAT);
  174. (*input->mutable_attr())["serialize_datatype"] = val2;
  175. tensorflow::AttrValue val3;
  176. val3.mutable_list()->add_i(10);
  177. (*input->mutable_attr())["serialize_shape"] = val3;
  178. }
  179. {
  180. auto output = (*src->mutable_attr())[ge::ATTR_NAME_OUTPUT_TENSOR_DESC].mutable_list()->add_func();
  181. tensorflow::AttrValue val1;
  182. val1.set_i(0);
  183. (*output->mutable_attr())["serialize_format"] = val1;
  184. tensorflow::AttrValue val2;
  185. val2.set_i(tensorflow::DT_FLOAT);
  186. (*output->mutable_attr())["serialize_datatype"] = val2;
  187. tensorflow::AttrValue val3;
  188. val3.mutable_list()->add_i(10);
  189. (*output->mutable_attr())["serialize_shape"] = val3;
  190. }
  191. }
  192. }
  193. NodeDef *initNodeDef() {
  194. NodeDef * nodeDef = new NodeDef();
  195. nodeDef->set_op("Const");
  196. ::google::protobuf::Map<std::string, tensorflow::AttrValue >* node_attr_map = nodeDef->mutable_attr();
  197. //设置 T属性
  198. domi::tensorflow::AttrValue t_attr_value;
  199. t_attr_value.set_type(domi::tensorflow::DT_INT32);
  200. (*node_attr_map)[TENSORFLOW_ATTR_T] = t_attr_value;
  201. domi::tensorflow::AttrValue dtype_attr_value;
  202. dtype_attr_value.set_type(domi::tensorflow::DT_INT32);
  203. (*node_attr_map)[TENSORFLOW_ATTR_DTYPE] = dtype_attr_value;
  204. // out_put
  205. domi::tensorflow::AttrValue outputs_attr_value;
  206. ::tensorflow::AttrValue_ListValue* list = outputs_attr_value.mutable_list();
  207. list->add_s("MatMul");
  208. (*node_attr_map)[TENSORFLOW_ATTR_OUTPUT_OP] = outputs_attr_value;
  209. // 设置 tensor 属性
  210. domi::tensorflow::AttrValue value_attr_value;
  211. tensorflow::TensorProto* tensor = value_attr_value.mutable_tensor();
  212. tensorflow::TensorShapeProto* tensor_shape = tensor->mutable_tensor_shape();
  213. tensor_shape->clear_dim();
  214. tensor_shape->add_dim()->set_size(4);
  215. tensor_shape->add_dim()->set_size(6);
  216. tensor->set_dtype(domi::tensorflow::DT_INT32);
  217. float *addr = new float[24];
  218. for (int32_t i = 0; i < 24; i++) {
  219. *(addr + i) = 1.0 + i;
  220. }
  221. tensor->set_tensor_content((void *)addr, 24 * sizeof(float));
  222. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  223. delete[] addr;
  224. return nodeDef;
  225. }
  226. NodeDef * initOpNodeDef_VariableV2() {
  227. NodeDef * nodeDef = new NodeDef();
  228. nodeDef->set_op("VariableV2");
  229. google::protobuf::Map<std::string, tensorflow::AttrValue > *node_attr_map = nodeDef->mutable_attr();
  230. //设置data_format属性
  231. domi::tensorflow::AttrValue format_attr_value;
  232. format_attr_value.set_s("_FZ");
  233. (*node_attr_map)[VAR_ATTR_FORMAT] = format_attr_value;
  234. domi::tensorflow::AttrValue type_attr;
  235. type_attr.set_type(domi::tensorflow::DT_FLOAT);
  236. (*node_attr_map)[VAR_ATTR_DTYPE] = type_attr;
  237. domi::tensorflow::AttrValue container_attr_value;
  238. container_attr_value.set_s("container");
  239. (*node_attr_map)[VAR_ATTR_CONTAINER] = container_attr_value;
  240. domi::tensorflow::AttrValue shard_name_attr_value;
  241. shard_name_attr_value.set_s("shard_name");
  242. (*node_attr_map)[VAR_ATTR_SHARED_NAME] = shard_name_attr_value;
  243. domi::tensorflow::AttrValue shape_attr_value;
  244. shape_attr_value.mutable_shape()->add_dim()->set_size(1);
  245. shape_attr_value.mutable_shape()->add_dim()->set_size(2);
  246. shape_attr_value.mutable_shape()->add_dim()->set_size(3);
  247. shape_attr_value.mutable_shape()->add_dim()->set_size(4);
  248. (*node_attr_map)[ge::VAR_ATTR_SHAPE] = shape_attr_value;
  249. domi::tensorflow::AttrValue shape;
  250. shape.mutable_list()->add_i((int64)32);
  251. shape.mutable_list()->add_i((int64)32);
  252. shape.mutable_list()->add_i((int64)14);
  253. shape.mutable_list()->add_i((int64)14);
  254. //设置data_format属性
  255. domi::tensorflow::AttrValue df_attr_value;
  256. domi::tensorflow::AttrValue df_attr_value2;
  257. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  258. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  259. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  260. //设置padding属性
  261. domi::tensorflow::AttrValue pad_attr_value;
  262. domi::tensorflow::AttrValue pad_attr_value2;
  263. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  264. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  265. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  266. domi::tensorflow::NameAttrList name_attr_list;
  267. name_attr_list.set_name(std::to_string(0));
  268. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  269. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  270. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  271. domi::tensorflow::AttrValue output_tensor_descs;
  272. *(output_tensor_descs.mutable_list()->add_func()) = name_attr_list;
  273. nodeDef->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, output_tensor_descs});
  274. return nodeDef;
  275. }
  276. NodeDef *initOpNodeDef_TemporaryVariable() {
  277. NodeDef * nodeDef = new NodeDef();
  278. nodeDef->set_op("TemporaryVariable");
  279. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  280. //设置dtype属性
  281. domi::tensorflow::AttrValue type_attr;
  282. type_attr.set_type(domi::tensorflow::DT_FLOAT);
  283. (*node_attr_map)[VAR_ATTR_DTYPE] = type_attr;
  284. //设置var_name属性
  285. domi::tensorflow::AttrValue var_name_attr_value;
  286. var_name_attr_value.set_s("temporary_variable_name");
  287. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  288. //设置shape属性
  289. domi::tensorflow::AttrValue shape_attr_value;
  290. shape_attr_value.mutable_shape()->add_dim()->set_size(1);
  291. shape_attr_value.mutable_shape()->add_dim()->set_size(2);
  292. shape_attr_value.mutable_shape()->add_dim()->set_size(3);
  293. shape_attr_value.mutable_shape()->add_dim()->set_size(4);
  294. (*node_attr_map)[ge::VAR_ATTR_SHAPE] = shape_attr_value;
  295. domi::tensorflow::AttrValue shape;
  296. shape.mutable_list()->add_i((int64)32);
  297. shape.mutable_list()->add_i((int64)32);
  298. shape.mutable_list()->add_i((int64)14);
  299. shape.mutable_list()->add_i((int64)14);
  300. //设置data_format属性
  301. domi::tensorflow::AttrValue df_attr_value2;
  302. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  303. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  304. domi::tensorflow::AttrValue df_attr_value;
  305. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  306. //设置padding属性
  307. domi::tensorflow::AttrValue pad_attr_value2;
  308. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  309. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  310. domi::tensorflow::AttrValue pad_attr_value;
  311. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  312. domi::tensorflow::NameAttrList name_attr_list;
  313. name_attr_list.set_name(std::to_string(0));
  314. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  315. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  316. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  317. domi::tensorflow::AttrValue output_tensor_descs;
  318. *(output_tensor_descs.mutable_list()->add_func()) = name_attr_list;
  319. nodeDef->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, output_tensor_descs});
  320. return nodeDef;
  321. }
  322. NodeDef *fusioninitNodeDef(int index) {
  323. NodeDef *nodeDef = new NodeDef();
  324. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  325. //设置 type属性
  326. domi::tensorflow::AttrValue dtype_attr_value ;
  327. if (index == 0) {
  328. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  329. } else if (index == 1) {
  330. dtype_attr_value.set_type(domi::tensorflow::DT_INT32);
  331. } else if (index == 2) {
  332. dtype_attr_value.set_type(tensorflow::DT_HALF);
  333. }
  334. (*node_attr_map)[ge::TENSORFLOW_ATTR_DTYPE] = dtype_attr_value;
  335. //设置data_format属性
  336. domi::tensorflow::AttrValue df_attr_value;
  337. df_attr_value.set_s(TENSORFLOWF_TENSOR_NCHW);
  338. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value;
  339. // 设置 tensor 属性
  340. domi::tensorflow::AttrValue value_attr_value;
  341. ::tensorflow::TensorProto* tensor = value_attr_value.mutable_tensor();
  342. ::tensorflow::TensorShapeProto* tensor_shape = tensor->mutable_tensor_shape();
  343. tensor_shape->clear_dim();
  344. ::tensorflow::TensorShapeProto_Dim* dim = tensor_shape->add_dim();
  345. dim->set_name("tensor dim");
  346. dim->set_size(1);
  347. if (index == 0) {
  348. tensor->set_dtype(domi::tensorflow::DT_FLOAT);
  349. float *addr = new float[1];
  350. *addr = 1.0;
  351. tensor->set_tensor_content((void *)addr, sizeof(float));
  352. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  353. delete[] addr;
  354. } else if (index == 1) {
  355. tensor->set_dtype(domi::tensorflow::DT_INT32);
  356. int32_t *addr = new int32_t[1];
  357. *addr = 1;
  358. tensor->set_tensor_content((void *)addr, sizeof(int32_t));
  359. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  360. delete[] addr;
  361. } else if (index == 2) {
  362. tensor->set_dtype(tensorflow::DT_HALF);
  363. tensor->add_half_val(1);
  364. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  365. }
  366. return nodeDef;
  367. }
  368. NodeDef *MallocNodeDef(const string &name, const string &type) {
  369. NodeDef* node_def = new (std::nothrow) NodeDef();
  370. if (node_def != nullptr) {
  371. node_def->set_name(name);
  372. node_def->set_op(type);
  373. }
  374. return node_def;
  375. }
  376. void GenOriginNodeDef(ge::TensorFlowModelParser *tensorflow_parser, vector<string> &node_name_list) {
  377. NodeDef* pre_node_a = MallocNodeDef("pre_node_a", "Const");
  378. EXPECT_NE(pre_node_a, nullptr);
  379. {
  380. google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = pre_node_a->mutable_attr();
  381. tensorflow::AttrValue attr_dtype;
  382. attr_dtype.set_type(tensorflow::DT_FLOAT);
  383. (*node_attr_map)["dtype"] = attr_dtype;
  384. tensorflow::AttrValue attr_value;
  385. tensorflow::TensorProto* tensor = attr_value.mutable_tensor();
  386. tensor->add_bool_val(true);
  387. tensor->set_dtype(tensorflow::DT_BOOL);
  388. (*node_attr_map)["value"] = attr_value;
  389. }
  390. tensorflow_parser->nodedef_map_["pre_node_a"] = pre_node_a;
  391. node_name_list.push_back("pre_node_a");
  392. NodeDef* pre_node_ctrl_in = MallocNodeDef("pre_node_ctrl_in", "Const");
  393. EXPECT_NE(pre_node_ctrl_in, nullptr);
  394. {
  395. ::google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = pre_node_ctrl_in->mutable_attr();
  396. tensorflow::AttrValue attr_dtype;
  397. attr_dtype.set_type(tensorflow::DT_FLOAT);
  398. (*node_attr_map)["dtype"] = attr_dtype;
  399. tensorflow::AttrValue attr_value;
  400. tensorflow::TensorProto* tensor = attr_value.mutable_tensor();
  401. tensor->add_bool_val(true);
  402. tensor->set_dtype(tensorflow::DT_BOOL);
  403. (*node_attr_map)["value"] = attr_value;
  404. }
  405. tensorflow_parser->nodedef_map_["pre_node_ctrl_in"] = pre_node_ctrl_in;
  406. node_name_list.push_back("pre_node_ctrl_in");
  407. NodeDef* post_node_b = MallocNodeDef("post_node_b", "Identity");
  408. EXPECT_NE(post_node_b, nullptr);
  409. tensorflow_parser->nodedef_map_["post_node_b"] = post_node_b;
  410. node_name_list.push_back("post_node_b");
  411. NodeDef* post_node_c = MallocNodeDef("post_node_c", "Identity");
  412. EXPECT_NE(post_node_c, nullptr);
  413. tensorflow_parser->nodedef_map_["post_node_c"] = post_node_c;
  414. node_name_list.push_back("post_node_c");
  415. NodeDef* post_node_d = MallocNodeDef("post_node_d", "Identity");
  416. EXPECT_NE(post_node_d, nullptr);
  417. tensorflow_parser->nodedef_map_["post_node_d"] = post_node_d;
  418. node_name_list.push_back("post_node_d");
  419. }
  420. void FreeNodeDefMap(ge::TensorFlowModelParser *tensorflow_parser, set<string> &malloc_node_name_list) {
  421. for (auto &item : tensorflow_parser->nodedef_map_) {
  422. if (item.second != nullptr && malloc_node_name_list.count(item.first) > 0) {
  423. delete (item.second);
  424. item.second = nullptr;
  425. }
  426. }
  427. }
  428. void GenFusionScopesResult(shared_ptr<ScopeGraph> &scope_graph, FusionScopesResult *fusion_rlt,
  429. const string &fusion_op_name) {
  430. if (fusion_rlt == nullptr) {
  431. return;
  432. }
  433. fusion_rlt->InsertInputs("scope_node_1", {0}); // scope input 0
  434. fusion_rlt->InsertOutputs("scope_node_m", {0}); // scope output 0
  435. fusion_rlt->InsertOutputs("scope_node_n", {1}); // scope output 1
  436. fusion_rlt->SetType(ge::kScopeToMultiNodes);
  437. fusion_rlt->SetName(fusion_op_name);
  438. fusion_rlt->SetDescription("Description for fusion node");
  439. // Add inner nodes in sequence.
  440. auto node1 = fusion_rlt->AddInnerNode("inner_node_1", "Unique"); // add inner node1
  441. CHECK_INNER_NODE_CONDITION(node1 != nullptr, fusion_rlt);
  442. auto ret = node1
  443. ->InsertInput(ge::kInputFromFusionScope, 0) // Input from 0th of boundary (a)
  444. .InsertOutput(ge::kOutputToFusionScope, 0) // Output to 0th of boundary (b)
  445. .InsertOutput("inner_node_2", 0) // Output to input 0th of internal node 2
  446. .BuildInnerNode(); // Construct an internal Operator
  447. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  448. string str_val = "This is a string.";
  449. node1->MutableOperator()->SetAttr("key1", 2); // Set integer attribute
  450. node1->MutableOperator()->SetAttr("key2", str_val); // Set the string attribute
  451. node1->MutableOperator()->SetAttr("key3", true); // Set boolean attribute
  452. auto node2 = fusion_rlt->AddInnerNode("inner_node_2", "Identity"); // add inner node2
  453. CHECK_INNER_NODE_CONDITION(node2 != nullptr, fusion_rlt);
  454. ret = node2
  455. ->InsertInput("inner_node_1", 1) // The input comes from the 1st output of internal node 1
  456. .InsertOutput("inner_node_3", 0) // Output to input 0th of internal node 3
  457. .BuildInnerNode();
  458. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  459. node2->SetInputFormat("x", "NHWC");
  460. node2->SetOutputFormat("y", "NHWC");
  461. auto node3 = fusion_rlt->AddInnerNode("inner_node_3", "Identity"); // add inner node3
  462. CHECK_INNER_NODE_CONDITION(node3 != nullptr, fusion_rlt);
  463. ret = node3
  464. ->InsertInput("inner_node_2", 0) // The input comes from the 0th output of internal node 2
  465. .InsertOutput(ge::kOutputToFusionScope, 1) // Output to 1st of boundary (c)
  466. .BuildInnerNode();
  467. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  468. scope_graph->impl_->AddFusionScopesResult(fusion_rlt);
  469. }
  470. void GenOriginContext(ge::TensorFlowModelParser *tensorflow_parser, const string &fusion_op_name) {
  471. // op_node_context for fusion op
  472. ge::OpNodeContext op_node_context;
  473. op_node_context.input_map["pre_node_a"].push_back({0, 0});
  474. op_node_context.input_map["pre_node_ctrl_in"].push_back({-1, -1}); // ctrl edges
  475. op_node_context.output_map["post_node_b"].push_back({0, 0});
  476. op_node_context.output_map["post_node_c"].push_back({1, 0});
  477. op_node_context.output_map["post_node_d"].push_back({-1, -1});
  478. op_node_context.output_map["_Retval"].push_back({0, 1});
  479. // ctrl edges
  480. tensorflow_parser->op_node_context_map_[fusion_op_name] = op_node_context;
  481. tensorflow_parser->SaveEdgesControlInfo(fusion_op_name, -1);
  482. // op_node_context for pre_node_a
  483. ge::OpNodeContext op_node_context_a;
  484. op_node_context_a.output_map[fusion_op_name].push_back({0, 0});
  485. tensorflow_parser->op_node_context_map_["pre_node_a"] = op_node_context_a;
  486. // op_node_context for pre_node_ctrl_in
  487. ge::OpNodeContext op_node_context_ctrl_in;
  488. op_node_context_ctrl_in.output_map[fusion_op_name].push_back({-1, -1}); // ctrl edges
  489. tensorflow_parser->op_node_context_map_["pre_node_ctrl_in"] = op_node_context_ctrl_in;
  490. // op_node_context for post_node_b
  491. ge::OpNodeContext op_node_context_b;
  492. op_node_context_b.input_map[fusion_op_name].push_back({0, 0});
  493. tensorflow_parser->op_node_context_map_["post_node_b"] = op_node_context_b;
  494. // op_node_context for post_node_c
  495. ge::OpNodeContext op_node_context_c;
  496. op_node_context_c.output_map["post_node_d"].push_back({0, 0});
  497. tensorflow_parser->op_node_context_map_["post_node_c"] = op_node_context_c;
  498. // op_node_context for post_node_d
  499. ge::OpNodeContext op_node_context_d;
  500. op_node_context_d.input_map[fusion_op_name].push_back({-1, -1}); // ctrl edges
  501. tensorflow_parser->op_node_context_map_["post_node_d"] = op_node_context_d;
  502. // op_node_context for Retval
  503. ge::OpNodeContext op_node_context_Retval;
  504. op_node_context_d.input_map["post_node_d"].push_back({-1, -1});
  505. op_node_context_c.output_map["fusion_op_name"].push_back({0,1});
  506. tensorflow_parser->op_node_context_map_["_Retval"] = op_node_context_Retval;
  507. tensorflow_parser->SaveEdgesControlInfo("op_node_context_Retval", -1);
  508. string fusion_op_type = ge::kScopeToMultiNodes;
  509. string description = "fusion op description";
  510. tensorflow_parser->fusion_op_type_map_[fusion_op_name].push_back(fusion_op_type);
  511. tensorflow_parser->fusion_op_type_map_[fusion_op_name].push_back(description);
  512. }
  513. void register_tbe_op() {
  514. std::vector<OpRegistrationData> registrationDatas = OpRegistry::Instance()->registrationDatas;
  515. for (OpRegistrationData reg_data : registrationDatas) {
  516. OpRegistrationTbe::Instance()->Finalize(reg_data);
  517. OpRegistry::Instance()->Register(reg_data);
  518. }
  519. OpRegistry::Instance()->registrationDatas.clear();
  520. }
  521. NodeDef *initNodeDef_axis_dims() {
  522. NodeDef *nodeDef = new NodeDef();
  523. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  524. //设置T属性
  525. domi::tensorflow::AttrValue dtype_attr_value ;
  526. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  527. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  528. //设置strides属性
  529. domi::tensorflow::AttrValue axis_attr_value;
  530. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  531. list->add_i(1);
  532. list->add_i(2);
  533. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  534. (*node_attr_map)[ge::SQUEEZE_ATTR_DIMS] = axis_attr_value;
  535. return nodeDef;
  536. }
  537. NodeDef *initNodeDef_dims() {
  538. NodeDef *nodeDef = new NodeDef();
  539. ::google::protobuf::Map<std::string, tensorflow::AttrValue > *node_attr_map = nodeDef->mutable_attr();
  540. //设置T属性
  541. domi::tensorflow::AttrValue dtype_attr_value ;
  542. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  543. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  544. //设置strides属性
  545. domi::tensorflow::AttrValue axis_attr_value;
  546. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  547. list->add_i(1);
  548. list->add_i(2);
  549. (*node_attr_map)[ge::SQUEEZE_ATTR_DIMS] = axis_attr_value;
  550. return nodeDef;
  551. }
  552. void CreateOpDef(const string& _name, const string& _type, ge::OpDescPtr opDef) {
  553. tensorflow::OpDef tsOpDef;
  554. tsOpDef.set_name(_name);
  555. tensorflow::OpDef_ArgDef* outArgDef = tsOpDef.add_output_arg();
  556. outArgDef->set_name(_name);
  557. outArgDef->set_description("outArgDef");
  558. outArgDef->set_type((tensorflow::DataType)3);
  559. if ((_name == "A") || (_name == "B")) {
  560. tensorflow::OpDef_ArgDef* argDef1 = tsOpDef.add_output_arg();
  561. string name = _name+"t";
  562. argDef1->set_name(name);
  563. argDef1->set_description("this is a test 2");
  564. argDef1->set_type((tensorflow::DataType)3);
  565. }
  566. if ((_name == "C") ) {
  567. outArgDef->set_number_attr("num");
  568. }
  569. if ((_name == "D") ) {
  570. outArgDef->set_type_list_attr("type_list");
  571. }
  572. string strTsOpDef;
  573. tsOpDef.SerializeToString(&strTsOpDef);
  574. ge::AttrUtils::SetStr(opDef, "op_def", strTsOpDef);
  575. tensorflow::NodeDef nodedef;
  576. nodedef.set_name(_name);
  577. nodedef.set_op(_name);
  578. string name("op_def");
  579. tensorflow::AttrValue value;
  580. value.set_s(strTsOpDef);
  581. TensorFlowUtil::AddNodeAttr(name, value, &nodedef);
  582. value.set_i(1);
  583. TensorFlowUtil::AddNodeAttr("num", value, &nodedef);
  584. value.mutable_list();
  585. TensorFlowUtil::AddNodeAttr("type_list", value, &nodedef);
  586. string strNodeDef;
  587. nodedef.SerializeToString(&strNodeDef);
  588. ge::GeAttrValue::BYTES nodedefBytes;
  589. nodedefBytes = ge::GeAttrValue::BYTES::CopyFrom((uint8_t*)strNodeDef.data(), strNodeDef.length());
  590. ge::AttrUtils::SetBytes(opDef, "node_def", nodedefBytes);
  591. if ((_name== "S") || (_name == "K")) {
  592. int index = 0;
  593. ge::AttrUtils::SetInt(opDef, "T", 1);
  594. ge::AttrUtils::SetInt(opDef, "arg_index", index);
  595. ge::AttrUtils::SetInt(opDef, "ret_index", index);
  596. }
  597. }
  598. ge::NodePtr AddNode(ge::ComputeGraphPtr graph, const string& _name, const string& _type,int32_t i_n, int32_t o_n) {
  599. ge::OpDescPtr opDef = std::make_shared<ge::OpDesc>();
  600. opDef->SetName(_name);
  601. opDef->SetType(_type);
  602. for(int32_t i = 0; i < i_n; i++) {
  603. ge::GeTensorDesc input;
  604. input.SetDataType((ge::DataType)1);
  605. opDef->AddInputDesc(input);
  606. }
  607. for(int32_t i = 0;i < o_n; i++) {
  608. ge::GeTensorDesc output;
  609. output.SetDataType((ge::DataType)1);
  610. opDef->AddOutputDesc(output);
  611. }
  612. CreateOpDef(_name, _type, opDef);
  613. return graph->AddNode(opDef);
  614. }
  615. void MakeDagGraph(ge::ComputeGraphPtr graph, const string& input_node_type) {
  616. ge::NodePtr node_s = AddNode(graph, "S", parser::DATA,1,1);
  617. ge::NodePtr node_a = AddNode(graph, "A", "testa",1,2);
  618. ge::NodePtr node_b = AddNode(graph, "B", "testb",1,2);
  619. ge::NodePtr node_c = AddNode(graph, "C", "testc",1,1);
  620. ge::NodePtr node_d = AddNode(graph, "D", "testd",1,1);
  621. ge::NodePtr node_e = AddNode(graph, "E", "teste",1,1);
  622. ge::NodePtr node_f = AddNode(graph, "F", "testf",1,1);
  623. ge::NodePtr node_g = AddNode(graph, "G", "testg",2,1);
  624. ge::NodePtr node_h = AddNode(graph, "H", "testh",1,1);
  625. ge::NodePtr node_i = AddNode(graph, "I", "testi",1,1);
  626. ge::NodePtr node_j = AddNode(graph, "J", "testj",2,1);
  627. ge::NodePtr node_k = AddNode(graph, "K", parser::NETOUTPUT,1,1);
  628. ge::GraphUtils::AddEdge(node_s->GetOutDataAnchor(0), node_a->GetInDataAnchor(0));
  629. ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0));
  630. ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(1), node_c->GetInDataAnchor(0));
  631. ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_d->GetInDataAnchor(0));
  632. ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(1), node_e->GetInDataAnchor(0));
  633. ge::GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_g->GetInDataAnchor(0));
  634. ge::GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_f->GetInDataAnchor(0));
  635. ge::GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_g->GetInDataAnchor(1));
  636. ge::GraphUtils::AddEdge(node_f->GetOutDataAnchor(0), node_h->GetInDataAnchor(0));
  637. ge::GraphUtils::AddEdge(node_g->GetOutDataAnchor(0), node_j->GetInDataAnchor(0));
  638. ge::GraphUtils::AddEdge(node_h->GetOutDataAnchor(0), node_i->GetInDataAnchor(0));
  639. ge::GraphUtils::AddEdge(node_i->GetOutDataAnchor(0), node_j->GetInDataAnchor(1));
  640. ge::GraphUtils::AddEdge(node_j->GetOutDataAnchor(0), node_k->GetInDataAnchor(0));
  641. ge::GraphUtils::AddEdge(node_h->GetOutControlAnchor(), node_j->GetInControlAnchor());
  642. }
  643. void ChangeDataType(tensorflow::NodeDef* node_tf, int32_t data_type)
  644. {
  645. domi::tensorflow::AttrValue input_attr_value;
  646. google::protobuf::Map<std::string, tensorflow::AttrValue>* attr = node_tf->mutable_attr();
  647. google::protobuf::Map<std::string, tensorflow::AttrValue>::const_iterator it = attr->find(ge::ATTR_NAME_INPUT_TENSOR_DESC);
  648. if (it != attr->end()) {
  649. input_attr_value = it->second;
  650. }
  651. (*attr)[ge::ATTR_NAME_INPUT_TENSOR_DESC] = input_attr_value;
  652. }
  653. NodeDef* AddGraphNode(GraphDef *graph, string name, string optype, string input)
  654. {
  655. NodeDef *node_def = graph->add_node();
  656. node_def->set_name(name);
  657. node_def->set_op(optype);
  658. node_def->add_input(input);
  659. return node_def;
  660. }
  661. ge::ComputeGraphPtr build_graph(bool with_leaf_node = false)
  662. {
  663. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>("default");
  664. ge::OpDescPtr data_op = std::make_shared<ge::OpDesc>();
  665. data_op->SetType(parser::DATA);
  666. data_op->SetName("Data1");
  667. data_op->AddInputDesc(ge::GeTensorDesc());
  668. data_op->AddOutputDesc(ge::GeTensorDesc());
  669. ge::NodePtr data1 = graph->AddNode(data_op);
  670. ge::OpDescPtr relu_op1 = std::make_shared<ge::OpDesc>();
  671. relu_op1->SetType(parser::ACTIVATION);
  672. relu_op1->SetName("Relu1");
  673. relu_op1->AddInputDesc(ge::GeTensorDesc());
  674. relu_op1->AddOutputDesc(ge::GeTensorDesc());
  675. ge::NodePtr relu1 = graph->AddNode(relu_op1);
  676. ge::OpDescPtr relu_op2 = std::make_shared<ge::OpDesc>();
  677. relu_op2->SetType(parser::RELU);
  678. relu_op2->SetName("Relu2");
  679. relu_op2->AddInputDesc(ge::GeTensorDesc());
  680. relu_op2->AddOutputDesc(ge::GeTensorDesc());
  681. relu_op2->AddOutputDesc(ge::GeTensorDesc());
  682. ge::NodePtr relu2 = graph->AddNode(relu_op2);
  683. ge::OpDescPtr relu_op3 = std::make_shared<ge::OpDesc>();
  684. relu_op3->SetType(parser::ACTIVATION);
  685. relu_op3->SetName("Relu3");
  686. relu_op3->AddInputDesc(ge::GeTensorDesc());
  687. relu_op3->AddOutputDesc(ge::GeTensorDesc());
  688. ge::NodePtr relu3;
  689. if (with_leaf_node == true) {
  690. relu3 = graph->AddNode(relu_op3);
  691. }
  692. ge::OpDescPtr mul_op = std::make_shared<ge::OpDesc>();
  693. mul_op->SetType(parser::MUL);
  694. mul_op->SetName("Mul");
  695. mul_op->AddInputDesc(ge::GeTensorDesc());
  696. mul_op->AddInputDesc(ge::GeTensorDesc());
  697. mul_op->AddOutputDesc(ge::GeTensorDesc());
  698. mul_op->AddOutputDesc(ge::GeTensorDesc());
  699. mul_op->AddOutputDesc(ge::GeTensorDesc());
  700. mul_op->AddOutputDesc(ge::GeTensorDesc());
  701. ge::NodePtr mul = graph->AddNode(mul_op);
  702. ge::OpDescPtr mul_op1 = std::make_shared<ge::OpDesc>();
  703. mul_op1->SetType(parser::MUL);
  704. mul_op1->SetName("Mul1");
  705. mul_op1->AddInputDesc(ge::GeTensorDesc());
  706. mul_op1->AddInputDesc(ge::GeTensorDesc());
  707. mul_op1->AddOutputDesc(ge::GeTensorDesc());
  708. ge::NodePtr mul1 = graph->AddNode(mul_op1);
  709. ge::OpDescPtr mul_op2 = std::make_shared<ge::OpDesc>();
  710. mul_op2->SetType(parser::MUL);
  711. mul_op2->SetName("Mul2");
  712. mul_op2->AddInputDesc(ge::GeTensorDesc());
  713. mul_op2->AddInputDesc(ge::GeTensorDesc());
  714. mul_op2->AddOutputDesc(ge::GeTensorDesc());
  715. ge::NodePtr mul2 = graph->AddNode(mul_op2);
  716. ge::OpDescPtr fc_op = std::make_shared<ge::OpDesc>();
  717. fc_op->SetType(parser::FULL_CONNECTION);
  718. fc_op->SetName("FullConnection");
  719. fc_op->AddInputDesc(ge::GeTensorDesc());
  720. fc_op->AddOutputDesc(ge::GeTensorDesc());
  721. fc_op->AddOutputDesc(ge::GeTensorDesc());
  722. ge::NodePtr fc = graph->AddNode(fc_op);
  723. ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), relu1->GetInDataAnchor(0));
  724. ge::GraphUtils::AddEdge(relu1->GetOutDataAnchor(0), fc->GetInDataAnchor(0));
  725. ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(0), relu2->GetInDataAnchor(0));
  726. if (with_leaf_node == true) {
  727. ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(1), relu3->GetInDataAnchor(0));
  728. }
  729. ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(0), mul->GetInDataAnchor(0));
  730. ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(1), mul->GetInDataAnchor(1));
  731. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(0), mul1->GetInDataAnchor(0));
  732. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(1), mul1->GetInDataAnchor(1));
  733. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(2), mul2->GetInDataAnchor(0));
  734. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(3), mul2->GetInDataAnchor(1));
  735. return graph;
  736. }
  737. }
  738. namespace {
  739. REG_OP(Data)
  740. .INPUT(x, TensorType::ALL())
  741. .OUTPUT(y, TensorType::ALL())
  742. .ATTR(index, Int, 0)
  743. .OP_END_FACTORY_REG(Data)
  744. REG_OP(Add)
  745. .INPUT(x1, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  746. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  747. DT_COMPLEX64, DT_STRING}))
  748. .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  749. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  750. DT_COMPLEX64, DT_STRING}))
  751. .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  752. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  753. DT_COMPLEX64, DT_STRING}))
  754. .OP_END_FACTORY_REG(Add)
  755. }
  756. static Status FusionParserParams(const std::vector<const google::protobuf::Message *> inside_nodes, ge::Operator &op) {
  757. return domi::SUCCESS;
  758. }
  759. static MemBuffer* MemBufferFromFile(const char *path)
  760. {
  761. char path_temp[PATH_MAX + 1] = {0x00};
  762. if(strlen(path) > PATH_MAX || nullptr == realpath(path, path_temp)) {
  763. return nullptr;
  764. }
  765. FILE *fp = fopen(path_temp, "r+");
  766. if (fp == nullptr) {
  767. return nullptr;
  768. }
  769. // get model file length
  770. if (0 != fseek(fp, 0, SEEK_END)) {
  771. fclose(fp);
  772. return nullptr;
  773. }
  774. long file_length = ftell(fp);
  775. if (fseek(fp, 0, SEEK_SET)) {
  776. fclose(fp);
  777. return nullptr;
  778. }
  779. if (file_length <= 0) {
  780. fclose(fp);
  781. return nullptr;
  782. }
  783. // alloc model buffer
  784. void *data = malloc((unsigned int)file_length);
  785. if (!data) {
  786. fclose(fp);
  787. return nullptr;
  788. }
  789. // read file into memory
  790. uint32_t read_size = (uint32_t)fread(data, 1, (unsigned int)file_length, fp);
  791. // check if read success
  792. if ((long)read_size != file_length) {
  793. free(data);
  794. data = nullptr;
  795. fclose(fp);
  796. return nullptr;
  797. }
  798. // close model file
  799. fclose(fp);
  800. // create an MemBuffer
  801. MemBuffer* membuf = new MemBuffer();
  802. if (!membuf) {
  803. free(data);
  804. data = nullptr;
  805. return nullptr;
  806. }
  807. membuf->data = malloc((unsigned int)read_size);
  808. // set size && data
  809. membuf->size = (uint32_t)read_size;
  810. memcpy((char*)membuf->data, (char*)data, read_size);
  811. free(data);
  812. return membuf;
  813. }
  814. /// placeholder0 placeholder1
  815. /// | /\ /\ |
  816. /// | / \/ \ |
  817. /// | / /\ \ |
  818. /// | | / \ | |
  819. /// | add0 mul0 |
  820. /// | / /c | \ |
  821. /// mul1 --- / | add1
  822. /// \ | |
  823. /// \ ---- add2 |
  824. /// | |
  825. /// retval0 retval1
  826. void CreateGraphDef(domi::tensorflow::GraphDef &graph_def) {
  827. // 1. add node
  828. auto placeholder0 = graph_def.add_node();
  829. auto placeholder1 = graph_def.add_node();
  830. auto add0 = graph_def.add_node();
  831. auto add1 = graph_def.add_node();
  832. auto mul0 = graph_def.add_node();
  833. auto mul1 = graph_def.add_node();
  834. auto add2 = graph_def.add_node();
  835. auto retval0 = graph_def.add_node();
  836. auto retval1 = graph_def.add_node();
  837. auto softmax0 = graph_def.add_node();
  838. auto softmax1 = graph_def.add_node();
  839. // 2. set info
  840. placeholder0->set_name("placeholder0");
  841. placeholder0->set_op("PlaceHolder");
  842. placeholder1->set_name("placeholder1");
  843. placeholder1->set_op("PlaceHolder");
  844. add0->set_name("add0");
  845. add0->set_op("Add");
  846. add1->set_name("add1");
  847. add1->set_op("Add");
  848. add2->set_name("add2");
  849. add2->set_op("Add");
  850. mul0->set_name("mul0");
  851. mul0->set_op("Mul");
  852. mul1->set_name("mul1");
  853. mul1->set_op("Mul");
  854. retval0->set_name("retval0");
  855. retval0->set_op("_RetVal");
  856. retval1->set_name("retval1");
  857. retval1->set_op("_RetVal");
  858. retval0->set_name("retval0");
  859. retval0->set_op("_RetVal");
  860. retval1->set_name("retval1");
  861. retval1->set_op("_RetVal");
  862. softmax0->set_name("Softmax0");
  863. softmax0->set_op("Softmax");
  864. softmax1->set_name("Softmax1");
  865. softmax1->set_op("Softmax");
  866. // 3. add edges
  867. add0->add_input("placeholder0");
  868. add0->add_input("placeholder1");
  869. mul0->add_input("placeholder0");
  870. mul0->add_input("placeholder1");
  871. mul1->add_input("placeholder0");
  872. mul1->add_input("add0");
  873. mul1->add_input("^mul0");
  874. add1->add_input("mul0");
  875. add1->add_input("placeholder1");
  876. add2->add_input("mul1");
  877. add2->add_input("mul0");
  878. retval0->add_input("add2:0");
  879. retval1->add_input("add1:0");
  880. softmax0->add_input("add3:0");
  881. softmax0->add_input("add2:0");
  882. }
  883. TEST_F(STestTensorflowParser, tensorflow_parser_success) {
  884. RegisterCustomOp();
  885. std::string case_dir = __FILE__;
  886. ParserOperator unused("Add");
  887. case_dir = case_dir.substr(0, case_dir.find_last_of("/"));
  888. std::string model_file = case_dir + "/origin_models/tf_add.pb";
  889. std::map<ge::AscendString, ge::AscendString> parser_params;
  890. ge::Graph graph;
  891. auto ret = ge::aclgrphParseTensorFlow(model_file.c_str(), parser_params, graph);
  892. ASSERT_EQ(ret, SUCCESS);
  893. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  894. auto output_nodes_info = compute_graph->GetGraphOutNodesInfo();
  895. ASSERT_EQ(output_nodes_info.size(), 1);
  896. EXPECT_EQ((output_nodes_info.at(0).first->GetName()), "add_test_1");
  897. EXPECT_EQ((output_nodes_info.at(0).second), 0);
  898. auto &net_out_name = ge::GetParserContext().net_out_nodes;
  899. ASSERT_EQ(net_out_name.size(), 1);
  900. EXPECT_EQ(net_out_name.at(0), "add_test_1:0");
  901. }
  902. TEST_F(STestTensorflowParser, tensorflow_model_Failed) {
  903. ge::Graph graph;
  904. std::string caseDir = __FILE__;
  905. std::size_t idx = caseDir.find_last_of("/");
  906. caseDir = caseDir.substr(0, idx);
  907. std::string modelFile = caseDir + "/origin_models/model.pb";
  908. auto status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  909. EXPECT_EQ(status, ge::SUCCESS);
  910. modelFile = caseDir + "/origin_models/test_depth_wise_conv2d.pb";
  911. status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  912. EXPECT_EQ(status, ge::GRAPH_FAILED);
  913. }
  914. TEST_F(STestTensorflowParser, tensorflow_model_not_exist) {
  915. ge::Graph graph;
  916. std::string caseDir = __FILE__;
  917. std::size_t idx = caseDir.find_last_of("/");
  918. caseDir = caseDir.substr(0, idx);
  919. // model file is not exist
  920. std::string modelFile = caseDir + "/origin_models/conv2d_explicit1_pad.pb";
  921. auto status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  922. EXPECT_EQ(status, ge::GRAPH_FAILED);
  923. }
  924. TEST_F(STestTensorflowParser, parser_tensorflow_model) {
  925. std::string caseDir = __FILE__;
  926. std::size_t idx = caseDir.find_last_of("/");
  927. caseDir = caseDir.substr(0, idx);
  928. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  929. const char *model_file = modelFile.c_str();
  930. std::string op_name = "ge_ascend_irgraph";
  931. ge::Graph graph(op_name);
  932. std::map<ge::AscendString, ge::AscendString> parser_options = {
  933. {ge::AscendString(ge::ir_option::INPUT_FORMAT), ge::AscendString("NHWC")},
  934. };
  935. auto ret_graph = ge::aclgrphParseTensorFlow(model_file, parser_options, graph);
  936. EXPECT_EQ(ret_graph, ge::FAILED);
  937. // parser tensorflow model out_node_size is equal to index
  938. string graph_name;
  939. AclGrphParseUtil acl_graph_parse_util;
  940. std::map<AscendString, AscendString> out_nodes_with_node_and_index = {
  941. {AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:1")}};
  942. ParerSTestsUtils::ClearParserInnerCtx();
  943. auto ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  944. ret_graph = ge::aclgrphParseTensorFlow(model_file, graph);
  945. EXPECT_EQ(ret_graph, domi::FAILED);
  946. // parser tensorflow model success
  947. modelFile = caseDir + "/origin_models/model.pb";
  948. model_file = modelFile.c_str();
  949. out_nodes_with_node_and_index = {{AscendString(ge::ir_option::OUT_NODES), AscendString("x:0;y:0")}};
  950. ParerSTestsUtils::ClearParserInnerCtx();
  951. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  952. ret_graph = ge::aclgrphParseTensorFlow(model_file, graph);
  953. EXPECT_EQ(ret_graph, domi::SUCCESS);
  954. }
  955. TEST_F(STestTensorflowParser, tensorflow_parser_to_json)
  956. {
  957. TensorFlowModelParser modelParser;
  958. std::string caseDir = __FILE__;
  959. std::size_t idx = caseDir.find_last_of("/");
  960. caseDir = caseDir.substr(0, idx);
  961. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  962. std::string jsonFile = caseDir + "/origin_models/test.json";
  963. const char *model_file = modelFile.c_str();
  964. const char *json_file = jsonFile.c_str();
  965. Status ret = modelParser.ToJson(model_file, json_file);
  966. EXPECT_EQ(ret, SUCCESS);
  967. }
  968. TEST_F(STestTensorflowParser, tensorflow_parserfrommemory_failed)
  969. {
  970. TensorFlowModelParser modelParser;
  971. std::string caseDir = __FILE__;
  972. std::size_t idx = caseDir.find_last_of("/");
  973. caseDir = caseDir.substr(0, idx);
  974. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  975. const char *data = modelFile.c_str();
  976. uint32_t size = 1;
  977. ge::Graph graph;
  978. std::map<ge::AscendString, ge::AscendString> parser_params;
  979. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  980. ASSERT_EQ(ret, SUCCESS);
  981. modelFile = caseDir + "/origin_models/tf_add.pb";
  982. parser_params = {{AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  983. ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  984. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  985. ret = modelParser.ParseFromMemory(data, size, compute_graph);
  986. EXPECT_EQ(ret, INTERNAL_ERROR);
  987. }
  988. TEST_F(STestTensorflowParser, modelparser_parsefrommemory_success)
  989. {
  990. std::string caseDir = __FILE__;
  991. std::size_t idx = caseDir.find_last_of("/");
  992. caseDir = caseDir.substr(0, idx);
  993. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  994. const char* tmp_tf_pb_model = modelFile.c_str();
  995. ge::Graph graph;
  996. std::map<ge::AscendString, ge::AscendString> parser_params;
  997. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  998. ASSERT_EQ(ret, SUCCESS);
  999. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1000. TensorFlowModelParser modelParser;
  1001. MemBuffer* memBuffer = MemBufferFromFile(tmp_tf_pb_model);
  1002. PreChecker::Instance().HasError() == false;
  1003. ret = modelParser.ParseFromMemory((char*)memBuffer->data, memBuffer->size, compute_graph);
  1004. free(memBuffer->data);
  1005. delete memBuffer;
  1006. }
  1007. TEST_F(STestTensorflowParser, weightsparser_parsefrommemory_success)
  1008. {
  1009. std::string caseDir = __FILE__;
  1010. std::size_t idx = caseDir.find_last_of("/");
  1011. caseDir = caseDir.substr(0, idx);
  1012. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1013. const char* tmp_tf_pb_model = modelFile.c_str();
  1014. ge::Graph graph;
  1015. std::map<ge::AscendString, ge::AscendString> parser_params;
  1016. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1017. ASSERT_EQ(ret, SUCCESS);
  1018. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1019. auto weights_parser = domi::WeightsParserFactory::Instance()->CreateWeightsParser(domi::TENSORFLOW);
  1020. MemBuffer* memBuffer = MemBufferFromFile(tmp_tf_pb_model);
  1021. ret = weights_parser->ParseFromMemory((char*)memBuffer->data, memBuffer->size, compute_graph);
  1022. free(memBuffer->data);
  1023. delete memBuffer;
  1024. EXPECT_EQ(SUCCESS, ret);
  1025. }
  1026. std::string getGraphCallbackV2(string subgraph_name)
  1027. {
  1028. std::string caseDir = __FILE__;
  1029. std::size_t idx = caseDir.find_last_of("/");
  1030. caseDir = caseDir.substr(0, idx);
  1031. subgraph_name = caseDir + "/origin_models/tf_add.pb";
  1032. return subgraph_name;
  1033. }
  1034. TEST_F(STestTensorflowParser, parser_ParseProtoWithSubgraphV2)
  1035. {
  1036. std::string caseDir = __FILE__;
  1037. std::size_t idx = caseDir.find_last_of("/");
  1038. caseDir = caseDir.substr(0, idx);
  1039. const std::string root_proto = caseDir + "/origin_models/tf_add.pb";
  1040. ge::Graph graph;
  1041. std::map<ge::AscendString, ge::AscendString> parser_params;
  1042. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1043. ASSERT_EQ(ret, SUCCESS);
  1044. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1045. domi::GetGraphCallbackV2 callback(&getGraphCallbackV2);
  1046. TensorFlowModelParser parser;
  1047. ret = parser.ParseProtoWithSubgraph(root_proto, callback, root_graph);
  1048. }
  1049. TEST_F(STestTensorflowParser, parser_ConvertToGeDataType)
  1050. {
  1051. // convert to ge type success
  1052. const uint32_t type1 = domi::tensorflow::DataType::DT_FLOAT;
  1053. TensorFlowModelParser parser;
  1054. ge::DataType dataType = parser.ConvertToGeDataType(type1);
  1055. ASSERT_EQ(dataType, ge::DataType::DT_FLOAT);
  1056. const uint32_t type2 = 80; // invalid type
  1057. dataType = parser.ConvertToGeDataType(type2);
  1058. ASSERT_EQ(dataType, ge::DataType::DT_UNDEFINED);
  1059. }
  1060. TEST_F(STestTensorflowParser, tensorflow_ParserProto_failed)
  1061. {
  1062. std::string caseDir = __FILE__;
  1063. std::size_t idx = caseDir.find_last_of("/");
  1064. caseDir = caseDir.substr(0, idx);
  1065. const std::string root_proto = caseDir + "/origin_models/avgpool3dgrad.pb.txt";
  1066. domi::tensorflow::GraphDef graphDef;
  1067. ge::Graph graph;
  1068. std::map<ge::AscendString, ge::AscendString> parser_params;
  1069. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1070. ASSERT_EQ(ret, SUCCESS);
  1071. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1072. TensorFlowModelParser tensorflow_parser;
  1073. ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1074. EXPECT_EQ(PARAM_INVALID, ret);
  1075. // proto解析失败
  1076. bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef);
  1077. ASSERT_EQ(protoRet, false);
  1078. ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1079. ASSERT_EQ(ret, PARAM_INVALID);
  1080. std::string serialized_proto = "";
  1081. ret = tensorflow_parser.ParseProto(serialized_proto, root_graph);
  1082. ASSERT_EQ(ret, FAILED);
  1083. }
  1084. TEST_F(STestTensorflowParser, tensorflow_parserAllGraph_failed)
  1085. {
  1086. std::string caseDir = __FILE__;
  1087. std::size_t idx = caseDir.find_last_of("/");
  1088. caseDir = caseDir.substr(0, idx);
  1089. const std::string root_proto = caseDir + "/origin_models/conv2d.pb";
  1090. domi::tensorflow::GraphDef graphDef;
  1091. CreateGraphDef(graphDef);
  1092. auto no_op = graphDef.add_node();
  1093. no_op->set_name("no_op");
  1094. no_op->set_op("NoOp");
  1095. no_op->add_input("placeholder0");
  1096. no_op->add_input("placeholder1");
  1097. ge::Graph graph;
  1098. std::map<ge::AscendString, ge::AscendString> parser_params;
  1099. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1100. ASSERT_EQ(ret, SUCCESS);
  1101. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1102. TensorFlowModelParser tensorflow_parser;
  1103. ret = tensorflow_parser.ParseAllGraph(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1104. ASSERT_NE(ret, SUCCESS);
  1105. }
  1106. TEST_F(STestTensorflowParser, test_parse_acl_output_nodes)
  1107. {
  1108. AclGrphParseUtil acl_graph_parse_util;
  1109. string graph_name;
  1110. // case 1: Normal with 'node and index'
  1111. ParerSTestsUtils::ClearParserInnerCtx();
  1112. GetParserContext().type = domi::ONNX;
  1113. std::map<AscendString, AscendString> out_nodes_with_node_and_index = {
  1114. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out1:0;Out2:1")}};
  1115. ParerSTestsUtils::ClearParserInnerCtx();
  1116. auto ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  1117. ASSERT_EQ(ret, SUCCESS);
  1118. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 2);
  1119. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 2);
  1120. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 0);
  1121. // case 2: Normal with 'tensor name'
  1122. ParerSTestsUtils::ClearParserInnerCtx();
  1123. GetParserContext().type = domi::ONNX;
  1124. std::map<AscendString, AscendString> out_nodes_with_tensor_name = {
  1125. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out_tensor_2")}};
  1126. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_tensor_name, graph_name);
  1127. ASSERT_EQ(ret, SUCCESS);
  1128. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1129. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1130. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 2);
  1131. // case 3: Failed with 'node and index' before 'tensor name'
  1132. ParerSTestsUtils::ClearParserInnerCtx();
  1133. GetParserContext().type = domi::ONNX;
  1134. std::map<AscendString, AscendString> out_nodes_mode_mixex_pre = {
  1135. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out1:0;Out2:1;Out_tensor_1;Out_tensor_2")}};
  1136. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_pre, graph_name);
  1137. ASSERT_EQ(ret, PARAM_INVALID);
  1138. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 2);
  1139. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 2);
  1140. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 0);
  1141. // case 4: Failed with 'node and index' inserted in 'tensor name'
  1142. ParerSTestsUtils::ClearParserInnerCtx();
  1143. GetParserContext().type = domi::ONNX;
  1144. std::map<AscendString, AscendString> out_nodes_mode_mixex_mid = {
  1145. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out1:0;Out2:1;Out_tensor_2")}};
  1146. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_mid, graph_name);
  1147. ASSERT_EQ(ret, PARAM_INVALID);
  1148. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1149. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1150. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 1);
  1151. // case 5: Failed with 'node and index' after 'tensor name'
  1152. ParerSTestsUtils::ClearParserInnerCtx();
  1153. GetParserContext().type = domi::ONNX;
  1154. std::map<AscendString, AscendString> out_nodes_mode_mixex_post = {
  1155. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out_tensor_2;Out1:0;Out2:1")}};
  1156. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_post, graph_name);
  1157. ASSERT_EQ(ret, PARAM_INVALID);
  1158. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1159. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1160. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 2);
  1161. }
  1162. TEST_F(STestTensorflowParser, parse_AutoMappingByOp) {
  1163. static const string KEY_STRING = "key_string";
  1164. static const string KEY_INT = "key_int";
  1165. static const string KEY_FLOAT = "key_float";
  1166. static const string KEY_BOOL = "key_bool";
  1167. static const string KEY_TYPE = "key_type";
  1168. static const string VALUE_STRING = "string";
  1169. static const int64_t VALUE_INT = 1;
  1170. static const float VALUE_FLOAT = 1.0;
  1171. static const bool VALUE_BOOL = true;
  1172. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1173. static const string VALUE_NAME = "test_name";
  1174. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  1175. NodeDef node_def;
  1176. domi::tensorflow::AttrValue value;
  1177. ge::Operator op = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  1178. node_def.set_name(VALUE_NAME);
  1179. value.set_s(VALUE_STRING);
  1180. TensorFlowUtil::AddNodeAttr(KEY_STRING, value, &node_def);
  1181. value.set_i(VALUE_INT);
  1182. TensorFlowUtil::AddNodeAttr(KEY_INT, value, &node_def);
  1183. value.set_f(VALUE_FLOAT);
  1184. TensorFlowUtil::AddNodeAttr(KEY_FLOAT, value, &node_def);
  1185. value.set_b(VALUE_BOOL);
  1186. TensorFlowUtil::AddNodeAttr(KEY_BOOL, value, &node_def);
  1187. value.set_type(VALUE_TYPE);
  1188. TensorFlowUtil::AddNodeAttr(KEY_TYPE, value, &node_def);
  1189. domi::Status status = domi::AutoMappingFn(reinterpret_cast<google::protobuf::Message *>(&node_def), op);
  1190. EXPECT_EQ(domi::SUCCESS, status);
  1191. EXPECT_EQ(VALUE_NAME, op_desc->GetName());
  1192. string value_string = "";
  1193. ge::AttrUtils::GetStr(op_desc, KEY_STRING, value_string);
  1194. EXPECT_EQ(VALUE_STRING, value_string);
  1195. int64_t value_int = 0;
  1196. ge::AttrUtils::GetInt(op_desc, KEY_INT, value_int);
  1197. EXPECT_EQ(VALUE_INT, value_int);
  1198. float value_float = 0.0;
  1199. ge::AttrUtils::GetFloat(op_desc, KEY_FLOAT, value_float);
  1200. EXPECT_EQ(VALUE_FLOAT, value_float);
  1201. bool value_bool = false;
  1202. ge::AttrUtils::GetBool(op_desc, KEY_BOOL, value_bool);
  1203. EXPECT_EQ(VALUE_BOOL, value_bool);
  1204. ge::DataType data_type = ge::DT_UNDEFINED;
  1205. ge::AttrUtils::GetDataType(op_desc, KEY_TYPE, data_type);
  1206. EXPECT_EQ(ge::DT_FLOAT, data_type);
  1207. // test AutoMappingByOpFn
  1208. ge::OpDescPtr op_desc_dest = std::make_shared<ge::OpDesc>();
  1209. ge::Operator op_dest = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc_dest);
  1210. status = domi::AutoMappingByOpFn(op, op_dest);
  1211. EXPECT_EQ(domi::SUCCESS, status);
  1212. EXPECT_EQ(VALUE_NAME, op_dest.GetName());
  1213. value_string = "";
  1214. ge::AttrUtils::GetStr(op_desc_dest, KEY_STRING, value_string);
  1215. EXPECT_EQ(VALUE_STRING, value_string);
  1216. value_int = 0;
  1217. ge::AttrUtils::GetInt(op_desc_dest, KEY_INT, value_int);
  1218. EXPECT_EQ(VALUE_INT, value_int);
  1219. value_float = 0.0;
  1220. ge::AttrUtils::GetFloat(op_desc_dest, KEY_FLOAT, value_float);
  1221. EXPECT_EQ(VALUE_FLOAT, value_float);
  1222. value_bool = false;
  1223. ge::AttrUtils::GetBool(op_desc_dest, KEY_BOOL, value_bool);
  1224. EXPECT_EQ(VALUE_BOOL, value_bool);
  1225. data_type = ge::DT_UNDEFINED;
  1226. ge::AttrUtils::GetDataType(op_desc_dest, KEY_TYPE, data_type);
  1227. EXPECT_EQ(ge::DT_FLOAT, data_type);
  1228. }
  1229. TEST_F(STestTensorflowParser, parse_ParseNodeDef)
  1230. {
  1231. NodeDef * node_def = new NodeDef();
  1232. node_def->set_name("test_name");
  1233. node_def->set_op("PlaceholderWithDefault");
  1234. bool isDatasetInit = true;
  1235. TensorFlowModelParser model_parser;
  1236. Status ret = model_parser.AdaptOpType(node_def, isDatasetInit);
  1237. EXPECT_EQ(domi::SUCCESS, ret);
  1238. node_def->set_op("Add");
  1239. ret = model_parser.AdaptOpType(node_def, isDatasetInit);
  1240. EXPECT_EQ(domi::SUCCESS, ret);
  1241. delete node_def;
  1242. }
  1243. TEST_F(STestTensorflowParser, parse_AddFmkNode)
  1244. {
  1245. TensorFlowModelParser modelParser;
  1246. std::string caseDir = __FILE__;
  1247. std::size_t idx = caseDir.find_last_of("/");
  1248. caseDir = caseDir.substr(0, idx);
  1249. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1250. ge::Graph graph;
  1251. string graph_name;
  1252. AclGrphParseUtil acl_graph_parse_util;
  1253. std::map<ge::AscendString, ge::AscendString> parser_options = {{AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1254. ParerSTestsUtils::ClearParserInnerCtx();
  1255. Status ret = acl_graph_parse_util.ParseParamsBeforeGraph(parser_options, graph_name);
  1256. ret = aclgrphParseTensorFlow(modelFile.c_str(), parser_options, graph);
  1257. ASSERT_EQ(ret, SUCCESS);
  1258. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  1259. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  1260. ScopePassManager pass_manager;
  1261. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  1262. std::string fusion_op_name = "fusion_op_name";
  1263. FusionScopesResult *fusion_rlt = new (std::nothrow) FusionScopesResult();
  1264. EXPECT_NE(fusion_rlt, nullptr);
  1265. fusion_rlt->Init();
  1266. GenFusionScopesResult(scope_graph, fusion_rlt, fusion_op_name);
  1267. GenOriginContext(&modelParser, fusion_op_name);
  1268. // origin inner node def
  1269. NodeDef* node_def = MallocNodeDef("scope_node_1", "Add");
  1270. EXPECT_NE(node_def, nullptr);
  1271. modelParser.fusion_op_nodedef_map_[fusion_op_name].push_back(node_def);
  1272. bool train_flag_backup = ge::GetParserContext().train_flag;
  1273. ge::GetParserContext().train_flag = true;
  1274. REGISTER_CUSTOM_OP("Identity")
  1275. .FrameworkType(domi::TENSORFLOW)
  1276. .OriginOpType("Identity")
  1277. .ParseParamsFn(ParseParams)
  1278. .ImplyType(ImplyType::TVM);
  1279. REGISTER_CUSTOM_OP("Constant")
  1280. .FrameworkType(domi::TENSORFLOW)
  1281. .OriginOpType("Const")
  1282. .ParseParamsFn(ParseParams)
  1283. .ImplyType(ImplyType::TVM);
  1284. register_tbe_op();
  1285. std::vector<std::string> node_name_list;
  1286. GenOriginNodeDef(&modelParser, node_name_list);
  1287. std::set<std::string> malloc_node_name_list(node_name_list.begin(), node_name_list.end());
  1288. node_name_list.push_back(fusion_op_name);
  1289. ret = modelParser.AddFmkNode(compute_graph, scope_graph, node_name_list, false);
  1290. EXPECT_EQ(ret, PARAM_INVALID);
  1291. EXPECT_EQ(modelParser.scope_inner_node_map_.size(), 0);
  1292. EXPECT_EQ(modelParser.nodedef_map_.size(), 5);
  1293. ret = modelParser.AddEdges(compute_graph);
  1294. EXPECT_EQ(ret, SUCCESS);
  1295. // release resource
  1296. delete graphDef;
  1297. delete node_def;
  1298. modelParser.DeleteFuisonNodeDef();
  1299. FreeNodeDefMap(&modelParser, malloc_node_name_list);
  1300. ge::GetParserContext().train_flag = train_flag_backup;
  1301. }
  1302. TEST_F(STestTensorflowParser, parse_AddScopeInnerNode)
  1303. {
  1304. TensorFlowModelParser modelParser;
  1305. std::string caseDir = __FILE__;
  1306. std::size_t idx = caseDir.find_last_of("/");
  1307. caseDir = caseDir.substr(0, idx);
  1308. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1309. std::string op_name = "ge_ascend_irgraph";
  1310. ge::Graph graph(op_name);
  1311. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1312. std::map<ge::AscendString, ge::AscendString> parser_params = {
  1313. {AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1314. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1315. EXPECT_EQ(ret, SUCCESS);
  1316. std::mutex graph_mutex;
  1317. tensorflow::NodeDef *node_def = initNodeDef();
  1318. node_def->set_name("FastrcnnPredictions");
  1319. node_def->set_op("FastrcnnPredictions");
  1320. // can't find in scope_inner_node_map
  1321. ret = modelParser.AddScopeInnerNode(&modelParser, compute_graph, &graph_mutex, node_def);
  1322. EXPECT_EQ(ret, PARAM_INVALID);
  1323. delete node_def;
  1324. }
  1325. TEST_F(STestTensorflowParser, dyncmic_rnn_scope_pass_plugin_test) {
  1326. ge::Graph graph;
  1327. std::cout << __FILE__ << std::endl;
  1328. std::string caseDir = __FILE__;
  1329. std::size_t idx = caseDir.find_last_of("/");
  1330. caseDir = caseDir.substr(0, idx);
  1331. std::string modelFile = caseDir + "/origin_models/tensor_array.pb";
  1332. std::map<ge::AscendString, ge::AscendString> params;
  1333. string key ="enable_scope_fusion_passes";
  1334. string value ="ScopeDynamicRNNPass";
  1335. params.insert(std::make_pair(ge::AscendString(key.c_str()), ge::AscendString(value.c_str())));
  1336. auto status = aclgrphParseTensorFlow(modelFile.c_str(), params, graph);
  1337. EXPECT_EQ(status, SUCCESS);
  1338. }
  1339. TEST_F(STestTensorflowParser, avgpool3dgrad_plugin_test_format_NDHWC) {
  1340. ge::Graph graph;
  1341. std::cout << __FILE__ << std::endl;
  1342. std::string caseDir = __FILE__;
  1343. std::size_t idx = caseDir.find_last_of("/");
  1344. caseDir = caseDir.substr(0, idx);
  1345. std::string modelFile = caseDir + "/origin_models/avgpool3dgrad_case_1.pb";
  1346. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1347. EXPECT_EQ(status, SUCCESS);
  1348. }
  1349. TEST_F(STestTensorflowParser, tensorflow_merge_test) {
  1350. ge::Graph graph;
  1351. std::cout << __FILE__ << std::endl;
  1352. std::string caseDir = __FILE__;
  1353. std::size_t idx = caseDir.find_last_of("/");
  1354. caseDir = caseDir.substr(0, idx);
  1355. std::string modelFile = caseDir + "/origin_models/merge.pb";
  1356. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1357. EXPECT_EQ(status, FAILED);
  1358. }
  1359. TEST_F(STestTensorflowParser, tensorflow_no_op_test) {
  1360. ge::Graph graph;
  1361. std::cout << __FILE__ << std::endl;
  1362. std::string caseDir = __FILE__;
  1363. std::size_t idx = caseDir.find_last_of("/");
  1364. caseDir = caseDir.substr(0, idx);
  1365. std::string modelFile = caseDir + "/origin_models/test_no_op.pb";
  1366. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1367. EXPECT_EQ(status, SUCCESS);
  1368. }
  1369. TEST_F(STestTensorflowParser, tensorflow_identity_test) {
  1370. ge::Graph graph;
  1371. std::cout << __FILE__ << std::endl;
  1372. std::string caseDir = __FILE__;
  1373. std::size_t idx = caseDir.find_last_of("/");
  1374. caseDir = caseDir.substr(0, idx);
  1375. std::string modelFile = caseDir + "/origin_models/test_identity.pb";
  1376. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1377. EXPECT_EQ(status, SUCCESS);
  1378. }
  1379. TEST_F(STestTensorflowParser, tensorflow_constant_test) {
  1380. ge::Graph graph;
  1381. std::cout << __FILE__ << std::endl;
  1382. std::string caseDir = __FILE__;
  1383. std::size_t idx = caseDir.find_last_of("/");
  1384. caseDir = caseDir.substr(0, idx);
  1385. std::string modelFile = caseDir + "/origin_models/test_constant.pb";
  1386. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1387. EXPECT_EQ(status, SUCCESS);
  1388. TensorFlowConstantParser constantParser;
  1389. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  1390. NodeDef* node_def = initNodeDef();
  1391. node_def->set_name("Constant");
  1392. auto params = constantParser.ParseParams(node_def, op_dest);
  1393. EXPECT_EQ(params, SUCCESS);
  1394. auto value = constantParser.ParseValue(node_def, op_dest);
  1395. EXPECT_EQ(value, SUCCESS);
  1396. ConstantOperator op;
  1397. auto type = constantParser.ParseDType(node_def, &op);
  1398. EXPECT_EQ(type, SUCCESS);
  1399. }
  1400. TEST_F(STestTensorflowParser, tensorflow_reshpae_test) {
  1401. ge::Graph graph;
  1402. std::cout << __FILE__ << std::endl;
  1403. std::string caseDir = __FILE__;
  1404. std::size_t idx = caseDir.find_last_of("/");
  1405. caseDir = caseDir.substr(0, idx);
  1406. std::string modelFile = caseDir + "/origin_models/test_reshape.pb";
  1407. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1408. EXPECT_EQ(status, SUCCESS);
  1409. TensorFlowReshapeParser parser;
  1410. NodeDef * nodeDef = new NodeDef();
  1411. ge::OpDescPtr opdef_ = make_shared<::ge::OpDesc>("","");
  1412. google::protobuf::Map<std::string, tensorflow::AttrValue > *attr_map = nodeDef->mutable_attr();
  1413. domi::tensorflow::AttrValue tshape_attr_value;
  1414. tshape_attr_value.set_type(domi::tensorflow::DT_INT32);
  1415. (*attr_map)[TENSORFLOW_ATTR_TSHAPE] = tshape_attr_value;
  1416. domi::tensorflow::AttrValue t_attr_value;
  1417. t_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1418. (*attr_map)[TENSORFLOW_ATTR_T] = t_attr_value;
  1419. Status ret = parser.ParseParams(nodeDef, opdef_);
  1420. EXPECT_EQ(domi::SUCCESS, ret);
  1421. delete nodeDef;
  1422. }
  1423. TEST_F(STestTensorflowParser, tensorflow_squeeze_test) {
  1424. ge::Graph graph;
  1425. std::cout << __FILE__ << std::endl;
  1426. std::string caseDir = __FILE__;
  1427. std::size_t idx = caseDir.find_last_of("/");
  1428. caseDir = caseDir.substr(0, idx);
  1429. std::string modelFile = caseDir + "/origin_models/test_sequeeze.pb";
  1430. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1431. EXPECT_EQ(status, SUCCESS);
  1432. TensorFlowSqueezeParser parser;
  1433. NodeDef *nodeDef = initNodeDef();
  1434. ge::OpDescPtr opDef = make_shared<::ge::OpDesc>("Squeeze","Squeeze");
  1435. Status ret = parser.ParseParams(nodeDef, opDef);
  1436. EXPECT_EQ(ret, SUCCESS);
  1437. NodeDef *nodeDef_dim = initNodeDef_dims();
  1438. ret = parser.ParseParams(nodeDef_dim, opDef);
  1439. EXPECT_EQ(SUCCESS, ret);
  1440. NodeDef *nodeDef_axis_dims = initNodeDef_axis_dims();
  1441. ret = parser.ParseParams(nodeDef_axis_dims, opDef);
  1442. EXPECT_EQ(GRAPH_PARAM_INVALID, ret);
  1443. static const string KEY_SHAPE_LIST = "key_shape_list";
  1444. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1445. static const string KEY_DEFAULT = "key_default";
  1446. NodeDef *nodeDef2 = new NodeDef();
  1447. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef2->mutable_attr();
  1448. domi::tensorflow::AttrValue dtype_attr_value ;
  1449. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1450. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1451. //设置strides属性
  1452. tensorflow::AttrValue axis_attr_value;
  1453. tensorflow::AttrValue_ListValue *list = axis_attr_value.mutable_list();
  1454. list->add_i(1);
  1455. list->add_i(2);
  1456. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1457. domi::tensorflow::AttrValue value;
  1458. domi::tensorflow::AttrValue df_attr_value;
  1459. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1460. domi::tensorflow::AttrValue pad_attr_value;
  1461. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1462. domi::tensorflow::AttrValue shape;
  1463. shape.mutable_list()->add_i((int64)32);
  1464. shape.mutable_list()->add_i((int64)32);
  1465. shape.mutable_list()->add_i((int64)14);
  1466. static const string KEY_TYPE_LIST = "key_type_list";
  1467. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1468. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1469. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1470. value.clear_value();
  1471. value.mutable_list()->add_type(VALUE_TYPE);
  1472. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, nodeDef2);
  1473. value.clear_value();
  1474. domi::tensorflow::NameAttrList name_attr_list;
  1475. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1476. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1477. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1478. *(value.mutable_list()->add_func()) = name_attr_list;
  1479. nodeDef2->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1480. nodeDef2->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1481. ret = parser.ParseParams(nodeDef2, opDef);
  1482. EXPECT_EQ(domi::SUCCESS, ret);
  1483. GeTensorDesc ge_desc;
  1484. ge_desc.SetFormat(ge::FORMAT_C1HWNCoC0);
  1485. ge_desc.SetDataType(ge::DT_FLOAT);
  1486. ge_desc.SetShape(GeShape({1,1,1,1,1,1}));
  1487. ret = parser.ParseDesc(value, ge_desc);
  1488. EXPECT_EQ(ret, SUCCESS);
  1489. delete nodeDef2;
  1490. delete nodeDef_axis_dims;
  1491. delete nodeDef_dim;
  1492. delete nodeDef;
  1493. }
  1494. TEST_F(STestTensorflowParser, tensorflow_fill_test) {
  1495. ge::Graph graph;
  1496. std::cout << __FILE__ << std::endl;
  1497. std::string caseDir = __FILE__;
  1498. std::size_t idx = caseDir.find_last_of("/");
  1499. caseDir = caseDir.substr(0, idx);
  1500. std::string modelFile = caseDir + "/origin_models/test_fill.pb";
  1501. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1502. EXPECT_EQ(status, SUCCESS);
  1503. }
  1504. TEST_F(STestTensorflowParser, tensorflow_shape_n_test) {
  1505. ge::Graph graph;
  1506. std::cout << __FILE__ << std::endl;
  1507. std::string caseDir = __FILE__;
  1508. std::size_t idx = caseDir.find_last_of("/");
  1509. caseDir = caseDir.substr(0, idx);
  1510. std::string modelFile = caseDir + "/origin_models/test_shape_n.pb";
  1511. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1512. EXPECT_EQ(status, SUCCESS);
  1513. }
  1514. TEST_F(STestTensorflowParser, tensorflow_switch_test) {
  1515. ge::Graph graph;
  1516. std::cout << __FILE__ << std::endl;
  1517. std::string caseDir = __FILE__;
  1518. std::size_t idx = caseDir.find_last_of("/");
  1519. caseDir = caseDir.substr(0, idx);
  1520. std::string modelFile = caseDir + "/origin_models/test_switch.pb";
  1521. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1522. EXPECT_EQ(status, SUCCESS);
  1523. TensorFlowRefSwitchParser refSwitchParser;
  1524. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  1525. NodeDef* node_def = initNodeDef();
  1526. node_def->set_name("RefSwitch");
  1527. auto params = refSwitchParser.ParseParams(node_def, op_dest);
  1528. EXPECT_EQ(params, SUCCESS);
  1529. RefSwitchOperator op;
  1530. auto parseRet = refSwitchParser.ParseT(node_def, &op);
  1531. EXPECT_EQ(parseRet, SUCCESS);
  1532. }
  1533. TEST_F(STestTensorflowParser, tensorflow_enter_test) {
  1534. ge::Graph graph;
  1535. std::cout << __FILE__ << std::endl;
  1536. std::string caseDir = __FILE__;
  1537. std::size_t idx = caseDir.find_last_of("/");
  1538. caseDir = caseDir.substr(0, idx);
  1539. std::string modelFile = caseDir + "/origin_models/test_enter.pb";
  1540. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1541. EXPECT_EQ(status, SUCCESS);
  1542. TensorFlowEnterParser enterParser;
  1543. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("Enter", ge::parser::ENTER);
  1544. NodeDef* node_def = initNodeDef();
  1545. node_def->set_name("Enter");
  1546. Status ret = enterParser.ParseParams(node_def, op_dest);
  1547. EXPECT_EQ(ret, FAILED);
  1548. static const string KEY_SHAPE_LIST = "key_shape_list";
  1549. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1550. static const string KEY_DEFAULT = "key_default";
  1551. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1552. domi::tensorflow::AttrValue dtype_attr_value;
  1553. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1554. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1555. //设置strides属性
  1556. domi::tensorflow::AttrValue axis_attr_value;
  1557. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1558. list->add_i(1);
  1559. list->add_i(2);
  1560. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1561. domi::tensorflow::AttrValue value;
  1562. domi::tensorflow::AttrValue df_attr_value;
  1563. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1564. domi::tensorflow::AttrValue pad_attr_value;
  1565. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1566. domi::tensorflow::AttrValue shape;
  1567. shape.mutable_list()->add_i((int64)32);
  1568. shape.mutable_list()->add_i((int64)32);
  1569. shape.mutable_list()->add_i((int64)14);
  1570. static const string KEY_TYPE_LIST = "key_type_list";
  1571. const std::string ENTER_ATTR_FRAME_NAME = "frame_name";
  1572. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1573. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1574. value.clear_value();
  1575. value.mutable_list()->add_type(VALUE_TYPE);
  1576. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1577. value.clear_value();
  1578. domi::tensorflow::NameAttrList name_attr_list;
  1579. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1580. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1581. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1582. *(value.mutable_list()->add_func()) = name_attr_list;
  1583. node_def->mutable_attr()->insert({ge::ENTER_ATTR_FRAME_NAME, value});
  1584. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1585. ret = enterParser.ParseParams(node_def, op_dest);
  1586. EXPECT_EQ(ret, FAILED);
  1587. }
  1588. TEST_F(STestTensorflowParser, tensorflow_VariableV2_test) {
  1589. ge::Graph graph;
  1590. std::string caseDir = __FILE__;
  1591. std::size_t idx = caseDir.find_last_of("/");
  1592. caseDir = caseDir.substr(0, idx);
  1593. std::string modelFile = caseDir + "/origin_models/test_VariableV2.pb";
  1594. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1595. EXPECT_EQ(status, SUCCESS);
  1596. }
  1597. TEST_F(STestTensorflowParser, tensorflow_fusion_op_parser_test)
  1598. {
  1599. TensorFlowFusionOpParser fusionOpParser;
  1600. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("FusionOp", ge::parser::CONSTANT);
  1601. int index = 0;
  1602. NodeDef* node_def = fusioninitNodeDef(index);
  1603. node_def->set_name("FusionOp");
  1604. auto ret = fusionOpParser.ParseParams(node_def, op_dest);
  1605. EXPECT_EQ(ret, SUCCESS);
  1606. int32_t param = 1;
  1607. ret = fusionOpParser.ParseParamFromConst(node_def, param);
  1608. EXPECT_EQ(ret, SUCCESS);
  1609. ret = fusionOpParser.ParseParamFromConst(node_def, param, index);
  1610. EXPECT_EQ(ret, SUCCESS);
  1611. float params = 0.0;
  1612. ret = fusionOpParser.ParseParamFromConst(node_def, params);
  1613. EXPECT_EQ(ret, SUCCESS);
  1614. index = 2;
  1615. node_def = fusioninitNodeDef(index);
  1616. ret = fusionOpParser.ParseParamFromConst(node_def, params, index);
  1617. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1618. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 0);
  1619. EXPECT_EQ(ret, SUCCESS);
  1620. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 3);
  1621. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1622. node_def = fusioninitNodeDef(0);
  1623. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 3);
  1624. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1625. static const float VALUE_FLOAT = 1.0;
  1626. ge::GeTensorPtr weight = nullptr;
  1627. ret = fusionOpParser.ParseWeightFromConst(node_def, weight);
  1628. EXPECT_EQ(ret, domi::SUCCESS);
  1629. EXPECT_NE(weight, nullptr);
  1630. ge::DataType ge_data_type = weight->GetTensorDesc().GetDataType();
  1631. EXPECT_EQ(ge_data_type, ge::DataType::DT_FLOAT);
  1632. const uint8_t* data_buff = weight->GetData().GetData();
  1633. size_t data_size = weight->GetData().size();
  1634. EXPECT_NE(data_buff, nullptr);
  1635. EXPECT_EQ(data_size, sizeof(float));
  1636. float value_float = *((float*)data_buff);
  1637. EXPECT_EQ(value_float, VALUE_FLOAT);
  1638. delete node_def;
  1639. }
  1640. TEST_F(STestTensorflowParser, tensorflow_auto_mapping_parser_adapter_test)
  1641. {
  1642. ge::OpDescPtr op_dest = nullptr;
  1643. Message *op_src = nullptr;
  1644. TensorFlowAutoMappingParserAdapter autoMappingParser;
  1645. NodeDef* node_def = initNodeDef();
  1646. Status ret = autoMappingParser.ParseParams(op_src, op_dest);
  1647. EXPECT_EQ(ret, PARAM_INVALID);
  1648. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1649. EXPECT_EQ(ret, PARAM_INVALID);
  1650. op_dest = make_shared<ge::OpDesc>("AutoMapping", ge::parser::CONSTANT);
  1651. op_dest->SetType(ge::parser::EMPTY);
  1652. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1653. EXPECT_EQ(ret, SUCCESS);
  1654. op_dest->SetType(ge::parser::IDENTITYN);
  1655. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1656. EXPECT_EQ(ret, SUCCESS);
  1657. op_dest->SetType(ge::parser::SIZE);
  1658. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1659. EXPECT_EQ(ret, SUCCESS);
  1660. op_dest->SetType(ge::parser::SHAPE);
  1661. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1662. EXPECT_EQ(ret, SUCCESS);
  1663. }
  1664. TEST_F(STestTensorflowParser, tensorflow_fusion_custom_parser_adapter_test)
  1665. {
  1666. REGISTER_CUSTOM_OP("FusionCustom")
  1667. .FrameworkType(domi::TENSORFLOW)
  1668. .OriginOpType("FusionCustom")
  1669. .FusionParseParamsFn(FusionParserParams)
  1670. .ImplyType(ImplyType::TVM);
  1671. register_tbe_op();
  1672. auto graph = std::make_shared<ge::ComputeGraph>("FusionCustom");
  1673. auto op_desc = std::make_shared<ge::OpDesc>("FusionCustom", "FusionCustom");
  1674. auto node = graph->AddNode(op_desc);
  1675. NodeDef *node_def = new NodeDef();
  1676. std::vector<const NodeDef *> v_input_const1;
  1677. v_input_const1.push_back(node_def);
  1678. TensorFlowFusionCustomParserAdapter parser;
  1679. domi::Status status = parser.ParseParams(v_input_const1, node);
  1680. EXPECT_EQ(SUCCESS, status);
  1681. ge::Operator op_src("pool", "pooling");
  1682. std::vector<ge::Operator> v_input_const2;
  1683. v_input_const2.push_back(op_src);
  1684. Status ret = parser.ParseParams(v_input_const2, node);
  1685. EXPECT_EQ(FAILED, ret);
  1686. delete node_def;
  1687. }
  1688. TEST_F(STestTensorflowParser, tensorflow_custom_parser_adapter_test)
  1689. {
  1690. ge::Operator op_src("pool", "pooling");
  1691. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1692. TensorFlowCustomParserAdapter parser;
  1693. Status ret = parser.ParseParams(op_src, op_dest);
  1694. EXPECT_EQ(ret, FAILED);
  1695. REGISTER_CUSTOM_OP("Variable")
  1696. .FrameworkType(domi::TENSORFLOW)
  1697. .OriginOpType("VariableV2")
  1698. .ParseParamsFn(ParseParams)
  1699. .ParseParamsByOperatorFn(ParseParamByOpFunc)
  1700. .ImplyType(ImplyType::CUSTOM);
  1701. register_tbe_op();
  1702. Operator opSrc(ge::parser::VARIABLE, "VariableV2");
  1703. ret = parser.ParseParams(opSrc, op_dest);
  1704. EXPECT_EQ(ret, SUCCESS);
  1705. }
  1706. TEST_F(STestTensorflowParser, tensorflow_graph_functiondef_FindAttrValue_test)
  1707. {
  1708. GraphToFunctionDef functionDef;
  1709. NodeDef *node_def = nullptr;
  1710. std::string attr_name = "Const";
  1711. tensorflow::AttrValue attr_value;
  1712. bool ret = functionDef.FindAttrValue(node_def, attr_name, attr_value);
  1713. EXPECT_EQ(ret, false);
  1714. node_def = initNodeDef();
  1715. attr_name = ge::ATTR_NAME_INPUT_TENSOR_DESC;
  1716. node_def->set_name("Const");
  1717. ret = functionDef.FindAttrValue(node_def, attr_name, attr_value);
  1718. EXPECT_EQ(ret, false);
  1719. }
  1720. TEST_F(STestTensorflowParser, tensorflow_graph_functiondef_BuildFunctionDef_test)
  1721. {
  1722. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  1723. string inputNodeType = "DATA";
  1724. MakeDagGraph(subGraph, inputNodeType);
  1725. FunctionDefLibrary library;
  1726. tensorflow::NodeDef call_node_def;
  1727. call_node_def.set_op("fusionop");
  1728. call_node_def.set_name("fusionop");
  1729. vector<ge::InDataAnchorPtr> in_anchor;
  1730. vector<ge::OutDataAnchorPtr> out_anchor;
  1731. for (ge::NodePtr node : subGraph->GetAllNodes()) {
  1732. for (auto in : node->GetAllInDataAnchors()) {
  1733. if (in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  1734. in_anchor.push_back(in);
  1735. }
  1736. }
  1737. for (auto out : node->GetAllOutDataAnchors()) {
  1738. for (auto i : out->GetPeerInDataAnchors()) {
  1739. if (i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  1740. out_anchor.push_back(out);
  1741. }
  1742. }
  1743. }
  1744. }
  1745. Status ret = GraphToFunctionDef::BuildFunctionDef(subGraph,
  1746. "fusionop",
  1747. &library,
  1748. &call_node_def,
  1749. in_anchor,
  1750. out_anchor);
  1751. EXPECT_EQ(domi::INTERNAL_ERROR, ret);
  1752. }
  1753. TEST_F(STestTensorflowParser, tensorflow_CheckOpShapeDim_test)
  1754. {
  1755. NodeDef *node_def = initNodeDef();
  1756. std::set<int> dims;
  1757. dims.insert(1);
  1758. dims.insert(2);
  1759. bool valid = true;
  1760. TensorFlowModelParser parser;
  1761. Status ret = parser.CheckOpShapeDim(node_def, dims, valid);
  1762. EXPECT_EQ(ret, SUCCESS);
  1763. static const string KEY_SHAPE_LIST = "key_shape_list";
  1764. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1765. static const string KEY_DEFAULT = "key_default";
  1766. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1767. domi::tensorflow::AttrValue dtype_attr_value;
  1768. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1769. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1770. //设置strides属性
  1771. domi::tensorflow::AttrValue axis_attr_value;
  1772. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1773. list->add_i(1);
  1774. list->add_i(2);
  1775. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1776. domi::tensorflow::AttrValue value;
  1777. domi::tensorflow::AttrValue df_attr_value;
  1778. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1779. domi::tensorflow::AttrValue pad_attr_value;
  1780. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1781. domi::tensorflow::AttrValue shape;
  1782. shape.mutable_list()->add_i((int64)32);
  1783. shape.mutable_list()->add_i((int64)32);
  1784. shape.mutable_list()->add_i((int64)14);
  1785. static const string KEY_TYPE_LIST = "key_type_list";
  1786. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1787. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1788. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1789. value.clear_value();
  1790. value.mutable_list()->add_type(VALUE_TYPE);
  1791. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1792. value.clear_value();
  1793. domi::tensorflow::NameAttrList name_attr_list;
  1794. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1795. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1796. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1797. *(value.mutable_list()->add_func()) = name_attr_list;
  1798. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1799. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1800. ret = parser.CheckOpShapeDim(node_def, dims, valid);
  1801. EXPECT_EQ(ret, SUCCESS);
  1802. }
  1803. TEST_F(STestTensorflowParser, tensorflow_Scope_pass_test)
  1804. {
  1805. ScopePassManager passmanager;
  1806. auto scope_graph = ge::parser::MakeShared<ge::ScopeGraph>();
  1807. if (scope_graph == nullptr) {
  1808. GELOGE(FAILED, "Scope graph make shared failed.");
  1809. return;
  1810. }
  1811. if (scope_graph->Init() != SUCCESS) {
  1812. GELOGE(FAILED, "Scope graph init failed.");
  1813. return;
  1814. }
  1815. ge::TensorFlowModelParser tf_model_parser;
  1816. std::vector<string> scope_passes_list = {"ScopeBasicLSTMCellPass", "ScopeLayerNormPass"};
  1817. Status ret = tf_model_parser.RunScopeFusionPass(scope_passes_list, passmanager, scope_graph);
  1818. EXPECT_NE(ge::SUCCESS, ret);
  1819. }
  1820. TEST_F(STestTensorflowParser, tensorflow_variable_v2_parser_test)
  1821. {
  1822. TensorFlowCustomParserAdapter parser;
  1823. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1824. NodeDef *node_def = initNodeDef();
  1825. TensorFlowModelParser modelParser;
  1826. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1827. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Variable");
  1828. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1829. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1830. EXPECT_EQ(ret, PARAM_INVALID);
  1831. node_def->set_name("TemporaryVariable");
  1832. node_def->set_op("TemporaryVariable");
  1833. op_parser = factory->CreateOpParser("TemporaryVariable");
  1834. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1835. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1836. EXPECT_EQ(ret, PARAM_INVALID);
  1837. NodeDef *nodeDef_temporaryVariable = initOpNodeDef_TemporaryVariable();
  1838. op_parser = factory->CreateOpParser("TemporaryVariable");
  1839. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1840. ret = tensorflow_op_parser->ParseParams(nodeDef_temporaryVariable, op_dest);
  1841. EXPECT_EQ(ret, SUCCESS);
  1842. NodeDef *nodeDef_VariableV2 = initOpNodeDef_VariableV2();
  1843. op_parser = factory->CreateOpParser("Variable");
  1844. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1845. ret = tensorflow_op_parser->ParseParams(nodeDef_VariableV2, op_dest);
  1846. EXPECT_EQ(ret, SUCCESS);
  1847. }
  1848. TEST_F(STestTensorflowParser, tensorflow_var_is_initialized_op_test)
  1849. {
  1850. TensorFlowCustomParserAdapter parser;
  1851. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1852. NodeDef *node_def = initNodeDef();
  1853. TensorFlowModelParser modelParser;
  1854. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1855. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("VarIsInitializedOp");
  1856. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1857. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1858. EXPECT_EQ(ret, SUCCESS);
  1859. }
  1860. TEST_F(STestTensorflowParser, tensorflow_arg_parser_test)
  1861. {
  1862. TensorFlowCustomParserAdapter parser;
  1863. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1864. NodeDef *node_def = initNodeDef();
  1865. TensorFlowModelParser modelParser;
  1866. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1867. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("_Arg");
  1868. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1869. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1870. EXPECT_EQ(ret, SUCCESS);
  1871. static const string KEY_SHAPE_LIST = "key_shape_list";
  1872. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1873. static const string KEY_DEFAULT = "key_default";
  1874. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1875. domi::tensorflow::AttrValue dtype_attr_value;
  1876. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1877. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1878. //设置strides属性
  1879. domi::tensorflow::AttrValue axis_attr_value;
  1880. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1881. list->add_i(1);
  1882. list->add_i(2);
  1883. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1884. domi::tensorflow::AttrValue value;
  1885. domi::tensorflow::AttrValue df_attr_value;
  1886. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1887. domi::tensorflow::AttrValue pad_attr_value;
  1888. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1889. domi::tensorflow::AttrValue shape;
  1890. shape.mutable_list()->add_i((int64)32);
  1891. shape.mutable_list()->add_i((int64)32);
  1892. shape.mutable_list()->add_i((int64)14);
  1893. static const string KEY_TYPE_LIST = "key_type_list";
  1894. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1895. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1896. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1897. value.clear_value();
  1898. value.mutable_list()->add_type(VALUE_TYPE);
  1899. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1900. value.clear_value();
  1901. domi::tensorflow::NameAttrList name_attr_list;
  1902. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1903. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1904. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1905. *(value.mutable_list()->add_func()) = name_attr_list;
  1906. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1907. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1908. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1909. EXPECT_EQ(ret, SUCCESS);
  1910. }
  1911. TEST_F(STestTensorflowParser, tensorflow_frameworkop_parser_test1)
  1912. {
  1913. TensorFlowCustomParserAdapter parser;
  1914. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1915. NodeDef *node_def = initNodeDef();
  1916. TensorFlowModelParser modelParser;
  1917. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1918. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("FrameworkOp");
  1919. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1920. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1921. EXPECT_EQ(ret, PARAM_INVALID);
  1922. ChangeDataType(node_def, tensorflow::DT_UINT16);
  1923. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1924. EXPECT_EQ(ret, PARAM_INVALID);
  1925. }
  1926. TEST_F(STestTensorflowParser, tensorflow_frameworkop_parser_test2)
  1927. {
  1928. TensorFlowCustomParserAdapter parser;
  1929. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1930. NodeDef *node_def = initNodeDef();
  1931. node_def->set_name("FrameworkOp");
  1932. node_def->set_op("_Retval");
  1933. TensorFlowModelParser modelParser;
  1934. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1935. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("FrameworkOp");
  1936. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1937. static const string KEY_SHAPE_LIST = "key_shape_list";
  1938. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1939. static const string KEY_DEFAULT = "key_default";
  1940. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1941. domi::tensorflow::AttrValue dtype_attr_value;
  1942. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1943. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1944. //设置strides属性
  1945. domi::tensorflow::AttrValue axis_attr_value;
  1946. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1947. list->add_i(1);
  1948. list->add_i(2);
  1949. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1950. domi::tensorflow::AttrValue value;
  1951. domi::tensorflow::AttrValue df_attr_value;
  1952. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1953. domi::tensorflow::AttrValue pad_attr_value;
  1954. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1955. domi::tensorflow::AttrValue shape;
  1956. shape.mutable_list()->add_i((int64)32);
  1957. shape.mutable_list()->add_i((int64)32);
  1958. shape.mutable_list()->add_i((int64)14);
  1959. static const string KEY_TYPE_LIST = "key_type_list";
  1960. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "ATTR_NAME_FRAMEWORK_OP_DEF";
  1961. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1962. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1963. value.clear_value();
  1964. value.mutable_list()->add_type(VALUE_TYPE);
  1965. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1966. value.clear_value();
  1967. domi::tensorflow::NameAttrList name_attr_list;
  1968. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1969. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1970. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1971. *(value.mutable_list()->add_func()) = name_attr_list;
  1972. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1973. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1974. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1975. EXPECT_EQ(ret, SUCCESS);
  1976. }
  1977. TEST_F(STestTensorflowParser, tensorflow_reshape_parser_test)
  1978. {
  1979. TensorFlowCustomParserAdapter parser;
  1980. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1981. NodeDef *node_def = initNodeDef();
  1982. TensorFlowModelParser modelParser;
  1983. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1984. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Reshape");
  1985. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1986. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1987. EXPECT_EQ(ret, SUCCESS);
  1988. NodeDef * nodeDef = new NodeDef();
  1989. nodeDef->set_op("Reshape");
  1990. google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = nodeDef->mutable_attr();
  1991. domi::tensorflow::AttrValue attr_value;
  1992. attr_value.mutable_list()->add_i((int64)32);
  1993. attr_value.mutable_list()->add_i((int64)32);
  1994. attr_value.mutable_list()->add_i((int64)14);
  1995. domi::tensorflow::AttrValue df_attr_value2;
  1996. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  1997. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  1998. domi::tensorflow::AttrValue df_attr_value;
  1999. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2000. //设置padding属性
  2001. domi::tensorflow::AttrValue pad_attr_value2;
  2002. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  2003. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  2004. domi::tensorflow::AttrValue pad_attr_value;
  2005. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2006. domi::tensorflow::NameAttrList name_attr_list;
  2007. name_attr_list.mutable_attr()->insert({"serialize_shape", attr_value});
  2008. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2009. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2010. *(attr_value.mutable_list()->add_func()) = name_attr_list;
  2011. GeTensorDesc ge_desc;
  2012. ge_desc.SetFormat(ge::FORMAT_C1HWNCoC0);
  2013. ge_desc.SetDataType(ge::DT_FLOAT);
  2014. ge_desc.SetShape(GeShape({1,1,1,1,1,1}));
  2015. TensorFlowReshapeParser reshapeParser;
  2016. ret = reshapeParser.ParseDesc(attr_value, ge_desc);
  2017. EXPECT_EQ(ret, SUCCESS);
  2018. }
  2019. TEST_F(STestTensorflowParser, tensorflow_DefunToPartitionedCall_parser_test)
  2020. {
  2021. TensorFlowModelParser parser;
  2022. NodeDef *node_def = initNodeDef();
  2023. node_def->set_name("ShapeN");
  2024. ge::OpDescPtr op = make_shared<ge::OpDesc>("ShapeN", ge::parser::PARTITIONEDCALL);
  2025. Status ret = parser.DefunToPartitionedCall(node_def, op);
  2026. EXPECT_EQ(ret, FAILED);
  2027. static const string KEY_SHAPE_LIST = "key_shape_list";
  2028. static const string KEY_TENSOR_LIST = "key_tensor_list";
  2029. static const string KEY_DEFAULT = "key_default";
  2030. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  2031. domi::tensorflow::AttrValue dtype_attr_value;
  2032. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  2033. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  2034. //设置strides属性
  2035. domi::tensorflow::AttrValue axis_attr_value;
  2036. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  2037. list->add_i(1);
  2038. list->add_i(2);
  2039. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  2040. domi::tensorflow::AttrValue value;
  2041. domi::tensorflow::AttrValue df_attr_value;
  2042. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2043. domi::tensorflow::AttrValue pad_attr_value;
  2044. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2045. domi::tensorflow::AttrValue shape;
  2046. shape.mutable_list()->add_i((int64)32);
  2047. shape.mutable_list()->add_i((int64)32);
  2048. shape.mutable_list()->add_i((int64)14);
  2049. static const string KEY_TYPE_LIST = "key_type_list";
  2050. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  2051. value.clear_value();
  2052. value.mutable_list()->add_type(VALUE_TYPE);
  2053. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  2054. value.clear_value();
  2055. domi::tensorflow::NameAttrList name_attr_list;
  2056. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2057. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2058. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  2059. *(value.mutable_list()->add_func()) = name_attr_list;
  2060. node_def->mutable_attr()->insert({"_disable_call_shape_inference", value});
  2061. node_def->mutable_attr()->insert({"_disable_call_shape_inference", value});
  2062. std::string fusion_op_name = "pre_node_a";
  2063. GenOriginContext(&parser, fusion_op_name);
  2064. node_def->set_name("pre_node_a");
  2065. ret = parser.DefunToPartitionedCall(node_def, op);
  2066. EXPECT_EQ(ret, SUCCESS);
  2067. }
  2068. TEST_F(STestTensorflowParser, tensorflow_TransNodeToOpDesc_parser_test)
  2069. {
  2070. TensorFlowModelParser parser;
  2071. NodeDef *node_def = initNodeDef();
  2072. node_def->set_name("ge::parser::DATA");
  2073. std::string op_type = "ge::parser::DATA";
  2074. ge::OpDescPtr op = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  2075. Status ret = parser.TransNodeToOpDesc(node_def, op, op_type);
  2076. EXPECT_EQ(ret, FAILED);
  2077. }
  2078. domi::Status fusion_parse_param_by_op(const std::vector<ge::Operator> &op_src, ge::Operator &op) {
  2079. return domi::SUCCESS;
  2080. }
  2081. TEST_F(STestTensorflowParser, Fusion_node_parse_params_success) {
  2082. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2083. ModelParserFactory* factory = ModelParserFactory::Instance();
  2084. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2085. ASSERT_TRUE(NULL != model_parser);
  2086. TensorFlowModelParser tensorflow_parser;
  2087. domi::tensorflow::NodeDef node_def;
  2088. node_def.set_name("data");
  2089. node_def.set_op("FusionCustom");
  2090. FusionParseParamByOpFunc function = fusion_parse_param_by_op;
  2091. shared_ptr<ge::OpParserFactory> op_parser = ge::OpParserFactory::Instance(domi::TENSORFLOW);
  2092. shared_ptr<OpParser> fusion_op_parser = op_parser->CreateFusionOpParser("FusionCustom");
  2093. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2094. ge::OpDescPtr op1 = std::make_shared<ge::OpDesc>("data", "FusionCustom");
  2095. ge::NodePtr node1 = std::make_shared<ge::Node>(op1, graph);
  2096. vector<const NodeDef *> node_defs;
  2097. node_defs.push_back(&node_def);
  2098. tensorflow_parser.fusion_op_nodedef_map_["data"] = node_defs;
  2099. Status ret = tensorflow_parser.FusionNodeParseParams(fusion_op_parser, &node_def, node1);
  2100. EXPECT_EQ(domi::SUCCESS, ret);
  2101. }
  2102. TEST_F(STestTensorflowParser, Tensorflow_recordFusionResult_parser_test)
  2103. {
  2104. auto scope_graph = ge::parser::MakeShared<ge::ScopeGraph>();
  2105. if (scope_graph == nullptr) {
  2106. GELOGE(FAILED, "Scope graph make shared failed.");
  2107. return;
  2108. }
  2109. if (scope_graph->Init() != SUCCESS) {
  2110. GELOGE(FAILED, "Scope graph init failed.");
  2111. return;
  2112. }
  2113. domi::tensorflow::NodeDef node_def;
  2114. node_def.set_name("OP");
  2115. FusionScopesResult *fusion_scope_rlt = new (std::nothrow) FusionScopesResult();
  2116. if (fusion_scope_rlt == nullptr) {
  2117. GELOGE(FAILED, "FusionScopesResult make shared failed.");
  2118. return;
  2119. }
  2120. fusion_scope_rlt->Init();
  2121. fusion_scope_rlt->SetName("OP");
  2122. auto &impl_scope_graph = scope_graph->impl_;
  2123. std::string scope_name = fusion_scope_rlt->Name();
  2124. impl_scope_graph->fusion_results_.insert(std::make_pair(scope_name, fusion_scope_rlt));
  2125. std::vector<ge::OperatorPtr> nodes;
  2126. ge::OperatorPtr op = ge::parser::MakeShared<ge::Operator>("op_name", "op_type");
  2127. if (op == nullptr) {
  2128. GELOGE(FAILED, "Operator make shared failed.");
  2129. return;
  2130. }
  2131. nodes.push_back(op);
  2132. fusion_scope_rlt->impl_->AddNodes(nodes);
  2133. ge::OpDescPtr opDesc = std::make_shared<ge::OpDesc>();
  2134. ge::TensorFlowModelParser tf_model_parser;
  2135. Status ret = tf_model_parser.RecordFusionResult(scope_graph, &node_def, opDesc);
  2136. EXPECT_EQ(SUCCESS, ret);
  2137. }
  2138. TEST_F(STestTensorflowParser, Tensorflow_UpdateFusionOpContext_test)
  2139. {
  2140. ModelParserFactory* factory = ModelParserFactory::Instance();
  2141. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2142. TensorFlowModelParser tensorflow_parser;
  2143. ScopeFusionOpInfo info;
  2144. ge::OpNodeContext normal_op_node_context;
  2145. ge::OpNodeContext fusion_op_node_context;
  2146. /* 1.预置条件 */
  2147. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2148. ScopePassManager passmanager;
  2149. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2150. NodeDef * node1 = graph->add_node();
  2151. node1->set_name("conv_conv5/BatchNorm/batchnorm/add");
  2152. node1->set_op("Add");
  2153. node1->add_input("conv_conv5/BatchNorm/moving_variance");
  2154. node1->add_input("conv_conv5/BatchNorm/batchnorm/add/y");
  2155. NodeDef * node2 = graph->add_node();
  2156. node2->set_name("conv_conv5/BatchNorm/moving_variance");
  2157. node2->set_op("Const");
  2158. NodeDef * node3 = graph->add_node();
  2159. node3->set_name("conv_conv5/BatchNorm/batchnorm/add/y");
  2160. node3->set_op("Const");
  2161. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  2162. info.fusion_op_type = ge::parser::FUSIONBATCHNORM;
  2163. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  2164. info.description = "";
  2165. info.scope_pass = false;
  2166. EXPECT_EQ(scope_graph->impl_->GetFusionScopesResults(nullptr), nullptr);
  2167. EXPECT_EQ(scope_graph->impl_->GetFusionScopesResults(node1), nullptr);
  2168. Status ret = tensorflow_parser.UpdateFusionOpContext(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  2169. EXPECT_EQ(ret, domi::SUCCESS);
  2170. delete graph;
  2171. }
  2172. TEST_F(STestTensorflowParser, Tensorflow_GetInOutPutIndex_scope_pass)
  2173. {
  2174. ModelParserFactory* factory = ModelParserFactory::Instance();
  2175. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2176. TensorFlowModelParser tensorflow_parser;
  2177. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2178. ScopePassManager passmanager;
  2179. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2180. FusionScopesResult* fusion_rlt = new FusionScopesResult();
  2181. fusion_rlt->Init();
  2182. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/ToInt32" ,{0}));
  2183. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/ToInt32" ,{0}));
  2184. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/ReverseSequence" ,{0, 1}));
  2185. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/ReverseSequence" ,{1}));
  2186. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("concat" ,{0}));
  2187. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/while/Exit_3" ,{1}));
  2188. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/while/Exit_4" ,{2}));
  2189. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/while/Exit_3" ,{3}));
  2190. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/while/Exit_4" ,{4}));
  2191. fusion_rlt->SetType("dynamic_rnn");
  2192. fusion_rlt->SetName("dynamic_rnn_node1");
  2193. scope_graph->impl_->AddFusionScopesResult(fusion_rlt);
  2194. ScopeFusionOpInfo info1;
  2195. info1.node_name = "fw/fw/ToInt32";
  2196. info1.fusion_node_name = "dynamic_rnn_node1";
  2197. info1.fusion_op_type = "dynamic_rnn";
  2198. info1.description = "";
  2199. info1.scope_pass = true;
  2200. bool ignore = false;
  2201. ignore = tensorflow_parser.FusionOpChildIgnore(scope_graph, info1);
  2202. EXPECT_EQ(true, !ignore);
  2203. ScopeFusionOpInfo info2;
  2204. info2.node_name = "fw/fw/others";
  2205. info2.fusion_node_name = "dynamic_rnn_node1";
  2206. info2.fusion_op_type = "dynamic_rnn";
  2207. info2.description = "";
  2208. info2.scope_pass = true;
  2209. ignore = tensorflow_parser.FusionOpChildIgnore(scope_graph, info2);
  2210. EXPECT_EQ(true, ignore);
  2211. ScopeFusionOpInfo input_node_info;
  2212. input_node_info.node_name = "fw/fw/ToInt32";
  2213. input_node_info.fusion_node_name = "dynamic_rnn_node1";
  2214. input_node_info.fusion_op_type = "dynamic_rnn";
  2215. input_node_info.description = "";
  2216. input_node_info.scope_pass = true;
  2217. ScopeFusionOpInfo output_node_info;
  2218. output_node_info.node_name = "fw/fw/while/Exit_3";
  2219. output_node_info.fusion_node_name = "dynamic_rnn_node1";
  2220. output_node_info.fusion_op_type = "dynamic_rnn";
  2221. output_node_info.description = "";
  2222. output_node_info.scope_pass = true;
  2223. int32_t old_index = 0, new_index = -1;
  2224. Status ret = tensorflow_parser.GetInPutIndex(scope_graph, input_node_info, old_index, new_index);
  2225. EXPECT_EQ(domi::SUCCESS, ret);
  2226. EXPECT_EQ(true, (new_index == 0));
  2227. ret = tensorflow_parser.GetOutPutIndex(scope_graph, output_node_info, old_index, new_index);
  2228. EXPECT_EQ(domi::SUCCESS, ret);
  2229. EXPECT_EQ(true, (new_index == 1));
  2230. delete graph;
  2231. }
  2232. TEST_F(STestTensorflowParser, Tensorflow_AddFusionNodeDef_add_fusion_op_succ)
  2233. {
  2234. ModelParserFactory* factory = ModelParserFactory::Instance();
  2235. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2236. TensorFlowModelParser tensorflow_parser;
  2237. string fusion_op_name = "dropout";
  2238. string fusion_op_type = "Dropout";
  2239. string description = "test/dropout";
  2240. tensorflow_parser.fusion_op_type_map_[fusion_op_name].push_back(fusion_op_type);
  2241. tensorflow_parser.fusion_op_type_map_[fusion_op_name].push_back(description);
  2242. // op_node_context for fusion op
  2243. ge::OpNodeContext op_node_context;
  2244. op_node_context.input_map["pre_node_a"].push_back({0, 0});
  2245. op_node_context.input_map["pre_node_b"].push_back({0, 1});
  2246. tensorflow_parser.op_node_context_map_[fusion_op_name] = op_node_context;
  2247. // origin inner node def
  2248. NodeDef* node_def = new (std::nothrow) NodeDef();
  2249. node_def->set_name("scope_node_1");
  2250. node_def->set_op("Add");
  2251. tensorflow_parser.fusion_op_nodedef_map_[fusion_op_name].push_back(node_def);
  2252. ScopePassManager pass_manager;
  2253. tensorflow::GraphDef *graph = new (std::nothrow) tensorflow::GraphDef();
  2254. shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graph);
  2255. vector<string> node_name_list = {fusion_op_name};
  2256. Status ret = tensorflow_parser.AddFusionNodeDef(scope_graph, node_name_list);
  2257. EXPECT_EQ(ret, SUCCESS);
  2258. EXPECT_EQ(tensorflow_parser.nodedef_map_.size(), 1);
  2259. auto fusion_node_def = tensorflow_parser.nodedef_map_[fusion_op_name];
  2260. EXPECT_NE(fusion_node_def, nullptr);
  2261. EXPECT_EQ(fusion_node_def->op(), fusion_op_type);
  2262. delete node_def;
  2263. delete graph;
  2264. tensorflow_parser.DeleteFuisonNodeDef();
  2265. }
  2266. TEST_F(STestTensorflowParser, remain_dpop_node)
  2267. {
  2268. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2269. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("dpop_123", "FrameworkOp");
  2270. ge::NodePtr node = std::make_shared<ge::Node>(op, graph);
  2271. graph->AddNode(node);
  2272. ModelParserFactory* factory = ModelParserFactory::Instance();
  2273. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2274. ASSERT_TRUE(NULL != model_parser);
  2275. TensorFlowModelParser tensorflow_parser;
  2276. Status ret = tensorflow_parser.RemoveIsolateNode(graph);
  2277. EXPECT_EQ(domi::SUCCESS, ret);
  2278. }
  2279. TEST_F(STestTensorflowParser, tensorflow_UpdateEdgesControlInfo_test)
  2280. {
  2281. TensorFlowModelParser model_parser;
  2282. ge::ScopeFusionOpInfo info;
  2283. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  2284. info.fusion_op_type = ge::parser::FUSIONBATCHNORM;
  2285. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  2286. info.description = "";
  2287. info.scope_pass = false;
  2288. model_parser.UpdateEdgesControlInfo(info);
  2289. }
  2290. TEST_F(STestTensorflowParser, tensorflow_OptimizeIdentityByOutput_test)
  2291. {
  2292. TensorFlowModelParser model_parser;
  2293. NodeDef *node_def = new NodeDef();
  2294. node_def->set_name("Placeholder");
  2295. node_def->set_op("Placeholder_0");
  2296. std::map<string, NodeDef *> nodedef_map;
  2297. nodedef_map.emplace("Placeholder", node_def);
  2298. std::string curr_node_name = "Placeholder";
  2299. bool clear_input_flag = true;
  2300. Status ret = model_parser.OptimizeIdentityByOutput(nodedef_map, curr_node_name, clear_input_flag);
  2301. EXPECT_EQ(ret, INTERNAL_ERROR);
  2302. GraphDef graph;
  2303. curr_node_name = "pre_node_a";
  2304. nodedef_map.emplace("pre_node_a", node_def);
  2305. node_def->set_op("pre_node_a");
  2306. GenOriginContext(&model_parser, curr_node_name);
  2307. ret = model_parser.OptimizeIdentityByOutput(nodedef_map, curr_node_name, clear_input_flag);
  2308. EXPECT_EQ(ret, SUCCESS);
  2309. delete node_def;
  2310. }
  2311. TEST_F(STestTensorflowParser, tensorflow_OptimizeSnapShot_test)
  2312. {
  2313. TensorFlowModelParser model_parser;
  2314. tensorflow::NodeDef *curr_mode_def = initNodeDef();
  2315. std::map<string, NodeDef *> nodedef_map;
  2316. nodedef_map.emplace("pre_node_a", curr_mode_def);
  2317. std::pair<string, int> input_data;
  2318. std::vector<string> control_list;
  2319. std::string curr_node_name = "pre_node_a";
  2320. GenOriginContext(&model_parser, curr_node_name);
  2321. Status ret = model_parser.OptimizeSnapShot(curr_mode_def, nodedef_map, input_data, control_list);
  2322. EXPECT_EQ(ret, INTERNAL_ERROR);
  2323. curr_mode_def->set_name("pre_node_a");
  2324. GenOriginContext(&model_parser, curr_node_name);
  2325. ret = model_parser.OptimizeSnapShot(curr_mode_def, nodedef_map, input_data, control_list);
  2326. EXPECT_EQ(ret, SUCCESS);
  2327. }
  2328. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeSnapShot_test)
  2329. {
  2330. TensorFlowModelParser model_parser;
  2331. tensorflow::GraphDef graph_def;
  2332. tensorflow::NodeDef *curr_mode_def = initNodeDef();
  2333. std::map<string, NodeDef *> nodedef_map;
  2334. nodedef_map.emplace("pre_node_a", curr_mode_def);
  2335. std::vector<NodeDef *> nodedef_to_optimize;
  2336. nodedef_to_optimize.emplace_back(curr_mode_def);
  2337. Status ret = model_parser.GraphDefOptimizeSnapShot(&graph_def, nodedef_map, nodedef_to_optimize);
  2338. EXPECT_EQ(ret, FAILED);
  2339. }
  2340. TEST_F(STestTensorflowParser, tensorflow_SetDestNodeName_test)
  2341. {
  2342. TensorFlowModelParser model_parser;
  2343. GraphDef graph;
  2344. auto arg0 = AddNode(graph, "_Arg", "arg0");
  2345. auto identity0 = AddNode(graph, "Identity", "identity0");
  2346. auto add0 = AddNode(graph, "Add", "add0");
  2347. int32_t input_idx = 0;
  2348. bool is_control = true;
  2349. bool clear_input_flag = true;
  2350. AddInput(arg0, identity0, 0);
  2351. AddInput(identity0, add0, 0);
  2352. Status ret = model_parser.SetDestNodeName(identity0, add0, input_idx, is_control, clear_input_flag);
  2353. EXPECT_EQ(ret, SUCCESS);
  2354. }
  2355. TEST_F(STestTensorflowParser, tensorflow_OptimizeDestroyTemporaryVariable_test)
  2356. {
  2357. ModelParserFactory* factory = ModelParserFactory::Instance();
  2358. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2359. TensorFlowModelParser tensorflow_parser;
  2360. GraphDef graph;
  2361. auto const0 = AddNode(graph, "Const", "Const0");
  2362. auto tmpVar0 = AddNode(graph, "TemporaryVariable", "TemporaryVariable0");
  2363. auto assign0 = AddNode(graph, "Assign", "Assign0");
  2364. auto destroy0 = AddNode(graph, "DestroyTemporaryVariable", "DestroyTemporaryVariable0");
  2365. auto add0 = AddNode(graph, "Add", "Add0");
  2366. google::protobuf::Map< std::string, tensorflow::AttrValue> *node_attr_map = tmpVar0->mutable_attr();
  2367. tensorflow::AttrValue var_name_attr_value;
  2368. var_name_attr_value.set_s("temporary_variable_name");
  2369. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  2370. google::protobuf::Map<std::string, tensorflow::AttrValue>* node_attr_map_destroy = destroy0->mutable_attr();
  2371. tensorflow::AttrValue var_name_attr_value_destroy;
  2372. var_name_attr_value_destroy.set_s("destroy_temporary_variable_name");
  2373. (*node_attr_map_destroy)[ge::VAR_ATTR_NAME] = var_name_attr_value_destroy;
  2374. AddInput(tmpVar0, assign0, 0);
  2375. AddInput(assign0, destroy0, 0);
  2376. AddInput(const0, add0, 0);
  2377. AddInput(destroy0, add0, 1);
  2378. GraphDef* graphDef = &graph;
  2379. int32_t no_input_node_size_original = 0;
  2380. for (int w = 0; w < graphDef->node_size(); w++) {
  2381. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2382. if (nodeTmp->input_size() == 0) {
  2383. no_input_node_size_original++;
  2384. }
  2385. }
  2386. Status ret = tensorflow_parser.GraphDefOptimize(graphDef);
  2387. int32_t no_input_node_size_result = 0;
  2388. for (int w = 0; w < graphDef->node_size(); w++) {
  2389. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2390. if (nodeTmp->input_size() == 0) {
  2391. no_input_node_size_result ++;
  2392. }
  2393. }
  2394. ASSERT_EQ(ret, domi::FAILED);
  2395. ASSERT_EQ(no_input_node_size_original, no_input_node_size_result);
  2396. }
  2397. TEST_F(STestTensorflowParser, tensorflow_OptimizeDestroyTemporaryVariable_test2)
  2398. {
  2399. ModelParserFactory* factory = ModelParserFactory::Instance();
  2400. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2401. TensorFlowModelParser tensorflow_parser;
  2402. GraphDef graph;
  2403. auto const0 = AddNode(graph, "Const", "Const0");
  2404. auto tmpVar0 = AddNode(graph, "TemporaryVariable", "TemporaryVariable0");
  2405. auto assign0 = AddNode(graph, "Assign", "Assign0");
  2406. auto destroy0 = AddNode(graph, "DestroyTemporaryVariable", "DestroyTemporaryVariable0");
  2407. auto add0 = AddNode(graph, "Add", "Add0");
  2408. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = tmpVar0->mutable_attr();
  2409. tensorflow::AttrValue var_name_attr_value;
  2410. var_name_attr_value.set_s("temporary_variable_name");
  2411. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  2412. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map_destroy = destroy0->mutable_attr();
  2413. tensorflow::AttrValue var_name_attr_value_destroy;
  2414. var_name_attr_value_destroy.set_s("temporary_variable_name");
  2415. (*node_attr_map_destroy)[ge::VAR_ATTR_NAME] = var_name_attr_value_destroy;
  2416. AddInput(tmpVar0, assign0, 0);
  2417. AddInput(assign0, destroy0, 0);
  2418. AddInput(const0, add0, 0);
  2419. AddInput(destroy0, add0, 1);
  2420. GraphDef* graphDef = &graph;
  2421. int32_t no_input_node_size_original = 0;
  2422. for (int w = 0; w < graphDef->node_size(); w++) {
  2423. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2424. if (nodeTmp->input_size() == 0) {
  2425. no_input_node_size_original ++;
  2426. }
  2427. }
  2428. Status ret = tensorflow_parser.GraphDefOptimize(graphDef);
  2429. int32_t no_input_node_size_result = 0;
  2430. for (int w = 0; w < graphDef->node_size(); w++) {
  2431. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2432. if (nodeTmp->input_size() == 0) {
  2433. no_input_node_size_result ++;
  2434. }
  2435. }
  2436. ASSERT_EQ(ret, domi::SUCCESS);
  2437. ASSERT_EQ(no_input_node_size_original, (no_input_node_size_result - 1));
  2438. }
  2439. TEST_F(STestTensorflowParser, tensorflow_AddControlEdgeAfterRemoveInputs_test)
  2440. {
  2441. tensorflow::GraphDef graph_def;
  2442. TensorFlowModelParser tensorflow_parser;
  2443. tensorflow::NodeDef *node_def = initNodeDef();
  2444. node_def->set_name("Add0");
  2445. node_def->set_op("add");
  2446. std::map<std::string, NodeDef *> all_node_map;
  2447. all_node_map.emplace("Add0", node_def);
  2448. std::vector<std::string> removed_inputs_vec;
  2449. removed_inputs_vec.emplace_back("Add0");
  2450. Status ret = tensorflow_parser.AddControlEdgeAfterRemoveInputs(&graph_def, node_def, all_node_map, removed_inputs_vec);
  2451. EXPECT_EQ(ret, SUCCESS);
  2452. }
  2453. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeIdentity_test)
  2454. {
  2455. tensorflow::GraphDef graph_def;
  2456. TensorFlowModelParser tensorflow_parser;
  2457. tensorflow::NodeDef *node_def = initNodeDef();
  2458. node_def->set_name("post_node_d");
  2459. std::map<string, NodeDef *> nodedef_map;
  2460. nodedef_map.emplace("post_node_d", node_def);
  2461. nodedef_map.emplace("post_node_a", node_def);
  2462. nodedef_map.emplace("post_node_b", node_def);
  2463. std::vector<NodeDef *> nodedef_to_optimize;
  2464. nodedef_to_optimize.emplace_back(node_def);
  2465. std::string curr_node_name = "post_node_b";
  2466. GenOriginContext(&tensorflow_parser, curr_node_name);
  2467. Status ret = tensorflow_parser.GraphDefOptimizeIdentity(&graph_def, nodedef_map, nodedef_to_optimize);
  2468. EXPECT_EQ(ret, ge::PARAM_INVALID);
  2469. }
  2470. TEST_F(STestTensorflowParser, tensorflow_optimizer_snapshot_no_retval_test) {
  2471. std::string caseDir = __FILE__;
  2472. std::size_t idx = caseDir.find_last_of("/");
  2473. caseDir = caseDir.substr(0, idx);
  2474. const std::string root_proto = caseDir + "/origin_models/test_snapshot.pb";
  2475. domi::tensorflow::GraphDef graphDef;
  2476. bool protoRet =
  2477. parser::ReadProtoFromBinaryFile(root_proto.c_str(), &graphDef);
  2478. ASSERT_EQ(protoRet, true);
  2479. TensorFlowModelParser tensorflow_parser;
  2480. ge::ComputeGraphPtr root_graph =
  2481. ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
  2482. Status ret = tensorflow_parser.ParseProto(
  2483. reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  2484. EXPECT_EQ(FAILED, ret);
  2485. }
  2486. TEST_F(STestTensorflowParser, tensorflow_RemoveInputs_test)
  2487. {
  2488. tensorflow::GraphDef graph_def;
  2489. tensorflow::NodeDef *node_def = initNodeDef();
  2490. node_def->set_name("OP");
  2491. node_def->add_input("OP/Input_1");
  2492. node_def->add_input("OP/Input_2");
  2493. std::set<uint32_t> remove_index_set;
  2494. std::map<std::string, NodeDef *> all_node_map;
  2495. TensorFlowModelParser model_parser;
  2496. Status ret = model_parser.RemoveInputs(&graph_def, node_def, remove_index_set, all_node_map);
  2497. EXPECT_EQ(ret, SUCCESS);
  2498. remove_index_set.emplace(0);
  2499. ret = model_parser.RemoveInputs(&graph_def, node_def, remove_index_set, all_node_map);
  2500. EXPECT_EQ(ret, FAILED);
  2501. }
  2502. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerNodeContext_test)
  2503. {
  2504. std::string fusion_op_name = "post_node_a";
  2505. std::vector<std::string> inner_nodes_name;
  2506. inner_nodes_name.emplace_back("post_node_a");
  2507. TensorFlowModelParser model_parser;
  2508. Status ret = model_parser.UpdateInnerNodeContext(fusion_op_name, inner_nodes_name);
  2509. EXPECT_EQ(ret, INTERNAL_ERROR);
  2510. GenOriginContext(&model_parser, fusion_op_name);
  2511. ret = model_parser.UpdateInnerNodeContext(fusion_op_name, inner_nodes_name);
  2512. EXPECT_EQ(ret, SUCCESS);
  2513. }
  2514. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerInputMap_test)
  2515. {
  2516. string fusion_op_name = "post_node_a";
  2517. OpNodeContext fusion_context;
  2518. std::vector<std::string> inner_nodes_name;
  2519. inner_nodes_name.emplace_back("post_node_a");
  2520. std::set<string> fusion_input_nodes;
  2521. fusion_input_nodes.insert("post_node_a");
  2522. TensorFlowModelParser model_parser;
  2523. GenOriginContext(&model_parser, fusion_op_name);
  2524. model_parser.UpdateInnerInputMap(fusion_op_name, fusion_context, inner_nodes_name, fusion_input_nodes);
  2525. }
  2526. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerOutputMap_test)
  2527. {
  2528. string fusion_op_name = "post_node_a";
  2529. OpNodeContext fusion_context;
  2530. std::vector<std::string> inner_nodes_name;
  2531. inner_nodes_name.emplace_back("post_node_a");
  2532. std::set<string> fusion_output_nodes;
  2533. fusion_output_nodes.insert("post_node_a");
  2534. TensorFlowModelParser model_parser;
  2535. GenOriginContext(&model_parser, fusion_op_name);
  2536. model_parser.UpdateInnerOutputMap(fusion_op_name, fusion_context, inner_nodes_name, fusion_output_nodes);
  2537. }
  2538. TEST_F(STestTensorflowParser, tensorflow_ScopePassManager_AddPass_test)
  2539. {
  2540. ScopePassManager passmanager;
  2541. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2542. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2543. unique_ptr<ScopeBasePass> pass;
  2544. pass.reset(new ScopeTestPass());
  2545. EXPECT_EQ(ge::SUCCESS, passmanager.AddPass(pass));
  2546. EXPECT_NE(ge::SUCCESS, passmanager.Run(scope_graph));
  2547. delete graph;
  2548. graph = nullptr;
  2549. }
  2550. TEST_F(STestTensorflowParser, tensorflow_CheckAttrHasType_test1)
  2551. {
  2552. tensorflow::AttrValue attr_value;
  2553. attr_value.mutable_list();
  2554. Status ret = TensorFlowUtil::CheckAttrHasType(attr_value, "int");
  2555. EXPECT_EQ(FAILED, ret);
  2556. attr_value.set_type(DT_INVALID);
  2557. ret = TensorFlowUtil::CheckAttrHasType(attr_value, "type");
  2558. EXPECT_EQ(FAILED, ret);
  2559. tensorflow::AttrValue attr_value2;
  2560. AttrValue_ListValue *list = attr_value2.mutable_list();
  2561. list->add_type(tensorflow::DT_FLOAT);
  2562. list->add_type((tensorflow::DataType)30);
  2563. ret = TensorFlowUtil::CheckAttrHasType(attr_value2, "list(type)");
  2564. EXPECT_EQ(FAILED, ret);
  2565. }
  2566. TEST_F(STestTensorflowParser, tensorflow_CheckAttrHasType_test2)
  2567. {
  2568. tensorflow::AttrValue attr_value;
  2569. AttrValue_ListValue * list = attr_value.mutable_list();
  2570. list->add_type(tensorflow::DT_FLOAT);
  2571. list->add_type(tensorflow::DT_INVALID);
  2572. Status ret = TensorFlowUtil::CheckAttrHasType(attr_value, "list(type)");
  2573. EXPECT_EQ(FAILED, ret);
  2574. attr_value.set_placeholder("test");
  2575. ret = TensorFlowUtil::CheckAttrHasType(attr_value, "");
  2576. EXPECT_EQ(FAILED, ret);
  2577. }
  2578. TEST_F(STestTensorflowParser, tensorflow_TransTensorDescriptor_test)
  2579. {
  2580. tensorflow::AttrValue attr_value;
  2581. AttrValue_ListValue *list = attr_value.mutable_list();
  2582. list->add_type(tensorflow::DT_FLOAT);
  2583. ParserOperator op;
  2584. uint32_t io = TENSORFLOW_NORMAL_INPUT_TENSOR_FLAG;
  2585. std::string type = ge::parser::FUSEDBATCHNORMGRAD;
  2586. Status ret = TensorFlowUtil::TransTensorDescriptor(attr_value, &op, io, type);
  2587. EXPECT_EQ(ret, SUCCESS);
  2588. io = TENSORFLOW_NORMAL_OUTPUT_TENSOR_FLAG;
  2589. ret = TensorFlowUtil::TransTensorDescriptor(attr_value, &op, io, type);
  2590. EXPECT_EQ(ret, SUCCESS);
  2591. }
  2592. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeDestroyTemporaryVariable_test)
  2593. {
  2594. tensorflow::GraphDef *graph_def = nullptr;
  2595. tensorflow::NodeDef *nodeCurrent = initNodeDef();
  2596. TensorFlowModelParser model_parser;
  2597. Status ret = model_parser.GraphDefOptimizeDestroyTemporaryVariable(graph_def, nodeCurrent);
  2598. EXPECT_EQ(ret, FAILED);
  2599. }
  2600. TEST_F(STestTensorflowParser, tensorflow_GetFunctionProto_test)
  2601. {
  2602. std::cout << __FILE__ << std::endl;
  2603. std::string caseDir = __FILE__;
  2604. std::size_t idx = caseDir.find_last_of("/");
  2605. caseDir = caseDir.substr(0, idx);
  2606. std::string file = caseDir + "/origin_models/test_enter.pb";
  2607. domi::tensorflow::GraphDefLibrary graph_def_library;
  2608. TensorFlowModelParser model_parser;
  2609. Status ret = model_parser.GetFunctionProto(file, graph_def_library);
  2610. EXPECT_EQ(ret, FAILED);
  2611. }
  2612. TEST_F(STestTensorflowParser, tensorflow_GetNodeFormat_test)
  2613. {
  2614. NodeDef *node_def1 = initNodeDef();
  2615. node_def1->set_op("NoOp");
  2616. node_def1->set_name("NoOp");
  2617. NodeDef *node_def2 = initNodeDef();
  2618. node_def2->set_op("Add");
  2619. node_def2->set_name("Add0");
  2620. TfTranspose pred_transpose = TO_NCHW;
  2621. domiTensorFormat_t format = domi::DOMI_TENSOR_NC1HWC0;
  2622. std::set<const NodeDef *> visited_node;
  2623. visited_node.emplace(node_def2);
  2624. TensorFlowModelParser model_parser;
  2625. Status ret = model_parser.GetNodeFormat(node_def1, pred_transpose, format, visited_node);
  2626. EXPECT_EQ(ret, FAILED);
  2627. delete node_def1;
  2628. delete node_def2;
  2629. }
  2630. TEST_F(STestTensorflowParser, tensorflow_GetFormatTranspose_test)
  2631. {
  2632. NodeDef *transpose_node = initNodeDef();
  2633. transpose_node->set_op("Transpose");
  2634. TfTranspose transpose_direc = NO_TRANSPOSE;
  2635. TensorFlowModelParser modelParser;
  2636. Status ret = modelParser.GetFormatTranspose(transpose_node, transpose_direc);
  2637. EXPECT_EQ(ret, FAILED);
  2638. delete transpose_node;
  2639. }
  2640. TEST_F(STestTensorflowParser, tensorflow_GetFormatTranspose_test2)
  2641. {
  2642. TensorFlowModelParser modelParser;
  2643. TfTranspose transpose_direc = NO_TRANSPOSE;
  2644. NodeDef *transpose_node = initNodeDef();
  2645. GraphDef graph;
  2646. auto arg0 = AddNode(graph, "_Arg", "arg0");
  2647. auto snapshot0 = AddNode(graph, "Snapshot", "snapshot0");
  2648. auto ret0 = AddNode(graph, "_Retval", "retval0");
  2649. auto arg1 = AddNode(graph, "_Arg", "arg1");
  2650. auto snapshot1 = AddNode(graph, "Snapshot", "snapshot1");
  2651. auto ret1 = AddNode(graph, TENSORFLOWF_NODE_OP_TRANSPOSE, "retval1");
  2652. auto arg2 = AddNode(graph, "_Arg", "arg2");
  2653. auto snapshot2 = AddNode(graph, "Snapshot", "snapshot2");
  2654. auto ret2 = AddNode(graph, TENSORFLOWF_NODE_OP_TRANSPOSE, TENSORFLOWF_NODE_OP_TRANSPOSE);
  2655. AddInput(arg0, snapshot0, 0);
  2656. AddInput(snapshot0, ret0, 0);
  2657. AddInput(arg1, snapshot1, 0);
  2658. AddInput(snapshot1, ret1, 0);
  2659. AddInput(arg2, snapshot2, 0);
  2660. AddInput(snapshot2, ret2, 0);
  2661. AddInput(snapshot0, snapshot1, -1);
  2662. AddInput(snapshot1, snapshot2, -1);
  2663. bool train_flag = ge::GetParserContext().train_flag;
  2664. ge::GetParserContext().train_flag = true;
  2665. ASSERT_EQ(modelParser.GraphDefOptimize(&graph), SUCCESS);
  2666. ge::GetParserContext().train_flag = train_flag;
  2667. modelParser.nodedef_map_["arg1"] = transpose_node;
  2668. modelParser.nodedef_map_["^arg0"] = transpose_node;
  2669. Status ret = modelParser.GetFormatTranspose(ret1, transpose_direc);
  2670. EXPECT_EQ(ret, SUCCESS);
  2671. delete transpose_node;
  2672. }
  2673. TEST_F(STestTensorflowParser, tensorflow_GetTensorflowGraphInOutMap_test)
  2674. {
  2675. TensorFlowModelParser model_parser;
  2676. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2677. tensorflow::NodeDef *node_input = graph->add_node();
  2678. node_input->set_name("name_input");
  2679. node_input->set_op("op_input");
  2680. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid5", "Sigmoid", "node_input");
  2681. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid6", "Sigmoid", "node_input");
  2682. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid7", "Sigmoid", "node_input");
  2683. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul5", "Mul", "node_input");
  2684. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul6", "Mul", "node_input");
  2685. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul7", "Mul", "node_input");
  2686. AddGraphNode(graph, "t_lstm/t_lstm_cell/Relu5", "Relu", "node_input");
  2687. AddGraphNode(graph, "t_lstm/t_lstm_cell/Relu6", "Relu", "node_input");
  2688. Status ret = model_parser.GetTensorflowGraphInOutMap(graph);
  2689. EXPECT_EQ(ret, SUCCESS);
  2690. delete graph;
  2691. }
  2692. TEST_F(STestTensorflowParser, tensorflow_RemoveIsolateNode_test)
  2693. {
  2694. TensorFlowModelParser model_parser;
  2695. tensorflow::GraphDef graph;
  2696. CreateGraphDef(graph);
  2697. Status ret = model_parser.RemoveIsolateNode(&graph);
  2698. EXPECT_EQ(ret, FAILED);
  2699. }
  2700. TEST_F(STestTensorflowParser, tensorflow_AddNodeToGraphAndMarkFormat_test)
  2701. {
  2702. TensorFlowModelParser model_parser;
  2703. ComputeGraphPtr graph = make_shared<ge::ComputeGraph>("default");
  2704. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2705. GenOriginNodeDef(&model_parser, op_node_name_list);
  2706. Status ret = model_parser.AddNodeToGraphAndMarkFormat(graph, op_node_name_list);
  2707. EXPECT_EQ(ret, INTERNAL_ERROR);
  2708. }
  2709. TEST_F(STestTensorflowParser, tensorflow_ParserNodeDef1_test)
  2710. {
  2711. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2712. ModelParserFactory* factory = ModelParserFactory::Instance();
  2713. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2714. ASSERT_TRUE(NULL != model_parser);
  2715. TensorFlowModelParser tensorflow_parser;
  2716. tensorflow_parser.adaptedOpTypeMap_["test_name"] = "POOLING";
  2717. std::mutex graphMutex;
  2718. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2719. ScopePassManager passmanager;
  2720. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2721. domi::tensorflow::NodeDef node_def;
  2722. node_def.set_name("test_name");
  2723. node_def.set_op("POOLING");
  2724. error_message::Context error_context;
  2725. Status ret = ge::TensorFlowModelParser::ParseNodeDef(&tensorflow_parser, compute_graph, &graphMutex, scope_graph, &node_def, error_context);
  2726. EXPECT_EQ(FAILED, ret);
  2727. delete graph;
  2728. }
  2729. TEST_F(STestTensorflowParser, tensorflow_ParserNodeDef2_test)
  2730. {
  2731. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2732. ModelParserFactory* factory = ModelParserFactory::Instance();
  2733. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2734. ASSERT_TRUE(NULL != model_parser);
  2735. TensorFlowModelParser tensorflow_parser;
  2736. tensorflow_parser.adaptedOpTypeMap_["Pooling"] = "Pooling";
  2737. std::mutex graphMutex;
  2738. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2739. ScopePassManager passmanager;
  2740. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2741. REGISTER_CUSTOM_OP("Pooling")
  2742. .FrameworkType(domi::TENSORFLOW)
  2743. .OriginOpType("Pooling")
  2744. .ParseParamsFn(ParseParams)
  2745. .ImplyType(ImplyType::TVM);
  2746. register_tbe_op();
  2747. domi::tensorflow::NodeDef node_def;
  2748. node_def.set_name("Pooling");
  2749. node_def.set_op("Pooling");
  2750. error_message::Context error_context;
  2751. Status ret = ge::TensorFlowModelParser::ParseNodeDef(&tensorflow_parser, compute_graph, &graphMutex, scope_graph, &node_def, error_context);
  2752. EXPECT_EQ(FAILED, ret);
  2753. delete graph;
  2754. }
  2755. TEST_F(STestTensorflowParser, tensorflow_AddExternalGraph_test)
  2756. {
  2757. TensorFlowModelParser modelParser;
  2758. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  2759. std::string inputNodeType = "DATA";
  2760. MakeDagGraph(subGraph, inputNodeType);
  2761. Status ret = modelParser.AddExternalGraph(subGraph);
  2762. EXPECT_EQ(ret, SUCCESS);
  2763. }
  2764. TEST_F(STestTensorflowParser, tensorflow_AddFmkNode_test)
  2765. {
  2766. TensorFlowModelParser model_parser;
  2767. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2768. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  2769. ScopePassManager pass_manager;
  2770. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  2771. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2772. GenOriginNodeDef(&model_parser, op_node_name_list);
  2773. Status ret = model_parser.AddFmkNode(compute_graph, scope_graph, op_node_name_list, false);
  2774. EXPECT_EQ(ret, PARAM_INVALID);
  2775. delete graphDef;
  2776. }
  2777. TEST_F(STestTensorflowParser, tensorflow_OptimizeConstNodes4CustomOp_test)
  2778. {
  2779. TensorFlowModelParser model_parser;
  2780. tensorflow::GraphDef graph_def;
  2781. CreateGraphDef(graph_def);
  2782. Status ret = model_parser.OptimizeConstNodes4CustomOp(&graph_def);
  2783. EXPECT_EQ(ret, SUCCESS);
  2784. }
  2785. TEST_F(STestTensorflowParser, OptimizeConstNodes4CustomOp_success)
  2786. {
  2787. GraphDef graph;
  2788. auto bn = AddNode(graph, "FusedBatchNormV3", "FusedBatchNormV3_0");
  2789. auto bn_grad = AddNode(graph, "FusedBatchNormGradV3", "FusedBatchNormGradV3_0");
  2790. AddInput(bn, bn_grad, 0);
  2791. AddInput(bn, bn_grad, 1);
  2792. AddInput(bn, bn_grad, 2);
  2793. AddInput(bn, bn_grad, 3);
  2794. AddInput(bn, bn_grad, 5);
  2795. AddInput(bn, bn_grad, 5);
  2796. GraphDef* graphDef = &graph;
  2797. int before_bn_grad_input_size = bn_grad->input_size();
  2798. ASSERT_EQ(before_bn_grad_input_size, 6);
  2799. ModelParserFactory* factory = ModelParserFactory::Instance();
  2800. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2801. ge::TensorFlowModelParser tensorflow_parser;
  2802. Status ret = tensorflow_parser.OptimizeConstNodes4CustomOp(graphDef);
  2803. int after_bn_grad_input_size = bn_grad->input_size();
  2804. ASSERT_EQ(after_bn_grad_input_size, 6);
  2805. ASSERT_EQ(ret, domi::SUCCESS);
  2806. REGISTER_CUSTOM_OP("BatchNormGrad")
  2807. .FrameworkType(domi::TENSORFLOW)
  2808. .OriginOpType({"FusedBatchNormGradV3", "FusedBatchNormGradV2", "FusedBatchNormGrad"})
  2809. .ParseParamsFn(AutoMappingFn)
  2810. .DelInputWithOriginalType(5, "FusedBatchNormGradV3")
  2811. .ImplyType(ImplyType::TVM);
  2812. register_tbe_op();
  2813. ret = tensorflow_parser.OptimizeConstNodes4CustomOp(graphDef);
  2814. after_bn_grad_input_size = bn_grad->input_size();
  2815. ASSERT_EQ(after_bn_grad_input_size, 6);
  2816. ASSERT_EQ(ret, domi::SUCCESS);
  2817. }
  2818. TEST_F(STestTensorflowParser, tensorflow_ParseOpParams_test)
  2819. {
  2820. TensorFlowModelParser model_parser;
  2821. tensorflow::NodeDef *node_def = initNodeDef();
  2822. node_def->set_name("Pooling");
  2823. node_def->set_op("Pooling");
  2824. ge::OpDescPtr op = std::make_shared<ge::OpDesc>();
  2825. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2826. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Pooling");
  2827. Status ret = model_parser.ParseOpParams(node_def, op, op_parser);
  2828. EXPECT_EQ(ret, FAILED);
  2829. node_def->set_name("TensorArrayWrite");
  2830. node_def->set_op("TensorArrayWriteV3");
  2831. op_parser = factory->CreateOpParser("TensorArrayWrite");
  2832. ret = model_parser.ParseOpParams(node_def, op, op_parser);
  2833. EXPECT_EQ(ret, SUCCESS);
  2834. delete node_def;
  2835. }
  2836. TEST_F(STestTensorflowParser, tensorflow_AddFusionInnerNodeDef_test)
  2837. {
  2838. TensorFlowModelParser model_parser;
  2839. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2840. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  2841. ScopePassManager pass_manager;
  2842. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  2843. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2844. FusionScopesResult *fusion_scope_rlt = new (std::nothrow) FusionScopesResult();
  2845. fusion_scope_rlt->Init();
  2846. fusion_scope_rlt->SetName("FusionCustom");
  2847. auto &impl_scope_graph = scope_graph->impl_;
  2848. std::string scope_name = fusion_scope_rlt->Name();
  2849. impl_scope_graph->fusion_results_.insert(std::make_pair(scope_name, fusion_scope_rlt));
  2850. std::string fusion_op_name = "FusionCustom";
  2851. GenOriginNodeDef(&model_parser, op_node_name_list);
  2852. GenFusionScopesResult(scope_graph, fusion_scope_rlt, fusion_op_name);
  2853. Status ret = model_parser.AddFusionInnerNodeDef(scope_graph, fusion_op_name, op_node_name_list);
  2854. EXPECT_EQ(ret, INTERNAL_ERROR);
  2855. delete graphDef;
  2856. }
  2857. TEST_F(STestTensorflowParser, Scope_pass_test)
  2858. {
  2859. ScopePassManager passmanager;
  2860. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2861. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2862. EXPECT_NE(nullptr, scope_graph);
  2863. unique_ptr<ScopeBasePass> pass;
  2864. pass.reset(new ScopeTestPass());
  2865. EXPECT_EQ(domi::SUCCESS, passmanager.AddPass(pass));
  2866. scope_graph = passmanager.BuildScopeGraph(graph);
  2867. EXPECT_NE(nullptr, scope_graph);
  2868. delete graph;
  2869. }
  2870. TEST_F(STestTensorflowParser, operator_attr_set_and_get)
  2871. {
  2872. TestOperator test_operator;
  2873. test_operator.Name("test_op");
  2874. EXPECT_EQ("test_op" , test_operator.GetName());
  2875. test_operator.Input(test_operator, 0);
  2876. test_operator.Input(test_operator, 1);
  2877. test_operator.GetOpAttrs();
  2878. int64_t pad = 1;
  2879. test_operator.Attr("pad", pad);
  2880. EXPECT_EQ(pad , test_operator.GetIntAttr("pad"));
  2881. bool bool_value = true;
  2882. test_operator.Attr("bool_value", bool_value);
  2883. EXPECT_EQ(bool_value , test_operator.GetBoolAttr("bool_value"));
  2884. float float_value = true;
  2885. test_operator.Attr("float_value", float_value);
  2886. EXPECT_EQ(float_value , test_operator.GetFloatAttr("float_value"));
  2887. std::string str_value = "test_string";
  2888. test_operator.Attr("str_value", str_value);
  2889. EXPECT_EQ(str_value , test_operator.GetStringAttr("str_value"));
  2890. BoolTuple boollist_value{true, false};
  2891. test_operator.Attr("boollist_value", boollist_value);
  2892. BoolTuple get_boollist_value = test_operator.GetBoolTupleAttr("boollist_value");
  2893. EXPECT_EQ(boollist_value[0] , get_boollist_value[0]);
  2894. StringTuple strlist_value{"a", "b"};
  2895. test_operator.Attr("strlist_value", strlist_value);
  2896. StringTuple get_strlist_value = test_operator.GetStringTupleAttr("strlist_value");
  2897. EXPECT_EQ(strlist_value[0] , get_strlist_value[0]);
  2898. int64_t num = 1;
  2899. IntTuple intlist{num, num};
  2900. test_operator.Attr("intlist", intlist);
  2901. IntTuple get_intlist = test_operator.GetIntTupleAttr("intlist");
  2902. EXPECT_EQ(intlist[0] , get_intlist[0]);
  2903. FloatTuple floatlist{1.1, 1.1};
  2904. test_operator.Attr("floatlist", floatlist);
  2905. FloatTuple get_floatlist = test_operator.GetFloatTupleAttr("floatlist");
  2906. EXPECT_EQ(floatlist[0] , get_floatlist[0]);
  2907. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  2908. ParserOperator *op = &test_operator;
  2909. Status ret = ConvertToOpDesc(*op, op_desc);
  2910. EXPECT_EQ(domi::SUCCESS , ret);
  2911. TestOperator test_operator_1;
  2912. ParserOperator *op_convert = &test_operator_1;
  2913. ret = ConvertFromOpDesc(op_desc, *op_convert);
  2914. EXPECT_EQ(domi::SUCCESS , ret);
  2915. op_desc = nullptr;
  2916. ret = ConvertFromOpDesc(op_desc, *op_convert);
  2917. EXPECT_EQ(FAILED , ret);
  2918. ret = ConvertToOpDesc(*op, op_desc);
  2919. EXPECT_EQ(FAILED, ret);
  2920. }
  2921. TEST_F(STestTensorflowParser, success_frameworkop_get)
  2922. {
  2923. FrameworkOpOperator *frameworkOp=new FrameworkOpOperator();
  2924. int64_t index = 1;
  2925. std::string opdef_string = "tensorflow_parser";
  2926. frameworkOp->GetFrameworkType();
  2927. frameworkOp->GetNodeDefPkg();
  2928. frameworkOp->FuncDefPkg("func");
  2929. frameworkOp->Index(index);
  2930. frameworkOp->TfOpDef(opdef_string);
  2931. EXPECT_EQ(SUCCESS, SUCCESS);
  2932. delete frameworkOp;
  2933. }
  2934. TEST_F(STestTensorflowParser, op_set_get_success)
  2935. {
  2936. ConstantOperator op;
  2937. vector<int64_t> v;
  2938. op.VectorAttr("key", v);
  2939. op.GetDType();
  2940. }
  2941. TEST_F(STestTensorflowParser, success_argop_get)
  2942. {
  2943. ArgOpOperator *argOp=new ArgOpOperator();
  2944. int64_t index = 1;
  2945. argOp->Index(index);
  2946. argOp->GetIndex();
  2947. EXPECT_EQ(domi::SUCCESS, SUCCESS);
  2948. delete argOp;
  2949. }
  2950. TEST_F(STestTensorflowParser, success_operator)
  2951. {
  2952. ParserOperator tfOperator;
  2953. ParserOperator in_op;
  2954. uint32_t index = 0;
  2955. std::string type = "add";
  2956. std::string key = "Add";
  2957. std::vector<int64_t> value;
  2958. int64_t tmp = 0;
  2959. value.emplace_back(tmp);
  2960. tfOperator.Input(in_op, index);
  2961. tfOperator.Type(type);
  2962. tfOperator.AttrVector(key, value);
  2963. }
  2964. TEST_F(STestTensorflowParser, success_shapen_get)
  2965. {
  2966. ShapeNOperator *shapen =new ShapeNOperator();
  2967. shapen->GetInType();
  2968. shapen->GetInType();
  2969. shapen->GetOutType();
  2970. EXPECT_EQ(domi::SUCCESS, domi::SUCCESS);
  2971. delete shapen;
  2972. }
  2973. TEST_F(STestTensorflowParser, success_VarIsInitializedOpOperator_get)
  2974. {
  2975. VarIsInitializedOpOperator op;
  2976. op.Name("x");
  2977. std::vector<int64_t> value;
  2978. op.VectorAttr("key", value);
  2979. }
  2980. TEST_F(STestTensorflowParser, success_variable_op_get)
  2981. {
  2982. VariableOperator op;
  2983. uint32_t mem_type = 1;
  2984. op.Name("x");
  2985. std::vector<int64_t> value;
  2986. op.Placement("shared_name");
  2987. op.MemType(mem_type);
  2988. }
  2989. TEST_F(STestTensorflowParser, param_success_get)
  2990. {
  2991. FillOperator* fillOp=new FillOperator();
  2992. fillOp->GetDataType();
  2993. fillOp->GetAlpha();
  2994. fillOp->GetBeta();
  2995. EXPECT_EQ(domi::SUCCESS, domi::SUCCESS);
  2996. delete fillOp;
  2997. }
  2998. TEST_F(STestTensorflowParser, tensorflow_Message2Operator_ParseOperatorAttrs_test)
  2999. {
  3000. Message2Operator mess2Op;
  3001. tensorflow::NodeDef *node_def = initNodeDef();
  3002. int depth = 6;
  3003. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  3004. ge::Operator ops = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  3005. Status ret = mess2Op.ParseOperatorAttrs(node_def, depth, ops);
  3006. EXPECT_EQ(ret, FAILED);
  3007. depth = 4;
  3008. ret = mess2Op.ParseOperatorAttrs(node_def, depth, ops);
  3009. EXPECT_EQ(ret, SUCCESS);
  3010. }
  3011. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_RepeatedEnum2Json_test)
  3012. {
  3013. Pb2Json toJson;
  3014. ProtobufEnumValueDescriptor *enum_value_desc = new google::protobuf::EnumValueDescriptor();
  3015. bool enum2str = true;
  3016. Json json;
  3017. ProtobufFieldDescriptor *field = nullptr;
  3018. toJson.RepeatedEnum2Json(enum_value_desc, enum2str, json);
  3019. toJson.Enum2Json(enum_value_desc, field, enum2str, json);
  3020. enum2str = false;
  3021. toJson.RepeatedEnum2Json(enum_value_desc, enum2str, json);
  3022. delete enum_value_desc;
  3023. }
  3024. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_TypeBytes2String_test)
  3025. {
  3026. Pb2Json toJson;
  3027. std::string field_name = "offset";
  3028. std::string type_bytes = "offset";
  3029. toJson.TypeBytes2String(field_name, type_bytes);
  3030. field_name = "test";
  3031. toJson.TypeBytes2String(field_name, type_bytes);
  3032. }
  3033. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_RepeatedMessage2Json_test)
  3034. {
  3035. Pb2Json toJson;
  3036. tensorflow::NodeDef *node_def = initNodeDef();
  3037. ProtobufFieldDescriptor *field = new google::protobuf::FieldDescriptor();
  3038. ProtobufReflection *reflection = nullptr;
  3039. set<string> black_fields;
  3040. black_fields.emplace("offset");
  3041. Json json;
  3042. bool enum2str = true;
  3043. toJson.RepeatedMessage2Json((*node_def), field, reflection, black_fields, json, enum2str);
  3044. delete field;
  3045. }
  3046. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_OneField2Json_test)
  3047. {
  3048. Pb2Json toJson;
  3049. tensorflow::NodeDef *node_def = initNodeDef();
  3050. ProtobufFieldDescriptor *field = new google::protobuf::FieldDescriptor();
  3051. ProtobufReflection *reflection = nullptr;
  3052. set<string> black_fields;
  3053. black_fields.emplace("offset");
  3054. Json json;
  3055. bool enum2str = true;
  3056. Message2Operator mess2Op;
  3057. int depth = 4;
  3058. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>("FusionCustom", "FusionCustom");
  3059. ge::Operator ops = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  3060. field->CppTypeName(google::protobuf::FieldDescriptor::CPPTYPE_ENUM);
  3061. mess2Op.ParseField(reflection, node_def, field, depth, ops);
  3062. toJson.OneField2Json((*node_def), field, reflection, black_fields, json, enum2str, 1);
  3063. toJson.OneField2Json((*node_def), field, reflection, black_fields, json, enum2str, 5);
  3064. delete field;
  3065. }
  3066. TEST_F(STestTensorflowParser, input_proto_real_path_success) {
  3067. const char *caffe_proto_path = "./caffe/caffe.proto";
  3068. const char *custom_proto_path = "./caffe/custom.proto";
  3069. ProtoFileParser proto_file_parser;
  3070. string fusion_proto_file;
  3071. auto ret = proto_file_parser.CombineProtoFile(caffe_proto_path, custom_proto_path, fusion_proto_file);
  3072. EXPECT_EQ(ret, FAILED);
  3073. ret = proto_file_parser.RecordProtoMessage(caffe_proto_path);
  3074. EXPECT_EQ(ret, FAILED);
  3075. ret = proto_file_parser.WriteProtoFile(caffe_proto_path, custom_proto_path);
  3076. EXPECT_EQ(ret, FAILED);
  3077. std::cout << __FILE__ << std::endl;
  3078. std::string caseDir = __FILE__;
  3079. std::size_t idx = caseDir.find_last_of("/");
  3080. caseDir = caseDir.substr(0, idx);
  3081. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3082. caffe_proto_path = proto_file.c_str();
  3083. ret = proto_file_parser.CombineProtoFile(caffe_proto_path, caffe_proto_path, fusion_proto_file);
  3084. EXPECT_EQ(ret, SUCCESS);
  3085. ret = proto_file_parser.WriteProtoFile(caffe_proto_path, custom_proto_path);
  3086. EXPECT_EQ(ret, FAILED);
  3087. std::string dest_line = "test";
  3088. ret = proto_file_parser.FindConflictLine(custom_proto_path, 0, dest_line);
  3089. EXPECT_EQ(ret, FAILED);
  3090. std::map<int, std::pair<string, string>> identifier_op_map;
  3091. std::map<std::string, std::pair<int, string>> op_identifier_map;
  3092. ret = proto_file_parser.ParseProtoFile(custom_proto_path, identifier_op_map, op_identifier_map);
  3093. EXPECT_EQ(ret, FAILED);
  3094. proto_file_parser.GetFusionProtoFile();
  3095. std::ofstream write_tmp;
  3096. ret = proto_file_parser.AddCustomAndConflictMessage(custom_proto_path, write_tmp);
  3097. EXPECT_EQ(ret, FAILED);
  3098. }
  3099. TEST_F(STestTensorflowParser, all_success)
  3100. {
  3101. PreChecker::OpId id1 = (void*)(intptr_t)1;
  3102. PreChecker::OpId id2 = (void*)(intptr_t)2;
  3103. PreChecker::OpId id3 = (void*)(intptr_t)3;
  3104. PreChecker::OpId id4 = (void*)(intptr_t)4;
  3105. PreChecker &checker = PreChecker::Instance();
  3106. EXPECT_EQ(checker.AddOp(id1, "name1", "type1"), SUCCESS);
  3107. EXPECT_EQ(checker.AddOp(id2, "name2", "type2"), SUCCESS);
  3108. EXPECT_EQ(checker.AddOp(id3, "name1", "type3"), SUCCESS);
  3109. EXPECT_EQ(checker.AddOp(id4, "name4", ge::parser::DETECTIONOUTPUT), SUCCESS);
  3110. EXPECT_EQ(checker.CheckName(id1), SUCCESS);
  3111. EXPECT_EQ(checker.CheckName(id2), SUCCESS);
  3112. EXPECT_EQ(checker.CheckName(id3), SUCCESS);
  3113. EXPECT_EQ(checker.CheckName(id4), SUCCESS);
  3114. EXPECT_EQ(checker.CheckType(id1), SUCCESS);
  3115. EXPECT_EQ(checker.CheckType(id2), SUCCESS);
  3116. EXPECT_EQ(checker.CheckType(id3), SUCCESS);
  3117. EXPECT_EQ(checker.CheckType(id4), SUCCESS);
  3118. EXPECT_EQ(checker.AddCause(id1, PreChecker::ErrorCode::OK, "msg"), SUCCESS);
  3119. EXPECT_EQ(checker.AddCause(id1, PreChecker::ErrorCode::PARAM_INVALID, "msg"), domi::SUCCESS);
  3120. PreChecker::Cause cause;
  3121. cause.code = PreChecker::ErrorCode::TYPE_AMBIGUOUS;
  3122. cause.message = "msg";
  3123. EXPECT_EQ(checker.AddCause(id1, cause), SUCCESS);
  3124. EXPECT_EQ(checker.HasError(), true);
  3125. EXPECT_EQ(checker.Save("check_result.json"), SUCCESS);
  3126. std::string msg = "msg";
  3127. Status ret = checker.Clear(id1, msg);
  3128. EXPECT_EQ(ret, SUCCESS);
  3129. checker.Clear();
  3130. checker.RefreshErrorMessageByName("name1",PreChecker::ErrorCode::PARAM_INVALID,"node repeated in");
  3131. }
  3132. TEST_F(STestTensorflowParser, tensorflow_tbe_tfplugin_loader_test)
  3133. {
  3134. TBEPluginLoader pluginLoad;
  3135. vector<string> fileList = {};
  3136. string caffeParserPath = "";
  3137. string full_name = "dabc";
  3138. string caffe_parser_so_suff = "abc";
  3139. pluginLoad.ProcessSoFullName(fileList, caffeParserPath, full_name, caffe_parser_so_suff);
  3140. ASSERT_EQ(caffeParserPath, full_name);
  3141. pluginLoad.ClearHandles_();
  3142. std::cout << __FILE__ << std::endl;
  3143. std::string caseDir = __FILE__;
  3144. std::size_t idx = caseDir.find_last_of("/");
  3145. caseDir = caseDir.substr(0, idx);
  3146. std::string proto_file = caseDir + "/origin_models/";
  3147. std::string path = proto_file;
  3148. std::string caffe_parser_path = path;
  3149. pluginLoad.FindParserSo(path, fileList, caffe_parser_path);
  3150. setenv("ASCEND_OPP_PATH", "aaa", 1);
  3151. std::string customop_path = "";
  3152. pluginLoad.GetCustomOpPath(customop_path);
  3153. ASSERT_EQ(customop_path, "aaa/framework/custom/:aaa/framework/built-in/tensorflow");
  3154. Status ret = pluginLoad.Finalize();
  3155. EXPECT_EQ(ret, SUCCESS);
  3156. }
  3157. TEST_F(STestTensorflowParser, tensorflow_data_op_parser_test)
  3158. {
  3159. std::vector<int64_t> shape = {1, 1, 224, 224};
  3160. ge::GeTensorDesc tensor_desc;
  3161. DataOpParser opParser;
  3162. Status ret = opParser.Init5DInputTensor(shape, tensor_desc);
  3163. EXPECT_EQ(ret, SUCCESS);
  3164. ret = opParser.Init5DOutputTensor(shape, tensor_desc);
  3165. EXPECT_EQ(ret, SUCCESS);
  3166. ge::OpDescPtr op = std::make_shared<ge::OpDesc>();
  3167. ret = opParser.ParseShape(shape, op);
  3168. }
  3169. TEST_F(STestTensorflowParser, read_proto_from_mem_test)
  3170. {
  3171. tensorflow::NodeDef *node_def = initNodeDef();
  3172. const char *data = nullptr;
  3173. int size = 3;
  3174. bool ret = parser::ReadProtoFromMem(data, size, node_def);
  3175. EXPECT_EQ(false, ret);
  3176. data = "not file";
  3177. ret = parser::ReadProtoFromMem(data, size, node_def);
  3178. EXPECT_EQ(false, ret);
  3179. }
  3180. TEST_F(STestTensorflowParser, tensorflow_GetOriginalType_test)
  3181. {
  3182. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3183. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("fusionCustom", parser::FRAMEWORKOP);
  3184. ge::NodePtr node = std::make_shared<ge::Node>(op, graph);
  3185. string type = parser::FRAMEWORKOP;
  3186. Status ret = parser::GetOriginalType(node, type);
  3187. EXPECT_EQ(ret, INTERNAL_ERROR);
  3188. }
  3189. TEST_F(STestTensorflowParser, tensorflow_ReadBytesFromBinaryFile_test)
  3190. {
  3191. const char *file_name = nullptr;
  3192. char *buffer = nullptr;
  3193. int length = 1;
  3194. bool ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3195. EXPECT_EQ(ret, false);
  3196. file_name = "./caffe.proto";
  3197. ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3198. EXPECT_EQ(ret, false);
  3199. std::cout << __FILE__ << std::endl;
  3200. std::string caseDir = __FILE__;
  3201. std::size_t idx = caseDir.find_last_of("/");
  3202. caseDir = caseDir.substr(0, idx);
  3203. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3204. file_name = proto_file.c_str();
  3205. ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3206. EXPECT_EQ(ret, true);
  3207. char path[4096 + 1] = { 0 };
  3208. memset(path, 'a', 4096);
  3209. std::string realPath = parser::RealPath(path);
  3210. EXPECT_EQ(realPath, "");
  3211. const char *real_path = nullptr;
  3212. realPath = parser::RealPath(real_path);
  3213. EXPECT_EQ(realPath, "");
  3214. }
  3215. TEST_F(STestTensorflowParser, tensorflow_AclGrphParseUtil_ParseAclInputFp16Nodes_test)
  3216. {
  3217. AclGrphParseUtil parserUtil;
  3218. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3219. std::string input_fp16_nodes = "Add";
  3220. std::string is_input_adjust_hw_layout = "is_input_adjust_hw_layout";
  3221. Status ret = parserUtil.ParseAclInputFp16Nodes(graph, input_fp16_nodes, is_input_adjust_hw_layout);
  3222. EXPECT_EQ(ret, PARAM_INVALID);
  3223. is_input_adjust_hw_layout = "true";
  3224. ret = parserUtil.ParseAclInputFp16Nodes(graph, input_fp16_nodes, is_input_adjust_hw_layout);
  3225. EXPECT_EQ(ret, PARAM_INVALID);
  3226. vector<string> adjust_fp16_format_vec = {"true", "false"};
  3227. uint32_t index = 1;
  3228. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  3229. parserUtil.AddAttrsForInputNodes(adjust_fp16_format_vec, input_fp16_nodes, index, op_desc);
  3230. std::string is_output_fp16 = "is_output_fp16";
  3231. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3232. EXPECT_EQ(ret, PARAM_INVALID);
  3233. is_output_fp16 = "false";
  3234. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3235. EXPECT_EQ(ret, SUCCESS);
  3236. is_output_fp16 = "true";
  3237. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3238. EXPECT_EQ(ret, SUCCESS);
  3239. }
  3240. TEST_F(STestTensorflowParser, tensorflow_ModelSaver_test)
  3241. {
  3242. const char *file_path = nullptr;
  3243. const Json model = {{"a", "b"}};
  3244. Status ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3245. EXPECT_EQ(ret, FAILED);
  3246. file_path = "./origin_models/";
  3247. ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3248. EXPECT_EQ(ret, FAILED);
  3249. std::string caseDir = __FILE__;
  3250. std::size_t idx = caseDir.find_last_of("/");
  3251. caseDir = caseDir.substr(0, idx);
  3252. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3253. file_path = proto_file.c_str();
  3254. ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3255. char path[4096 + 1] = { 0 };
  3256. memset(path, 'a', 4096);
  3257. EXPECT_EQ(-1, ge::parser::ModelSaver::CreateDirectory(path));
  3258. EXPECT_EQ(-1, ge::parser::ModelSaver::CheckPath(path));
  3259. }
  3260. TEST_F(STestTensorflowParser, create_weights_parser_failed)
  3261. {
  3262. WeightsParserFactory* factory = WeightsParserFactory::Instance();
  3263. shared_ptr<WeightsParser> weight_parser = factory->CreateWeightsParser(FRAMEWORK_RESERVED);
  3264. ASSERT_TRUE(NULL == weight_parser);
  3265. ModelParserFactory *modelFactory = ModelParserFactory::Instance();
  3266. shared_ptr<ModelParser> model_parser = modelFactory->CreateModelParser(FRAMEWORK_RESERVED);
  3267. ASSERT_TRUE(NULL == model_parser);
  3268. std::shared_ptr<OpParserFactory> parserFactory = OpParserFactory::Instance(domi::FrameworkType::CAFFE);
  3269. std::shared_ptr<OpParser> fusion_op_parser = parserFactory->CreateFusionOpParser(ge::parser::DATA);
  3270. ASSERT_TRUE(NULL == fusion_op_parser);
  3271. std::shared_ptr<OpParser> op_parser = parserFactory->CreateOpParser("10");
  3272. ASSERT_TRUE(NULL == op_parser);
  3273. }
  3274. TEST_F(STestTensorflowParser, custom_parser_adapter_register)
  3275. {
  3276. using PARSER_CREATOR_FN = std::function<std::shared_ptr<OpParser>(void)>;
  3277. PARSER_CREATOR_FN func = CustomParserAdapterRegistry::Instance()->GetCreateFunc(domi::TENSORFLOW);
  3278. CustomParserAdapterRegistry::Instance()->Register(domi::TENSORFLOW, func);
  3279. CustomParserAdapterRegistry::Instance()->Register(domi::TENSORFLOW, func);
  3280. func = CustomParserAdapterRegistry::Instance()->GetCreateFunc(domi::FRAMEWORK_RESERVED);
  3281. ASSERT_EQ(nullptr, func);
  3282. }
  3283. TEST_F(STestTensorflowParser, tensorflow_parser_api_test)
  3284. {
  3285. std::map<std::string, std::string> options = {{"ge.runFlag", "1"}};
  3286. Status ret = ParserInitialize(options);
  3287. EXPECT_EQ(ret, SUCCESS);
  3288. ret = ParserInitialize(options);
  3289. EXPECT_EQ(ret, SUCCESS);
  3290. ret = ParserFinalize();
  3291. EXPECT_EQ(ret, SUCCESS);
  3292. ret = ParserFinalize();
  3293. EXPECT_EQ(ret, SUCCESS);
  3294. }
  3295. TEST_F(STestTensorflowParser, tensorflow_FP16_parser_test)
  3296. {
  3297. parser::fp16_t fp16;
  3298. fp16.ToDouble();
  3299. fp16.ToInt8();
  3300. fp16.ToUInt8();
  3301. fp16.ToInt16();
  3302. fp16.ToUInt16();
  3303. fp16.ToInt32();
  3304. fp16.ToUInt32();
  3305. fp16.IsInf();
  3306. fp16.operator+(fp16);
  3307. fp16.operator-(fp16);
  3308. fp16.operator*(fp16);
  3309. fp16.operator/(fp16);
  3310. fp16.operator+=(fp16);
  3311. fp16.operator-=(fp16);
  3312. fp16.operator*=(fp16);
  3313. fp16.operator/=(fp16);
  3314. fp16.operator==(fp16);
  3315. fp16.operator!=(fp16);
  3316. fp16.operator>(fp16);
  3317. fp16.operator>=(fp16);
  3318. fp16.operator<(fp16);
  3319. fp16.operator<=(fp16);
  3320. fp16.operator=(fp16);
  3321. float f_val = 0.1;
  3322. fp16.operator=(f_val);
  3323. double d_val = 0.2;
  3324. fp16.operator=(d_val);
  3325. int8_t i_val = 1;
  3326. fp16.operator=(i_val);
  3327. uint8_t ui_val = 2;
  3328. fp16.operator=(ui_val);
  3329. int16_t i_vals = 1;
  3330. fp16.operator=(i_vals);
  3331. uint16_t ui16_val = 1;
  3332. fp16.operator=(ui16_val);
  3333. ui16_val = 0;
  3334. fp16.operator=(ui16_val);
  3335. ui16_val = 1;
  3336. fp16.operator=(ui16_val);
  3337. int32_t i32_val = 0;
  3338. fp16.operator=(i32_val);
  3339. i32_val = 1;
  3340. fp16.operator=(i32_val);
  3341. uint32_t ui32_val = 0;
  3342. fp16.operator=(ui32_val);
  3343. ui32_val = 1;
  3344. fp16.operator=(ui32_val);
  3345. float f_val1= 2139095000.2;
  3346. ge::parser::fp16_t fp16_1,fp16_2;
  3347. fp16_1.operator=(fp16_2);
  3348. fp16_1.operator=(f_val1);
  3349. float f_val2= 0.0000112;
  3350. fp16_1.operator=(f_val2);
  3351. float f_val3= 0.0000000299;
  3352. fp16_1.operator=(f_val3);
  3353. float f_val4= 0.00000000299;
  3354. fp16_1.operator=(f_val4);
  3355. uint32_t u_val1 = 4095;
  3356. fp16_1.operator=(u_val1);
  3357. uint16_t u16_val1 = 4095;
  3358. fp16_1.operator=(u16_val1);
  3359. int16_t int_val1 = 0;
  3360. fp16_1.operator=(int_val1);
  3361. int16_t int_val2 = -32767;
  3362. fp16_1.operator=(int_val2);
  3363. i_val = -0x7FFFFFFF;
  3364. fp16_1.operator=(i_val);
  3365. fp16.operator=(f_val1);
  3366. float f = fp16; //float();
  3367. double d = fp16;
  3368. int8_t int8 = fp16;
  3369. uint8_t uint8 = fp16;
  3370. uint16_t uint16 = fp16;
  3371. int32_t int32 = fp16;
  3372. uint32_t uint32 = fp16;
  3373. int64_t int64 = fp16;
  3374. uint64_t uint64 = fp16;
  3375. (void)f;
  3376. (void)d;
  3377. (void)int8;
  3378. (void)uint8;
  3379. (void)uint8;
  3380. (void)uint16;
  3381. (void)int32;
  3382. (void)uint32;
  3383. (void)int64;
  3384. (void)uint64;
  3385. parser::fp16_t val;
  3386. val.val = 0x7C00;
  3387. val.IsInf();
  3388. val.val = 0xFC00;
  3389. val.IsInf();
  3390. parser::fp16_t fp16_3, fp16_4;
  3391. fp16_3.val = 1;
  3392. fp16_4.val = 2;
  3393. fp16_4.operator/(fp16_3);
  3394. fp16.val = 21504;
  3395. int16_t int16 = fp16;
  3396. int8 = fp16;
  3397. }
  3398. TEST_F(STestTensorflowParser, tensorflow_AclParserInitialize_test)
  3399. {
  3400. AclGrphParseUtil parseUtil;
  3401. std::map<std::string, std::string> options;
  3402. Status ret = parseUtil.AclParserInitialize(options);
  3403. EXPECT_EQ(ret, FAILED);
  3404. options = {{ge::FRAMEWORK_TYPE, "2"}};
  3405. ret = parseUtil.AclParserInitialize(options);
  3406. EXPECT_EQ(ret, SUCCESS);
  3407. }
  3408. TEST_F(STestTensorflowParser, tensorflow_GetOutputLeaf_test)
  3409. {
  3410. AclGrphParseUtil parseUtil;
  3411. ge::ComputeGraphPtr compute_graph = build_graph(true);
  3412. ge::NodePtr output_nodes_info = compute_graph->FindNode("Relu3");
  3413. std::vector<std::pair<ge::NodePtr, int32_t>> output_nodes = {{output_nodes_info,0}};
  3414. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>("default");
  3415. ge::NodePtr node = AddNode(compute_graph, "K", parser::NETOUTPUT,1,1);
  3416. Status ret = parseUtil.GetOutputLeaf(node, output_nodes);
  3417. EXPECT_EQ(ret, FAILED);
  3418. }
  3419. TEST_F(STestTensorflowParser, graph_pass_error)
  3420. {
  3421. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
  3422. ErrorGraphPass pass;
  3423. ge::parser::PassManager passManager;
  3424. std::vector<std::pair<string, GraphPass*>> passes;
  3425. passes.emplace_back("", &pass);
  3426. Status status = passManager.Run(graph, passes);
  3427. EXPECT_EQ(domi::FAILED, status);
  3428. }
  3429. TEST_F(STestTensorflowParser, parser_FindFmkNodeCluser_success)
  3430. {
  3431. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("FrameworkOp");
  3432. ParserGraphOptimizer graphOptimizer(graph, domi::TENSORFLOW);
  3433. ge::NodePtr node = AddNode(graph, "K", parser::FRAMEWORK_OP_TYPE, 1, 1);
  3434. ge::NodePtr output_nodes_info = graph->FindNode("Relu3");
  3435. std::unordered_map<string, vector<NodePtr>> node_cluser_Map({
  3436. {"x", {node, output_nodes_info}},
  3437. });
  3438. Status ret = graphOptimizer.FindFmkNodeCluser(node_cluser_Map);
  3439. EXPECT_EQ(ret, SUCCESS);
  3440. }
  3441. TEST_F(STestTensorflowParser, parser_RebuildOutputAnchors_test)
  3442. {
  3443. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3444. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3445. string inputNodeType = "DATA";
  3446. MakeDagGraph(subGraph, inputNodeType);
  3447. vector<ge::InDataAnchorPtr> in_anchor;
  3448. vector<ge::OutDataAnchorPtr> out_anchor;
  3449. for(ge::NodePtr node : subGraph->GetAllNodes()) {
  3450. for(auto out : node->GetAllOutDataAnchors()) {
  3451. for(auto in : node->GetAllInDataAnchors()) {
  3452. if(in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  3453. in_anchor.push_back(in);
  3454. }
  3455. }
  3456. for(auto i : out->GetPeerInDataAnchors()) {
  3457. if(i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  3458. out_anchor.push_back(out);
  3459. }
  3460. }
  3461. }
  3462. }
  3463. OpDescPtr fusion_op_desc = make_shared<ge::OpDesc>("FusionCustom", ge::parser::CONSTANT);
  3464. Status ret = graphOptimizer.RebuildOutputAnchors(out_anchor, fusion_op_desc);
  3465. EXPECT_EQ(domi::SUCCESS, ret);
  3466. ret = graphOptimizer.RebuildInputAnchors(in_anchor, fusion_op_desc);
  3467. EXPECT_EQ(domi::SUCCESS, ret);
  3468. }
  3469. TEST_F(STestTensorflowParser, parser_LinkInnerAnchor_test)
  3470. {
  3471. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3472. NodePtr node_a = AddNode(subGraph, "A", parser::NETOUTPUT, 1, 1);
  3473. NodePtr node_b = AddNode(subGraph, "B", parser::NETOUTPUT, 1, 1);
  3474. unordered_map<string, ge::NodePtr> node_map;
  3475. node_map.insert(pair<string, ge::NodePtr>("A", node_a));
  3476. node_map.insert(pair<string, ge::NodePtr>("B", node_b));
  3477. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3478. graphOptimizer.LinkInnerAnchor(node_map);
  3479. }
  3480. TEST_F(STestTensorflowParser, parser_MarkForFusion_test)
  3481. {
  3482. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3483. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3484. ge::NodePtr node = AddNode(subGraph, "K", parser::FRAMEWORK_OP_TYPE, 1, 1);
  3485. ge::NodePtr output_nodes_info = subGraph->FindNode("Relu3");
  3486. std::unordered_map<string, vector<NodePtr>> node_cluser_Map({
  3487. {"x", {node, output_nodes_info}},
  3488. });
  3489. Status ret = graphOptimizer.MarkForFusion(node_cluser_Map);
  3490. EXPECT_EQ(ret, INTERNAL_ERROR);
  3491. }
  3492. TEST_F(STestTensorflowParser, parser_UpdateGraph_test)
  3493. {
  3494. std::vector<NodePtr> nodes;
  3495. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3496. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3497. NodePtr node_a = AddNode(subGraph, "A", parser::NETOUTPUT, 1, 1);
  3498. NodePtr node_b = AddNode(subGraph, "B", parser::NETOUTPUT, 1, 1);
  3499. nodes.emplace_back(node_a);
  3500. nodes.emplace_back(node_b);
  3501. Status ret = graphOptimizer.UpdateGraph(nodes);
  3502. EXPECT_EQ(ret, PARAM_INVALID);
  3503. }
  3504. TEST_F(STestTensorflowParser, parser_RebuildFusionNode_test)
  3505. {
  3506. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3507. ParserGraphOptimizer graphOptimizer(graph, domi::TENSORFLOW);
  3508. string inputNodeType = "DATA";
  3509. MakeDagGraph(graph, inputNodeType);
  3510. vector<ge::InDataAnchorPtr> input_anchors;
  3511. vector<ge::OutDataAnchorPtr> output_anchors;
  3512. for(ge::NodePtr node : graph->GetAllNodes()) {
  3513. for(auto out : node->GetAllOutDataAnchors()) {
  3514. for(auto in : node->GetAllInDataAnchors()) {
  3515. if(in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  3516. input_anchors.push_back(in);
  3517. }
  3518. }
  3519. for(auto i : out->GetPeerInDataAnchors()) {
  3520. if(i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  3521. output_anchors.push_back(out);
  3522. }
  3523. }
  3524. }
  3525. }
  3526. map<ge::OutDataAnchorPtr, vector<ge::InDataAnchorPtr>> output_in_map;
  3527. vector<ge::InControlAnchorPtr> input_control_anchors;
  3528. vector<ge::OutControlAnchorPtr> output_control_anchors;
  3529. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("dpop_123", "FrameworkOp");
  3530. ge::NodePtr fusion_node = std::make_shared<ge::Node>(op, graph);
  3531. Status ret = graphOptimizer.RebuildFusionNode(input_anchors, output_anchors, output_in_map, input_control_anchors, output_control_anchors, fusion_node);
  3532. EXPECT_EQ(ret, FAILED);
  3533. }
  3534. TEST_F(STestTensorflowParser, parser_InsertNode_test)
  3535. {
  3536. std::vector<NodePtr> nodes;
  3537. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3538. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3539. auto merge_node = AddNode(subGraph, "Merge", parser::MERGE, 1, 2);
  3540. auto node1 = AddNode(subGraph, "Op1", parser::RELU, 1, 1);
  3541. auto node2 = AddNode(subGraph, "Op2", parser::CONVOLUTION, 1, 1);
  3542. auto node3 = AddNode(subGraph, "Op3", parser::CONVOLUTION, 1, 1);
  3543. nodes.emplace_back(merge_node);
  3544. nodes.emplace_back(node1);
  3545. nodes.emplace_back(node2);
  3546. nodes.emplace_back(node3);
  3547. vector<ge::InDataAnchorPtr> in_anchor;
  3548. vector<ge::OutDataAnchorPtr> out_anchor;
  3549. map<ge::OutDataAnchorPtr, vector<ge::InDataAnchorPtr>> output_in_map;
  3550. vector<ge::InControlAnchorPtr> input_control_anchors;
  3551. vector<ge::OutControlAnchorPtr> output_control_anchors;
  3552. unordered_map<string, ge::NodePtr> node_map;
  3553. node_map.insert(pair<string, ge::NodePtr>("A", merge_node));
  3554. node_map.insert(pair<string, ge::NodePtr>("B", node1));
  3555. node_map.insert(pair<string, ge::NodePtr>("C", node2));
  3556. node_map.insert(pair<string, ge::NodePtr>("D", node3));
  3557. Status ret = graphOptimizer.InsertNode(subGraph, nodes, in_anchor, out_anchor, output_in_map, input_control_anchors, output_control_anchors, node_map);
  3558. EXPECT_EQ(ret, PARAM_INVALID);
  3559. }
  3560. TEST_F(STestTensorflowParser, parser_GeStoi_test)
  3561. {
  3562. TensorFlowModelParser model_parser;
  3563. string input_node_name = "dynamic_rnn_node1";
  3564. string index_str = "dynamic_rnn";
  3565. int32_t index = 0;
  3566. Status ret = model_parser.GeStoi(input_node_name, index_str, &index);
  3567. EXPECT_EQ(ret, INTERNAL_ERROR);
  3568. }
  3569. TEST_F(STestTensorflowParser, parser_ConstOpNeedUpdate_test)
  3570. {
  3571. ge::TensorFlowModelParser tensorflow_parser;
  3572. NodeDef *op_node_def = new NodeDef();
  3573. op_node_def->set_name("OP");
  3574. op_node_def->add_input("OP/Input_1");
  3575. op_node_def->set_op(TENSORFLOWF_NODE_OP_CONST);
  3576. NodeDef *input_node = new NodeDef();
  3577. input_node->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3578. input_node->add_input("OP/Input_1/Input_2");
  3579. NodeDef *input_2 = new NodeDef();
  3580. input_2->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3581. tensorflow_parser.nodedef_map_["OP"] = op_node_def;
  3582. tensorflow_parser.nodedef_map_["OP/Input_1"] = input_node;
  3583. tensorflow_parser.nodedef_map_["OP/Input_1/Input_2"] = input_2;
  3584. std::string op_name = "OP/Input_1/Input_2";
  3585. Status ret = tensorflow_parser.ConstOpNeedUpdate(op_name);
  3586. EXPECT_EQ(ret, true);
  3587. op_name = "OP";
  3588. ret = tensorflow_parser.ConstOpNeedUpdate(op_name);
  3589. EXPECT_EQ(ret, true);
  3590. delete op_node_def;
  3591. delete input_node;
  3592. delete input_2;
  3593. }
  3594. TEST_F(STestTensorflowParser, parser_UppdateInputMap_test)
  3595. {
  3596. ge::TensorFlowModelParser tensorflow_parser;
  3597. ScopeFusionOpInfo info;
  3598. ge::OpNodeContext normal_op_node_context;
  3599. ge::OpNodeContext fusion_op_node_context;
  3600. string fusion_op_name = "dropout";
  3601. normal_op_node_context.input_map["dropout"].push_back({0, 0});
  3602. normal_op_node_context.input_map["conv_conv5/BatchNorm/moving_variance"].push_back({0, 1});
  3603. normal_op_node_context.output_map["dropout"].push_back({1, 0});
  3604. normal_op_node_context.output_map["conv_conv5/BatchNorm/batchnorm/add/y"].push_back({-1, -1});
  3605. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  3606. ScopePassManager passmanager;
  3607. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  3608. NodeDef *node1 = graph->add_node();
  3609. node1->set_name("dropout");
  3610. node1->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3611. node1->add_input("conv_conv5/BatchNorm/moving_variance");
  3612. node1->add_input("conv_conv5/BatchNorm/batchnorm/add/y");
  3613. NodeDef *node2 = graph->add_node();
  3614. node2->set_name("conv_conv5/BatchNorm/moving_variance");
  3615. node2->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3616. NodeDef *node3 = graph->add_node();
  3617. node3->set_name("conv_conv5/BatchNorm/batchnorm/add/y");
  3618. node3->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3619. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  3620. info.fusion_op_type = parser::FUSIONBATCHNORM;
  3621. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  3622. info.description = "";
  3623. info.scope_pass = true;
  3624. tensorflow_parser.nodedef_map_["dropout"] = node1;
  3625. tensorflow_parser.nodedef_map_["conv_conv5/BatchNorm/moving_variance"] = node2;
  3626. tensorflow_parser.nodedef_map_["conv_conv5/BatchNorm/batchnorm/add/y"] = node3;
  3627. Status ret = tensorflow_parser.UppdateInputMap(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  3628. EXPECT_EQ(ret, domi::SUCCESS);
  3629. ret = tensorflow_parser.UppdateOutputMap(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  3630. TensorFlowWeightsParser weights_parser;
  3631. std::string caseDir = __FILE__;
  3632. std::size_t idx = caseDir.find_last_of("/");
  3633. caseDir = caseDir.substr(0, idx);
  3634. std::string proto_file = caseDir + "/origin_models/tf_add.pb";
  3635. const char *file = proto_file.c_str();
  3636. ge::Graph graphs;
  3637. Status weightsRet = weights_parser.Parse(file, graphs);
  3638. EXPECT_EQ(weightsRet, SUCCESS);
  3639. delete graph;
  3640. }
  3641. TEST_F(STestTensorflowParser, tensorflow_optimizer_fmk_fusion_op) {
  3642. std::string caseDir = __FILE__;
  3643. std::size_t idx = caseDir.find_last_of("/");
  3644. caseDir = caseDir.substr(0, idx);
  3645. const std::string root_proto = caseDir + "/origin_models/test_getnext_dynamic_fusion.pbtxt";
  3646. domi::tensorflow::GraphDef graphDef;
  3647. bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef);
  3648. ASSERT_EQ(protoRet, true);
  3649. TensorFlowModelParser tensorflow_parser;
  3650. ge::ComputeGraphPtr root_graph = ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
  3651. Status ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  3652. EXPECT_EQ(ret, SUCCESS);
  3653. EXPECT_EQ(root_graph->GetDirectNode().size(), 3);
  3654. }
  3655. } // namespace ge