You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_tensorflow_parser.cc 171 kB

3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <gtest/gtest.h>
  17. #define protected public
  18. #define private public
  19. #include "parser/common/op_parser_factory.h"
  20. #include "parser/tensorflow/tensorflow_parser.h"
  21. #include "graph/operator_reg.h"
  22. #include "register/op_registry.h"
  23. #include "external/register/register.h"
  24. #include "st/parser_st_utils.h"
  25. #include "tests/depends/ops_stub/ops_stub.h"
  26. #include "parser/common/acl_graph_parser_util.h"
  27. #include "external/ge/ge_api_types.h"
  28. #include "omg/parser/parser_factory.h"
  29. #include "common/pre_checker.h"
  30. #include "common/util.h"
  31. #include "external/parser/tensorflow_parser.h"
  32. #include "parser/tensorflow/tensorflow_constant_parser.h"
  33. #include "common/types.h"
  34. #include "parser/common/op_def/variable_operator.h"
  35. #include "parser/tensorflow/tensorflow_ref_switch_parser.h"
  36. #include "parser/tensorflow/tensorflow_fusion_op_parser.h"
  37. #include "parser/tensorflow/tensorflow_auto_mapping_parser_adapter.h"
  38. #include "parser/common/op_def/arg_op_operator.h"
  39. #include "parser/tensorflow/tensorflow_fusion_custom_parser_adapter.h"
  40. #include "parser/tensorflow/tensorflow_reshape_parser.h"
  41. #include "parser/tensorflow/tensorflow_custom_parser_adapter.h"
  42. #include "parser/tensorflow/tensorflow_squeeze_parser.h"
  43. #include "parser/tensorflow/graph_to_function_def.h"
  44. #include "parser/tensorflow/parser_graph_optimizer.h"
  45. #include "cce/dnn_base_def.hpp"
  46. #include "parser/tensorflow/scope/scope_pass_manager.h"
  47. #include "parser/tensorflow/tensorflow_util.h"
  48. #include "compute_graph_impl.h"
  49. #include "parser/tensorflow/tensorflow_enter_parser.h"
  50. #include "parser/common/op_def/ir_pb_converter.h"
  51. #include "parser/common/tuple.h"
  52. #include "common/op_def/framework_op_operator.h"
  53. #include "common/op_def/shape_n_operator.h"
  54. #include "common/op_def/var_is_initialized_op_operator.h"
  55. #include "common/op_def/fill_operator.h"
  56. #include "common/convert/pb2json.h"
  57. #include "common/convert/message2operator.h"
  58. #include "parser/common/proto_file_parser.h"
  59. #include "parser/common/pre_checker.h"
  60. #include "parser/common/tbe_plugin_loader.h"
  61. #include "parser/common/data_op_parser.h"
  62. #include "parser/common/model_saver.h"
  63. #include "framework/omg/parser/parser_api.h"
  64. #include "framework/omg/parser/parser_factory.h"
  65. #include "parser/common/parser_fp16_t.h"
  66. #include "parser/common/op_parser_factory.h"
  67. #include "parser/common/prototype_pass_manager.h"
  68. #include "parser/common/op_registration_tbe.h"
  69. #include "parser/common/pass_manager.h"
  70. #include "parser/tensorflow/parser_graph_optimizer.h"
  71. #include "metadef/inc/register/scope/scope_pass_registry_impl.h"
  72. #include "register/scope/scope_fusion_pass_register.h"
  73. #undef protected
  74. #undef private
  75. using namespace std;
  76. using namespace domi::tensorflow;
  77. using namespace domi;
  78. using namespace cce;
  79. using namespace testing;
  80. using namespace std;
  81. using namespace google::protobuf;
  82. static const string GRAPH_DEFAULT_NAME = "default";
  83. namespace ge {
  84. class STestTensorflowParser : public testing::Test {
  85. protected:
  86. void SetUp() {
  87. ParerSTestsUtils::ClearParserInnerCtx();
  88. }
  89. void TearDown() {}
  90. public:
  91. void RegisterCustomOp();
  92. };
  93. class TestOperator : public ParserOperator
  94. {
  95. public:
  96. TestOperator()
  97. : ParserOperator("test")
  98. {
  99. }
  100. ~TestOperator()
  101. {
  102. }
  103. };
  104. class ErrorGraphPass: public GraphPass
  105. {
  106. Status Run(ComputeGraphPtr graph)
  107. {
  108. return domi::FAILED;
  109. }
  110. };
  111. class ScopeTestPass : public ScopeBasePass {
  112. protected:
  113. vector<ScopeFusionPatterns> DefinePatterns() {
  114. vector<ScopeFusionPatterns> patterns_list;
  115. return patterns_list;
  116. };
  117. string PassName() {
  118. return "test";
  119. };
  120. Status LastMatchScopesAndOPs(shared_ptr<ScopeGraph> &scope_graph, vector<ScopesResult> &results) {
  121. return domi::SUCCESS;
  122. };
  123. void GenerateFusionResult(const vector<Scope *> &scopes, FusionScopesResult *fusion_rlt) {
  124. return;
  125. };
  126. };
  127. static Status ParseParams(const google::protobuf::Message* op_src, ge::Operator& op_dest) {
  128. return SUCCESS;
  129. }
  130. static Status ParseParamByOpFunc(const ge::Operator &op_src, ge::Operator& op_dest) {
  131. return SUCCESS;
  132. }
  133. void STestTensorflowParser::RegisterCustomOp() {
  134. REGISTER_CUSTOM_OP("Add")
  135. .FrameworkType(domi::TENSORFLOW)
  136. .OriginOpType("Add")
  137. .ParseParamsFn(ParseParams);
  138. std::vector<OpRegistrationData> reg_datas = domi::OpRegistry::Instance()->registrationDatas;
  139. for (auto reg_data : reg_datas) {
  140. domi::OpRegTbeParserFactory::Instance()->Finalize(reg_data);
  141. domi::OpRegistry::Instance()->Register(reg_data);
  142. }
  143. domi::OpRegistry::Instance()->registrationDatas.clear();
  144. }
  145. void AddDumpOriginName(const ge::NodePtr parent_node, const std::string& subgraph_name, ge::ComputeGraphPtr graph);
  146. namespace {
  147. NodeDef* AddNode(GraphDef& graph, string type, string name) {
  148. NodeDef* nodeDef = graph.add_node();
  149. nodeDef->set_op(type);
  150. nodeDef->set_name(name);
  151. tensorflow::OpDef op_def;
  152. string op_def_string;
  153. op_def.SerializeToString(&op_def_string);
  154. tensorflow::AttrValue value;
  155. value.set_s(op_def_string);
  156. nodeDef->mutable_attr()->insert({"op_def", value});
  157. return nodeDef;
  158. }
  159. void AddInput(NodeDef* src, NodeDef* dst, int srcIndex) {
  160. if(srcIndex == -1){
  161. dst->add_input("^"+src->name());
  162. } else {
  163. if (srcIndex == 0) {
  164. dst->add_input(src->name());
  165. } else {
  166. dst->add_input(src->name() + ":" + std::to_string(srcIndex));
  167. }
  168. {
  169. auto input = (*dst->mutable_attr())[ge::ATTR_NAME_INPUT_TENSOR_DESC].mutable_list()->add_func();
  170. tensorflow::AttrValue val1;
  171. val1.set_i(0);
  172. (*input->mutable_attr())["serialize_format"] = val1;
  173. tensorflow::AttrValue val2;
  174. val2.set_i(tensorflow::DT_FLOAT);
  175. (*input->mutable_attr())["serialize_datatype"] = val2;
  176. tensorflow::AttrValue val3;
  177. val3.mutable_list()->add_i(10);
  178. (*input->mutable_attr())["serialize_shape"] = val3;
  179. }
  180. {
  181. auto output = (*src->mutable_attr())[ge::ATTR_NAME_OUTPUT_TENSOR_DESC].mutable_list()->add_func();
  182. tensorflow::AttrValue val1;
  183. val1.set_i(0);
  184. (*output->mutable_attr())["serialize_format"] = val1;
  185. tensorflow::AttrValue val2;
  186. val2.set_i(tensorflow::DT_FLOAT);
  187. (*output->mutable_attr())["serialize_datatype"] = val2;
  188. tensorflow::AttrValue val3;
  189. val3.mutable_list()->add_i(10);
  190. (*output->mutable_attr())["serialize_shape"] = val3;
  191. }
  192. }
  193. }
  194. NodeDef *initNodeDef() {
  195. NodeDef * nodeDef = new NodeDef();
  196. nodeDef->set_op("Const");
  197. ::google::protobuf::Map<std::string, tensorflow::AttrValue >* node_attr_map = nodeDef->mutable_attr();
  198. //设置 T属性
  199. domi::tensorflow::AttrValue t_attr_value;
  200. t_attr_value.set_type(domi::tensorflow::DT_INT32);
  201. (*node_attr_map)[TENSORFLOW_ATTR_T] = t_attr_value;
  202. domi::tensorflow::AttrValue dtype_attr_value;
  203. dtype_attr_value.set_type(domi::tensorflow::DT_INT32);
  204. (*node_attr_map)[TENSORFLOW_ATTR_DTYPE] = dtype_attr_value;
  205. // out_put
  206. domi::tensorflow::AttrValue outputs_attr_value;
  207. ::tensorflow::AttrValue_ListValue* list = outputs_attr_value.mutable_list();
  208. list->add_s("MatMul");
  209. (*node_attr_map)[TENSORFLOW_ATTR_OUTPUT_OP] = outputs_attr_value;
  210. // 设置 tensor 属性
  211. domi::tensorflow::AttrValue value_attr_value;
  212. tensorflow::TensorProto* tensor = value_attr_value.mutable_tensor();
  213. tensorflow::TensorShapeProto* tensor_shape = tensor->mutable_tensor_shape();
  214. tensor_shape->clear_dim();
  215. tensor_shape->add_dim()->set_size(4);
  216. tensor_shape->add_dim()->set_size(6);
  217. tensor->set_dtype(domi::tensorflow::DT_INT32);
  218. float *addr = new float[24];
  219. for (int32_t i = 0; i < 24; i++) {
  220. *(addr + i) = 1.0 + i;
  221. }
  222. tensor->set_tensor_content((void *)addr, 24 * sizeof(float));
  223. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  224. delete[] addr;
  225. return nodeDef;
  226. }
  227. NodeDef * initOpNodeDef_VariableV2() {
  228. NodeDef * nodeDef = new NodeDef();
  229. nodeDef->set_op("VariableV2");
  230. google::protobuf::Map<std::string, tensorflow::AttrValue > *node_attr_map = nodeDef->mutable_attr();
  231. //设置data_format属性
  232. domi::tensorflow::AttrValue format_attr_value;
  233. format_attr_value.set_s("_FZ");
  234. (*node_attr_map)[VAR_ATTR_FORMAT] = format_attr_value;
  235. domi::tensorflow::AttrValue type_attr;
  236. type_attr.set_type(domi::tensorflow::DT_FLOAT);
  237. (*node_attr_map)[VAR_ATTR_DTYPE] = type_attr;
  238. domi::tensorflow::AttrValue container_attr_value;
  239. container_attr_value.set_s("container");
  240. (*node_attr_map)[VAR_ATTR_CONTAINER] = container_attr_value;
  241. domi::tensorflow::AttrValue shard_name_attr_value;
  242. shard_name_attr_value.set_s("shard_name");
  243. (*node_attr_map)[VAR_ATTR_SHARED_NAME] = shard_name_attr_value;
  244. domi::tensorflow::AttrValue shape_attr_value;
  245. shape_attr_value.mutable_shape()->add_dim()->set_size(1);
  246. shape_attr_value.mutable_shape()->add_dim()->set_size(2);
  247. shape_attr_value.mutable_shape()->add_dim()->set_size(3);
  248. shape_attr_value.mutable_shape()->add_dim()->set_size(4);
  249. (*node_attr_map)[ge::VAR_ATTR_SHAPE] = shape_attr_value;
  250. domi::tensorflow::AttrValue shape;
  251. shape.mutable_list()->add_i((int64)32);
  252. shape.mutable_list()->add_i((int64)32);
  253. shape.mutable_list()->add_i((int64)14);
  254. shape.mutable_list()->add_i((int64)14);
  255. //设置data_format属性
  256. domi::tensorflow::AttrValue df_attr_value;
  257. domi::tensorflow::AttrValue df_attr_value2;
  258. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  259. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  260. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  261. //设置padding属性
  262. domi::tensorflow::AttrValue pad_attr_value;
  263. domi::tensorflow::AttrValue pad_attr_value2;
  264. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  265. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  266. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  267. domi::tensorflow::NameAttrList name_attr_list;
  268. name_attr_list.set_name(std::to_string(0));
  269. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  270. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  271. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  272. domi::tensorflow::AttrValue output_tensor_descs;
  273. *(output_tensor_descs.mutable_list()->add_func()) = name_attr_list;
  274. nodeDef->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, output_tensor_descs});
  275. return nodeDef;
  276. }
  277. NodeDef *initOpNodeDef_TemporaryVariable() {
  278. NodeDef * nodeDef = new NodeDef();
  279. nodeDef->set_op("TemporaryVariable");
  280. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  281. //设置dtype属性
  282. domi::tensorflow::AttrValue type_attr;
  283. type_attr.set_type(domi::tensorflow::DT_FLOAT);
  284. (*node_attr_map)[VAR_ATTR_DTYPE] = type_attr;
  285. //设置var_name属性
  286. domi::tensorflow::AttrValue var_name_attr_value;
  287. var_name_attr_value.set_s("temporary_variable_name");
  288. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  289. //设置shape属性
  290. domi::tensorflow::AttrValue shape_attr_value;
  291. shape_attr_value.mutable_shape()->add_dim()->set_size(1);
  292. shape_attr_value.mutable_shape()->add_dim()->set_size(2);
  293. shape_attr_value.mutable_shape()->add_dim()->set_size(3);
  294. shape_attr_value.mutable_shape()->add_dim()->set_size(4);
  295. (*node_attr_map)[ge::VAR_ATTR_SHAPE] = shape_attr_value;
  296. domi::tensorflow::AttrValue shape;
  297. shape.mutable_list()->add_i((int64)32);
  298. shape.mutable_list()->add_i((int64)32);
  299. shape.mutable_list()->add_i((int64)14);
  300. shape.mutable_list()->add_i((int64)14);
  301. //设置data_format属性
  302. domi::tensorflow::AttrValue df_attr_value2;
  303. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  304. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  305. domi::tensorflow::AttrValue df_attr_value;
  306. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  307. //设置padding属性
  308. domi::tensorflow::AttrValue pad_attr_value2;
  309. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  310. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  311. domi::tensorflow::AttrValue pad_attr_value;
  312. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  313. domi::tensorflow::NameAttrList name_attr_list;
  314. name_attr_list.set_name(std::to_string(0));
  315. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  316. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  317. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  318. domi::tensorflow::AttrValue output_tensor_descs;
  319. *(output_tensor_descs.mutable_list()->add_func()) = name_attr_list;
  320. nodeDef->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, output_tensor_descs});
  321. return nodeDef;
  322. }
  323. NodeDef *fusioninitNodeDef(int index) {
  324. NodeDef *nodeDef = new NodeDef();
  325. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  326. //设置 type属性
  327. domi::tensorflow::AttrValue dtype_attr_value ;
  328. if (index == 0) {
  329. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  330. } else if (index == 1) {
  331. dtype_attr_value.set_type(domi::tensorflow::DT_INT32);
  332. } else if (index == 2) {
  333. dtype_attr_value.set_type(tensorflow::DT_HALF);
  334. }
  335. (*node_attr_map)[ge::TENSORFLOW_ATTR_DTYPE] = dtype_attr_value;
  336. //设置data_format属性
  337. domi::tensorflow::AttrValue df_attr_value;
  338. df_attr_value.set_s(TENSORFLOWF_TENSOR_NCHW);
  339. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value;
  340. // 设置 tensor 属性
  341. domi::tensorflow::AttrValue value_attr_value;
  342. ::tensorflow::TensorProto* tensor = value_attr_value.mutable_tensor();
  343. ::tensorflow::TensorShapeProto* tensor_shape = tensor->mutable_tensor_shape();
  344. tensor_shape->clear_dim();
  345. ::tensorflow::TensorShapeProto_Dim* dim = tensor_shape->add_dim();
  346. dim->set_name("tensor dim");
  347. dim->set_size(1);
  348. if (index == 0) {
  349. tensor->set_dtype(domi::tensorflow::DT_FLOAT);
  350. float *addr = new float[1];
  351. *addr = 1.0;
  352. tensor->set_tensor_content((void *)addr, sizeof(float));
  353. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  354. delete[] addr;
  355. } else if (index == 1) {
  356. tensor->set_dtype(domi::tensorflow::DT_INT32);
  357. int32_t *addr = new int32_t[1];
  358. *addr = 1;
  359. tensor->set_tensor_content((void *)addr, sizeof(int32_t));
  360. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  361. delete[] addr;
  362. } else if (index == 2) {
  363. tensor->set_dtype(tensorflow::DT_HALF);
  364. tensor->add_half_val(1);
  365. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  366. }
  367. return nodeDef;
  368. }
  369. NodeDef *MallocNodeDef(const string &name, const string &type) {
  370. NodeDef* node_def = new (std::nothrow) NodeDef();
  371. if (node_def != nullptr) {
  372. node_def->set_name(name);
  373. node_def->set_op(type);
  374. }
  375. return node_def;
  376. }
  377. void GenOriginNodeDef(ge::TensorFlowModelParser *tensorflow_parser, vector<string> &node_name_list) {
  378. NodeDef* pre_node_a = MallocNodeDef("pre_node_a", "Const");
  379. EXPECT_NE(pre_node_a, nullptr);
  380. {
  381. google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = pre_node_a->mutable_attr();
  382. tensorflow::AttrValue attr_dtype;
  383. attr_dtype.set_type(tensorflow::DT_FLOAT);
  384. (*node_attr_map)["dtype"] = attr_dtype;
  385. tensorflow::AttrValue attr_value;
  386. tensorflow::TensorProto* tensor = attr_value.mutable_tensor();
  387. tensor->add_bool_val(true);
  388. tensor->set_dtype(tensorflow::DT_BOOL);
  389. (*node_attr_map)["value"] = attr_value;
  390. }
  391. tensorflow_parser->nodedef_map_["pre_node_a"] = pre_node_a;
  392. node_name_list.push_back("pre_node_a");
  393. NodeDef* pre_node_ctrl_in = MallocNodeDef("pre_node_ctrl_in", "Const");
  394. EXPECT_NE(pre_node_ctrl_in, nullptr);
  395. {
  396. ::google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = pre_node_ctrl_in->mutable_attr();
  397. tensorflow::AttrValue attr_dtype;
  398. attr_dtype.set_type(tensorflow::DT_FLOAT);
  399. (*node_attr_map)["dtype"] = attr_dtype;
  400. tensorflow::AttrValue attr_value;
  401. tensorflow::TensorProto* tensor = attr_value.mutable_tensor();
  402. tensor->add_bool_val(true);
  403. tensor->set_dtype(tensorflow::DT_BOOL);
  404. (*node_attr_map)["value"] = attr_value;
  405. }
  406. tensorflow_parser->nodedef_map_["pre_node_ctrl_in"] = pre_node_ctrl_in;
  407. node_name_list.push_back("pre_node_ctrl_in");
  408. NodeDef* post_node_b = MallocNodeDef("post_node_b", "Identity");
  409. EXPECT_NE(post_node_b, nullptr);
  410. tensorflow_parser->nodedef_map_["post_node_b"] = post_node_b;
  411. node_name_list.push_back("post_node_b");
  412. NodeDef* post_node_c = MallocNodeDef("post_node_c", "Identity");
  413. EXPECT_NE(post_node_c, nullptr);
  414. tensorflow_parser->nodedef_map_["post_node_c"] = post_node_c;
  415. node_name_list.push_back("post_node_c");
  416. NodeDef* post_node_d = MallocNodeDef("post_node_d", "Identity");
  417. EXPECT_NE(post_node_d, nullptr);
  418. tensorflow_parser->nodedef_map_["post_node_d"] = post_node_d;
  419. node_name_list.push_back("post_node_d");
  420. }
  421. void FreeNodeDefMap(ge::TensorFlowModelParser *tensorflow_parser, set<string> &malloc_node_name_list) {
  422. for (auto &item : tensorflow_parser->nodedef_map_) {
  423. if (item.second != nullptr && malloc_node_name_list.count(item.first) > 0) {
  424. delete (item.second);
  425. item.second = nullptr;
  426. }
  427. }
  428. }
  429. void GenFusionScopesResult(shared_ptr<ScopeGraph> &scope_graph, FusionScopesResult *fusion_rlt,
  430. const string &fusion_op_name) {
  431. if (fusion_rlt == nullptr) {
  432. return;
  433. }
  434. fusion_rlt->InsertInputs("scope_node_1", {0}); // scope input 0
  435. fusion_rlt->InsertOutputs("scope_node_m", {0}); // scope output 0
  436. fusion_rlt->InsertOutputs("scope_node_n", {1}); // scope output 1
  437. fusion_rlt->SetType(ge::kScopeToMultiNodes);
  438. fusion_rlt->SetName(fusion_op_name);
  439. fusion_rlt->SetDescription("Description for fusion node");
  440. // Add inner nodes in sequence.
  441. auto node1 = fusion_rlt->AddInnerNode("inner_node_1", "Unique"); // add inner node1
  442. CHECK_INNER_NODE_CONDITION(node1 != nullptr, fusion_rlt);
  443. auto ret = node1
  444. ->InsertInput(ge::kInputFromFusionScope, 0) // Input from 0th of boundary (a)
  445. .InsertOutput(ge::kOutputToFusionScope, 0) // Output to 0th of boundary (b)
  446. .InsertOutput("inner_node_2", 0) // Output to input 0th of internal node 2
  447. .BuildInnerNode(); // Construct an internal Operator
  448. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  449. string str_val = "This is a string.";
  450. node1->MutableOperator()->SetAttr("key1", 2); // Set integer attribute
  451. node1->MutableOperator()->SetAttr("key2", str_val); // Set the string attribute
  452. node1->MutableOperator()->SetAttr("key3", true); // Set boolean attribute
  453. auto node2 = fusion_rlt->AddInnerNode("inner_node_2", "Identity"); // add inner node2
  454. CHECK_INNER_NODE_CONDITION(node2 != nullptr, fusion_rlt);
  455. ret = node2
  456. ->InsertInput("inner_node_1", 1) // The input comes from the 1st output of internal node 1
  457. .InsertOutput("inner_node_3", 0) // Output to input 0th of internal node 3
  458. .BuildInnerNode();
  459. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  460. node2->SetInputFormat("x", "NHWC");
  461. node2->SetOutputFormat("y", "NHWC");
  462. auto node3 = fusion_rlt->AddInnerNode("inner_node_3", "Identity"); // add inner node3
  463. CHECK_INNER_NODE_CONDITION(node3 != nullptr, fusion_rlt);
  464. ret = node3
  465. ->InsertInput("inner_node_2", 0) // The input comes from the 0th output of internal node 2
  466. .InsertOutput(ge::kOutputToFusionScope, 1) // Output to 1st of boundary (c)
  467. .BuildInnerNode();
  468. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  469. scope_graph->impl_->AddFusionScopesResult(fusion_rlt);
  470. }
  471. void GenOriginContext(ge::TensorFlowModelParser *tensorflow_parser, const string &fusion_op_name) {
  472. // op_node_context for fusion op
  473. ge::OpNodeContext op_node_context;
  474. op_node_context.input_map["pre_node_a"].push_back({0, 0});
  475. op_node_context.input_map["pre_node_ctrl_in"].push_back({-1, -1}); // ctrl edges
  476. op_node_context.output_map["post_node_b"].push_back({0, 0});
  477. op_node_context.output_map["post_node_c"].push_back({1, 0});
  478. op_node_context.output_map["post_node_d"].push_back({-1, -1});
  479. op_node_context.output_map["_Retval"].push_back({0, 1});
  480. // ctrl edges
  481. tensorflow_parser->op_node_context_map_[fusion_op_name] = op_node_context;
  482. tensorflow_parser->SaveEdgesControlInfo(fusion_op_name, -1);
  483. // op_node_context for pre_node_a
  484. ge::OpNodeContext op_node_context_a;
  485. op_node_context_a.output_map[fusion_op_name].push_back({0, 0});
  486. tensorflow_parser->op_node_context_map_["pre_node_a"] = op_node_context_a;
  487. // op_node_context for pre_node_ctrl_in
  488. ge::OpNodeContext op_node_context_ctrl_in;
  489. op_node_context_ctrl_in.output_map[fusion_op_name].push_back({-1, -1}); // ctrl edges
  490. tensorflow_parser->op_node_context_map_["pre_node_ctrl_in"] = op_node_context_ctrl_in;
  491. // op_node_context for post_node_b
  492. ge::OpNodeContext op_node_context_b;
  493. op_node_context_b.input_map[fusion_op_name].push_back({0, 0});
  494. tensorflow_parser->op_node_context_map_["post_node_b"] = op_node_context_b;
  495. // op_node_context for post_node_c
  496. ge::OpNodeContext op_node_context_c;
  497. op_node_context_c.output_map["post_node_d"].push_back({0, 0});
  498. tensorflow_parser->op_node_context_map_["post_node_c"] = op_node_context_c;
  499. // op_node_context for post_node_d
  500. ge::OpNodeContext op_node_context_d;
  501. op_node_context_d.input_map[fusion_op_name].push_back({-1, -1}); // ctrl edges
  502. tensorflow_parser->op_node_context_map_["post_node_d"] = op_node_context_d;
  503. // op_node_context for Retval
  504. ge::OpNodeContext op_node_context_Retval;
  505. op_node_context_d.input_map["post_node_d"].push_back({-1, -1});
  506. op_node_context_c.output_map["fusion_op_name"].push_back({0,1});
  507. tensorflow_parser->op_node_context_map_["_Retval"] = op_node_context_Retval;
  508. tensorflow_parser->SaveEdgesControlInfo("op_node_context_Retval", -1);
  509. string fusion_op_type = ge::kScopeToMultiNodes;
  510. string description = "fusion op description";
  511. tensorflow_parser->fusion_op_type_map_[fusion_op_name].push_back(fusion_op_type);
  512. tensorflow_parser->fusion_op_type_map_[fusion_op_name].push_back(description);
  513. }
  514. void register_tbe_op() {
  515. std::vector<OpRegistrationData> registrationDatas = OpRegistry::Instance()->registrationDatas;
  516. for (OpRegistrationData reg_data : registrationDatas) {
  517. domi::OpRegTbeParserFactory::Instance()->Finalize(reg_data);
  518. OpRegistry::Instance()->Register(reg_data);
  519. }
  520. OpRegistry::Instance()->registrationDatas.clear();
  521. }
  522. NodeDef *initNodeDef_axis_dims() {
  523. NodeDef *nodeDef = new NodeDef();
  524. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  525. //设置T属性
  526. domi::tensorflow::AttrValue dtype_attr_value ;
  527. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  528. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  529. //设置strides属性
  530. domi::tensorflow::AttrValue axis_attr_value;
  531. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  532. list->add_i(1);
  533. list->add_i(2);
  534. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  535. (*node_attr_map)[ge::SQUEEZE_ATTR_DIMS] = axis_attr_value;
  536. return nodeDef;
  537. }
  538. NodeDef *initNodeDef_dims() {
  539. NodeDef *nodeDef = new NodeDef();
  540. ::google::protobuf::Map<std::string, tensorflow::AttrValue > *node_attr_map = nodeDef->mutable_attr();
  541. //设置T属性
  542. domi::tensorflow::AttrValue dtype_attr_value ;
  543. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  544. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  545. //设置strides属性
  546. domi::tensorflow::AttrValue axis_attr_value;
  547. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  548. list->add_i(1);
  549. list->add_i(2);
  550. (*node_attr_map)[ge::SQUEEZE_ATTR_DIMS] = axis_attr_value;
  551. return nodeDef;
  552. }
  553. void CreateOpDef(const string& _name, const string& _type, ge::OpDescPtr opDef) {
  554. tensorflow::OpDef tsOpDef;
  555. tsOpDef.set_name(_name);
  556. tensorflow::OpDef_ArgDef* outArgDef = tsOpDef.add_output_arg();
  557. outArgDef->set_name(_name);
  558. outArgDef->set_description("outArgDef");
  559. outArgDef->set_type((tensorflow::DataType)3);
  560. if ((_name == "A") || (_name == "B")) {
  561. tensorflow::OpDef_ArgDef* argDef1 = tsOpDef.add_output_arg();
  562. string name = _name+"t";
  563. argDef1->set_name(name);
  564. argDef1->set_description("this is a test 2");
  565. argDef1->set_type((tensorflow::DataType)3);
  566. }
  567. if ((_name == "C") ) {
  568. outArgDef->set_number_attr("num");
  569. }
  570. if ((_name == "D") ) {
  571. outArgDef->set_type_list_attr("type_list");
  572. }
  573. string strTsOpDef;
  574. tsOpDef.SerializeToString(&strTsOpDef);
  575. ge::AttrUtils::SetStr(opDef, "op_def", strTsOpDef);
  576. tensorflow::NodeDef nodedef;
  577. nodedef.set_name(_name);
  578. nodedef.set_op(_name);
  579. string name("op_def");
  580. tensorflow::AttrValue value;
  581. value.set_s(strTsOpDef);
  582. TensorFlowUtil::AddNodeAttr(name, value, &nodedef);
  583. value.set_i(1);
  584. TensorFlowUtil::AddNodeAttr("num", value, &nodedef);
  585. value.mutable_list();
  586. TensorFlowUtil::AddNodeAttr("type_list", value, &nodedef);
  587. string strNodeDef;
  588. nodedef.SerializeToString(&strNodeDef);
  589. ge::GeAttrValue::BYTES nodedefBytes;
  590. nodedefBytes = ge::GeAttrValue::BYTES::CopyFrom((uint8_t*)strNodeDef.data(), strNodeDef.length());
  591. ge::AttrUtils::SetBytes(opDef, "node_def", nodedefBytes);
  592. if ((_name== "S") || (_name == "K")) {
  593. int index = 0;
  594. ge::AttrUtils::SetInt(opDef, "T", 1);
  595. ge::AttrUtils::SetInt(opDef, "arg_index", index);
  596. ge::AttrUtils::SetInt(opDef, "ret_index", index);
  597. }
  598. }
  599. ge::NodePtr AddNode(ge::ComputeGraphPtr graph, const string& _name, const string& _type,int32_t i_n, int32_t o_n) {
  600. ge::OpDescPtr opDef = std::make_shared<ge::OpDesc>();
  601. opDef->SetName(_name);
  602. opDef->SetType(_type);
  603. for(int32_t i = 0; i < i_n; i++) {
  604. ge::GeTensorDesc input;
  605. input.SetDataType((ge::DataType)1);
  606. opDef->AddInputDesc(input);
  607. }
  608. for(int32_t i = 0;i < o_n; i++) {
  609. ge::GeTensorDesc output;
  610. output.SetDataType((ge::DataType)1);
  611. opDef->AddOutputDesc(output);
  612. }
  613. CreateOpDef(_name, _type, opDef);
  614. return graph->AddNode(opDef);
  615. }
  616. void MakeDagGraph(ge::ComputeGraphPtr graph, const string& input_node_type) {
  617. ge::NodePtr node_s = AddNode(graph, "S", parser::DATA,1,1);
  618. ge::NodePtr node_a = AddNode(graph, "A", "testa",1,2);
  619. ge::NodePtr node_b = AddNode(graph, "B", "testb",1,2);
  620. ge::NodePtr node_c = AddNode(graph, "C", "testc",1,1);
  621. ge::NodePtr node_d = AddNode(graph, "D", "testd",1,1);
  622. ge::NodePtr node_e = AddNode(graph, "E", "teste",1,1);
  623. ge::NodePtr node_f = AddNode(graph, "F", "testf",1,1);
  624. ge::NodePtr node_g = AddNode(graph, "G", "testg",2,1);
  625. ge::NodePtr node_h = AddNode(graph, "H", "testh",1,1);
  626. ge::NodePtr node_i = AddNode(graph, "I", "testi",1,1);
  627. ge::NodePtr node_j = AddNode(graph, "J", "testj",2,1);
  628. ge::NodePtr node_k = AddNode(graph, "K", parser::NETOUTPUT,1,1);
  629. ge::GraphUtils::AddEdge(node_s->GetOutDataAnchor(0), node_a->GetInDataAnchor(0));
  630. ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0));
  631. ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(1), node_c->GetInDataAnchor(0));
  632. ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_d->GetInDataAnchor(0));
  633. ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(1), node_e->GetInDataAnchor(0));
  634. ge::GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_g->GetInDataAnchor(0));
  635. ge::GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_f->GetInDataAnchor(0));
  636. ge::GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_g->GetInDataAnchor(1));
  637. ge::GraphUtils::AddEdge(node_f->GetOutDataAnchor(0), node_h->GetInDataAnchor(0));
  638. ge::GraphUtils::AddEdge(node_g->GetOutDataAnchor(0), node_j->GetInDataAnchor(0));
  639. ge::GraphUtils::AddEdge(node_h->GetOutDataAnchor(0), node_i->GetInDataAnchor(0));
  640. ge::GraphUtils::AddEdge(node_i->GetOutDataAnchor(0), node_j->GetInDataAnchor(1));
  641. ge::GraphUtils::AddEdge(node_j->GetOutDataAnchor(0), node_k->GetInDataAnchor(0));
  642. ge::GraphUtils::AddEdge(node_h->GetOutControlAnchor(), node_j->GetInControlAnchor());
  643. }
  644. void MakeGraph(const ComputeGraphPtr &root_graph, const string &name) {
  645. root_graph->SetName(name);
  646. ge::NodePtr data1 = AddNode(root_graph, name + "_input1", parser::DATA, 1, 1);
  647. ge::NodePtr data2 = AddNode(root_graph, name + "_input2", parser::DATA, 1, 1);
  648. ge::NodePtr add = AddNode(root_graph, name + "_add", parser::ADD, 2, 1);
  649. ge::NodePtr net_output = AddNode(root_graph, name + "_net_output", parser::NETOUTPUT, 1, 1);
  650. ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), add->GetInDataAnchor(0));
  651. ge::GraphUtils::AddEdge(data2->GetOutDataAnchor(0), add->GetInDataAnchor(1));
  652. ge::GraphUtils::AddEdge(add->GetOutDataAnchor(0), net_output->GetInDataAnchor(0));
  653. }
  654. void ChangeDataType(tensorflow::NodeDef* node_tf, int32_t data_type)
  655. {
  656. domi::tensorflow::AttrValue input_attr_value;
  657. google::protobuf::Map<std::string, tensorflow::AttrValue>* attr = node_tf->mutable_attr();
  658. google::protobuf::Map<std::string, tensorflow::AttrValue>::const_iterator it = attr->find(ge::ATTR_NAME_INPUT_TENSOR_DESC);
  659. if (it != attr->end()) {
  660. input_attr_value = it->second;
  661. }
  662. (*attr)[ge::ATTR_NAME_INPUT_TENSOR_DESC] = input_attr_value;
  663. }
  664. NodeDef* AddGraphNode(GraphDef *graph, string name, string optype, string input)
  665. {
  666. NodeDef *node_def = graph->add_node();
  667. node_def->set_name(name);
  668. node_def->set_op(optype);
  669. node_def->add_input(input);
  670. return node_def;
  671. }
  672. ge::ComputeGraphPtr build_graph(bool with_leaf_node = false)
  673. {
  674. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>("default");
  675. ge::OpDescPtr data_op = std::make_shared<ge::OpDesc>();
  676. data_op->SetType(parser::DATA);
  677. data_op->SetName("Data1");
  678. data_op->AddInputDesc(ge::GeTensorDesc());
  679. data_op->AddOutputDesc(ge::GeTensorDesc());
  680. ge::NodePtr data1 = graph->AddNode(data_op);
  681. ge::OpDescPtr relu_op1 = std::make_shared<ge::OpDesc>();
  682. relu_op1->SetType(parser::ACTIVATION);
  683. relu_op1->SetName("Relu1");
  684. relu_op1->AddInputDesc(ge::GeTensorDesc());
  685. relu_op1->AddOutputDesc(ge::GeTensorDesc());
  686. ge::NodePtr relu1 = graph->AddNode(relu_op1);
  687. ge::OpDescPtr relu_op2 = std::make_shared<ge::OpDesc>();
  688. relu_op2->SetType(parser::RELU);
  689. relu_op2->SetName("Relu2");
  690. relu_op2->AddInputDesc(ge::GeTensorDesc());
  691. relu_op2->AddOutputDesc(ge::GeTensorDesc());
  692. relu_op2->AddOutputDesc(ge::GeTensorDesc());
  693. ge::NodePtr relu2 = graph->AddNode(relu_op2);
  694. ge::OpDescPtr relu_op3 = std::make_shared<ge::OpDesc>();
  695. relu_op3->SetType(parser::ACTIVATION);
  696. relu_op3->SetName("Relu3");
  697. relu_op3->AddInputDesc(ge::GeTensorDesc());
  698. relu_op3->AddOutputDesc(ge::GeTensorDesc());
  699. ge::NodePtr relu3;
  700. if (with_leaf_node == true) {
  701. relu3 = graph->AddNode(relu_op3);
  702. }
  703. ge::OpDescPtr mul_op = std::make_shared<ge::OpDesc>();
  704. mul_op->SetType(parser::MUL);
  705. mul_op->SetName("Mul");
  706. mul_op->AddInputDesc(ge::GeTensorDesc());
  707. mul_op->AddInputDesc(ge::GeTensorDesc());
  708. mul_op->AddOutputDesc(ge::GeTensorDesc());
  709. mul_op->AddOutputDesc(ge::GeTensorDesc());
  710. mul_op->AddOutputDesc(ge::GeTensorDesc());
  711. mul_op->AddOutputDesc(ge::GeTensorDesc());
  712. ge::NodePtr mul = graph->AddNode(mul_op);
  713. ge::OpDescPtr mul_op1 = std::make_shared<ge::OpDesc>();
  714. mul_op1->SetType(parser::MUL);
  715. mul_op1->SetName("Mul1");
  716. mul_op1->AddInputDesc(ge::GeTensorDesc());
  717. mul_op1->AddInputDesc(ge::GeTensorDesc());
  718. mul_op1->AddOutputDesc(ge::GeTensorDesc());
  719. ge::NodePtr mul1 = graph->AddNode(mul_op1);
  720. ge::OpDescPtr mul_op2 = std::make_shared<ge::OpDesc>();
  721. mul_op2->SetType(parser::MUL);
  722. mul_op2->SetName("Mul2");
  723. mul_op2->AddInputDesc(ge::GeTensorDesc());
  724. mul_op2->AddInputDesc(ge::GeTensorDesc());
  725. mul_op2->AddOutputDesc(ge::GeTensorDesc());
  726. ge::NodePtr mul2 = graph->AddNode(mul_op2);
  727. ge::OpDescPtr fc_op = std::make_shared<ge::OpDesc>();
  728. fc_op->SetType(parser::FULL_CONNECTION);
  729. fc_op->SetName("FullConnection");
  730. fc_op->AddInputDesc(ge::GeTensorDesc());
  731. fc_op->AddOutputDesc(ge::GeTensorDesc());
  732. fc_op->AddOutputDesc(ge::GeTensorDesc());
  733. ge::NodePtr fc = graph->AddNode(fc_op);
  734. ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), relu1->GetInDataAnchor(0));
  735. ge::GraphUtils::AddEdge(relu1->GetOutDataAnchor(0), fc->GetInDataAnchor(0));
  736. ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(0), relu2->GetInDataAnchor(0));
  737. if (with_leaf_node == true) {
  738. ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(1), relu3->GetInDataAnchor(0));
  739. }
  740. ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(0), mul->GetInDataAnchor(0));
  741. ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(1), mul->GetInDataAnchor(1));
  742. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(0), mul1->GetInDataAnchor(0));
  743. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(1), mul1->GetInDataAnchor(1));
  744. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(2), mul2->GetInDataAnchor(0));
  745. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(3), mul2->GetInDataAnchor(1));
  746. return graph;
  747. }
  748. }
  749. namespace {
  750. REG_OP(Data)
  751. .INPUT(x, TensorType::ALL())
  752. .OUTPUT(y, TensorType::ALL())
  753. .ATTR(index, Int, 0)
  754. .OP_END_FACTORY_REG(Data)
  755. REG_OP(Add)
  756. .INPUT(x1, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  757. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  758. DT_COMPLEX64, DT_STRING}))
  759. .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  760. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  761. DT_COMPLEX64, DT_STRING}))
  762. .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  763. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  764. DT_COMPLEX64, DT_STRING}))
  765. .OP_END_FACTORY_REG(Add)
  766. }
  767. static Status FusionParserParams(const std::vector<const google::protobuf::Message *> inside_nodes, ge::Operator &op) {
  768. return domi::SUCCESS;
  769. }
  770. static MemBuffer* MemBufferFromFile(const char *path)
  771. {
  772. char path_temp[PATH_MAX + 1] = {0x00};
  773. if(strlen(path) > PATH_MAX || nullptr == realpath(path, path_temp)) {
  774. return nullptr;
  775. }
  776. FILE *fp = fopen(path_temp, "r+");
  777. if (fp == nullptr) {
  778. return nullptr;
  779. }
  780. // get model file length
  781. if (0 != fseek(fp, 0, SEEK_END)) {
  782. fclose(fp);
  783. return nullptr;
  784. }
  785. long file_length = ftell(fp);
  786. if (fseek(fp, 0, SEEK_SET)) {
  787. fclose(fp);
  788. return nullptr;
  789. }
  790. if (file_length <= 0) {
  791. fclose(fp);
  792. return nullptr;
  793. }
  794. // alloc model buffer
  795. void *data = malloc((unsigned int)file_length);
  796. if (!data) {
  797. fclose(fp);
  798. return nullptr;
  799. }
  800. // read file into memory
  801. uint32_t read_size = (uint32_t)fread(data, 1, (unsigned int)file_length, fp);
  802. // check if read success
  803. if ((long)read_size != file_length) {
  804. free(data);
  805. data = nullptr;
  806. fclose(fp);
  807. return nullptr;
  808. }
  809. // close model file
  810. fclose(fp);
  811. // create an MemBuffer
  812. MemBuffer* membuf = new MemBuffer();
  813. if (!membuf) {
  814. free(data);
  815. data = nullptr;
  816. return nullptr;
  817. }
  818. membuf->data = malloc((unsigned int)read_size);
  819. // set size && data
  820. membuf->size = (uint32_t)read_size;
  821. memcpy((char*)membuf->data, (char*)data, read_size);
  822. free(data);
  823. return membuf;
  824. }
  825. /// placeholder0 placeholder1
  826. /// | /\ /\ |
  827. /// | / \/ \ |
  828. /// | / /\ \ |
  829. /// | | / \ | |
  830. /// | add0 mul0 |
  831. /// | / /c | \ |
  832. /// mul1 --- / | add1
  833. /// \ | |
  834. /// \ ---- add2 |
  835. /// | |
  836. /// retval0 retval1
  837. void CreateGraphDef(domi::tensorflow::GraphDef &graph_def) {
  838. // 1. add node
  839. auto placeholder0 = graph_def.add_node();
  840. auto placeholder1 = graph_def.add_node();
  841. auto add0 = graph_def.add_node();
  842. auto add1 = graph_def.add_node();
  843. auto mul0 = graph_def.add_node();
  844. auto mul1 = graph_def.add_node();
  845. auto add2 = graph_def.add_node();
  846. auto retval0 = graph_def.add_node();
  847. auto retval1 = graph_def.add_node();
  848. auto softmax0 = graph_def.add_node();
  849. auto softmax1 = graph_def.add_node();
  850. // 2. set info
  851. placeholder0->set_name("placeholder0");
  852. placeholder0->set_op("PlaceHolder");
  853. placeholder1->set_name("placeholder1");
  854. placeholder1->set_op("PlaceHolder");
  855. add0->set_name("add0");
  856. add0->set_op("Add");
  857. add1->set_name("add1");
  858. add1->set_op("Add");
  859. add2->set_name("add2");
  860. add2->set_op("Add");
  861. mul0->set_name("mul0");
  862. mul0->set_op("Mul");
  863. mul1->set_name("mul1");
  864. mul1->set_op("Mul");
  865. retval0->set_name("retval0");
  866. retval0->set_op("_RetVal");
  867. retval1->set_name("retval1");
  868. retval1->set_op("_RetVal");
  869. retval0->set_name("retval0");
  870. retval0->set_op("_RetVal");
  871. retval1->set_name("retval1");
  872. retval1->set_op("_RetVal");
  873. softmax0->set_name("Softmax0");
  874. softmax0->set_op("Softmax");
  875. softmax1->set_name("Softmax1");
  876. softmax1->set_op("Softmax");
  877. // 3. add edges
  878. add0->add_input("placeholder0");
  879. add0->add_input("placeholder1");
  880. mul0->add_input("placeholder0");
  881. mul0->add_input("placeholder1");
  882. mul1->add_input("placeholder0");
  883. mul1->add_input("add0");
  884. mul1->add_input("^mul0");
  885. add1->add_input("mul0");
  886. add1->add_input("placeholder1");
  887. add2->add_input("mul1");
  888. add2->add_input("mul0");
  889. retval0->add_input("add2:0");
  890. retval1->add_input("add1:0");
  891. softmax0->add_input("add3:0");
  892. softmax0->add_input("add2:0");
  893. }
  894. TEST_F(STestTensorflowParser, tensorflow_parser_success) {
  895. RegisterCustomOp();
  896. std::string case_dir = __FILE__;
  897. ParserOperator unused("Add");
  898. case_dir = case_dir.substr(0, case_dir.find_last_of("/"));
  899. std::string model_file = case_dir + "/origin_models/tf_add.pb";
  900. std::map<ge::AscendString, ge::AscendString> parser_params = {
  901. {ge::AscendString(ge::ir_option::INPUT_DATA_NAMES), ge::AscendString("Placeholder,Placeholder_1")},
  902. };
  903. ge::Graph graph;
  904. auto ret = ge::aclgrphParseTensorFlow(model_file.c_str(), parser_params, graph);
  905. ASSERT_EQ(ret, SUCCESS);
  906. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  907. auto output_nodes_info = compute_graph->GetGraphOutNodesInfo();
  908. ASSERT_EQ(output_nodes_info.size(), 1);
  909. EXPECT_EQ((output_nodes_info.at(0).first->GetName()), "add_test_1");
  910. EXPECT_EQ((output_nodes_info.at(0).second), 0);
  911. auto &net_out_name = ge::GetParserContext().net_out_nodes;
  912. ASSERT_EQ(net_out_name.size(), 1);
  913. EXPECT_EQ(net_out_name.at(0), "add_test_1:0");
  914. }
  915. TEST_F(STestTensorflowParser, tensorflow_parser_failed_for_input_data_names_error) {
  916. RegisterCustomOp();
  917. std::string case_dir = __FILE__;
  918. ParserOperator unused("Add");
  919. case_dir = case_dir.substr(0, case_dir.find_last_of("/"));
  920. std::string model_file = case_dir + "/origin_models/tf_add.pb";
  921. std::map<ge::AscendString, ge::AscendString> parser_params = {
  922. {ge::AscendString(ge::ir_option::INPUT_DATA_NAMES), ge::AscendString("Placeholder_1,Placeholder_3")},
  923. };
  924. ge::Graph graph;
  925. auto ret = ge::aclgrphParseTensorFlow(model_file.c_str(), parser_params, graph);
  926. ASSERT_EQ(ret, ge::GRAPH_FAILED);
  927. }
  928. TEST_F(STestTensorflowParser, tensorflow_model_Failed) {
  929. ge::Graph graph;
  930. std::string caseDir = __FILE__;
  931. std::size_t idx = caseDir.find_last_of("/");
  932. caseDir = caseDir.substr(0, idx);
  933. std::string modelFile = caseDir + "/origin_models/model.pb";
  934. auto status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  935. EXPECT_EQ(status, ge::SUCCESS);
  936. modelFile = caseDir + "/origin_models/test_depth_wise_conv2d.pb";
  937. status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  938. EXPECT_EQ(status, ge::GRAPH_FAILED);
  939. }
  940. TEST_F(STestTensorflowParser, tensorflow_model_not_exist) {
  941. ge::Graph graph;
  942. std::string caseDir = __FILE__;
  943. std::size_t idx = caseDir.find_last_of("/");
  944. caseDir = caseDir.substr(0, idx);
  945. // model file is not exist
  946. std::string modelFile = caseDir + "/origin_models/conv2d_explicit1_pad.pb";
  947. auto status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  948. EXPECT_EQ(status, ge::GRAPH_FAILED);
  949. }
  950. TEST_F(STestTensorflowParser, parser_tensorflow_model) {
  951. std::string caseDir = __FILE__;
  952. std::size_t idx = caseDir.find_last_of("/");
  953. caseDir = caseDir.substr(0, idx);
  954. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  955. const char *model_file = modelFile.c_str();
  956. std::string op_name = "ge_ascend_irgraph";
  957. ge::Graph graph(op_name);
  958. std::map<ge::AscendString, ge::AscendString> parser_options = {
  959. {ge::AscendString(ge::ir_option::INPUT_FORMAT), ge::AscendString("NHWC")},
  960. };
  961. auto ret_graph = ge::aclgrphParseTensorFlow(model_file, parser_options, graph);
  962. EXPECT_EQ(ret_graph, ge::FAILED);
  963. // parser tensorflow model out_node_size is equal to index
  964. string graph_name;
  965. AclGraphParserUtil acl_graph_parse_util;
  966. std::map<AscendString, AscendString> out_nodes_with_node_and_index = {
  967. {AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:1")}};
  968. ParerSTestsUtils::ClearParserInnerCtx();
  969. auto ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  970. ret_graph = ge::aclgrphParseTensorFlow(model_file, graph);
  971. EXPECT_EQ(ret_graph, domi::FAILED);
  972. // parser tensorflow model success
  973. modelFile = caseDir + "/origin_models/model.pb";
  974. model_file = modelFile.c_str();
  975. out_nodes_with_node_and_index = {{AscendString(ge::ir_option::OUT_NODES), AscendString("x:0;y:0")}};
  976. ParerSTestsUtils::ClearParserInnerCtx();
  977. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  978. ret_graph = ge::aclgrphParseTensorFlow(model_file, graph);
  979. EXPECT_EQ(ret_graph, domi::SUCCESS);
  980. }
  981. TEST_F(STestTensorflowParser, tensorflow_parser_to_json)
  982. {
  983. TensorFlowModelParser modelParser;
  984. std::string caseDir = __FILE__;
  985. std::size_t idx = caseDir.find_last_of("/");
  986. caseDir = caseDir.substr(0, idx);
  987. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  988. std::string jsonFile = caseDir + "/origin_models/test.json";
  989. const char *model_file = modelFile.c_str();
  990. const char *json_file = jsonFile.c_str();
  991. Status ret = modelParser.ToJson(model_file, json_file);
  992. EXPECT_EQ(ret, SUCCESS);
  993. }
  994. TEST_F(STestTensorflowParser, tensorflow_parserfrommemory_failed)
  995. {
  996. TensorFlowModelParser modelParser;
  997. std::string caseDir = __FILE__;
  998. std::size_t idx = caseDir.find_last_of("/");
  999. caseDir = caseDir.substr(0, idx);
  1000. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1001. uint32_t size = 1;
  1002. ge::Graph graph;
  1003. std::map<ge::AscendString, ge::AscendString> parser_params;
  1004. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1005. ASSERT_EQ(ret, SUCCESS);
  1006. parser_params = {{AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1007. ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1008. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1009. ret = modelParser.ParseFromMemory(modelFile.c_str(), size, compute_graph);
  1010. EXPECT_NE(ret, SUCCESS);
  1011. }
  1012. TEST_F(STestTensorflowParser, modelparser_parsefrommemory_success)
  1013. {
  1014. std::string caseDir = __FILE__;
  1015. std::size_t idx = caseDir.find_last_of("/");
  1016. caseDir = caseDir.substr(0, idx);
  1017. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1018. const char* tmp_tf_pb_model = modelFile.c_str();
  1019. ge::Graph graph;
  1020. std::map<ge::AscendString, ge::AscendString> parser_params;
  1021. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1022. ASSERT_EQ(ret, SUCCESS);
  1023. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1024. TensorFlowModelParser modelParser;
  1025. MemBuffer* memBuffer = MemBufferFromFile(tmp_tf_pb_model);
  1026. PreChecker::Instance().HasError() == false;
  1027. ret = modelParser.ParseFromMemory((char*)memBuffer->data, memBuffer->size, compute_graph);
  1028. free(memBuffer->data);
  1029. delete memBuffer;
  1030. }
  1031. TEST_F(STestTensorflowParser, weightsparser_parsefrommemory_success)
  1032. {
  1033. std::string caseDir = __FILE__;
  1034. std::size_t idx = caseDir.find_last_of("/");
  1035. caseDir = caseDir.substr(0, idx);
  1036. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1037. const char* tmp_tf_pb_model = modelFile.c_str();
  1038. ge::Graph graph;
  1039. std::map<ge::AscendString, ge::AscendString> parser_params;
  1040. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1041. ASSERT_EQ(ret, SUCCESS);
  1042. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1043. auto weights_parser = domi::WeightsParserFactory::Instance()->CreateWeightsParser(domi::TENSORFLOW);
  1044. MemBuffer* memBuffer = MemBufferFromFile(tmp_tf_pb_model);
  1045. ret = weights_parser->ParseFromMemory((char*)memBuffer->data, memBuffer->size, compute_graph);
  1046. free(memBuffer->data);
  1047. delete memBuffer;
  1048. EXPECT_EQ(SUCCESS, ret);
  1049. }
  1050. std::string getGraphCallbackV2(string subgraph_name)
  1051. {
  1052. std::string caseDir = __FILE__;
  1053. std::size_t idx = caseDir.find_last_of("/");
  1054. caseDir = caseDir.substr(0, idx);
  1055. subgraph_name = caseDir + "/origin_models/tf_add.pb";
  1056. return subgraph_name;
  1057. }
  1058. TEST_F(STestTensorflowParser, parser_ParseProtoWithSubgraphV2)
  1059. {
  1060. std::string caseDir = __FILE__;
  1061. std::size_t idx = caseDir.find_last_of("/");
  1062. caseDir = caseDir.substr(0, idx);
  1063. const std::string root_proto = caseDir + "/origin_models/tf_add.pb";
  1064. ge::Graph graph;
  1065. std::map<ge::AscendString, ge::AscendString> parser_params;
  1066. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1067. ASSERT_EQ(ret, SUCCESS);
  1068. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1069. domi::GetGraphCallbackV2 callback(&getGraphCallbackV2);
  1070. TensorFlowModelParser parser;
  1071. ret = parser.ParseProtoWithSubgraph(root_proto, callback, root_graph);
  1072. }
  1073. TEST_F(STestTensorflowParser, parser_ConvertToGeDataType)
  1074. {
  1075. // convert to ge type success
  1076. const uint32_t type1 = domi::tensorflow::DataType::DT_FLOAT;
  1077. TensorFlowModelParser parser;
  1078. ge::DataType dataType = parser.ConvertToGeDataType(type1);
  1079. ASSERT_EQ(dataType, ge::DataType::DT_FLOAT);
  1080. const uint32_t type2 = 80; // invalid type
  1081. dataType = parser.ConvertToGeDataType(type2);
  1082. ASSERT_EQ(dataType, ge::DataType::DT_UNDEFINED);
  1083. }
  1084. TEST_F(STestTensorflowParser, tensorflow_parser_with_external_normal_graph) {
  1085. // 1. Create root graph
  1086. ComputeGraphPtr root_graph = ge::parser::MakeShared<ge::ComputeGraph>("root_graph");
  1087. MakeGraph(root_graph, "root_graph");
  1088. // 2. Create ONNX sub graph
  1089. // 2.1 Sub graph of onnx graph
  1090. ge::ComputeGraphPtr sub_sub_graph = ge::parser::MakeShared<ge::ComputeGraph>("sub_sub");
  1091. // 2.2 ONNX graph
  1092. ComputeGraphPtr sub_graph = ge::parser::MakeShared<ge::ComputeGraph>("sub_sub");
  1093. MakeGraph(sub_graph, "sub_graph");
  1094. auto add = sub_graph->FindNode("sub_graph_add");
  1095. ASSERT_NE(add, nullptr);
  1096. add->GetOpDesc()->AddSubgraphName("sub_sub_graph");
  1097. add->GetOpDesc()->SetSubgraphInstanceName(0, sub_sub_graph->GetName());
  1098. sub_graph->AddSubGraph(sub_sub_graph);
  1099. auto input1 = sub_graph->FindNode("sub_graph_input1");
  1100. ASSERT_NE(input1, nullptr);
  1101. AttrUtils::SetInt(input1->GetOpDesc(), ATTR_NAME_INDEX, 0);
  1102. auto input2 = sub_graph->FindNode("sub_graph_input2");
  1103. ASSERT_NE(input2, nullptr);
  1104. AttrUtils::SetInt(input2->GetOpDesc(), ATTR_NAME_INDEX, 1);
  1105. // 3. Serialize ONNX graph to string
  1106. // 3.1 normal
  1107. ge::Model model("model", "");
  1108. model.SetGraph(GraphUtils::CreateGraphFromComputeGraph(sub_graph));
  1109. Buffer buffer;
  1110. graphStatus save_ret = model.Save(buffer, false);
  1111. ASSERT_EQ(save_ret, GRAPH_SUCCESS);
  1112. std::string external_graph(reinterpret_cast<const char *>(buffer.GetData()),
  1113. buffer.GetSize());
  1114. // model will failed
  1115. input1->GetOpDesc()->DelAttr(ATTR_NAME_INDEX);
  1116. ge::Model model_will_fail("model_will_fail", "");
  1117. model_will_fail.SetGraph(GraphUtils::CreateGraphFromComputeGraph(sub_graph));
  1118. Buffer buffer_fail;
  1119. save_ret = model_will_fail.Save(buffer_fail, false);
  1120. ASSERT_EQ(save_ret, GRAPH_SUCCESS);
  1121. std::string external_graph_fail(
  1122. reinterpret_cast<const char *>(buffer_fail.GetData()),
  1123. buffer_fail.GetSize());
  1124. // 4. Set string to function node
  1125. auto root_add = root_graph->FindNode("root_graph_add");
  1126. ASSERT_NE(root_add, nullptr);
  1127. AttrUtils::SetStr(root_add->GetOpDesc(), "_external_model", external_graph);
  1128. auto root_input1 = root_graph->FindNode("root_graph_input1");
  1129. ASSERT_NE(root_input1, nullptr);
  1130. AttrUtils::SetInt(root_input1->GetOpDesc(), ATTR_NAME_INDEX, 0);
  1131. auto root_input2 = root_graph->FindNode("root_graph_input2");
  1132. ASSERT_NE(root_input2, nullptr);
  1133. AttrUtils::SetInt(root_input2->GetOpDesc(), ATTR_NAME_INDEX, 1);
  1134. // 5. Run test (normal)
  1135. auto ret = TensorFlowModelParser::AddExternalGraph(root_graph);
  1136. EXPECT_EQ(ret, SUCCESS);
  1137. EXPECT_EQ(root_graph->GetAllSubgraphs().size(), 2);
  1138. EXPECT_EQ(sub_graph->GetAllSubgraphs().size(), 1);
  1139. EXPECT_NE(root_graph->GetSubgraph(sub_graph->GetName()), nullptr);
  1140. EXPECT_EQ(root_graph->GetSubgraph(sub_graph->GetName())->GetAllSubgraphs().size(), 0);
  1141. }
  1142. TEST_F(STestTensorflowParser, tensorflow_ParserProto_failed)
  1143. {
  1144. std::string caseDir = __FILE__;
  1145. std::size_t idx = caseDir.find_last_of("/");
  1146. caseDir = caseDir.substr(0, idx);
  1147. const std::string root_proto = caseDir + "/origin_models/avgpool3dgrad.pb.txt";
  1148. domi::tensorflow::GraphDef graphDef;
  1149. ge::Graph graph;
  1150. std::map<ge::AscendString, ge::AscendString> parser_params;
  1151. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1152. ASSERT_EQ(ret, SUCCESS);
  1153. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1154. TensorFlowModelParser tensorflow_parser;
  1155. ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1156. EXPECT_EQ(PARAM_INVALID, ret);
  1157. // proto解析失败
  1158. bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef);
  1159. ASSERT_EQ(protoRet, false);
  1160. ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1161. ASSERT_EQ(ret, PARAM_INVALID);
  1162. std::string serialized_proto = "";
  1163. ret = tensorflow_parser.ParseProto(serialized_proto, root_graph);
  1164. ASSERT_EQ(ret, FAILED);
  1165. }
  1166. TEST_F(STestTensorflowParser, tensorflow_parserAllGraph_failed)
  1167. {
  1168. std::string caseDir = __FILE__;
  1169. std::size_t idx = caseDir.find_last_of("/");
  1170. caseDir = caseDir.substr(0, idx);
  1171. const std::string root_proto = caseDir + "/origin_models/conv2d.pb";
  1172. domi::tensorflow::GraphDef graphDef;
  1173. CreateGraphDef(graphDef);
  1174. auto no_op = graphDef.add_node();
  1175. no_op->set_name("no_op");
  1176. no_op->set_op("NoOp");
  1177. no_op->add_input("placeholder0");
  1178. no_op->add_input("placeholder1");
  1179. ge::Graph graph;
  1180. std::map<ge::AscendString, ge::AscendString> parser_params;
  1181. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1182. ASSERT_EQ(ret, SUCCESS);
  1183. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1184. TensorFlowModelParser tensorflow_parser;
  1185. ret = tensorflow_parser.ParseAllGraph(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1186. ASSERT_NE(ret, SUCCESS);
  1187. }
  1188. TEST_F(STestTensorflowParser, test_parse_acl_output_nodes)
  1189. {
  1190. AclGraphParserUtil acl_graph_parse_util;
  1191. string graph_name;
  1192. // case 1: Normal with 'node and index'
  1193. ParerSTestsUtils::ClearParserInnerCtx();
  1194. GetParserContext().type = domi::ONNX;
  1195. std::map<AscendString, AscendString> out_nodes_with_node_and_index = {
  1196. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out1:0;Out2:1")}};
  1197. ParerSTestsUtils::ClearParserInnerCtx();
  1198. auto ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  1199. ASSERT_EQ(ret, SUCCESS);
  1200. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 2);
  1201. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 2);
  1202. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 0);
  1203. // case 2: Normal with 'tensor name'
  1204. ParerSTestsUtils::ClearParserInnerCtx();
  1205. GetParserContext().type = domi::ONNX;
  1206. std::map<AscendString, AscendString> out_nodes_with_tensor_name = {
  1207. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out_tensor_2")}};
  1208. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_tensor_name, graph_name);
  1209. ASSERT_EQ(ret, SUCCESS);
  1210. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1211. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1212. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 2);
  1213. // case 3: Failed with 'node and index' before 'tensor name'
  1214. ParerSTestsUtils::ClearParserInnerCtx();
  1215. GetParserContext().type = domi::ONNX;
  1216. std::map<AscendString, AscendString> out_nodes_mode_mixex_pre = {
  1217. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out1:0;Out2:1;Out_tensor_1;Out_tensor_2")}};
  1218. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_pre, graph_name);
  1219. ASSERT_EQ(ret, PARAM_INVALID);
  1220. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 2);
  1221. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 2);
  1222. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 0);
  1223. // case 4: Failed with 'node and index' inserted in 'tensor name'
  1224. ParerSTestsUtils::ClearParserInnerCtx();
  1225. GetParserContext().type = domi::ONNX;
  1226. std::map<AscendString, AscendString> out_nodes_mode_mixex_mid = {
  1227. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out1:0;Out2:1;Out_tensor_2")}};
  1228. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_mid, graph_name);
  1229. ASSERT_EQ(ret, PARAM_INVALID);
  1230. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1231. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1232. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 1);
  1233. // case 5: Failed with 'node and index' after 'tensor name'
  1234. ParerSTestsUtils::ClearParserInnerCtx();
  1235. GetParserContext().type = domi::ONNX;
  1236. std::map<AscendString, AscendString> out_nodes_mode_mixex_post = {
  1237. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out_tensor_2;Out1:0;Out2:1")}};
  1238. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_post, graph_name);
  1239. ASSERT_EQ(ret, PARAM_INVALID);
  1240. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1241. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1242. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 2);
  1243. }
  1244. TEST_F(STestTensorflowParser, parse_AutoMappingByOp) {
  1245. static const string KEY_STRING = "key_string";
  1246. static const string KEY_INT = "key_int";
  1247. static const string KEY_FLOAT = "key_float";
  1248. static const string KEY_BOOL = "key_bool";
  1249. static const string KEY_TYPE = "key_type";
  1250. static const string VALUE_STRING = "string";
  1251. static const int64_t VALUE_INT = 1;
  1252. static const float VALUE_FLOAT = 1.0;
  1253. static const bool VALUE_BOOL = true;
  1254. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1255. static const string VALUE_NAME = "test_name";
  1256. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  1257. NodeDef node_def;
  1258. domi::tensorflow::AttrValue value;
  1259. ge::Operator op = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  1260. node_def.set_name(VALUE_NAME);
  1261. value.set_s(VALUE_STRING);
  1262. TensorFlowUtil::AddNodeAttr(KEY_STRING, value, &node_def);
  1263. value.set_i(VALUE_INT);
  1264. TensorFlowUtil::AddNodeAttr(KEY_INT, value, &node_def);
  1265. value.set_f(VALUE_FLOAT);
  1266. TensorFlowUtil::AddNodeAttr(KEY_FLOAT, value, &node_def);
  1267. value.set_b(VALUE_BOOL);
  1268. TensorFlowUtil::AddNodeAttr(KEY_BOOL, value, &node_def);
  1269. value.set_type(VALUE_TYPE);
  1270. TensorFlowUtil::AddNodeAttr(KEY_TYPE, value, &node_def);
  1271. domi::Status status = domi::AutoMappingFn(reinterpret_cast<google::protobuf::Message *>(&node_def), op);
  1272. EXPECT_EQ(domi::SUCCESS, status);
  1273. EXPECT_EQ(VALUE_NAME, op_desc->GetName());
  1274. string value_string = "";
  1275. ge::AttrUtils::GetStr(op_desc, KEY_STRING, value_string);
  1276. EXPECT_EQ(VALUE_STRING, value_string);
  1277. int64_t value_int = 0;
  1278. ge::AttrUtils::GetInt(op_desc, KEY_INT, value_int);
  1279. EXPECT_EQ(VALUE_INT, value_int);
  1280. float value_float = 0.0;
  1281. ge::AttrUtils::GetFloat(op_desc, KEY_FLOAT, value_float);
  1282. EXPECT_EQ(VALUE_FLOAT, value_float);
  1283. bool value_bool = false;
  1284. ge::AttrUtils::GetBool(op_desc, KEY_BOOL, value_bool);
  1285. EXPECT_EQ(VALUE_BOOL, value_bool);
  1286. ge::DataType data_type = ge::DT_UNDEFINED;
  1287. ge::AttrUtils::GetDataType(op_desc, KEY_TYPE, data_type);
  1288. EXPECT_EQ(ge::DT_FLOAT, data_type);
  1289. // test AutoMappingByOpFn
  1290. ge::OpDescPtr op_desc_dest = std::make_shared<ge::OpDesc>();
  1291. ge::Operator op_dest = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc_dest);
  1292. status = domi::AutoMappingByOpFn(op, op_dest);
  1293. EXPECT_EQ(domi::SUCCESS, status);
  1294. EXPECT_EQ(VALUE_NAME, op_dest.GetName());
  1295. value_string = "";
  1296. ge::AttrUtils::GetStr(op_desc_dest, KEY_STRING, value_string);
  1297. EXPECT_EQ(VALUE_STRING, value_string);
  1298. value_int = 0;
  1299. ge::AttrUtils::GetInt(op_desc_dest, KEY_INT, value_int);
  1300. EXPECT_EQ(VALUE_INT, value_int);
  1301. value_float = 0.0;
  1302. ge::AttrUtils::GetFloat(op_desc_dest, KEY_FLOAT, value_float);
  1303. EXPECT_EQ(VALUE_FLOAT, value_float);
  1304. value_bool = false;
  1305. ge::AttrUtils::GetBool(op_desc_dest, KEY_BOOL, value_bool);
  1306. EXPECT_EQ(VALUE_BOOL, value_bool);
  1307. data_type = ge::DT_UNDEFINED;
  1308. ge::AttrUtils::GetDataType(op_desc_dest, KEY_TYPE, data_type);
  1309. EXPECT_EQ(ge::DT_FLOAT, data_type);
  1310. }
  1311. TEST_F(STestTensorflowParser, parse_ParseNodeDef)
  1312. {
  1313. NodeDef * node_def = new NodeDef();
  1314. node_def->set_name("test_name");
  1315. node_def->set_op("PlaceholderWithDefault");
  1316. bool isDatasetInit = true;
  1317. TensorFlowModelParser model_parser;
  1318. Status ret = model_parser.AdaptOpType(node_def, isDatasetInit);
  1319. EXPECT_EQ(domi::SUCCESS, ret);
  1320. node_def->set_op("Add");
  1321. ret = model_parser.AdaptOpType(node_def, isDatasetInit);
  1322. EXPECT_EQ(domi::SUCCESS, ret);
  1323. delete node_def;
  1324. }
  1325. TEST_F(STestTensorflowParser, parse_AddFmkNode)
  1326. {
  1327. TensorFlowModelParser modelParser;
  1328. std::string caseDir = __FILE__;
  1329. std::size_t idx = caseDir.find_last_of("/");
  1330. caseDir = caseDir.substr(0, idx);
  1331. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1332. ge::Graph graph;
  1333. string graph_name;
  1334. AclGraphParserUtil acl_graph_parse_util;
  1335. std::map<ge::AscendString, ge::AscendString> parser_options = {{AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1336. ParerSTestsUtils::ClearParserInnerCtx();
  1337. Status ret = acl_graph_parse_util.ParseParamsBeforeGraph(parser_options, graph_name);
  1338. ret = aclgrphParseTensorFlow(modelFile.c_str(), parser_options, graph);
  1339. ASSERT_EQ(ret, SUCCESS);
  1340. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  1341. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  1342. ScopePassManager pass_manager;
  1343. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  1344. std::string fusion_op_name = "fusion_op_name";
  1345. FusionScopesResult *fusion_rlt = new (std::nothrow) FusionScopesResult();
  1346. EXPECT_NE(fusion_rlt, nullptr);
  1347. fusion_rlt->Init();
  1348. GenFusionScopesResult(scope_graph, fusion_rlt, fusion_op_name);
  1349. GenOriginContext(&modelParser, fusion_op_name);
  1350. // origin inner node def
  1351. NodeDef* node_def = MallocNodeDef("scope_node_1", "Add");
  1352. EXPECT_NE(node_def, nullptr);
  1353. modelParser.fusion_op_nodedef_map_[fusion_op_name].push_back(node_def);
  1354. bool train_flag_backup = ge::GetParserContext().train_flag;
  1355. ge::GetParserContext().train_flag = true;
  1356. REGISTER_CUSTOM_OP("Identity")
  1357. .FrameworkType(domi::TENSORFLOW)
  1358. .OriginOpType("Identity")
  1359. .ParseParamsFn(ParseParams)
  1360. .ImplyType(ImplyType::TVM);
  1361. REGISTER_CUSTOM_OP("Constant")
  1362. .FrameworkType(domi::TENSORFLOW)
  1363. .OriginOpType("Const")
  1364. .ParseParamsFn(ParseParams)
  1365. .ImplyType(ImplyType::TVM);
  1366. register_tbe_op();
  1367. std::vector<std::string> node_name_list;
  1368. GenOriginNodeDef(&modelParser, node_name_list);
  1369. std::set<std::string> malloc_node_name_list(node_name_list.begin(), node_name_list.end());
  1370. node_name_list.push_back(fusion_op_name);
  1371. ret = modelParser.AddFmkNode(compute_graph, scope_graph, node_name_list, false);
  1372. EXPECT_EQ(ret, PARAM_INVALID);
  1373. EXPECT_EQ(modelParser.scope_inner_node_map_.size(), 0);
  1374. EXPECT_EQ(modelParser.nodedef_map_.size(), 5);
  1375. ret = modelParser.AddEdges(compute_graph);
  1376. EXPECT_EQ(ret, SUCCESS);
  1377. // release resource
  1378. delete graphDef;
  1379. delete node_def;
  1380. modelParser.DeleteFuisonNodeDef();
  1381. FreeNodeDefMap(&modelParser, malloc_node_name_list);
  1382. ge::GetParserContext().train_flag = train_flag_backup;
  1383. }
  1384. TEST_F(STestTensorflowParser, parse_AddScopeInnerNode)
  1385. {
  1386. TensorFlowModelParser modelParser;
  1387. std::string caseDir = __FILE__;
  1388. std::size_t idx = caseDir.find_last_of("/");
  1389. caseDir = caseDir.substr(0, idx);
  1390. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1391. std::string op_name = "ge_ascend_irgraph";
  1392. ge::Graph graph(op_name);
  1393. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1394. std::map<ge::AscendString, ge::AscendString> parser_params = {
  1395. {AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1396. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1397. EXPECT_EQ(ret, SUCCESS);
  1398. std::mutex graph_mutex;
  1399. tensorflow::NodeDef *node_def = initNodeDef();
  1400. node_def->set_name("FastrcnnPredictions");
  1401. node_def->set_op("FastrcnnPredictions");
  1402. // can't find in scope_inner_node_map
  1403. ret = modelParser.AddScopeInnerNode(&modelParser, compute_graph, &graph_mutex, node_def);
  1404. EXPECT_EQ(ret, PARAM_INVALID);
  1405. delete node_def;
  1406. }
  1407. TEST_F(STestTensorflowParser, dyncmic_rnn_scope_pass_plugin_test) {
  1408. ge::Graph graph;
  1409. std::cout << __FILE__ << std::endl;
  1410. std::string caseDir = __FILE__;
  1411. std::size_t idx = caseDir.find_last_of("/");
  1412. caseDir = caseDir.substr(0, idx);
  1413. std::string modelFile = caseDir + "/origin_models/tensor_array.pb";
  1414. std::map<ge::AscendString, ge::AscendString> params;
  1415. string key ="enable_scope_fusion_passes";
  1416. string value ="ScopeDynamicRNNPass";
  1417. params.insert(std::make_pair(ge::AscendString(key.c_str()), ge::AscendString(value.c_str())));
  1418. auto status = aclgrphParseTensorFlow(modelFile.c_str(), params, graph);
  1419. EXPECT_EQ(status, SUCCESS);
  1420. }
  1421. TEST_F(STestTensorflowParser, avgpool3dgrad_plugin_test_format_NDHWC) {
  1422. ge::Graph graph;
  1423. std::cout << __FILE__ << std::endl;
  1424. std::string caseDir = __FILE__;
  1425. std::size_t idx = caseDir.find_last_of("/");
  1426. caseDir = caseDir.substr(0, idx);
  1427. std::string modelFile = caseDir + "/origin_models/avgpool3dgrad_case_1.pb";
  1428. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1429. EXPECT_EQ(status, SUCCESS);
  1430. }
  1431. TEST_F(STestTensorflowParser, tensorflow_merge_test) {
  1432. ge::Graph graph;
  1433. std::cout << __FILE__ << std::endl;
  1434. std::string caseDir = __FILE__;
  1435. std::size_t idx = caseDir.find_last_of("/");
  1436. caseDir = caseDir.substr(0, idx);
  1437. std::string modelFile = caseDir + "/origin_models/merge.pb";
  1438. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1439. EXPECT_EQ(status, FAILED);
  1440. }
  1441. TEST_F(STestTensorflowParser, tensorflow_no_op_test) {
  1442. ge::Graph graph;
  1443. std::cout << __FILE__ << std::endl;
  1444. std::string caseDir = __FILE__;
  1445. std::size_t idx = caseDir.find_last_of("/");
  1446. caseDir = caseDir.substr(0, idx);
  1447. std::string modelFile = caseDir + "/origin_models/test_no_op.pb";
  1448. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1449. EXPECT_EQ(status, SUCCESS);
  1450. }
  1451. TEST_F(STestTensorflowParser, tensorflow_identity_test) {
  1452. ge::Graph graph;
  1453. std::cout << __FILE__ << std::endl;
  1454. std::string caseDir = __FILE__;
  1455. std::size_t idx = caseDir.find_last_of("/");
  1456. caseDir = caseDir.substr(0, idx);
  1457. std::string modelFile = caseDir + "/origin_models/test_identity.pb";
  1458. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1459. EXPECT_EQ(status, SUCCESS);
  1460. }
  1461. TEST_F(STestTensorflowParser, tensorflow_constant_test) {
  1462. ge::Graph graph;
  1463. std::cout << __FILE__ << std::endl;
  1464. std::string caseDir = __FILE__;
  1465. std::size_t idx = caseDir.find_last_of("/");
  1466. caseDir = caseDir.substr(0, idx);
  1467. std::string modelFile = caseDir + "/origin_models/test_constant.pb";
  1468. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1469. EXPECT_EQ(status, SUCCESS);
  1470. TensorFlowConstantParser constantParser;
  1471. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  1472. NodeDef* node_def = initNodeDef();
  1473. node_def->set_name("Constant");
  1474. auto params = constantParser.ParseParams(node_def, op_dest);
  1475. EXPECT_EQ(params, SUCCESS);
  1476. auto value = constantParser.ParseValue(node_def, op_dest);
  1477. EXPECT_EQ(value, SUCCESS);
  1478. ConstantOperator op;
  1479. auto type = constantParser.ParseDType(node_def, &op);
  1480. EXPECT_EQ(type, SUCCESS);
  1481. }
  1482. TEST_F(STestTensorflowParser, tensorflow_reshpae_test) {
  1483. ge::Graph graph;
  1484. std::cout << __FILE__ << std::endl;
  1485. std::string caseDir = __FILE__;
  1486. std::size_t idx = caseDir.find_last_of("/");
  1487. caseDir = caseDir.substr(0, idx);
  1488. std::string modelFile = caseDir + "/origin_models/test_reshape.pb";
  1489. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1490. EXPECT_EQ(status, SUCCESS);
  1491. TensorFlowReshapeParser parser;
  1492. NodeDef * nodeDef = new NodeDef();
  1493. ge::OpDescPtr opdef_ = make_shared<::ge::OpDesc>("","");
  1494. google::protobuf::Map<std::string, tensorflow::AttrValue > *attr_map = nodeDef->mutable_attr();
  1495. domi::tensorflow::AttrValue tshape_attr_value;
  1496. tshape_attr_value.set_type(domi::tensorflow::DT_INT32);
  1497. (*attr_map)[TENSORFLOW_ATTR_TSHAPE] = tshape_attr_value;
  1498. domi::tensorflow::AttrValue t_attr_value;
  1499. t_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1500. (*attr_map)[TENSORFLOW_ATTR_T] = t_attr_value;
  1501. Status ret = parser.ParseParams(nodeDef, opdef_);
  1502. EXPECT_EQ(domi::SUCCESS, ret);
  1503. delete nodeDef;
  1504. }
  1505. TEST_F(STestTensorflowParser, tensorflow_squeeze_test) {
  1506. ge::Graph graph;
  1507. std::cout << __FILE__ << std::endl;
  1508. std::string caseDir = __FILE__;
  1509. std::size_t idx = caseDir.find_last_of("/");
  1510. caseDir = caseDir.substr(0, idx);
  1511. std::string modelFile = caseDir + "/origin_models/test_sequeeze.pb";
  1512. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1513. EXPECT_EQ(status, SUCCESS);
  1514. TensorFlowSqueezeParser parser;
  1515. NodeDef *nodeDef = initNodeDef();
  1516. ge::OpDescPtr opDef = make_shared<::ge::OpDesc>("Squeeze","Squeeze");
  1517. Status ret = parser.ParseParams(nodeDef, opDef);
  1518. EXPECT_EQ(ret, SUCCESS);
  1519. NodeDef *nodeDef_dim = initNodeDef_dims();
  1520. ret = parser.ParseParams(nodeDef_dim, opDef);
  1521. EXPECT_EQ(SUCCESS, ret);
  1522. NodeDef *nodeDef_axis_dims = initNodeDef_axis_dims();
  1523. ret = parser.ParseParams(nodeDef_axis_dims, opDef);
  1524. EXPECT_EQ(GRAPH_PARAM_INVALID, ret);
  1525. static const string KEY_SHAPE_LIST = "key_shape_list";
  1526. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1527. static const string KEY_DEFAULT = "key_default";
  1528. NodeDef *nodeDef2 = new NodeDef();
  1529. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef2->mutable_attr();
  1530. domi::tensorflow::AttrValue dtype_attr_value ;
  1531. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1532. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1533. //设置strides属性
  1534. tensorflow::AttrValue axis_attr_value;
  1535. tensorflow::AttrValue_ListValue *list = axis_attr_value.mutable_list();
  1536. list->add_i(1);
  1537. list->add_i(2);
  1538. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1539. domi::tensorflow::AttrValue value;
  1540. domi::tensorflow::AttrValue df_attr_value;
  1541. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1542. domi::tensorflow::AttrValue pad_attr_value;
  1543. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1544. domi::tensorflow::AttrValue shape;
  1545. shape.mutable_list()->add_i((int64)32);
  1546. shape.mutable_list()->add_i((int64)32);
  1547. shape.mutable_list()->add_i((int64)14);
  1548. static const string KEY_TYPE_LIST = "key_type_list";
  1549. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1550. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1551. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1552. value.clear_value();
  1553. value.mutable_list()->add_type(VALUE_TYPE);
  1554. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, nodeDef2);
  1555. value.clear_value();
  1556. domi::tensorflow::NameAttrList name_attr_list;
  1557. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1558. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1559. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1560. *(value.mutable_list()->add_func()) = name_attr_list;
  1561. nodeDef2->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1562. nodeDef2->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1563. ret = parser.ParseParams(nodeDef2, opDef);
  1564. EXPECT_EQ(domi::SUCCESS, ret);
  1565. GeTensorDesc ge_desc;
  1566. ge_desc.SetFormat(ge::FORMAT_C1HWNCoC0);
  1567. ge_desc.SetDataType(ge::DT_FLOAT);
  1568. ge_desc.SetShape(GeShape({1,1,1,1,1,1}));
  1569. ret = parser.ParseDesc(value, ge_desc);
  1570. EXPECT_EQ(ret, SUCCESS);
  1571. delete nodeDef2;
  1572. delete nodeDef_axis_dims;
  1573. delete nodeDef_dim;
  1574. delete nodeDef;
  1575. }
  1576. TEST_F(STestTensorflowParser, tensorflow_fill_test) {
  1577. ge::Graph graph;
  1578. std::cout << __FILE__ << std::endl;
  1579. std::string caseDir = __FILE__;
  1580. std::size_t idx = caseDir.find_last_of("/");
  1581. caseDir = caseDir.substr(0, idx);
  1582. std::string modelFile = caseDir + "/origin_models/test_fill.pb";
  1583. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1584. EXPECT_EQ(status, SUCCESS);
  1585. }
  1586. TEST_F(STestTensorflowParser, tensorflow_shape_n_test) {
  1587. ge::Graph graph;
  1588. std::cout << __FILE__ << std::endl;
  1589. std::string caseDir = __FILE__;
  1590. std::size_t idx = caseDir.find_last_of("/");
  1591. caseDir = caseDir.substr(0, idx);
  1592. std::string modelFile = caseDir + "/origin_models/test_shape_n.pb";
  1593. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1594. EXPECT_EQ(status, SUCCESS);
  1595. }
  1596. TEST_F(STestTensorflowParser, tensorflow_switch_test) {
  1597. ge::Graph graph;
  1598. std::cout << __FILE__ << std::endl;
  1599. std::string caseDir = __FILE__;
  1600. std::size_t idx = caseDir.find_last_of("/");
  1601. caseDir = caseDir.substr(0, idx);
  1602. std::string modelFile = caseDir + "/origin_models/test_switch.pb";
  1603. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1604. EXPECT_EQ(status, SUCCESS);
  1605. TensorFlowRefSwitchParser refSwitchParser;
  1606. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  1607. NodeDef* node_def = initNodeDef();
  1608. node_def->set_name("RefSwitch");
  1609. auto params = refSwitchParser.ParseParams(node_def, op_dest);
  1610. EXPECT_EQ(params, SUCCESS);
  1611. RefSwitchOperator op;
  1612. auto parseRet = refSwitchParser.ParseT(node_def, &op);
  1613. EXPECT_EQ(parseRet, SUCCESS);
  1614. }
  1615. TEST_F(STestTensorflowParser, tensorflow_enter_test) {
  1616. ge::Graph graph;
  1617. std::cout << __FILE__ << std::endl;
  1618. std::string caseDir = __FILE__;
  1619. std::size_t idx = caseDir.find_last_of("/");
  1620. caseDir = caseDir.substr(0, idx);
  1621. std::string modelFile = caseDir + "/origin_models/test_enter.pb";
  1622. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1623. EXPECT_EQ(status, SUCCESS);
  1624. TensorFlowEnterParser enterParser;
  1625. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("Enter", ge::parser::ENTER);
  1626. NodeDef* node_def = initNodeDef();
  1627. node_def->set_name("Enter");
  1628. Status ret = enterParser.ParseParams(node_def, op_dest);
  1629. EXPECT_EQ(ret, FAILED);
  1630. static const string KEY_SHAPE_LIST = "key_shape_list";
  1631. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1632. static const string KEY_DEFAULT = "key_default";
  1633. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1634. domi::tensorflow::AttrValue dtype_attr_value;
  1635. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1636. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1637. //设置strides属性
  1638. domi::tensorflow::AttrValue axis_attr_value;
  1639. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1640. list->add_i(1);
  1641. list->add_i(2);
  1642. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1643. domi::tensorflow::AttrValue value;
  1644. domi::tensorflow::AttrValue df_attr_value;
  1645. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1646. domi::tensorflow::AttrValue pad_attr_value;
  1647. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1648. domi::tensorflow::AttrValue shape;
  1649. shape.mutable_list()->add_i((int64)32);
  1650. shape.mutable_list()->add_i((int64)32);
  1651. shape.mutable_list()->add_i((int64)14);
  1652. static const string KEY_TYPE_LIST = "key_type_list";
  1653. const std::string ENTER_ATTR_FRAME_NAME = "frame_name";
  1654. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1655. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1656. value.clear_value();
  1657. value.mutable_list()->add_type(VALUE_TYPE);
  1658. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1659. value.clear_value();
  1660. domi::tensorflow::NameAttrList name_attr_list;
  1661. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1662. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1663. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1664. *(value.mutable_list()->add_func()) = name_attr_list;
  1665. node_def->mutable_attr()->insert({ge::ENTER_ATTR_FRAME_NAME, value});
  1666. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1667. ret = enterParser.ParseParams(node_def, op_dest);
  1668. EXPECT_EQ(ret, FAILED);
  1669. }
  1670. TEST_F(STestTensorflowParser, tensorflow_VariableV2_test) {
  1671. ge::Graph graph;
  1672. std::string caseDir = __FILE__;
  1673. std::size_t idx = caseDir.find_last_of("/");
  1674. caseDir = caseDir.substr(0, idx);
  1675. std::string modelFile = caseDir + "/origin_models/test_VariableV2.pb";
  1676. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1677. EXPECT_EQ(status, SUCCESS);
  1678. }
  1679. TEST_F(STestTensorflowParser, tensorflow_fusion_op_parser_test)
  1680. {
  1681. TensorFlowFusionOpParser fusionOpParser;
  1682. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("FusionOp", ge::parser::CONSTANT);
  1683. int index = 0;
  1684. NodeDef* node_def = fusioninitNodeDef(index);
  1685. node_def->set_name("FusionOp");
  1686. auto ret = fusionOpParser.ParseParams(node_def, op_dest);
  1687. EXPECT_EQ(ret, SUCCESS);
  1688. int32_t param = 1;
  1689. ret = fusionOpParser.ParseParamFromConst(node_def, param);
  1690. EXPECT_EQ(ret, SUCCESS);
  1691. ret = fusionOpParser.ParseParamFromConst(node_def, param, index);
  1692. EXPECT_EQ(ret, SUCCESS);
  1693. float params = 0.0;
  1694. ret = fusionOpParser.ParseParamFromConst(node_def, params);
  1695. EXPECT_EQ(ret, SUCCESS);
  1696. index = 2;
  1697. node_def = fusioninitNodeDef(index);
  1698. ret = fusionOpParser.ParseParamFromConst(node_def, params, index);
  1699. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1700. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 0);
  1701. EXPECT_EQ(ret, SUCCESS);
  1702. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 3);
  1703. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1704. node_def = fusioninitNodeDef(0);
  1705. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 3);
  1706. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1707. static const float VALUE_FLOAT = 1.0;
  1708. ge::GeTensorPtr weight = nullptr;
  1709. ret = fusionOpParser.ParseWeightFromConst(node_def, weight);
  1710. EXPECT_EQ(ret, domi::SUCCESS);
  1711. EXPECT_NE(weight, nullptr);
  1712. ge::DataType ge_data_type = weight->GetTensorDesc().GetDataType();
  1713. EXPECT_EQ(ge_data_type, ge::DataType::DT_FLOAT);
  1714. const uint8_t* data_buff = weight->GetData().GetData();
  1715. size_t data_size = weight->GetData().size();
  1716. EXPECT_NE(data_buff, nullptr);
  1717. EXPECT_EQ(data_size, sizeof(float));
  1718. float value_float = *((float*)data_buff);
  1719. EXPECT_EQ(value_float, VALUE_FLOAT);
  1720. delete node_def;
  1721. }
  1722. TEST_F(STestTensorflowParser, tensorflow_auto_mapping_parser_adapter_test)
  1723. {
  1724. ge::OpDescPtr op_dest = nullptr;
  1725. Message *op_src = nullptr;
  1726. TensorFlowAutoMappingParserAdapter autoMappingParser;
  1727. NodeDef* node_def = initNodeDef();
  1728. Status ret = autoMappingParser.ParseParams(op_src, op_dest);
  1729. EXPECT_EQ(ret, PARAM_INVALID);
  1730. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1731. EXPECT_EQ(ret, PARAM_INVALID);
  1732. op_dest = make_shared<ge::OpDesc>("AutoMapping", ge::parser::CONSTANT);
  1733. op_dest->SetType(ge::parser::EMPTY);
  1734. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1735. EXPECT_EQ(ret, SUCCESS);
  1736. op_dest->SetType(ge::parser::IDENTITYN);
  1737. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1738. EXPECT_EQ(ret, SUCCESS);
  1739. op_dest->SetType(ge::parser::SIZE);
  1740. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1741. EXPECT_EQ(ret, SUCCESS);
  1742. op_dest->SetType(ge::parser::SHAPE);
  1743. op_dest->AddOutputDesc(GeTensorDesc());
  1744. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1745. EXPECT_EQ(ret, SUCCESS);
  1746. }
  1747. TEST_F(STestTensorflowParser, tensorflow_fusion_custom_parser_adapter_test)
  1748. {
  1749. REGISTER_CUSTOM_OP("FusionCustom")
  1750. .FrameworkType(domi::TENSORFLOW)
  1751. .OriginOpType("FusionCustom")
  1752. .FusionParseParamsFn(FusionParserParams)
  1753. .ImplyType(ImplyType::TVM);
  1754. register_tbe_op();
  1755. auto graph = std::make_shared<ge::ComputeGraph>("FusionCustom");
  1756. auto op_desc = std::make_shared<ge::OpDesc>("FusionCustom", "FusionCustom");
  1757. auto node = graph->AddNode(op_desc);
  1758. NodeDef *node_def = new NodeDef();
  1759. std::vector<const NodeDef *> v_input_const1;
  1760. v_input_const1.push_back(node_def);
  1761. TensorFlowFusionCustomParserAdapter parser;
  1762. domi::Status status = parser.ParseParams(v_input_const1, node);
  1763. EXPECT_EQ(SUCCESS, status);
  1764. ge::Operator op_src("pool", "pooling");
  1765. std::vector<ge::Operator> v_input_const2;
  1766. v_input_const2.push_back(op_src);
  1767. Status ret = parser.ParseParams(v_input_const2, node);
  1768. EXPECT_EQ(FAILED, ret);
  1769. delete node_def;
  1770. }
  1771. TEST_F(STestTensorflowParser, tensorflow_custom_parser_adapter_test)
  1772. {
  1773. ge::Operator op_src("pool", "pooling");
  1774. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1775. TensorFlowCustomParserAdapter parser;
  1776. Status ret = parser.ParseParams(op_src, op_dest);
  1777. EXPECT_EQ(ret, FAILED);
  1778. REGISTER_CUSTOM_OP("Variable")
  1779. .FrameworkType(domi::TENSORFLOW)
  1780. .OriginOpType("VariableV2")
  1781. .ParseParamsFn(ParseParams)
  1782. .ParseParamsByOperatorFn(ParseParamByOpFunc)
  1783. .ImplyType(ImplyType::CUSTOM);
  1784. register_tbe_op();
  1785. Operator opSrc(ge::parser::VARIABLE, "VariableV2");
  1786. ret = parser.ParseParams(opSrc, op_dest);
  1787. EXPECT_EQ(ret, SUCCESS);
  1788. }
  1789. TEST_F(STestTensorflowParser, tensorflow_graph_functiondef_FindAttrValue_test)
  1790. {
  1791. GraphToFunctionDef functionDef;
  1792. NodeDef *node_def = nullptr;
  1793. std::string attr_name = "Const";
  1794. tensorflow::AttrValue attr_value;
  1795. bool ret = functionDef.FindAttrValue(node_def, attr_name, attr_value);
  1796. EXPECT_EQ(ret, false);
  1797. node_def = initNodeDef();
  1798. attr_name = ge::ATTR_NAME_INPUT_TENSOR_DESC;
  1799. node_def->set_name("Const");
  1800. ret = functionDef.FindAttrValue(node_def, attr_name, attr_value);
  1801. EXPECT_EQ(ret, false);
  1802. }
  1803. TEST_F(STestTensorflowParser, tensorflow_graph_functiondef_BuildFunctionDef_test)
  1804. {
  1805. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  1806. string inputNodeType = "DATA";
  1807. MakeDagGraph(subGraph, inputNodeType);
  1808. FunctionDefLibrary library;
  1809. tensorflow::NodeDef call_node_def;
  1810. call_node_def.set_op("fusionop");
  1811. call_node_def.set_name("fusionop");
  1812. vector<ge::InDataAnchorPtr> in_anchor;
  1813. vector<ge::OutDataAnchorPtr> out_anchor;
  1814. for (ge::NodePtr node : subGraph->GetAllNodes()) {
  1815. for (auto in : node->GetAllInDataAnchors()) {
  1816. if (in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  1817. in_anchor.push_back(in);
  1818. }
  1819. }
  1820. for (auto out : node->GetAllOutDataAnchors()) {
  1821. for (auto i : out->GetPeerInDataAnchors()) {
  1822. if (i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  1823. out_anchor.push_back(out);
  1824. }
  1825. }
  1826. }
  1827. }
  1828. Status ret = GraphToFunctionDef::BuildFunctionDef(subGraph,
  1829. "fusionop",
  1830. &library,
  1831. &call_node_def,
  1832. in_anchor,
  1833. out_anchor);
  1834. EXPECT_EQ(domi::INTERNAL_ERROR, ret);
  1835. }
  1836. TEST_F(STestTensorflowParser, tensorflow_CheckOpShapeDim_test)
  1837. {
  1838. NodeDef *node_def = initNodeDef();
  1839. std::set<int> dims;
  1840. dims.insert(1);
  1841. dims.insert(2);
  1842. bool valid = true;
  1843. TensorFlowModelParser parser;
  1844. Status ret = parser.CheckOpShapeDim(node_def, dims, valid);
  1845. EXPECT_EQ(ret, SUCCESS);
  1846. static const string KEY_SHAPE_LIST = "key_shape_list";
  1847. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1848. static const string KEY_DEFAULT = "key_default";
  1849. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1850. domi::tensorflow::AttrValue dtype_attr_value;
  1851. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1852. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1853. //设置strides属性
  1854. domi::tensorflow::AttrValue axis_attr_value;
  1855. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1856. list->add_i(1);
  1857. list->add_i(2);
  1858. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1859. domi::tensorflow::AttrValue value;
  1860. domi::tensorflow::AttrValue df_attr_value;
  1861. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1862. domi::tensorflow::AttrValue pad_attr_value;
  1863. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1864. domi::tensorflow::AttrValue shape;
  1865. shape.mutable_list()->add_i((int64)32);
  1866. shape.mutable_list()->add_i((int64)32);
  1867. shape.mutable_list()->add_i((int64)14);
  1868. static const string KEY_TYPE_LIST = "key_type_list";
  1869. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1870. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1871. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1872. value.clear_value();
  1873. value.mutable_list()->add_type(VALUE_TYPE);
  1874. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1875. value.clear_value();
  1876. domi::tensorflow::NameAttrList name_attr_list;
  1877. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1878. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1879. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1880. *(value.mutable_list()->add_func()) = name_attr_list;
  1881. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1882. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1883. ret = parser.CheckOpShapeDim(node_def, dims, valid);
  1884. EXPECT_EQ(ret, SUCCESS);
  1885. }
  1886. TEST_F(STestTensorflowParser, tensorflow_Scope_pass_test)
  1887. {
  1888. ScopePassManager passmanager;
  1889. auto scope_graph = ge::parser::MakeShared<ge::ScopeGraph>();
  1890. if (scope_graph == nullptr) {
  1891. GELOGE(FAILED, "Scope graph make shared failed.");
  1892. return;
  1893. }
  1894. if (scope_graph->Init() != SUCCESS) {
  1895. GELOGE(FAILED, "Scope graph init failed.");
  1896. return;
  1897. }
  1898. ge::TensorFlowModelParser tf_model_parser;
  1899. std::vector<string> scope_passes_list = {"ScopeBasicLSTMCellPass", "ScopeLayerNormPass"};
  1900. Status ret = tf_model_parser.RunScopeFusionPass(scope_passes_list, passmanager, scope_graph);
  1901. EXPECT_NE(ge::SUCCESS, ret);
  1902. }
  1903. TEST_F(STestTensorflowParser, tensorflow_variable_v2_parser_test)
  1904. {
  1905. TensorFlowCustomParserAdapter parser;
  1906. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1907. NodeDef *node_def = initNodeDef();
  1908. TensorFlowModelParser modelParser;
  1909. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1910. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Variable");
  1911. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1912. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1913. EXPECT_EQ(ret, PARAM_INVALID);
  1914. node_def->set_name("TemporaryVariable");
  1915. node_def->set_op("TemporaryVariable");
  1916. op_parser = factory->CreateOpParser("TemporaryVariable");
  1917. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1918. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1919. EXPECT_EQ(ret, PARAM_INVALID);
  1920. NodeDef *nodeDef_temporaryVariable = initOpNodeDef_TemporaryVariable();
  1921. op_parser = factory->CreateOpParser("TemporaryVariable");
  1922. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1923. ret = tensorflow_op_parser->ParseParams(nodeDef_temporaryVariable, op_dest);
  1924. EXPECT_EQ(ret, SUCCESS);
  1925. NodeDef *nodeDef_VariableV2 = initOpNodeDef_VariableV2();
  1926. op_parser = factory->CreateOpParser("Variable");
  1927. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1928. ret = tensorflow_op_parser->ParseParams(nodeDef_VariableV2, op_dest);
  1929. EXPECT_EQ(ret, SUCCESS);
  1930. }
  1931. TEST_F(STestTensorflowParser, tensorflow_var_is_initialized_op_test)
  1932. {
  1933. TensorFlowCustomParserAdapter parser;
  1934. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1935. NodeDef *node_def = initNodeDef();
  1936. TensorFlowModelParser modelParser;
  1937. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1938. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("VarIsInitializedOp");
  1939. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1940. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1941. EXPECT_EQ(ret, SUCCESS);
  1942. }
  1943. TEST_F(STestTensorflowParser, tensorflow_arg_parser_test)
  1944. {
  1945. TensorFlowCustomParserAdapter parser;
  1946. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1947. NodeDef *node_def = initNodeDef();
  1948. TensorFlowModelParser modelParser;
  1949. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1950. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("_Arg");
  1951. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1952. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1953. EXPECT_EQ(ret, SUCCESS);
  1954. static const string KEY_SHAPE_LIST = "key_shape_list";
  1955. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1956. static const string KEY_DEFAULT = "key_default";
  1957. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1958. domi::tensorflow::AttrValue dtype_attr_value;
  1959. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1960. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1961. //设置strides属性
  1962. domi::tensorflow::AttrValue axis_attr_value;
  1963. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1964. list->add_i(1);
  1965. list->add_i(2);
  1966. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1967. domi::tensorflow::AttrValue value;
  1968. domi::tensorflow::AttrValue df_attr_value;
  1969. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1970. domi::tensorflow::AttrValue pad_attr_value;
  1971. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1972. domi::tensorflow::AttrValue shape;
  1973. shape.mutable_list()->add_i((int64)32);
  1974. shape.mutable_list()->add_i((int64)32);
  1975. shape.mutable_list()->add_i((int64)14);
  1976. static const string KEY_TYPE_LIST = "key_type_list";
  1977. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1978. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1979. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1980. value.clear_value();
  1981. value.mutable_list()->add_type(VALUE_TYPE);
  1982. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1983. value.clear_value();
  1984. domi::tensorflow::NameAttrList name_attr_list;
  1985. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1986. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1987. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1988. *(value.mutable_list()->add_func()) = name_attr_list;
  1989. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1990. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1991. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1992. EXPECT_EQ(ret, SUCCESS);
  1993. }
  1994. TEST_F(STestTensorflowParser, tensorflow_frameworkop_parser_test1)
  1995. {
  1996. TensorFlowCustomParserAdapter parser;
  1997. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1998. NodeDef *node_def = initNodeDef();
  1999. TensorFlowModelParser modelParser;
  2000. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2001. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("FrameworkOp");
  2002. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  2003. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  2004. EXPECT_EQ(ret, PARAM_INVALID);
  2005. ChangeDataType(node_def, tensorflow::DT_UINT16);
  2006. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  2007. EXPECT_EQ(ret, PARAM_INVALID);
  2008. }
  2009. TEST_F(STestTensorflowParser, tensorflow_frameworkop_parser_test2)
  2010. {
  2011. TensorFlowCustomParserAdapter parser;
  2012. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  2013. NodeDef *node_def = initNodeDef();
  2014. node_def->set_name("FrameworkOp");
  2015. node_def->set_op("_Retval");
  2016. TensorFlowModelParser modelParser;
  2017. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2018. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("FrameworkOp");
  2019. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  2020. static const string KEY_SHAPE_LIST = "key_shape_list";
  2021. static const string KEY_TENSOR_LIST = "key_tensor_list";
  2022. static const string KEY_DEFAULT = "key_default";
  2023. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  2024. domi::tensorflow::AttrValue dtype_attr_value;
  2025. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  2026. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  2027. //设置strides属性
  2028. domi::tensorflow::AttrValue axis_attr_value;
  2029. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  2030. list->add_i(1);
  2031. list->add_i(2);
  2032. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  2033. domi::tensorflow::AttrValue value;
  2034. domi::tensorflow::AttrValue df_attr_value;
  2035. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2036. domi::tensorflow::AttrValue pad_attr_value;
  2037. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2038. domi::tensorflow::AttrValue shape;
  2039. shape.mutable_list()->add_i((int64)32);
  2040. shape.mutable_list()->add_i((int64)32);
  2041. shape.mutable_list()->add_i((int64)14);
  2042. static const string KEY_TYPE_LIST = "key_type_list";
  2043. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "ATTR_NAME_FRAMEWORK_OP_DEF";
  2044. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  2045. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  2046. value.clear_value();
  2047. value.mutable_list()->add_type(VALUE_TYPE);
  2048. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  2049. value.clear_value();
  2050. domi::tensorflow::NameAttrList name_attr_list;
  2051. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2052. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2053. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  2054. *(value.mutable_list()->add_func()) = name_attr_list;
  2055. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  2056. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  2057. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  2058. EXPECT_EQ(ret, SUCCESS);
  2059. }
  2060. TEST_F(STestTensorflowParser, tensorflow_reshape_parser_test)
  2061. {
  2062. TensorFlowCustomParserAdapter parser;
  2063. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  2064. NodeDef *node_def = initNodeDef();
  2065. TensorFlowModelParser modelParser;
  2066. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2067. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Reshape");
  2068. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  2069. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  2070. EXPECT_EQ(ret, SUCCESS);
  2071. NodeDef * nodeDef = new NodeDef();
  2072. nodeDef->set_op("Reshape");
  2073. google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = nodeDef->mutable_attr();
  2074. domi::tensorflow::AttrValue attr_value;
  2075. attr_value.mutable_list()->add_i((int64)32);
  2076. attr_value.mutable_list()->add_i((int64)32);
  2077. attr_value.mutable_list()->add_i((int64)14);
  2078. domi::tensorflow::AttrValue df_attr_value2;
  2079. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  2080. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  2081. domi::tensorflow::AttrValue df_attr_value;
  2082. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2083. //设置padding属性
  2084. domi::tensorflow::AttrValue pad_attr_value2;
  2085. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  2086. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  2087. domi::tensorflow::AttrValue pad_attr_value;
  2088. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2089. domi::tensorflow::NameAttrList name_attr_list;
  2090. name_attr_list.mutable_attr()->insert({"serialize_shape", attr_value});
  2091. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2092. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2093. *(attr_value.mutable_list()->add_func()) = name_attr_list;
  2094. GeTensorDesc ge_desc;
  2095. ge_desc.SetFormat(ge::FORMAT_C1HWNCoC0);
  2096. ge_desc.SetDataType(ge::DT_FLOAT);
  2097. ge_desc.SetShape(GeShape({1,1,1,1,1,1}));
  2098. TensorFlowReshapeParser reshapeParser;
  2099. ret = reshapeParser.ParseDesc(attr_value, ge_desc);
  2100. EXPECT_EQ(ret, SUCCESS);
  2101. }
  2102. TEST_F(STestTensorflowParser, tensorflow_DefunToPartitionedCall_parser_test)
  2103. {
  2104. TensorFlowModelParser parser;
  2105. NodeDef *node_def = initNodeDef();
  2106. node_def->set_name("ShapeN");
  2107. ge::OpDescPtr op = make_shared<ge::OpDesc>("ShapeN", ge::parser::PARTITIONEDCALL);
  2108. Status ret = parser.DefunToPartitionedCall(node_def, op);
  2109. EXPECT_EQ(ret, FAILED);
  2110. static const string KEY_SHAPE_LIST = "key_shape_list";
  2111. static const string KEY_TENSOR_LIST = "key_tensor_list";
  2112. static const string KEY_DEFAULT = "key_default";
  2113. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  2114. domi::tensorflow::AttrValue dtype_attr_value;
  2115. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  2116. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  2117. //设置strides属性
  2118. domi::tensorflow::AttrValue axis_attr_value;
  2119. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  2120. list->add_i(1);
  2121. list->add_i(2);
  2122. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  2123. domi::tensorflow::AttrValue value;
  2124. domi::tensorflow::AttrValue df_attr_value;
  2125. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2126. domi::tensorflow::AttrValue pad_attr_value;
  2127. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2128. domi::tensorflow::AttrValue shape;
  2129. shape.mutable_list()->add_i((int64)32);
  2130. shape.mutable_list()->add_i((int64)32);
  2131. shape.mutable_list()->add_i((int64)14);
  2132. static const string KEY_TYPE_LIST = "key_type_list";
  2133. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  2134. value.clear_value();
  2135. value.mutable_list()->add_type(VALUE_TYPE);
  2136. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  2137. value.clear_value();
  2138. domi::tensorflow::NameAttrList name_attr_list;
  2139. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2140. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2141. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  2142. *(value.mutable_list()->add_func()) = name_attr_list;
  2143. node_def->mutable_attr()->insert({"_disable_call_shape_inference", value});
  2144. node_def->mutable_attr()->insert({"_disable_call_shape_inference", value});
  2145. std::string fusion_op_name = "pre_node_a";
  2146. GenOriginContext(&parser, fusion_op_name);
  2147. node_def->set_name("pre_node_a");
  2148. ret = parser.DefunToPartitionedCall(node_def, op);
  2149. EXPECT_EQ(ret, SUCCESS);
  2150. }
  2151. TEST_F(STestTensorflowParser, tensorflow_TransNodeToOpDesc_parser_test)
  2152. {
  2153. TensorFlowModelParser parser;
  2154. NodeDef *node_def = initNodeDef();
  2155. node_def->set_name("ge::parser::DATA");
  2156. std::string op_type = "ge::parser::DATA";
  2157. ge::OpDescPtr op = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  2158. Status ret = parser.TransNodeToOpDesc(node_def, op, op_type);
  2159. EXPECT_EQ(ret, FAILED);
  2160. }
  2161. domi::Status fusion_parse_param_by_op(const std::vector<ge::Operator> &op_src, ge::Operator &op) {
  2162. return domi::SUCCESS;
  2163. }
  2164. TEST_F(STestTensorflowParser, Fusion_node_parse_params_success) {
  2165. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2166. ModelParserFactory* factory = ModelParserFactory::Instance();
  2167. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2168. ASSERT_TRUE(NULL != model_parser);
  2169. TensorFlowModelParser tensorflow_parser;
  2170. domi::tensorflow::NodeDef node_def;
  2171. node_def.set_name("data");
  2172. node_def.set_op("FusionCustom");
  2173. FusionParseParamByOpFunc function = fusion_parse_param_by_op;
  2174. shared_ptr<ge::OpParserFactory> op_parser = ge::OpParserFactory::Instance(domi::TENSORFLOW);
  2175. shared_ptr<OpParser> fusion_op_parser = op_parser->CreateFusionOpParser("FusionCustom");
  2176. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2177. ge::OpDescPtr op1 = std::make_shared<ge::OpDesc>("data", "FusionCustom");
  2178. ge::NodePtr node1 = std::make_shared<ge::Node>(op1, graph);
  2179. vector<const NodeDef *> node_defs;
  2180. node_defs.push_back(&node_def);
  2181. tensorflow_parser.fusion_op_nodedef_map_["data"] = node_defs;
  2182. Status ret = tensorflow_parser.FusionNodeParseParams(fusion_op_parser, &node_def, node1);
  2183. EXPECT_EQ(domi::SUCCESS, ret);
  2184. }
  2185. TEST_F(STestTensorflowParser, Tensorflow_recordFusionResult_parser_test)
  2186. {
  2187. auto scope_graph = ge::parser::MakeShared<ge::ScopeGraph>();
  2188. if (scope_graph == nullptr) {
  2189. GELOGE(FAILED, "Scope graph make shared failed.");
  2190. return;
  2191. }
  2192. if (scope_graph->Init() != SUCCESS) {
  2193. GELOGE(FAILED, "Scope graph init failed.");
  2194. return;
  2195. }
  2196. domi::tensorflow::NodeDef node_def;
  2197. node_def.set_name("OP");
  2198. FusionScopesResult *fusion_scope_rlt = new (std::nothrow) FusionScopesResult();
  2199. if (fusion_scope_rlt == nullptr) {
  2200. GELOGE(FAILED, "FusionScopesResult make shared failed.");
  2201. return;
  2202. }
  2203. fusion_scope_rlt->Init();
  2204. fusion_scope_rlt->SetName("OP");
  2205. auto &impl_scope_graph = scope_graph->impl_;
  2206. std::string scope_name = fusion_scope_rlt->Name();
  2207. impl_scope_graph->fusion_results_.insert(std::make_pair(scope_name, fusion_scope_rlt));
  2208. std::vector<ge::OperatorPtr> nodes;
  2209. ge::OperatorPtr op = ge::parser::MakeShared<ge::Operator>("op_name", "op_type");
  2210. if (op == nullptr) {
  2211. GELOGE(FAILED, "Operator make shared failed.");
  2212. return;
  2213. }
  2214. nodes.push_back(op);
  2215. fusion_scope_rlt->impl_->AddNodes(nodes);
  2216. ge::OpDescPtr opDesc = std::make_shared<ge::OpDesc>();
  2217. ge::TensorFlowModelParser tf_model_parser;
  2218. Status ret = tf_model_parser.RecordFusionResult(scope_graph, &node_def, opDesc);
  2219. EXPECT_EQ(SUCCESS, ret);
  2220. }
  2221. TEST_F(STestTensorflowParser, Tensorflow_UpdateFusionOpContext_test)
  2222. {
  2223. ModelParserFactory* factory = ModelParserFactory::Instance();
  2224. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2225. TensorFlowModelParser tensorflow_parser;
  2226. ScopeFusionOpInfo info;
  2227. ge::OpNodeContext normal_op_node_context;
  2228. ge::OpNodeContext fusion_op_node_context;
  2229. /* 1.预置条件 */
  2230. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2231. ScopePassManager passmanager;
  2232. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2233. NodeDef * node1 = graph->add_node();
  2234. node1->set_name("conv_conv5/BatchNorm/batchnorm/add");
  2235. node1->set_op("Add");
  2236. node1->add_input("conv_conv5/BatchNorm/moving_variance");
  2237. node1->add_input("conv_conv5/BatchNorm/batchnorm/add/y");
  2238. NodeDef * node2 = graph->add_node();
  2239. node2->set_name("conv_conv5/BatchNorm/moving_variance");
  2240. node2->set_op("Const");
  2241. NodeDef * node3 = graph->add_node();
  2242. node3->set_name("conv_conv5/BatchNorm/batchnorm/add/y");
  2243. node3->set_op("Const");
  2244. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  2245. info.fusion_op_type = ge::parser::FUSIONBATCHNORM;
  2246. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  2247. info.description = "";
  2248. info.scope_pass = false;
  2249. EXPECT_EQ(scope_graph->impl_->GetFusionScopesResults(nullptr), nullptr);
  2250. EXPECT_EQ(scope_graph->impl_->GetFusionScopesResults(node1), nullptr);
  2251. Status ret = tensorflow_parser.UpdateFusionOpContext(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  2252. EXPECT_EQ(ret, domi::SUCCESS);
  2253. delete graph;
  2254. }
  2255. TEST_F(STestTensorflowParser, Tensorflow_GetInOutPutIndex_scope_pass)
  2256. {
  2257. ModelParserFactory* factory = ModelParserFactory::Instance();
  2258. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2259. TensorFlowModelParser tensorflow_parser;
  2260. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2261. ScopePassManager passmanager;
  2262. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2263. FusionScopesResult* fusion_rlt = new FusionScopesResult();
  2264. fusion_rlt->Init();
  2265. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/ToInt32" ,{0}));
  2266. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/ToInt32" ,{0}));
  2267. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/ReverseSequence" ,{0, 1}));
  2268. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/ReverseSequence" ,{1}));
  2269. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("concat" ,{0}));
  2270. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/while/Exit_3" ,{1}));
  2271. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/while/Exit_4" ,{2}));
  2272. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/while/Exit_3" ,{3}));
  2273. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/while/Exit_4" ,{4}));
  2274. fusion_rlt->SetType("dynamic_rnn");
  2275. fusion_rlt->SetName("dynamic_rnn_node1");
  2276. scope_graph->impl_->AddFusionScopesResult(fusion_rlt);
  2277. ScopeFusionOpInfo info1;
  2278. info1.node_name = "fw/fw/ToInt32";
  2279. info1.fusion_node_name = "dynamic_rnn_node1";
  2280. info1.fusion_op_type = "dynamic_rnn";
  2281. info1.description = "";
  2282. info1.scope_pass = true;
  2283. bool ignore = false;
  2284. ignore = tensorflow_parser.FusionOpChildIgnore(scope_graph, info1);
  2285. EXPECT_EQ(true, !ignore);
  2286. ScopeFusionOpInfo info2;
  2287. info2.node_name = "fw/fw/others";
  2288. info2.fusion_node_name = "dynamic_rnn_node1";
  2289. info2.fusion_op_type = "dynamic_rnn";
  2290. info2.description = "";
  2291. info2.scope_pass = true;
  2292. ignore = tensorflow_parser.FusionOpChildIgnore(scope_graph, info2);
  2293. EXPECT_EQ(true, ignore);
  2294. ScopeFusionOpInfo input_node_info;
  2295. input_node_info.node_name = "fw/fw/ToInt32";
  2296. input_node_info.fusion_node_name = "dynamic_rnn_node1";
  2297. input_node_info.fusion_op_type = "dynamic_rnn";
  2298. input_node_info.description = "";
  2299. input_node_info.scope_pass = true;
  2300. ScopeFusionOpInfo output_node_info;
  2301. output_node_info.node_name = "fw/fw/while/Exit_3";
  2302. output_node_info.fusion_node_name = "dynamic_rnn_node1";
  2303. output_node_info.fusion_op_type = "dynamic_rnn";
  2304. output_node_info.description = "";
  2305. output_node_info.scope_pass = true;
  2306. int32_t old_index = 0, new_index = -1;
  2307. Status ret = tensorflow_parser.GetInPutIndex(scope_graph, input_node_info, old_index, new_index);
  2308. EXPECT_EQ(domi::SUCCESS, ret);
  2309. EXPECT_EQ(true, (new_index == 0));
  2310. ret = tensorflow_parser.GetOutPutIndex(scope_graph, output_node_info, old_index, new_index);
  2311. EXPECT_EQ(domi::SUCCESS, ret);
  2312. EXPECT_EQ(true, (new_index == 1));
  2313. delete graph;
  2314. }
  2315. TEST_F(STestTensorflowParser, Tensorflow_AddFusionNodeDef_add_fusion_op_succ)
  2316. {
  2317. ModelParserFactory* factory = ModelParserFactory::Instance();
  2318. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2319. TensorFlowModelParser tensorflow_parser;
  2320. string fusion_op_name = "dropout";
  2321. string fusion_op_type = "Dropout";
  2322. string description = "test/dropout";
  2323. tensorflow_parser.fusion_op_type_map_[fusion_op_name].push_back(fusion_op_type);
  2324. tensorflow_parser.fusion_op_type_map_[fusion_op_name].push_back(description);
  2325. // op_node_context for fusion op
  2326. ge::OpNodeContext op_node_context;
  2327. op_node_context.input_map["pre_node_a"].push_back({0, 0});
  2328. op_node_context.input_map["pre_node_b"].push_back({0, 1});
  2329. tensorflow_parser.op_node_context_map_[fusion_op_name] = op_node_context;
  2330. // origin inner node def
  2331. NodeDef* node_def = new (std::nothrow) NodeDef();
  2332. node_def->set_name("scope_node_1");
  2333. node_def->set_op("Add");
  2334. tensorflow_parser.fusion_op_nodedef_map_[fusion_op_name].push_back(node_def);
  2335. ScopePassManager pass_manager;
  2336. tensorflow::GraphDef *graph = new (std::nothrow) tensorflow::GraphDef();
  2337. shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graph);
  2338. vector<string> node_name_list = {fusion_op_name};
  2339. Status ret = tensorflow_parser.AddFusionNodeDef(scope_graph, node_name_list);
  2340. EXPECT_EQ(ret, SUCCESS);
  2341. EXPECT_EQ(tensorflow_parser.nodedef_map_.size(), 1);
  2342. auto fusion_node_def = tensorflow_parser.nodedef_map_[fusion_op_name];
  2343. EXPECT_NE(fusion_node_def, nullptr);
  2344. EXPECT_EQ(fusion_node_def->op(), fusion_op_type);
  2345. delete node_def;
  2346. delete graph;
  2347. tensorflow_parser.DeleteFuisonNodeDef();
  2348. }
  2349. TEST_F(STestTensorflowParser, remain_dpop_node)
  2350. {
  2351. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2352. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("dpop_123", "FrameworkOp");
  2353. ge::NodePtr node = std::make_shared<ge::Node>(op, graph);
  2354. graph->AddNode(node);
  2355. ModelParserFactory* factory = ModelParserFactory::Instance();
  2356. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2357. ASSERT_TRUE(NULL != model_parser);
  2358. TensorFlowModelParser tensorflow_parser;
  2359. Status ret = tensorflow_parser.RemoveIsolateNode(graph);
  2360. EXPECT_EQ(domi::SUCCESS, ret);
  2361. }
  2362. TEST_F(STestTensorflowParser, tensorflow_UpdateEdgesControlInfo_test)
  2363. {
  2364. TensorFlowModelParser model_parser;
  2365. ge::ScopeFusionOpInfo info;
  2366. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  2367. info.fusion_op_type = ge::parser::FUSIONBATCHNORM;
  2368. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  2369. info.description = "";
  2370. info.scope_pass = false;
  2371. model_parser.UpdateEdgesControlInfo(info);
  2372. }
  2373. TEST_F(STestTensorflowParser, tensorflow_OptimizeSnapShot_test)
  2374. {
  2375. TensorFlowModelParser model_parser;
  2376. tensorflow::NodeDef *curr_mode_def = initNodeDef();
  2377. std::map<string, NodeDef *> nodedef_map;
  2378. nodedef_map.emplace("pre_node_a", curr_mode_def);
  2379. std::pair<string, int> input_data;
  2380. std::vector<string> control_list;
  2381. std::string curr_node_name = "pre_node_a";
  2382. GenOriginContext(&model_parser, curr_node_name);
  2383. Status ret = model_parser.OptimizeSnapShot(curr_mode_def, nodedef_map, input_data, control_list);
  2384. EXPECT_EQ(ret, INTERNAL_ERROR);
  2385. curr_mode_def->set_name("pre_node_a");
  2386. GenOriginContext(&model_parser, curr_node_name);
  2387. ret = model_parser.OptimizeSnapShot(curr_mode_def, nodedef_map, input_data, control_list);
  2388. EXPECT_EQ(ret, SUCCESS);
  2389. }
  2390. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeSnapShot_test)
  2391. {
  2392. TensorFlowModelParser model_parser;
  2393. tensorflow::GraphDef graph_def;
  2394. tensorflow::NodeDef *curr_mode_def = initNodeDef();
  2395. std::map<string, NodeDef *> nodedef_map;
  2396. nodedef_map.emplace("pre_node_a", curr_mode_def);
  2397. std::vector<NodeDef *> nodedef_to_optimize;
  2398. nodedef_to_optimize.emplace_back(curr_mode_def);
  2399. Status ret = model_parser.GraphDefOptimizeSnapShot(&graph_def, nodedef_map, nodedef_to_optimize);
  2400. EXPECT_EQ(ret, FAILED);
  2401. }
  2402. TEST_F(STestTensorflowParser, tensorflow_SetDestNodeName_test)
  2403. {
  2404. TensorFlowModelParser model_parser;
  2405. GraphDef graph;
  2406. auto arg0 = AddNode(graph, "_Arg", "arg0");
  2407. auto identity0 = AddNode(graph, "Identity", "identity0");
  2408. auto add0 = AddNode(graph, "Add", "add0");
  2409. int32_t input_idx = 0;
  2410. bool is_control = true;
  2411. bool clear_input_flag = true;
  2412. AddInput(arg0, identity0, 0);
  2413. AddInput(identity0, add0, 0);
  2414. Status ret = model_parser.SetDestNodeName(identity0, add0, input_idx, is_control, clear_input_flag);
  2415. EXPECT_EQ(ret, SUCCESS);
  2416. }
  2417. TEST_F(STestTensorflowParser, tensorflow_OptimizeDestroyTemporaryVariable_test)
  2418. {
  2419. ModelParserFactory* factory = ModelParserFactory::Instance();
  2420. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2421. TensorFlowModelParser tensorflow_parser;
  2422. GraphDef graph;
  2423. auto const0 = AddNode(graph, "Const", "Const0");
  2424. auto tmpVar0 = AddNode(graph, "TemporaryVariable", "TemporaryVariable0");
  2425. auto assign0 = AddNode(graph, "Assign", "Assign0");
  2426. auto destroy0 = AddNode(graph, "DestroyTemporaryVariable", "DestroyTemporaryVariable0");
  2427. auto add0 = AddNode(graph, "Add", "Add0");
  2428. google::protobuf::Map< std::string, tensorflow::AttrValue> *node_attr_map = tmpVar0->mutable_attr();
  2429. tensorflow::AttrValue var_name_attr_value;
  2430. var_name_attr_value.set_s("temporary_variable_name");
  2431. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  2432. google::protobuf::Map<std::string, tensorflow::AttrValue>* node_attr_map_destroy = destroy0->mutable_attr();
  2433. tensorflow::AttrValue var_name_attr_value_destroy;
  2434. var_name_attr_value_destroy.set_s("destroy_temporary_variable_name");
  2435. (*node_attr_map_destroy)[ge::VAR_ATTR_NAME] = var_name_attr_value_destroy;
  2436. AddInput(tmpVar0, assign0, 0);
  2437. AddInput(assign0, destroy0, 0);
  2438. AddInput(const0, add0, 0);
  2439. AddInput(destroy0, add0, 1);
  2440. GraphDef* graphDef = &graph;
  2441. int32_t no_input_node_size_original = 0;
  2442. for (int w = 0; w < graphDef->node_size(); w++) {
  2443. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2444. if (nodeTmp->input_size() == 0) {
  2445. no_input_node_size_original++;
  2446. }
  2447. }
  2448. Status ret = tensorflow_parser.GraphDefOptimize(graphDef);
  2449. int32_t no_input_node_size_result = 0;
  2450. for (int w = 0; w < graphDef->node_size(); w++) {
  2451. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2452. if (nodeTmp->input_size() == 0) {
  2453. no_input_node_size_result ++;
  2454. }
  2455. }
  2456. ASSERT_EQ(ret, domi::FAILED);
  2457. ASSERT_EQ(no_input_node_size_original, no_input_node_size_result);
  2458. }
  2459. TEST_F(STestTensorflowParser, tensorflow_OptimizeDestroyTemporaryVariable_test2)
  2460. {
  2461. ModelParserFactory* factory = ModelParserFactory::Instance();
  2462. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2463. TensorFlowModelParser tensorflow_parser;
  2464. GraphDef graph;
  2465. auto const0 = AddNode(graph, "Const", "Const0");
  2466. auto tmpVar0 = AddNode(graph, "TemporaryVariable", "TemporaryVariable0");
  2467. auto assign0 = AddNode(graph, "Assign", "Assign0");
  2468. auto destroy0 = AddNode(graph, "DestroyTemporaryVariable", "DestroyTemporaryVariable0");
  2469. auto add0 = AddNode(graph, "Add", "Add0");
  2470. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = tmpVar0->mutable_attr();
  2471. tensorflow::AttrValue var_name_attr_value;
  2472. var_name_attr_value.set_s("temporary_variable_name");
  2473. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  2474. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map_destroy = destroy0->mutable_attr();
  2475. tensorflow::AttrValue var_name_attr_value_destroy;
  2476. var_name_attr_value_destroy.set_s("temporary_variable_name");
  2477. (*node_attr_map_destroy)[ge::VAR_ATTR_NAME] = var_name_attr_value_destroy;
  2478. AddInput(tmpVar0, assign0, 0);
  2479. AddInput(assign0, destroy0, 0);
  2480. AddInput(const0, add0, 0);
  2481. AddInput(destroy0, add0, 1);
  2482. GraphDef* graphDef = &graph;
  2483. int32_t no_input_node_size_original = 0;
  2484. for (int w = 0; w < graphDef->node_size(); w++) {
  2485. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2486. if (nodeTmp->input_size() == 0) {
  2487. no_input_node_size_original ++;
  2488. }
  2489. }
  2490. Status ret = tensorflow_parser.GraphDefOptimize(graphDef);
  2491. int32_t no_input_node_size_result = 0;
  2492. for (int w = 0; w < graphDef->node_size(); w++) {
  2493. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2494. if (nodeTmp->input_size() == 0) {
  2495. no_input_node_size_result ++;
  2496. }
  2497. }
  2498. ASSERT_EQ(ret, domi::SUCCESS);
  2499. ASSERT_EQ(no_input_node_size_original, (no_input_node_size_result - 1));
  2500. }
  2501. TEST_F(STestTensorflowParser, tensorflow_AddControlEdgeAfterRemoveInputs_test)
  2502. {
  2503. tensorflow::GraphDef graph_def;
  2504. TensorFlowModelParser tensorflow_parser;
  2505. tensorflow::NodeDef *node_def = initNodeDef();
  2506. node_def->set_name("Add0");
  2507. node_def->set_op("add");
  2508. std::map<std::string, NodeDef *> all_node_map;
  2509. all_node_map.emplace("Add0", node_def);
  2510. std::vector<std::string> removed_inputs_vec;
  2511. removed_inputs_vec.emplace_back("Add0");
  2512. Status ret = tensorflow_parser.AddControlEdgeAfterRemoveInputs(&graph_def, node_def, all_node_map, removed_inputs_vec);
  2513. EXPECT_EQ(ret, SUCCESS);
  2514. tensorflow::NodeDef *node_swith = initNodeDef();
  2515. node_swith->set_name("switch_op");
  2516. node_swith->set_op(parser::SWITCH);
  2517. all_node_map.emplace("switch_op", node_swith);
  2518. removed_inputs_vec.clear();
  2519. removed_inputs_vec.emplace_back("switch_op");
  2520. ret = tensorflow_parser.AddControlEdgeAfterRemoveInputs(&graph_def, node_swith, all_node_map, removed_inputs_vec);
  2521. EXPECT_EQ(ret, SUCCESS);
  2522. }
  2523. TEST_F(STestTensorflowParser, tensorflow_optimizer_snapshot_no_retval_test) {
  2524. std::string caseDir = __FILE__;
  2525. std::size_t idx = caseDir.find_last_of("/");
  2526. caseDir = caseDir.substr(0, idx);
  2527. const std::string root_proto = caseDir + "/origin_models/test_snapshot.pb";
  2528. domi::tensorflow::GraphDef graphDef;
  2529. bool protoRet =
  2530. parser::ReadProtoFromBinaryFile(root_proto.c_str(), &graphDef);
  2531. ASSERT_EQ(protoRet, true);
  2532. TensorFlowModelParser tensorflow_parser;
  2533. ge::ComputeGraphPtr root_graph =
  2534. ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
  2535. Status ret = tensorflow_parser.ParseProto(
  2536. reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  2537. EXPECT_EQ(FAILED, ret);
  2538. }
  2539. TEST_F(STestTensorflowParser, tensorflow_RemoveInputs_test)
  2540. {
  2541. tensorflow::GraphDef graph_def;
  2542. tensorflow::NodeDef *node_def = initNodeDef();
  2543. node_def->set_name("OP");
  2544. node_def->add_input("OP/Input_1");
  2545. node_def->add_input("OP/Input_2");
  2546. std::set<uint32_t> remove_index_set;
  2547. std::map<std::string, NodeDef *> all_node_map;
  2548. TensorFlowModelParser model_parser;
  2549. Status ret = model_parser.RemoveInputs(&graph_def, node_def, remove_index_set, all_node_map);
  2550. EXPECT_EQ(ret, SUCCESS);
  2551. remove_index_set.emplace(0);
  2552. ret = model_parser.RemoveInputs(&graph_def, node_def, remove_index_set, all_node_map);
  2553. EXPECT_EQ(ret, FAILED);
  2554. }
  2555. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerNodeContext_test)
  2556. {
  2557. std::string fusion_op_name = "post_node_a";
  2558. std::vector<std::string> inner_nodes_name;
  2559. inner_nodes_name.emplace_back("post_node_a");
  2560. TensorFlowModelParser model_parser;
  2561. Status ret = model_parser.UpdateInnerNodeContext(fusion_op_name, inner_nodes_name);
  2562. EXPECT_EQ(ret, INTERNAL_ERROR);
  2563. GenOriginContext(&model_parser, fusion_op_name);
  2564. ret = model_parser.UpdateInnerNodeContext(fusion_op_name, inner_nodes_name);
  2565. EXPECT_EQ(ret, SUCCESS);
  2566. }
  2567. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerInputMap_test)
  2568. {
  2569. string fusion_op_name = "post_node_a";
  2570. OpNodeContext fusion_context;
  2571. std::vector<std::string> inner_nodes_name;
  2572. inner_nodes_name.emplace_back("post_node_a");
  2573. std::set<string> fusion_input_nodes;
  2574. fusion_input_nodes.insert("post_node_a");
  2575. TensorFlowModelParser model_parser;
  2576. GenOriginContext(&model_parser, fusion_op_name);
  2577. model_parser.UpdateInnerInputMap(fusion_op_name, fusion_context, inner_nodes_name, fusion_input_nodes);
  2578. }
  2579. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerOutputMap_test)
  2580. {
  2581. string fusion_op_name = "post_node_a";
  2582. OpNodeContext fusion_context;
  2583. std::vector<std::string> inner_nodes_name;
  2584. inner_nodes_name.emplace_back("post_node_a");
  2585. std::set<string> fusion_output_nodes;
  2586. fusion_output_nodes.insert("post_node_a");
  2587. TensorFlowModelParser model_parser;
  2588. GenOriginContext(&model_parser, fusion_op_name);
  2589. model_parser.UpdateInnerOutputMap(fusion_op_name, fusion_context, inner_nodes_name, fusion_output_nodes);
  2590. }
  2591. TEST_F(STestTensorflowParser, tensorflow_ScopePassManager_AddPass_test)
  2592. {
  2593. ScopePassManager passmanager;
  2594. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2595. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2596. unique_ptr<ScopeBasePass> pass;
  2597. pass.reset(new ScopeTestPass());
  2598. EXPECT_EQ(ge::SUCCESS, passmanager.AddPass(pass));
  2599. EXPECT_NE(ge::SUCCESS, passmanager.Run(scope_graph));
  2600. delete graph;
  2601. graph = nullptr;
  2602. }
  2603. TEST_F(STestTensorflowParser, tensorflow_CheckAttrHasType_test1)
  2604. {
  2605. tensorflow::AttrValue attr_value;
  2606. attr_value.mutable_list();
  2607. Status ret = TensorFlowUtil::CheckAttrHasType(attr_value, "int");
  2608. EXPECT_EQ(FAILED, ret);
  2609. attr_value.set_type(DT_INVALID);
  2610. ret = TensorFlowUtil::CheckAttrHasType(attr_value, "type");
  2611. EXPECT_EQ(FAILED, ret);
  2612. tensorflow::AttrValue attr_value2;
  2613. AttrValue_ListValue *list = attr_value2.mutable_list();
  2614. list->add_type(tensorflow::DT_FLOAT);
  2615. list->add_type((tensorflow::DataType)30);
  2616. ret = TensorFlowUtil::CheckAttrHasType(attr_value2, "list(type)");
  2617. EXPECT_EQ(FAILED, ret);
  2618. }
  2619. TEST_F(STestTensorflowParser, tensorflow_CheckAttrHasType_test2)
  2620. {
  2621. tensorflow::AttrValue attr_value;
  2622. AttrValue_ListValue * list = attr_value.mutable_list();
  2623. list->add_type(tensorflow::DT_FLOAT);
  2624. list->add_type(tensorflow::DT_INVALID);
  2625. Status ret = TensorFlowUtil::CheckAttrHasType(attr_value, "list(type)");
  2626. EXPECT_EQ(FAILED, ret);
  2627. attr_value.set_placeholder("test");
  2628. ret = TensorFlowUtil::CheckAttrHasType(attr_value, "");
  2629. EXPECT_EQ(FAILED, ret);
  2630. }
  2631. TEST_F(STestTensorflowParser, tensorflow_TransTensorDescriptor_test)
  2632. {
  2633. tensorflow::AttrValue attr_value;
  2634. AttrValue_ListValue *list = attr_value.mutable_list();
  2635. list->add_type(tensorflow::DT_FLOAT);
  2636. ParserOperator op;
  2637. uint32_t io = TENSORFLOW_NORMAL_INPUT_TENSOR_FLAG;
  2638. std::string type = ge::parser::FUSEDBATCHNORMGRAD;
  2639. Status ret = TensorFlowUtil::TransTensorDescriptor(attr_value, &op, io, type);
  2640. EXPECT_EQ(ret, SUCCESS);
  2641. io = TENSORFLOW_NORMAL_OUTPUT_TENSOR_FLAG;
  2642. ret = TensorFlowUtil::TransTensorDescriptor(attr_value, &op, io, type);
  2643. EXPECT_EQ(ret, SUCCESS);
  2644. }
  2645. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeDestroyTemporaryVariable_test)
  2646. {
  2647. tensorflow::GraphDef *graph_def = nullptr;
  2648. tensorflow::NodeDef *nodeCurrent = initNodeDef();
  2649. TensorFlowModelParser model_parser;
  2650. Status ret = model_parser.GraphDefOptimizeDestroyTemporaryVariable(graph_def, nodeCurrent);
  2651. EXPECT_EQ(ret, FAILED);
  2652. }
  2653. TEST_F(STestTensorflowParser, tensorflow_GetFunctionProto_test)
  2654. {
  2655. std::cout << __FILE__ << std::endl;
  2656. std::string caseDir = __FILE__;
  2657. std::size_t idx = caseDir.find_last_of("/");
  2658. caseDir = caseDir.substr(0, idx);
  2659. std::string file = caseDir + "/origin_models/test_enter.pb";
  2660. domi::tensorflow::GraphDefLibrary graph_def_library;
  2661. TensorFlowModelParser model_parser;
  2662. Status ret = model_parser.GetFunctionProto(file, graph_def_library);
  2663. EXPECT_EQ(ret, FAILED);
  2664. }
  2665. TEST_F(STestTensorflowParser, tensorflow_GetNodeFormat_test)
  2666. {
  2667. NodeDef *node_def1 = initNodeDef();
  2668. node_def1->set_op("NoOp");
  2669. node_def1->set_name("NoOp");
  2670. NodeDef *node_def2 = initNodeDef();
  2671. node_def2->set_op("Add");
  2672. node_def2->set_name("Add0");
  2673. TfTranspose pred_transpose = TO_NCHW;
  2674. domiTensorFormat_t format = domi::DOMI_TENSOR_NC1HWC0;
  2675. std::set<const NodeDef *> visited_node;
  2676. visited_node.emplace(node_def2);
  2677. TensorFlowModelParser model_parser;
  2678. Status ret = model_parser.GetNodeFormat(node_def1, pred_transpose, format, visited_node);
  2679. EXPECT_EQ(ret, FAILED);
  2680. delete node_def1;
  2681. delete node_def2;
  2682. }
  2683. TEST_F(STestTensorflowParser, tensorflow_GetFormatTranspose_test)
  2684. {
  2685. NodeDef *transpose_node = initNodeDef();
  2686. transpose_node->set_op("Transpose");
  2687. TfTranspose transpose_direc = NO_TRANSPOSE;
  2688. TensorFlowModelParser modelParser;
  2689. Status ret = modelParser.GetFormatTranspose(transpose_node, transpose_direc);
  2690. EXPECT_EQ(ret, FAILED);
  2691. delete transpose_node;
  2692. }
  2693. TEST_F(STestTensorflowParser, tensorflow_GetFormatTranspose_test2)
  2694. {
  2695. TensorFlowModelParser modelParser;
  2696. TfTranspose transpose_direc = NO_TRANSPOSE;
  2697. NodeDef *transpose_node = initNodeDef();
  2698. GraphDef graph;
  2699. auto arg0 = AddNode(graph, "_Arg", "arg0");
  2700. auto snapshot0 = AddNode(graph, "Snapshot", "snapshot0");
  2701. auto ret0 = AddNode(graph, "_Retval", "retval0");
  2702. auto arg1 = AddNode(graph, "_Arg", "arg1");
  2703. auto snapshot1 = AddNode(graph, "Snapshot", "snapshot1");
  2704. auto ret1 = AddNode(graph, TENSORFLOWF_NODE_OP_TRANSPOSE, "retval1");
  2705. auto arg2 = AddNode(graph, "_Arg", "arg2");
  2706. auto snapshot2 = AddNode(graph, "Snapshot", "snapshot2");
  2707. auto ret2 = AddNode(graph, TENSORFLOWF_NODE_OP_TRANSPOSE, TENSORFLOWF_NODE_OP_TRANSPOSE);
  2708. AddInput(arg0, snapshot0, 0);
  2709. AddInput(snapshot0, ret0, 0);
  2710. AddInput(arg1, snapshot1, 0);
  2711. AddInput(snapshot1, ret1, 0);
  2712. AddInput(arg2, snapshot2, 0);
  2713. AddInput(snapshot2, ret2, 0);
  2714. AddInput(snapshot0, snapshot1, -1);
  2715. AddInput(snapshot1, snapshot2, -1);
  2716. bool train_flag = ge::GetParserContext().train_flag;
  2717. ge::GetParserContext().train_flag = true;
  2718. ASSERT_EQ(modelParser.GraphDefOptimize(&graph), SUCCESS);
  2719. ge::GetParserContext().train_flag = train_flag;
  2720. modelParser.nodedef_map_["arg1"] = transpose_node;
  2721. modelParser.nodedef_map_["^arg0"] = transpose_node;
  2722. Status ret = modelParser.GetFormatTranspose(ret1, transpose_direc);
  2723. EXPECT_EQ(ret, SUCCESS);
  2724. delete transpose_node;
  2725. }
  2726. TEST_F(STestTensorflowParser, tensorflow_GetTensorflowGraphInOutMap_test)
  2727. {
  2728. TensorFlowModelParser model_parser;
  2729. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2730. tensorflow::NodeDef *node_input = graph->add_node();
  2731. node_input->set_name("name_input");
  2732. node_input->set_op("op_input");
  2733. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid5", "Sigmoid", "node_input");
  2734. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid6", "Sigmoid", "node_input");
  2735. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid7", "Sigmoid", "node_input");
  2736. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul5", "Mul", "node_input");
  2737. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul6", "Mul", "node_input");
  2738. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul7", "Mul", "node_input");
  2739. AddGraphNode(graph, "t_lstm/t_lstm_cell/Relu5", "Relu", "node_input");
  2740. AddGraphNode(graph, "t_lstm/t_lstm_cell/Relu6", "Relu", "node_input");
  2741. Status ret = model_parser.GetTensorflowGraphInOutMap(graph);
  2742. EXPECT_EQ(ret, SUCCESS);
  2743. delete graph;
  2744. }
  2745. TEST_F(STestTensorflowParser, tensorflow_RemoveIsolateNode_test)
  2746. {
  2747. TensorFlowModelParser model_parser;
  2748. tensorflow::GraphDef graph;
  2749. CreateGraphDef(graph);
  2750. Status ret = model_parser.RemoveIsolateNode(&graph);
  2751. EXPECT_EQ(ret, FAILED);
  2752. }
  2753. TEST_F(STestTensorflowParser, tensorflow_AddNodeToGraphAndMarkFormat_test)
  2754. {
  2755. TensorFlowModelParser model_parser;
  2756. ComputeGraphPtr graph = make_shared<ge::ComputeGraph>("default");
  2757. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2758. GenOriginNodeDef(&model_parser, op_node_name_list);
  2759. Status ret = model_parser.AddNodeToGraphAndMarkFormat(graph, op_node_name_list);
  2760. EXPECT_EQ(ret, INTERNAL_ERROR);
  2761. }
  2762. TEST_F(STestTensorflowParser, tensorflow_ParserNodeDef1_test)
  2763. {
  2764. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2765. ModelParserFactory* factory = ModelParserFactory::Instance();
  2766. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2767. ASSERT_TRUE(NULL != model_parser);
  2768. TensorFlowModelParser tensorflow_parser;
  2769. tensorflow_parser.adaptedOpTypeMap_["test_name"] = "POOLING";
  2770. std::mutex graphMutex;
  2771. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2772. ScopePassManager passmanager;
  2773. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2774. domi::tensorflow::NodeDef node_def;
  2775. node_def.set_name("test_name");
  2776. node_def.set_op("POOLING");
  2777. error_message::Context error_context;
  2778. Status ret = ge::TensorFlowModelParser::ParseNodeDef(&tensorflow_parser, compute_graph, &graphMutex, scope_graph, &node_def, error_context);
  2779. EXPECT_EQ(FAILED, ret);
  2780. delete graph;
  2781. }
  2782. TEST_F(STestTensorflowParser, tensorflow_ParserNodeDef2_test)
  2783. {
  2784. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2785. ModelParserFactory* factory = ModelParserFactory::Instance();
  2786. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2787. ASSERT_TRUE(NULL != model_parser);
  2788. TensorFlowModelParser tensorflow_parser;
  2789. tensorflow_parser.adaptedOpTypeMap_["Pooling"] = "Pooling";
  2790. std::mutex graphMutex;
  2791. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2792. ScopePassManager passmanager;
  2793. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2794. REGISTER_CUSTOM_OP("Pooling")
  2795. .FrameworkType(domi::TENSORFLOW)
  2796. .OriginOpType("Pooling")
  2797. .ParseParamsFn(ParseParams)
  2798. .ImplyType(ImplyType::TVM);
  2799. register_tbe_op();
  2800. domi::tensorflow::NodeDef node_def;
  2801. node_def.set_name("Pooling");
  2802. node_def.set_op("Pooling");
  2803. error_message::Context error_context;
  2804. Status ret = ge::TensorFlowModelParser::ParseNodeDef(&tensorflow_parser, compute_graph, &graphMutex, scope_graph, &node_def, error_context);
  2805. EXPECT_EQ(FAILED, ret);
  2806. delete graph;
  2807. }
  2808. TEST_F(STestTensorflowParser, tensorflow_AddExternalGraph_test)
  2809. {
  2810. TensorFlowModelParser modelParser;
  2811. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  2812. std::string inputNodeType = "DATA";
  2813. MakeDagGraph(subGraph, inputNodeType);
  2814. Status ret = modelParser.AddExternalGraph(subGraph);
  2815. EXPECT_EQ(ret, SUCCESS);
  2816. }
  2817. TEST_F(STestTensorflowParser, tensorflow_AddFmkNode_test)
  2818. {
  2819. TensorFlowModelParser model_parser;
  2820. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2821. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  2822. ScopePassManager pass_manager;
  2823. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  2824. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2825. GenOriginNodeDef(&model_parser, op_node_name_list);
  2826. Status ret = model_parser.AddFmkNode(compute_graph, scope_graph, op_node_name_list, false);
  2827. EXPECT_EQ(ret, PARAM_INVALID);
  2828. delete graphDef;
  2829. }
  2830. TEST_F(STestTensorflowParser, tensorflow_OptimizeConstNodes4CustomOp_test)
  2831. {
  2832. TensorFlowModelParser model_parser;
  2833. tensorflow::GraphDef graph_def;
  2834. CreateGraphDef(graph_def);
  2835. Status ret = model_parser.OptimizeConstNodes4CustomOp(&graph_def);
  2836. EXPECT_EQ(ret, SUCCESS);
  2837. }
  2838. TEST_F(STestTensorflowParser, OptimizeConstNodes4CustomOp_success)
  2839. {
  2840. GraphDef graph;
  2841. auto bn = AddNode(graph, "FusedBatchNormV3", "FusedBatchNormV3_0");
  2842. auto bn_grad = AddNode(graph, "FusedBatchNormGradV3", "FusedBatchNormGradV3_0");
  2843. AddInput(bn, bn_grad, 0);
  2844. AddInput(bn, bn_grad, 1);
  2845. AddInput(bn, bn_grad, 2);
  2846. AddInput(bn, bn_grad, 3);
  2847. AddInput(bn, bn_grad, 5);
  2848. AddInput(bn, bn_grad, 5);
  2849. GraphDef* graphDef = &graph;
  2850. int before_bn_grad_input_size = bn_grad->input_size();
  2851. ASSERT_EQ(before_bn_grad_input_size, 6);
  2852. ModelParserFactory* factory = ModelParserFactory::Instance();
  2853. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2854. ge::TensorFlowModelParser tensorflow_parser;
  2855. Status ret = tensorflow_parser.OptimizeConstNodes4CustomOp(graphDef);
  2856. int after_bn_grad_input_size = bn_grad->input_size();
  2857. ASSERT_EQ(after_bn_grad_input_size, 6);
  2858. ASSERT_EQ(ret, domi::SUCCESS);
  2859. REGISTER_CUSTOM_OP("BatchNormGrad")
  2860. .FrameworkType(domi::TENSORFLOW)
  2861. .OriginOpType({"FusedBatchNormGradV3", "FusedBatchNormGradV2", "FusedBatchNormGrad"})
  2862. .ParseParamsFn(AutoMappingFn)
  2863. .DelInputWithOriginalType(5, "FusedBatchNormGradV3")
  2864. .ImplyType(ImplyType::TVM);
  2865. register_tbe_op();
  2866. ret = tensorflow_parser.OptimizeConstNodes4CustomOp(graphDef);
  2867. after_bn_grad_input_size = bn_grad->input_size();
  2868. ASSERT_EQ(after_bn_grad_input_size, 6);
  2869. ASSERT_EQ(ret, domi::SUCCESS);
  2870. }
  2871. TEST_F(STestTensorflowParser, tensorflow_ParseOpParams_test)
  2872. {
  2873. TensorFlowModelParser model_parser;
  2874. tensorflow::NodeDef *node_def = initNodeDef();
  2875. node_def->set_name("Pooling");
  2876. node_def->set_op("Pooling");
  2877. ge::OpDescPtr op = std::make_shared<ge::OpDesc>();
  2878. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2879. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Pooling");
  2880. Status ret = model_parser.ParseOpParams(node_def, op, op_parser);
  2881. EXPECT_EQ(ret, FAILED);
  2882. node_def->set_name("TensorArrayWrite");
  2883. node_def->set_op("TensorArrayWriteV3");
  2884. op_parser = factory->CreateOpParser("TensorArrayWrite");
  2885. ret = model_parser.ParseOpParams(node_def, op, op_parser);
  2886. EXPECT_EQ(ret, SUCCESS);
  2887. delete node_def;
  2888. }
  2889. TEST_F(STestTensorflowParser, tensorflow_AddFusionInnerNodeDef_test)
  2890. {
  2891. TensorFlowModelParser model_parser;
  2892. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2893. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  2894. ScopePassManager pass_manager;
  2895. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  2896. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2897. FusionScopesResult *fusion_scope_rlt = new (std::nothrow) FusionScopesResult();
  2898. fusion_scope_rlt->Init();
  2899. fusion_scope_rlt->SetName("FusionCustom");
  2900. auto &impl_scope_graph = scope_graph->impl_;
  2901. std::string scope_name = fusion_scope_rlt->Name();
  2902. impl_scope_graph->fusion_results_.insert(std::make_pair(scope_name, fusion_scope_rlt));
  2903. std::string fusion_op_name = "FusionCustom";
  2904. GenOriginNodeDef(&model_parser, op_node_name_list);
  2905. GenFusionScopesResult(scope_graph, fusion_scope_rlt, fusion_op_name);
  2906. Status ret = model_parser.AddFusionInnerNodeDef(scope_graph, fusion_op_name, op_node_name_list);
  2907. EXPECT_EQ(ret, INTERNAL_ERROR);
  2908. delete graphDef;
  2909. }
  2910. TEST_F(STestTensorflowParser, Scope_pass_test)
  2911. {
  2912. ScopePassManager passmanager;
  2913. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2914. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2915. EXPECT_NE(nullptr, scope_graph);
  2916. unique_ptr<ScopeBasePass> pass;
  2917. pass.reset(new ScopeTestPass());
  2918. EXPECT_EQ(domi::SUCCESS, passmanager.AddPass(pass));
  2919. scope_graph = passmanager.BuildScopeGraph(graph);
  2920. EXPECT_NE(nullptr, scope_graph);
  2921. delete graph;
  2922. }
  2923. TEST_F(STestTensorflowParser, operator_attr_set_and_get)
  2924. {
  2925. TestOperator test_operator;
  2926. test_operator.Name("test_op");
  2927. EXPECT_EQ("test_op" , test_operator.GetName());
  2928. test_operator.Input(test_operator, 0);
  2929. test_operator.Input(test_operator, 1);
  2930. test_operator.GetOpAttrs();
  2931. int64_t pad = 1;
  2932. test_operator.Attr("pad", pad);
  2933. EXPECT_EQ(pad , test_operator.GetIntAttr("pad"));
  2934. bool bool_value = true;
  2935. test_operator.Attr("bool_value", bool_value);
  2936. EXPECT_EQ(bool_value , test_operator.GetBoolAttr("bool_value"));
  2937. float float_value = true;
  2938. test_operator.Attr("float_value", float_value);
  2939. EXPECT_EQ(float_value , test_operator.GetFloatAttr("float_value"));
  2940. std::string str_value = "test_string";
  2941. test_operator.Attr("str_value", str_value);
  2942. EXPECT_EQ(str_value , test_operator.GetStringAttr("str_value"));
  2943. BoolTuple boollist_value{true, false};
  2944. test_operator.Attr("boollist_value", boollist_value);
  2945. BoolTuple get_boollist_value = test_operator.GetBoolTupleAttr("boollist_value");
  2946. EXPECT_EQ(boollist_value[0] , get_boollist_value[0]);
  2947. StringTuple strlist_value{"a", "b"};
  2948. test_operator.Attr("strlist_value", strlist_value);
  2949. StringTuple get_strlist_value = test_operator.GetStringTupleAttr("strlist_value");
  2950. EXPECT_EQ(strlist_value[0] , get_strlist_value[0]);
  2951. int64_t num = 1;
  2952. IntTuple intlist{num, num};
  2953. test_operator.Attr("intlist", intlist);
  2954. IntTuple get_intlist = test_operator.GetIntTupleAttr("intlist");
  2955. EXPECT_EQ(intlist[0] , get_intlist[0]);
  2956. FloatTuple floatlist{1.1, 1.1};
  2957. test_operator.Attr("floatlist", floatlist);
  2958. FloatTuple get_floatlist = test_operator.GetFloatTupleAttr("floatlist");
  2959. EXPECT_EQ(floatlist[0] , get_floatlist[0]);
  2960. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  2961. ParserOperator *op = &test_operator;
  2962. Status ret = ConvertToOpDesc(*op, op_desc);
  2963. EXPECT_EQ(domi::SUCCESS , ret);
  2964. TestOperator test_operator_1;
  2965. ParserOperator *op_convert = &test_operator_1;
  2966. ret = ConvertFromOpDesc(op_desc, *op_convert);
  2967. EXPECT_EQ(domi::SUCCESS , ret);
  2968. op_desc = nullptr;
  2969. ret = ConvertFromOpDesc(op_desc, *op_convert);
  2970. EXPECT_EQ(FAILED , ret);
  2971. ret = ConvertToOpDesc(*op, op_desc);
  2972. EXPECT_EQ(FAILED, ret);
  2973. }
  2974. TEST_F(STestTensorflowParser, success_frameworkop_get)
  2975. {
  2976. FrameworkOpOperator *frameworkOp=new FrameworkOpOperator();
  2977. int64_t index = 1;
  2978. std::string opdef_string = "tensorflow_parser";
  2979. frameworkOp->GetFrameworkType();
  2980. frameworkOp->GetNodeDefPkg();
  2981. frameworkOp->FuncDefPkg("func");
  2982. frameworkOp->Index(index);
  2983. frameworkOp->TfOpDef(opdef_string);
  2984. EXPECT_EQ(SUCCESS, SUCCESS);
  2985. delete frameworkOp;
  2986. }
  2987. TEST_F(STestTensorflowParser, op_set_get_success)
  2988. {
  2989. ConstantOperator op;
  2990. vector<int64_t> v;
  2991. op.VectorAttr("key", v);
  2992. op.GetDType();
  2993. }
  2994. TEST_F(STestTensorflowParser, success_argop_get)
  2995. {
  2996. ArgOpOperator *argOp=new ArgOpOperator();
  2997. int64_t index = 1;
  2998. argOp->Index(index);
  2999. argOp->GetIndex();
  3000. EXPECT_EQ(domi::SUCCESS, SUCCESS);
  3001. delete argOp;
  3002. }
  3003. TEST_F(STestTensorflowParser, success_operator)
  3004. {
  3005. ParserOperator tfOperator;
  3006. ParserOperator in_op;
  3007. uint32_t index = 0;
  3008. std::string type = "add";
  3009. std::string key = "Add";
  3010. std::vector<int64_t> value;
  3011. int64_t tmp = 0;
  3012. value.emplace_back(tmp);
  3013. tfOperator.Input(in_op, index);
  3014. tfOperator.Type(type);
  3015. tfOperator.AttrVector(key, value);
  3016. }
  3017. TEST_F(STestTensorflowParser, success_shapen_get)
  3018. {
  3019. ShapeNOperator *shapen =new ShapeNOperator();
  3020. shapen->GetInType();
  3021. shapen->GetInType();
  3022. shapen->GetOutType();
  3023. EXPECT_EQ(domi::SUCCESS, domi::SUCCESS);
  3024. delete shapen;
  3025. }
  3026. TEST_F(STestTensorflowParser, success_VarIsInitializedOpOperator_get)
  3027. {
  3028. VarIsInitializedOpOperator op;
  3029. op.Name("x");
  3030. std::vector<int64_t> value;
  3031. op.VectorAttr("key", value);
  3032. }
  3033. TEST_F(STestTensorflowParser, success_variable_op_get)
  3034. {
  3035. VariableOperator op;
  3036. uint32_t mem_type = 1;
  3037. op.Name("x");
  3038. std::vector<int64_t> value;
  3039. op.Placement("shared_name");
  3040. op.MemType(mem_type);
  3041. }
  3042. TEST_F(STestTensorflowParser, param_success_get)
  3043. {
  3044. FillOperator* fillOp=new FillOperator();
  3045. fillOp->GetDataType();
  3046. fillOp->GetAlpha();
  3047. fillOp->GetBeta();
  3048. EXPECT_EQ(domi::SUCCESS, domi::SUCCESS);
  3049. delete fillOp;
  3050. }
  3051. TEST_F(STestTensorflowParser, tensorflow_Message2Operator_ParseOperatorAttrs_test)
  3052. {
  3053. Message2Operator mess2Op;
  3054. tensorflow::NodeDef *node_def = initNodeDef();
  3055. int depth = 6;
  3056. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  3057. ge::Operator ops = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  3058. Status ret = mess2Op.ParseOperatorAttrs(node_def, depth, ops);
  3059. EXPECT_EQ(ret, FAILED);
  3060. depth = 4;
  3061. ret = mess2Op.ParseOperatorAttrs(node_def, depth, ops);
  3062. EXPECT_EQ(ret, SUCCESS);
  3063. }
  3064. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_RepeatedEnum2Json_test)
  3065. {
  3066. Pb2Json toJson;
  3067. ProtobufEnumValueDescriptor *enum_value_desc = new google::protobuf::EnumValueDescriptor();
  3068. bool enum2str = true;
  3069. Json json;
  3070. ProtobufFieldDescriptor *field = nullptr;
  3071. toJson.RepeatedEnum2Json(enum_value_desc, enum2str, json);
  3072. toJson.Enum2Json(enum_value_desc, field, enum2str, json);
  3073. enum2str = false;
  3074. toJson.RepeatedEnum2Json(enum_value_desc, enum2str, json);
  3075. delete enum_value_desc;
  3076. }
  3077. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_TypeBytes2String_test)
  3078. {
  3079. Pb2Json toJson;
  3080. std::string field_name = "offset";
  3081. std::string type_bytes = "offset";
  3082. toJson.TypeBytes2String(field_name, type_bytes);
  3083. field_name = "test";
  3084. toJson.TypeBytes2String(field_name, type_bytes);
  3085. }
  3086. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_RepeatedMessage2Json_test)
  3087. {
  3088. Pb2Json toJson;
  3089. tensorflow::NodeDef *node_def = initNodeDef();
  3090. ProtobufFieldDescriptor *field = new google::protobuf::FieldDescriptor();
  3091. ProtobufReflection *reflection = nullptr;
  3092. set<string> black_fields;
  3093. black_fields.emplace("offset");
  3094. Json json;
  3095. bool enum2str = true;
  3096. toJson.RepeatedMessage2Json((*node_def), field, reflection, black_fields, json, enum2str);
  3097. delete field;
  3098. }
  3099. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_OneField2Json_test)
  3100. {
  3101. Pb2Json toJson;
  3102. tensorflow::NodeDef *node_def = initNodeDef();
  3103. ProtobufFieldDescriptor *field = new google::protobuf::FieldDescriptor();
  3104. ProtobufReflection *reflection = nullptr;
  3105. set<string> black_fields;
  3106. black_fields.emplace("offset");
  3107. Json json;
  3108. bool enum2str = true;
  3109. Message2Operator mess2Op;
  3110. int depth = 4;
  3111. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>("FusionCustom", "FusionCustom");
  3112. ge::Operator ops = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  3113. field->CppTypeName(google::protobuf::FieldDescriptor::CPPTYPE_ENUM);
  3114. mess2Op.ParseField(reflection, node_def, field, depth, ops);
  3115. toJson.OneField2Json((*node_def), field, reflection, black_fields, json, enum2str, 1);
  3116. toJson.OneField2Json((*node_def), field, reflection, black_fields, json, enum2str, 5);
  3117. delete field;
  3118. }
  3119. TEST_F(STestTensorflowParser, input_proto_real_path_success) {
  3120. const char *caffe_proto_path = "./caffe/caffe.proto";
  3121. const char *custom_proto_path = "./caffe/custom.proto";
  3122. ProtoFileParser proto_file_parser;
  3123. string fusion_proto_file;
  3124. auto ret = proto_file_parser.CombineProtoFile(caffe_proto_path, custom_proto_path, fusion_proto_file);
  3125. EXPECT_EQ(ret, FAILED);
  3126. ret = proto_file_parser.RecordProtoMessage(caffe_proto_path);
  3127. EXPECT_EQ(ret, FAILED);
  3128. ret = proto_file_parser.WriteProtoFile(caffe_proto_path, custom_proto_path);
  3129. EXPECT_EQ(ret, FAILED);
  3130. std::cout << __FILE__ << std::endl;
  3131. std::string caseDir = __FILE__;
  3132. std::size_t idx = caseDir.find_last_of("/");
  3133. caseDir = caseDir.substr(0, idx);
  3134. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3135. caffe_proto_path = proto_file.c_str();
  3136. ret = proto_file_parser.CombineProtoFile(caffe_proto_path, caffe_proto_path, fusion_proto_file);
  3137. EXPECT_EQ(ret, SUCCESS);
  3138. ret = proto_file_parser.WriteProtoFile(caffe_proto_path, custom_proto_path);
  3139. EXPECT_EQ(ret, FAILED);
  3140. std::string dest_line = "test";
  3141. ret = proto_file_parser.FindConflictLine(custom_proto_path, 0, dest_line);
  3142. EXPECT_EQ(ret, FAILED);
  3143. std::map<int, std::pair<string, string>> identifier_op_map;
  3144. std::map<std::string, std::pair<int, string>> op_identifier_map;
  3145. ret = proto_file_parser.ParseProtoFile(custom_proto_path, identifier_op_map, op_identifier_map);
  3146. EXPECT_EQ(ret, FAILED);
  3147. proto_file_parser.GetFusionProtoFile();
  3148. std::ofstream write_tmp;
  3149. ret = proto_file_parser.AddCustomAndConflictMessage(custom_proto_path, write_tmp);
  3150. EXPECT_EQ(ret, FAILED);
  3151. }
  3152. TEST_F(STestTensorflowParser, all_success)
  3153. {
  3154. PreChecker::OpId id1 = (void*)(intptr_t)1;
  3155. PreChecker::OpId id2 = (void*)(intptr_t)2;
  3156. PreChecker::OpId id3 = (void*)(intptr_t)3;
  3157. PreChecker::OpId id4 = (void*)(intptr_t)4;
  3158. PreChecker &checker = PreChecker::Instance();
  3159. EXPECT_EQ(checker.AddOp(id1, "name1", "type1"), SUCCESS);
  3160. EXPECT_EQ(checker.AddOp(id2, "name2", "type2"), SUCCESS);
  3161. EXPECT_EQ(checker.AddOp(id3, "name1", "type3"), SUCCESS);
  3162. EXPECT_EQ(checker.AddOp(id4, "name4", ge::parser::DETECTIONOUTPUT), SUCCESS);
  3163. EXPECT_EQ(checker.CheckName(id1), SUCCESS);
  3164. EXPECT_EQ(checker.CheckName(id2), SUCCESS);
  3165. EXPECT_EQ(checker.CheckName(id3), SUCCESS);
  3166. EXPECT_EQ(checker.CheckName(id4), SUCCESS);
  3167. EXPECT_EQ(checker.CheckType(id1), SUCCESS);
  3168. EXPECT_EQ(checker.CheckType(id2), SUCCESS);
  3169. EXPECT_EQ(checker.CheckType(id3), SUCCESS);
  3170. EXPECT_EQ(checker.CheckType(id4), SUCCESS);
  3171. EXPECT_EQ(checker.AddCause(id1, PreChecker::ErrorCode::OK, "msg"), SUCCESS);
  3172. EXPECT_EQ(checker.AddCause(id1, PreChecker::ErrorCode::PARAM_INVALID, "msg"), domi::SUCCESS);
  3173. PreChecker::Cause cause;
  3174. cause.code = PreChecker::ErrorCode::TYPE_AMBIGUOUS;
  3175. cause.message = "msg";
  3176. EXPECT_EQ(checker.AddCause(id1, cause), SUCCESS);
  3177. EXPECT_EQ(checker.HasError(), true);
  3178. EXPECT_EQ(checker.Save("check_result.json"), SUCCESS);
  3179. std::string msg = "msg";
  3180. Status ret = checker.Clear(id1, msg);
  3181. EXPECT_EQ(ret, SUCCESS);
  3182. checker.Clear();
  3183. checker.RefreshErrorMessageByName("name1",PreChecker::ErrorCode::PARAM_INVALID,"node repeated in");
  3184. }
  3185. TEST_F(STestTensorflowParser, tensorflow_tbe_tfplugin_loader_test)
  3186. {
  3187. TBEPluginLoader pluginLoad;
  3188. vector<string> fileList = {};
  3189. string caffeParserPath = "";
  3190. string full_name = "dabc";
  3191. string caffe_parser_so_suff = "abc";
  3192. pluginLoad.ProcessSoFullName(fileList, caffeParserPath, full_name, caffe_parser_so_suff);
  3193. ASSERT_EQ(caffeParserPath, full_name);
  3194. pluginLoad.ClearHandles_();
  3195. std::cout << __FILE__ << std::endl;
  3196. std::string caseDir = __FILE__;
  3197. std::size_t idx = caseDir.find_last_of("/");
  3198. caseDir = caseDir.substr(0, idx);
  3199. std::string proto_file = caseDir + "/origin_models/";
  3200. std::string path = proto_file;
  3201. std::string caffe_parser_path = path;
  3202. pluginLoad.FindParserSo(path, fileList, caffe_parser_path);
  3203. setenv("ASCEND_OPP_PATH", "aaa", 1);
  3204. std::string customop_path = "";
  3205. pluginLoad.GetCustomOpPath(customop_path);
  3206. ASSERT_EQ(customop_path, "aaa/framework/custom/:aaa/framework/built-in/tensorflow/");
  3207. Status ret = pluginLoad.Finalize();
  3208. EXPECT_EQ(ret, SUCCESS);
  3209. }
  3210. TEST_F(STestTensorflowParser, tensorflow_data_op_parser_test)
  3211. {
  3212. std::vector<int64_t> shape = {1, 1, 224, 224};
  3213. ge::GeTensorDesc tensor_desc;
  3214. DataOpParser opParser;
  3215. Status ret = opParser.Init5DInputTensor(shape, tensor_desc);
  3216. EXPECT_EQ(ret, SUCCESS);
  3217. ret = opParser.Init5DOutputTensor(shape, tensor_desc);
  3218. EXPECT_EQ(ret, SUCCESS);
  3219. ge::OpDescPtr op = std::make_shared<ge::OpDesc>();
  3220. ret = opParser.ParseShape(shape, op);
  3221. }
  3222. TEST_F(STestTensorflowParser, read_proto_from_mem_test)
  3223. {
  3224. tensorflow::NodeDef *node_def = initNodeDef();
  3225. const char *data = nullptr;
  3226. int size = 3;
  3227. bool ret = parser::ReadProtoFromMem(data, size, node_def);
  3228. EXPECT_EQ(false, ret);
  3229. data = "not file";
  3230. ret = parser::ReadProtoFromMem(data, size, node_def);
  3231. EXPECT_EQ(false, ret);
  3232. }
  3233. TEST_F(STestTensorflowParser, tensorflow_GetOriginalType_test)
  3234. {
  3235. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3236. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("fusionCustom", parser::FRAMEWORKOP);
  3237. ge::NodePtr node = std::make_shared<ge::Node>(op, graph);
  3238. string type = parser::FRAMEWORKOP;
  3239. Status ret = parser::GetOriginalType(node, type);
  3240. EXPECT_EQ(ret, INTERNAL_ERROR);
  3241. }
  3242. TEST_F(STestTensorflowParser, tensorflow_ReadBytesFromBinaryFile_test)
  3243. {
  3244. const char *file_name = nullptr;
  3245. char *buffer = nullptr;
  3246. int length = 1;
  3247. bool ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3248. EXPECT_EQ(ret, false);
  3249. file_name = "./caffe.proto";
  3250. ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3251. EXPECT_EQ(ret, false);
  3252. std::cout << __FILE__ << std::endl;
  3253. std::string caseDir = __FILE__;
  3254. std::size_t idx = caseDir.find_last_of("/");
  3255. caseDir = caseDir.substr(0, idx);
  3256. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3257. file_name = proto_file.c_str();
  3258. ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3259. EXPECT_EQ(ret, true);
  3260. char path[4096 + 1] = { 0 };
  3261. memset(path, 'a', 4096);
  3262. std::string realPath = parser::RealPath(path);
  3263. EXPECT_EQ(realPath, "");
  3264. const char *real_path = nullptr;
  3265. realPath = parser::RealPath(real_path);
  3266. EXPECT_EQ(realPath, "");
  3267. }
  3268. TEST_F(STestTensorflowParser, tensorflow_AclGraphParserUtil_ParseAclInputFp16Nodes_test)
  3269. {
  3270. AclGraphParserUtil parserUtil;
  3271. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3272. std::string input_fp16_nodes = "Add";
  3273. std::string is_input_adjust_hw_layout = "is_input_adjust_hw_layout";
  3274. Status ret = parserUtil.ParseAclInputFp16Nodes(graph, input_fp16_nodes, is_input_adjust_hw_layout);
  3275. EXPECT_EQ(ret, PARAM_INVALID);
  3276. is_input_adjust_hw_layout = "true";
  3277. ret = parserUtil.ParseAclInputFp16Nodes(graph, input_fp16_nodes, is_input_adjust_hw_layout);
  3278. EXPECT_EQ(ret, PARAM_INVALID);
  3279. vector<string> adjust_fp16_format_vec = {"true", "false"};
  3280. uint32_t index = 1;
  3281. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  3282. parserUtil.AddAttrsForInputNodes(adjust_fp16_format_vec, input_fp16_nodes, index, op_desc);
  3283. std::string is_output_fp16 = "is_output_fp16";
  3284. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3285. EXPECT_EQ(ret, PARAM_INVALID);
  3286. is_output_fp16 = "false";
  3287. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3288. EXPECT_EQ(ret, SUCCESS);
  3289. is_output_fp16 = "true";
  3290. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3291. EXPECT_EQ(ret, SUCCESS);
  3292. }
  3293. TEST_F(STestTensorflowParser, tensorflow_ModelSaver_test)
  3294. {
  3295. const char *file_path = nullptr;
  3296. const Json model = {{"a", "b"}};
  3297. Status ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3298. EXPECT_EQ(ret, FAILED);
  3299. file_path = "./origin_models/";
  3300. ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3301. EXPECT_EQ(ret, FAILED);
  3302. std::string caseDir = __FILE__;
  3303. std::size_t idx = caseDir.find_last_of("/");
  3304. caseDir = caseDir.substr(0, idx);
  3305. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3306. file_path = proto_file.c_str();
  3307. ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3308. char path[4096 + 1] = { 0 };
  3309. memset(path, 'a', 4096);
  3310. EXPECT_EQ(-1, ge::parser::ModelSaver::CreateDirectory(path));
  3311. EXPECT_EQ(-1, ge::parser::ModelSaver::CheckPath(path));
  3312. }
  3313. TEST_F(STestTensorflowParser, create_weights_parser_failed)
  3314. {
  3315. WeightsParserFactory* factory = WeightsParserFactory::Instance();
  3316. shared_ptr<WeightsParser> weight_parser = factory->CreateWeightsParser(FRAMEWORK_RESERVED);
  3317. ASSERT_TRUE(NULL == weight_parser);
  3318. ModelParserFactory *modelFactory = ModelParserFactory::Instance();
  3319. shared_ptr<ModelParser> model_parser = modelFactory->CreateModelParser(FRAMEWORK_RESERVED);
  3320. ASSERT_TRUE(NULL == model_parser);
  3321. std::shared_ptr<OpParserFactory> parserFactory = OpParserFactory::Instance(domi::FrameworkType::CAFFE);
  3322. std::shared_ptr<OpParser> fusion_op_parser = parserFactory->CreateFusionOpParser(ge::parser::DATA);
  3323. ASSERT_TRUE(NULL == fusion_op_parser);
  3324. std::shared_ptr<OpParser> op_parser = parserFactory->CreateOpParser("10");
  3325. ASSERT_TRUE(NULL == op_parser);
  3326. }
  3327. TEST_F(STestTensorflowParser, custom_parser_adapter_register)
  3328. {
  3329. using PARSER_CREATOR_FN = std::function<std::shared_ptr<OpParser>(void)>;
  3330. PARSER_CREATOR_FN func = CustomParserAdapterRegistry::Instance()->GetCreateFunc(domi::TENSORFLOW);
  3331. CustomParserAdapterRegistry::Instance()->Register(domi::TENSORFLOW, func);
  3332. CustomParserAdapterRegistry::Instance()->Register(domi::TENSORFLOW, func);
  3333. func = CustomParserAdapterRegistry::Instance()->GetCreateFunc(domi::FRAMEWORK_RESERVED);
  3334. ASSERT_EQ(nullptr, func);
  3335. }
  3336. TEST_F(STestTensorflowParser, tensorflow_parser_api_test)
  3337. {
  3338. std::map<std::string, std::string> options = {{"ge.runFlag", "1"}};
  3339. Status ret = ParserInitialize(options);
  3340. EXPECT_EQ(ret, SUCCESS);
  3341. ret = ParserInitialize(options);
  3342. EXPECT_EQ(ret, SUCCESS);
  3343. ret = ParserFinalize();
  3344. EXPECT_EQ(ret, SUCCESS);
  3345. ret = ParserFinalize();
  3346. EXPECT_EQ(ret, SUCCESS);
  3347. }
  3348. TEST_F(STestTensorflowParser, tensorflow_FP16_parser_test)
  3349. {
  3350. parser::fp16_t fp16;
  3351. fp16.ToDouble();
  3352. fp16.ToInt8();
  3353. fp16.ToUInt8();
  3354. fp16.ToInt16();
  3355. fp16.ToUInt16();
  3356. fp16.ToInt32();
  3357. fp16.ToUInt32();
  3358. fp16.IsInf();
  3359. fp16.operator+(fp16);
  3360. fp16.operator-(fp16);
  3361. fp16.operator*(fp16);
  3362. fp16.operator/(fp16);
  3363. fp16.operator+=(fp16);
  3364. fp16.operator-=(fp16);
  3365. fp16.operator*=(fp16);
  3366. fp16.operator/=(fp16);
  3367. fp16.operator==(fp16);
  3368. fp16.operator!=(fp16);
  3369. fp16.operator>(fp16);
  3370. fp16.operator>=(fp16);
  3371. fp16.operator<(fp16);
  3372. fp16.operator<=(fp16);
  3373. fp16.operator=(fp16);
  3374. float f_val = 0.1;
  3375. fp16.operator=(f_val);
  3376. double d_val = 0.2;
  3377. fp16.operator=(d_val);
  3378. int8_t i_val = 1;
  3379. fp16.operator=(i_val);
  3380. uint8_t ui_val = 2;
  3381. fp16.operator=(ui_val);
  3382. int16_t i_vals = 1;
  3383. fp16.operator=(i_vals);
  3384. uint16_t ui16_val = 1;
  3385. fp16.operator=(ui16_val);
  3386. ui16_val = 0;
  3387. fp16.operator=(ui16_val);
  3388. ui16_val = 1;
  3389. fp16.operator=(ui16_val);
  3390. int32_t i32_val = 0;
  3391. fp16.operator=(i32_val);
  3392. i32_val = 1;
  3393. fp16.operator=(i32_val);
  3394. uint32_t ui32_val = 0;
  3395. fp16.operator=(ui32_val);
  3396. ui32_val = 1;
  3397. fp16.operator=(ui32_val);
  3398. float f_val1= 2139095000.2;
  3399. ge::parser::fp16_t fp16_1,fp16_2;
  3400. fp16_1.operator=(fp16_2);
  3401. fp16_1.operator=(f_val1);
  3402. float f_val2= 0.0000112;
  3403. fp16_1.operator=(f_val2);
  3404. float f_val3= 0.0000000299;
  3405. fp16_1.operator=(f_val3);
  3406. float f_val4= 0.00000000299;
  3407. fp16_1.operator=(f_val4);
  3408. uint32_t u_val1 = 4095;
  3409. fp16_1.operator=(u_val1);
  3410. uint16_t u16_val1 = 4095;
  3411. fp16_1.operator=(u16_val1);
  3412. int16_t int_val1 = 0;
  3413. fp16_1.operator=(int_val1);
  3414. int16_t int_val2 = -32767;
  3415. fp16_1.operator=(int_val2);
  3416. i_val = -0x7FFFFFFF;
  3417. fp16_1.operator=(i_val);
  3418. fp16.operator=(f_val1);
  3419. float f = fp16; //float();
  3420. double d = fp16;
  3421. int8_t int8 = fp16;
  3422. uint8_t uint8 = fp16;
  3423. uint16_t uint16 = fp16;
  3424. int32_t int32 = fp16;
  3425. uint32_t uint32 = fp16;
  3426. int64_t int64 = fp16;
  3427. uint64_t uint64 = fp16;
  3428. (void)f;
  3429. (void)d;
  3430. (void)int8;
  3431. (void)uint8;
  3432. (void)uint8;
  3433. (void)uint16;
  3434. (void)int32;
  3435. (void)uint32;
  3436. (void)int64;
  3437. (void)uint64;
  3438. parser::fp16_t val;
  3439. val.val = 0x7C00;
  3440. val.IsInf();
  3441. val.val = 0xFC00;
  3442. val.IsInf();
  3443. parser::fp16_t fp16_3, fp16_4;
  3444. fp16_3.val = 1;
  3445. fp16_4.val = 2;
  3446. fp16_4.operator/(fp16_3);
  3447. fp16.val = 21504;
  3448. int16_t int16 = fp16;
  3449. int8 = fp16;
  3450. }
  3451. TEST_F(STestTensorflowParser, tensorflow_AclParserInitialize_test)
  3452. {
  3453. AclGraphParserUtil parseUtil;
  3454. std::map<std::string, std::string> options;
  3455. Status ret = parseUtil.AclParserInitialize(options);
  3456. EXPECT_EQ(ret, FAILED);
  3457. options = {{ge::FRAMEWORK_TYPE, "2"}};
  3458. ret = parseUtil.AclParserInitialize(options);
  3459. EXPECT_EQ(ret, SUCCESS);
  3460. }
  3461. TEST_F(STestTensorflowParser, tensorflow_GetOutputLeaf_test)
  3462. {
  3463. AclGraphParserUtil parseUtil;
  3464. ge::ComputeGraphPtr compute_graph = build_graph(true);
  3465. ge::NodePtr output_nodes_info = compute_graph->FindNode("Relu3");
  3466. std::vector<std::pair<ge::NodePtr, int32_t>> output_nodes = {{output_nodes_info,0}};
  3467. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>("default");
  3468. ge::NodePtr node = AddNode(compute_graph, "K", parser::NETOUTPUT,1,1);
  3469. Status ret = parseUtil.GetOutputLeaf(node, output_nodes);
  3470. EXPECT_EQ(ret, FAILED);
  3471. }
  3472. TEST_F(STestTensorflowParser, graph_pass_error)
  3473. {
  3474. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
  3475. ErrorGraphPass pass;
  3476. ge::parser::PassManager passManager;
  3477. std::vector<std::pair<string, GraphPass*>> passes;
  3478. passes.emplace_back("", &pass);
  3479. Status status = passManager.Run(graph, passes);
  3480. EXPECT_EQ(domi::FAILED, status);
  3481. }
  3482. TEST_F(STestTensorflowParser, parser_FindFmkNodeCluser_success)
  3483. {
  3484. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("FrameworkOp");
  3485. ParserGraphOptimizer graphOptimizer(graph, domi::TENSORFLOW);
  3486. ge::NodePtr node = AddNode(graph, "K", parser::FRAMEWORK_OP_TYPE, 1, 1);
  3487. ge::NodePtr output_nodes_info = graph->FindNode("Relu3");
  3488. std::unordered_map<string, vector<NodePtr>> node_cluser_Map({
  3489. {"x", {node, output_nodes_info}},
  3490. });
  3491. Status ret = graphOptimizer.FindFmkNodeCluser(node_cluser_Map);
  3492. EXPECT_EQ(ret, SUCCESS);
  3493. }
  3494. TEST_F(STestTensorflowParser, parser_RebuildOutputAnchors_test)
  3495. {
  3496. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3497. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3498. string inputNodeType = "DATA";
  3499. MakeDagGraph(subGraph, inputNodeType);
  3500. vector<ge::InDataAnchorPtr> in_anchor;
  3501. vector<ge::OutDataAnchorPtr> out_anchor;
  3502. for(ge::NodePtr node : subGraph->GetAllNodes()) {
  3503. for(auto out : node->GetAllOutDataAnchors()) {
  3504. for(auto in : node->GetAllInDataAnchors()) {
  3505. if(in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  3506. in_anchor.push_back(in);
  3507. }
  3508. }
  3509. for(auto i : out->GetPeerInDataAnchors()) {
  3510. if(i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  3511. out_anchor.push_back(out);
  3512. }
  3513. }
  3514. }
  3515. }
  3516. OpDescPtr fusion_op_desc = make_shared<ge::OpDesc>("FusionCustom", ge::parser::CONSTANT);
  3517. Status ret = graphOptimizer.RebuildOutputAnchors(out_anchor, fusion_op_desc);
  3518. EXPECT_EQ(domi::SUCCESS, ret);
  3519. ret = graphOptimizer.RebuildInputAnchors(in_anchor, fusion_op_desc);
  3520. EXPECT_EQ(domi::SUCCESS, ret);
  3521. }
  3522. TEST_F(STestTensorflowParser, parser_LinkInnerAnchor_test)
  3523. {
  3524. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3525. NodePtr node_a = AddNode(subGraph, "A", parser::NETOUTPUT, 1, 1);
  3526. NodePtr node_b = AddNode(subGraph, "B", parser::NETOUTPUT, 1, 1);
  3527. unordered_map<string, ge::NodePtr> node_map;
  3528. node_map.insert(pair<string, ge::NodePtr>("A", node_a));
  3529. node_map.insert(pair<string, ge::NodePtr>("B", node_b));
  3530. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3531. graphOptimizer.LinkInnerAnchor(node_map);
  3532. }
  3533. TEST_F(STestTensorflowParser, parser_MarkForFusion_test)
  3534. {
  3535. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3536. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3537. ge::NodePtr node = AddNode(subGraph, "K", parser::FRAMEWORK_OP_TYPE, 1, 1);
  3538. ge::NodePtr output_nodes_info = subGraph->FindNode("Relu3");
  3539. std::unordered_map<string, vector<NodePtr>> node_cluser_Map({
  3540. {"x", {node, output_nodes_info}},
  3541. });
  3542. Status ret = graphOptimizer.MarkForFusion(node_cluser_Map);
  3543. EXPECT_EQ(ret, INTERNAL_ERROR);
  3544. }
  3545. TEST_F(STestTensorflowParser, parser_UpdateGraph_test)
  3546. {
  3547. std::vector<NodePtr> nodes;
  3548. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3549. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3550. NodePtr node_a = AddNode(subGraph, "A", parser::NETOUTPUT, 1, 1);
  3551. NodePtr node_b = AddNode(subGraph, "B", parser::NETOUTPUT, 1, 1);
  3552. nodes.emplace_back(node_a);
  3553. nodes.emplace_back(node_b);
  3554. Status ret = graphOptimizer.UpdateGraph(nodes);
  3555. EXPECT_EQ(ret, PARAM_INVALID);
  3556. }
  3557. TEST_F(STestTensorflowParser, parser_RebuildFusionNode_test)
  3558. {
  3559. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3560. ParserGraphOptimizer graphOptimizer(graph, domi::TENSORFLOW);
  3561. string inputNodeType = "DATA";
  3562. MakeDagGraph(graph, inputNodeType);
  3563. vector<ge::InDataAnchorPtr> input_anchors;
  3564. vector<ge::OutDataAnchorPtr> output_anchors;
  3565. for(ge::NodePtr node : graph->GetAllNodes()) {
  3566. for(auto out : node->GetAllOutDataAnchors()) {
  3567. for(auto in : node->GetAllInDataAnchors()) {
  3568. if(in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  3569. input_anchors.push_back(in);
  3570. }
  3571. }
  3572. for(auto i : out->GetPeerInDataAnchors()) {
  3573. if(i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  3574. output_anchors.push_back(out);
  3575. }
  3576. }
  3577. }
  3578. }
  3579. map<ge::OutDataAnchorPtr, vector<ge::InDataAnchorPtr>> output_in_map;
  3580. vector<ge::InControlAnchorPtr> input_control_anchors;
  3581. vector<ge::OutControlAnchorPtr> output_control_anchors;
  3582. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("dpop_123", "FrameworkOp");
  3583. ge::NodePtr fusion_node = std::make_shared<ge::Node>(op, graph);
  3584. Status ret = graphOptimizer.RebuildFusionNode(input_anchors, output_anchors, output_in_map, input_control_anchors, output_control_anchors, fusion_node);
  3585. EXPECT_EQ(ret, FAILED);
  3586. }
  3587. TEST_F(STestTensorflowParser, parser_InsertNode_test)
  3588. {
  3589. std::vector<NodePtr> nodes;
  3590. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3591. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3592. auto merge_node = AddNode(subGraph, "Merge", parser::MERGE, 1, 2);
  3593. auto node1 = AddNode(subGraph, "Op1", parser::RELU, 1, 1);
  3594. auto node2 = AddNode(subGraph, "Op2", parser::CONVOLUTION, 1, 1);
  3595. auto node3 = AddNode(subGraph, "Op3", parser::CONVOLUTION, 1, 1);
  3596. nodes.emplace_back(merge_node);
  3597. nodes.emplace_back(node1);
  3598. nodes.emplace_back(node2);
  3599. nodes.emplace_back(node3);
  3600. vector<ge::InDataAnchorPtr> in_anchor;
  3601. vector<ge::OutDataAnchorPtr> out_anchor;
  3602. map<ge::OutDataAnchorPtr, vector<ge::InDataAnchorPtr>> output_in_map;
  3603. vector<ge::InControlAnchorPtr> input_control_anchors;
  3604. vector<ge::OutControlAnchorPtr> output_control_anchors;
  3605. unordered_map<string, ge::NodePtr> node_map;
  3606. node_map.insert(pair<string, ge::NodePtr>("A", merge_node));
  3607. node_map.insert(pair<string, ge::NodePtr>("B", node1));
  3608. node_map.insert(pair<string, ge::NodePtr>("C", node2));
  3609. node_map.insert(pair<string, ge::NodePtr>("D", node3));
  3610. Status ret = graphOptimizer.InsertNode(subGraph, nodes, in_anchor, out_anchor, output_in_map, input_control_anchors, output_control_anchors, node_map);
  3611. EXPECT_EQ(ret, PARAM_INVALID);
  3612. }
  3613. TEST_F(STestTensorflowParser, parser_GeStoi_test)
  3614. {
  3615. TensorFlowModelParser model_parser;
  3616. string input_node_name = "dynamic_rnn_node1";
  3617. string index_str = "dynamic_rnn";
  3618. int32_t index = 0;
  3619. Status ret = model_parser.GeStoi(input_node_name, index_str, &index);
  3620. EXPECT_EQ(ret, INTERNAL_ERROR);
  3621. }
  3622. TEST_F(STestTensorflowParser, parser_ConstOpNeedUpdate_test)
  3623. {
  3624. ge::TensorFlowModelParser tensorflow_parser;
  3625. NodeDef *op_node_def = new NodeDef();
  3626. op_node_def->set_name("OP");
  3627. op_node_def->add_input("OP/Input_1");
  3628. op_node_def->set_op(TENSORFLOWF_NODE_OP_CONST);
  3629. NodeDef *input_node = new NodeDef();
  3630. input_node->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3631. input_node->add_input("OP/Input_1/Input_2");
  3632. NodeDef *input_2 = new NodeDef();
  3633. input_2->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3634. tensorflow_parser.nodedef_map_["OP"] = op_node_def;
  3635. tensorflow_parser.nodedef_map_["OP/Input_1"] = input_node;
  3636. tensorflow_parser.nodedef_map_["OP/Input_1/Input_2"] = input_2;
  3637. std::string op_name = "OP/Input_1/Input_2";
  3638. Status ret = tensorflow_parser.ConstOpNeedUpdate(op_name);
  3639. EXPECT_EQ(ret, true);
  3640. op_name = "OP";
  3641. ret = tensorflow_parser.ConstOpNeedUpdate(op_name);
  3642. EXPECT_EQ(ret, true);
  3643. delete op_node_def;
  3644. delete input_node;
  3645. delete input_2;
  3646. }
  3647. TEST_F(STestTensorflowParser, parser_UppdateInputMap_test)
  3648. {
  3649. ge::TensorFlowModelParser tensorflow_parser;
  3650. ScopeFusionOpInfo info;
  3651. ge::OpNodeContext normal_op_node_context;
  3652. ge::OpNodeContext fusion_op_node_context;
  3653. string fusion_op_name = "dropout";
  3654. normal_op_node_context.input_map["dropout"].push_back({0, 0});
  3655. normal_op_node_context.input_map["conv_conv5/BatchNorm/moving_variance"].push_back({0, 1});
  3656. normal_op_node_context.output_map["dropout"].push_back({1, 0});
  3657. normal_op_node_context.output_map["conv_conv5/BatchNorm/batchnorm/add/y"].push_back({-1, -1});
  3658. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  3659. ScopePassManager passmanager;
  3660. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  3661. NodeDef *node1 = graph->add_node();
  3662. node1->set_name("dropout");
  3663. node1->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3664. node1->add_input("conv_conv5/BatchNorm/moving_variance");
  3665. node1->add_input("conv_conv5/BatchNorm/batchnorm/add/y");
  3666. NodeDef *node2 = graph->add_node();
  3667. node2->set_name("conv_conv5/BatchNorm/moving_variance");
  3668. node2->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3669. NodeDef *node3 = graph->add_node();
  3670. node3->set_name("conv_conv5/BatchNorm/batchnorm/add/y");
  3671. node3->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3672. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  3673. info.fusion_op_type = parser::FUSIONBATCHNORM;
  3674. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  3675. info.description = "";
  3676. info.scope_pass = true;
  3677. tensorflow_parser.nodedef_map_["dropout"] = node1;
  3678. tensorflow_parser.nodedef_map_["conv_conv5/BatchNorm/moving_variance"] = node2;
  3679. tensorflow_parser.nodedef_map_["conv_conv5/BatchNorm/batchnorm/add/y"] = node3;
  3680. Status ret = tensorflow_parser.UppdateInputMap(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  3681. EXPECT_EQ(ret, domi::SUCCESS);
  3682. ret = tensorflow_parser.UppdateOutputMap(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  3683. TensorFlowWeightsParser weights_parser;
  3684. std::string caseDir = __FILE__;
  3685. std::size_t idx = caseDir.find_last_of("/");
  3686. caseDir = caseDir.substr(0, idx);
  3687. std::string proto_file = caseDir + "/origin_models/tf_add.pb";
  3688. const char *file = proto_file.c_str();
  3689. ge::Graph graphs;
  3690. Status weightsRet = weights_parser.Parse(file, graphs);
  3691. EXPECT_EQ(weightsRet, SUCCESS);
  3692. delete graph;
  3693. }
  3694. TEST_F(STestTensorflowParser, tensorflow_optimizer_fmk_fusion_op) {
  3695. std::string caseDir = __FILE__;
  3696. std::size_t idx = caseDir.find_last_of("/");
  3697. caseDir = caseDir.substr(0, idx);
  3698. const std::string root_proto = caseDir + "/origin_models/test_getnext_dynamic_fusion.pbtxt";
  3699. domi::tensorflow::GraphDef graphDef;
  3700. bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef);
  3701. ASSERT_EQ(protoRet, true);
  3702. TensorFlowModelParser tensorflow_parser;
  3703. ge::ComputeGraphPtr root_graph = ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
  3704. Status ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  3705. EXPECT_EQ(ret, SUCCESS);
  3706. EXPECT_EQ(root_graph->GetDirectNode().size(), 3);
  3707. }
  3708. TEST_F(STestTensorflowParser, AddDumpOriginName_test)
  3709. {
  3710. GeTensorDesc scalar_tensor(GeShape(), ge::FORMAT_NCHW, ge::DT_FLOAT);
  3711. ge::ComputeGraphPtr parent_graph = std::make_shared<ge::ComputeGraph>("parent_graph");
  3712. ge::OpDescPtr parent = std::make_shared<ge::OpDesc>();
  3713. parent->SetType("Foo");
  3714. parent->SetName("foo");
  3715. ge::NodePtr foo = parent_graph->AddNode(parent);
  3716. ge::ComputeGraphPtr sub_graph = std::make_shared<ge::ComputeGraph>("sub_graph");
  3717. auto child = std::make_shared<ge::OpDesc>();
  3718. child->SetType("Bar");
  3719. child->SetName("bar");
  3720. ge::NodePtr bar = sub_graph->AddNode(child);
  3721. AddDumpOriginName(foo, "f", sub_graph);
  3722. std::vector<std::string> original_names;
  3723. (void)ge::AttrUtils::GetListStr(bar->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3724. EXPECT_EQ(original_names.size(), 1U);
  3725. EXPECT_EQ(original_names[0], "foo/f/bar");
  3726. (void)ge::AttrUtils::SetListStr(foo->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3727. AddDumpOriginName(foo, "f", sub_graph);
  3728. original_names.clear();
  3729. (void)ge::AttrUtils::GetListStr(bar->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3730. EXPECT_EQ(original_names.size(), 1U);
  3731. EXPECT_EQ(original_names[0], "foo/f/bar/f/bar");
  3732. original_names.push_back("abc");
  3733. (void)ge::AttrUtils::SetListStr(foo->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3734. AddDumpOriginName(foo, "f", sub_graph);
  3735. original_names.clear();
  3736. (void)ge::AttrUtils::GetListStr(bar->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3737. EXPECT_EQ(original_names.size(), 2U);
  3738. EXPECT_EQ(original_names[0], "foo/f/bar/f/bar/f/bar");
  3739. EXPECT_EQ(original_names[1], "abc");
  3740. }
  3741. TEST_F(STestTensorflowParser, test_plugin_manager_getopp_plugin_vendors_01) {
  3742. std::string opp_path = __FILE__;
  3743. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3744. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3745. std::string path_vendors = opp_path + "vendors";
  3746. std::string path_config = path_vendors + "/config.ini";
  3747. system(("mkdir -p " + path_vendors).c_str());
  3748. system(("echo 'load_priority=customize,mdc,lhisi' > " + path_config).c_str());
  3749. std::vector<std::string> vendors;
  3750. Status ret = TBEPluginLoader::GetOppPluginVendors(path_config, vendors);
  3751. EXPECT_EQ(ret, SUCCESS);
  3752. EXPECT_EQ(vendors[0], "customize");
  3753. EXPECT_EQ(vendors[1], "mdc");
  3754. EXPECT_EQ(vendors[2], "lhisi");
  3755. }
  3756. TEST_F(STestTensorflowParser, test_plugin_manager_getopp_plugin_vendors_02) {
  3757. std::string opp_path = __FILE__;
  3758. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3759. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3760. std::string path_vendors = opp_path + "vendors";
  3761. std::string path_config = path_vendors + "/config.ini";
  3762. system(("mkdir -p " + path_vendors).c_str());
  3763. system(("echo '' > " + path_config).c_str());
  3764. std::vector<std::string> vendors;
  3765. Status ret = TBEPluginLoader::GetOppPluginVendors(path_config, vendors);
  3766. EXPECT_NE(ret, SUCCESS);
  3767. }
  3768. TEST_F(STestTensorflowParser, test_plugin_manager_getopp_plugin_vendors_03) {
  3769. std::string opp_path = __FILE__;
  3770. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3771. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3772. std::string path_vendors = opp_path + "vendors";
  3773. std::string path_config = path_vendors + "/config.ini";
  3774. system(("mkdir -p " + path_vendors).c_str());
  3775. system(("echo 'load_priority' > " + path_config).c_str());
  3776. std::vector<std::string> vendors;
  3777. Status ret = TBEPluginLoader::GetOppPluginVendors(path_config, vendors);
  3778. EXPECT_NE(ret, SUCCESS);
  3779. }
  3780. TEST_F(STestTensorflowParser, test_plugin_manager_GetOpsProtoPath_01) {
  3781. std::string opp_path = __FILE__;
  3782. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3783. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3784. std::string path_vendors = opp_path + "vendors";
  3785. system(("rm -rf " + path_vendors).c_str());
  3786. std::string opsproto_path;
  3787. Status ret = TBEPluginLoader::GetOpsProtoPath(opsproto_path);
  3788. EXPECT_EQ(ret, SUCCESS);
  3789. EXPECT_EQ(opsproto_path,
  3790. opp_path + "op_proto/custom/:" + opp_path + "op_proto/built-in/"
  3791. );
  3792. }
  3793. TEST_F(STestTensorflowParser, test_plugin_manager_GetOpsProtoPath_02) {
  3794. std::string opp_path = __FILE__;
  3795. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3796. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3797. std::string path_vendors = opp_path + "vendors";
  3798. std::string path_config = path_vendors + "/config.ini";
  3799. system(("mkdir -p " + path_vendors).c_str());
  3800. system(("echo 'load_priority=customize,mdc,lhisi' > " + path_config).c_str());
  3801. std::string opsproto_path;
  3802. Status ret = TBEPluginLoader::GetOpsProtoPath(opsproto_path);
  3803. EXPECT_EQ(ret, SUCCESS);
  3804. EXPECT_EQ(opsproto_path,
  3805. path_vendors + "/customize/op_proto/:" +
  3806. path_vendors + "/mdc/op_proto/:" +
  3807. path_vendors + "/lhisi/op_proto/:" +
  3808. opp_path + "built-in/op_proto/"
  3809. );
  3810. }
  3811. TEST_F(STestTensorflowParser, test_plugin_manager_GetCustomOpPath_01) {
  3812. std::string opp_path = __FILE__;
  3813. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3814. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3815. std::string path_vendors = opp_path + "vendors";
  3816. system(("rm -rf " + path_vendors).c_str());
  3817. std::string customop_path;
  3818. TBEPluginLoader::GetCustomOpPath(customop_path);
  3819. EXPECT_EQ(customop_path.find(opp_path + "framework/custom/:" + opp_path + "framework/built-in/"), 0);
  3820. }
  3821. TEST_F(STestTensorflowParser, test_plugin_manager_GetCustomOpPath_02) {
  3822. std::string opp_path = __FILE__;
  3823. opp_path = opp_path.substr(0, opp_path.rfind("/") + 1);
  3824. setenv("ASCEND_OPP_PATH", opp_path.c_str(), 1);
  3825. std::string path_vendors = opp_path + "vendors";
  3826. std::string path_config = path_vendors + "/config.ini";
  3827. system(("mkdir -p " + path_vendors).c_str());
  3828. system(("echo 'load_priority=customize,mdc,lhisi' > " + path_config).c_str());
  3829. std::string customop_path;
  3830. TBEPluginLoader::GetCustomOpPath(customop_path);
  3831. EXPECT_EQ(customop_path.find(
  3832. path_vendors + "/customize/framework/:" +
  3833. path_vendors + "/mdc/framework/:" +
  3834. path_vendors + "/lhisi/framework/:" +
  3835. opp_path + "built-in/framework/"), 0);
  3836. }
  3837. } // namespace ge