|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581 |
- // clang-format off
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AdaptivePooling);
-
- namespace {
- size_t AdaptivePooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- return val;
- }
- bool AdaptivePooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AdaptivePooling>(),
- &&b_ = rhs_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.shape != b_.shape) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AdaptivePooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case AdaptivePooling::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case AdaptivePooling::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- case AdaptivePooling::Mode::AVERAGE_COUNT_EXCLUDE_PADDING:
- props_.emplace_back("mode", "AVERAGE_COUNT_EXCLUDE_PADDING");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- switch (op_.format){
- case AdaptivePooling::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case AdaptivePooling::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case AdaptivePooling::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case AdaptivePooling::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case AdaptivePooling::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case AdaptivePooling::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case AdaptivePooling::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case AdaptivePooling::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case AdaptivePooling::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case AdaptivePooling::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case AdaptivePooling::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case AdaptivePooling::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case AdaptivePooling::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case AdaptivePooling::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case AdaptivePooling::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case AdaptivePooling::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case AdaptivePooling::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case AdaptivePooling::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("shape", "{std::vector}");
- return props_;
- }
- std::string AdaptivePooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(op_);
- return "AdaptivePooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AdaptivePooling, AdaptivePooling)
- .hash(AdaptivePooling_hash_impl)
- .is_same_st(AdaptivePooling_is_same_st_impl)
- .props(AdaptivePooling_props_impl)
- .make_name(AdaptivePooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AddAxis);
-
- namespace {
- size_t AddAxis_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AddAxis>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool AddAxis_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AddAxis>(),
- &&b_ = rhs_.cast_final_safe<AddAxis>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AddAxis_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AddAxis>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", "{std::vector}");
- return props_;
- }
- std::string AddAxis_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AddAxis>();
- static_cast<void>(op_);
- return "AddAxis";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AddAxis, AddAxis)
- .hash(AddAxis_hash_impl)
- .is_same_st(AddAxis_is_same_st_impl)
- .props(AddAxis_props_impl)
- .make_name(AddAxis_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Argmax);
-
- namespace {
- size_t Argmax_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmax>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool Argmax_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Argmax>(),
- &&b_ = rhs_.cast_final_safe<Argmax>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Argmax_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmax>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string Argmax_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmax>();
- static_cast<void>(op_);
- return "Argmax";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Argmax, Argmax)
- .hash(Argmax_hash_impl)
- .is_same_st(Argmax_is_same_st_impl)
- .props(Argmax_props_impl)
- .make_name(Argmax_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Argmin);
-
- namespace {
- size_t Argmin_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmin>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool Argmin_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Argmin>(),
- &&b_ = rhs_.cast_final_safe<Argmin>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Argmin_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmin>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string Argmin_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmin>();
- static_cast<void>(op_);
- return "Argmin";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Argmin, Argmin)
- .hash(Argmin_hash_impl)
- .is_same_st(Argmin_is_same_st_impl)
- .props(Argmin_props_impl)
- .make_name(Argmin_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Argsort);
-
- namespace {
- size_t Argsort_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argsort>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.order));
- return val;
- }
- bool Argsort_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Argsort>(),
- &&b_ = rhs_.cast_final_safe<Argsort>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.order != b_.order) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Argsort_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argsort>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.order){
- case Argsort::Order::ASCENDING:
- props_.emplace_back("order", "ASCENDING");
- break;
- case Argsort::Order::DESCENDING:
- props_.emplace_back("order", "DESCENDING");
- break;
- default:
- props_.emplace_back("order", "INVALID");
- break;
- }
- return props_;
- }
- std::string Argsort_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argsort>();
- static_cast<void>(op_);
- return "Argsort";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Argsort, Argsort)
- .hash(Argsort_hash_impl)
- .is_same_st(Argsort_is_same_st_impl)
- .props(Argsort_props_impl)
- .make_name(Argsort_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AssertEqual);
-
- namespace {
- size_t AssertEqual_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AssertEqual>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.maxerr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.verbose));
- return val;
- }
- bool AssertEqual_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AssertEqual>(),
- &&b_ = rhs_.cast_final_safe<AssertEqual>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.maxerr != b_.maxerr) return false;
- if (a_.verbose != b_.verbose) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AssertEqual_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AssertEqual>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("maxerr", std::to_string(op_.maxerr));
- props_.emplace_back("verbose", std::to_string(op_.verbose));
- return props_;
- }
- std::string AssertEqual_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AssertEqual>();
- static_cast<void>(op_);
- return "AssertEqual";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AssertEqual, AssertEqual)
- .hash(AssertEqual_hash_impl)
- .is_same_st(AssertEqual_is_same_st_impl)
- .props(AssertEqual_props_impl)
- .make_name(AssertEqual_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AtlasRuntime);
-
- namespace {
- size_t AtlasRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- return val;
- }
- bool AtlasRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AtlasRuntime>(),
- &&b_ = rhs_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AtlasRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- return props_;
- }
- std::string AtlasRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(op_);
- return "AtlasRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AtlasRuntime, AtlasRuntime)
- .hash(AtlasRuntime_hash_impl)
- .is_same_st(AtlasRuntime_is_same_st_impl)
- .props(AtlasRuntime_props_impl)
- .make_name(AtlasRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Barrier);
-
- namespace {
- size_t Barrier_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Barrier>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.nr_outputs));
- return val;
- }
- bool Barrier_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Barrier>(),
- &&b_ = rhs_.cast_final_safe<Barrier>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.comp_node != b_.comp_node) return false;
- if (a_.nr_outputs != b_.nr_outputs) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Barrier_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Barrier>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- props_.emplace_back("nr_outputs", std::to_string(op_.nr_outputs));
- return props_;
- }
- std::string Barrier_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Barrier>();
- static_cast<void>(op_);
- return "Barrier";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Barrier, Barrier)
- .hash(Barrier_hash_impl)
- .is_same_st(Barrier_is_same_st_impl)
- .props(Barrier_props_impl)
- .make_name(Barrier_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchConvBias);
-
- namespace {
- size_t BatchConvBias_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchConvBias>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool BatchConvBias_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchConvBias>(),
- &&b_ = rhs_.cast_final_safe<BatchConvBias>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.nonlineMode != b_.nonlineMode) return false;
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchConvBias_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchConvBias>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.nonlineMode){
- case BatchConvBias::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case BatchConvBias::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case BatchConvBias::NonlineMode::SIGMOID:
- props_.emplace_back("nonlineMode", "SIGMOID");
- break;
- case BatchConvBias::NonlineMode::H_SWISH:
- props_.emplace_back("nonlineMode", "H_SWISH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- switch (op_.mode){
- case BatchConvBias::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case BatchConvBias::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case BatchConvBias::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case BatchConvBias::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case BatchConvBias::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case BatchConvBias::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case BatchConvBias::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case BatchConvBias::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case BatchConvBias::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case BatchConvBias::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case BatchConvBias::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case BatchConvBias::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case BatchConvBias::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case BatchConvBias::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case BatchConvBias::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case BatchConvBias::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case BatchConvBias::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case BatchConvBias::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case BatchConvBias::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case BatchConvBias::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case BatchConvBias::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case BatchConvBias::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case BatchConvBias::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case BatchConvBias::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case BatchConvBias::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case BatchConvBias::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case BatchConvBias::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case BatchConvBias::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string BatchConvBias_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchConvBias>();
- static_cast<void>(op_);
- return "BatchConvBias";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchConvBias, BatchConvBias)
- .hash(BatchConvBias_hash_impl)
- .is_same_st(BatchConvBias_is_same_st_impl)
- .props(BatchConvBias_props_impl)
- .make_name(BatchConvBias_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchNorm);
-
- namespace {
- size_t BatchNorm_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNorm>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.param_dim));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.epsilon));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.avg_factor));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- return val;
- }
- bool BatchNorm_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchNorm>(),
- &&b_ = rhs_.cast_final_safe<BatchNorm>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.param_dim != b_.param_dim) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- if (a_.epsilon != b_.epsilon) return false;
- if (a_.avg_factor != b_.avg_factor) return false;
- if (a_.scale != b_.scale) return false;
- if (a_.bias != b_.bias) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchNorm_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNorm>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.param_dim){
- case BatchNorm::ParamDim::DIM_11HW:
- props_.emplace_back("param_dim", "DIM_11HW");
- break;
- case BatchNorm::ParamDim::DIM_1CHW:
- props_.emplace_back("param_dim", "DIM_1CHW");
- break;
- case BatchNorm::ParamDim::DIM_1C11:
- props_.emplace_back("param_dim", "DIM_1C11");
- break;
- case BatchNorm::ParamDim::DIM_111C:
- props_.emplace_back("param_dim", "DIM_111C");
- break;
- default:
- props_.emplace_back("param_dim", "INVALID");
- break;
- }
- switch (op_.fwd_mode){
- case BatchNorm::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case BatchNorm::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- props_.emplace_back("epsilon", std::to_string(op_.epsilon));
- props_.emplace_back("avg_factor", std::to_string(op_.avg_factor));
- props_.emplace_back("scale", std::to_string(op_.scale));
- props_.emplace_back("bias", std::to_string(op_.bias));
- return props_;
- }
- std::string BatchNorm_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNorm>();
- static_cast<void>(op_);
- return "BatchNorm";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchNorm, BatchNorm)
- .hash(BatchNorm_hash_impl)
- .is_same_st(BatchNorm_is_same_st_impl)
- .props(BatchNorm_props_impl)
- .make_name(BatchNorm_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchNormBackward);
-
- namespace {
- size_t BatchNormBackward_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.param_dim));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.epsilon));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.avg_factor));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- return val;
- }
- bool BatchNormBackward_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchNormBackward>(),
- &&b_ = rhs_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.param_dim != b_.param_dim) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- if (a_.epsilon != b_.epsilon) return false;
- if (a_.avg_factor != b_.avg_factor) return false;
- if (a_.scale != b_.scale) return false;
- if (a_.bias != b_.bias) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchNormBackward_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.param_dim){
- case BatchNormBackward::ParamDim::DIM_11HW:
- props_.emplace_back("param_dim", "DIM_11HW");
- break;
- case BatchNormBackward::ParamDim::DIM_1CHW:
- props_.emplace_back("param_dim", "DIM_1CHW");
- break;
- case BatchNormBackward::ParamDim::DIM_1C11:
- props_.emplace_back("param_dim", "DIM_1C11");
- break;
- case BatchNormBackward::ParamDim::DIM_111C:
- props_.emplace_back("param_dim", "DIM_111C");
- break;
- default:
- props_.emplace_back("param_dim", "INVALID");
- break;
- }
- switch (op_.fwd_mode){
- case BatchNormBackward::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case BatchNormBackward::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- props_.emplace_back("epsilon", std::to_string(op_.epsilon));
- props_.emplace_back("avg_factor", std::to_string(op_.avg_factor));
- props_.emplace_back("scale", std::to_string(op_.scale));
- props_.emplace_back("bias", std::to_string(op_.bias));
- return props_;
- }
- std::string BatchNormBackward_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(op_);
- return "BatchNormBackward";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchNormBackward, BatchNormBackward)
- .hash(BatchNormBackward_hash_impl)
- .is_same_st(BatchNormBackward_is_same_st_impl)
- .props(BatchNormBackward_props_impl)
- .make_name(BatchNormBackward_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedIncrMeshIndexing);
-
- namespace {
- size_t BatchedIncrMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool BatchedIncrMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedIncrMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedIncrMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string BatchedIncrMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(op_);
- return "BatchedIncrMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedIncrMeshIndexing, BatchedIncrMeshIndexing)
- .hash(BatchedIncrMeshIndexing_hash_impl)
- .is_same_st(BatchedIncrMeshIndexing_is_same_st_impl)
- .props(BatchedIncrMeshIndexing_props_impl)
- .make_name(BatchedIncrMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedMatrixMul);
-
- namespace {
- size_t BatchedMatrixMul_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeB));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimB));
- return val;
- }
- bool BatchedMatrixMul_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedMatrixMul>(),
- &&b_ = rhs_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.transposeA != b_.transposeA) return false;
- if (a_.transposeB != b_.transposeB) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dimA != b_.dimA) return false;
- if (a_.dimB != b_.dimB) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedMatrixMul_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("transposeA", std::to_string(op_.transposeA));
- props_.emplace_back("transposeB", std::to_string(op_.transposeB));
- switch (op_.compute_mode){
- case BatchedMatrixMul::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case BatchedMatrixMul::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.format){
- case BatchedMatrixMul::Format::DEFAULT:
- props_.emplace_back("format", "DEFAULT");
- break;
- case BatchedMatrixMul::Format::MK4:
- props_.emplace_back("format", "MK4");
- break;
- case BatchedMatrixMul::Format::MK8:
- props_.emplace_back("format", "MK8");
- break;
- case BatchedMatrixMul::Format::MK4_DOT:
- props_.emplace_back("format", "MK4_DOT");
- break;
- case BatchedMatrixMul::Format::N32K4_DOT:
- props_.emplace_back("format", "N32K4_DOT");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case BatchedMatrixMul::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case BatchedMatrixMul::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case BatchedMatrixMul::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case BatchedMatrixMul::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dimA", std::to_string(op_.dimA));
- props_.emplace_back("dimB", std::to_string(op_.dimB));
- return props_;
- }
- std::string BatchedMatrixMul_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(op_);
- return "BatchedMatrixMul";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedMatrixMul, BatchedMatrixMul)
- .hash(BatchedMatrixMul_hash_impl)
- .is_same_st(BatchedMatrixMul_is_same_st_impl)
- .props(BatchedMatrixMul_props_impl)
- .make_name(BatchedMatrixMul_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedMeshIndexing);
-
- namespace {
- size_t BatchedMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool BatchedMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string BatchedMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(op_);
- return "BatchedMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedMeshIndexing, BatchedMeshIndexing)
- .hash(BatchedMeshIndexing_hash_impl)
- .is_same_st(BatchedMeshIndexing_is_same_st_impl)
- .props(BatchedMeshIndexing_props_impl)
- .make_name(BatchedMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedSetMeshIndexing);
-
- namespace {
- size_t BatchedSetMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool BatchedSetMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedSetMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedSetMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string BatchedSetMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(op_);
- return "BatchedSetMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedSetMeshIndexing, BatchedSetMeshIndexing)
- .hash(BatchedSetMeshIndexing_hash_impl)
- .is_same_st(BatchedSetMeshIndexing_is_same_st_impl)
- .props(BatchedSetMeshIndexing_props_impl)
- .make_name(BatchedSetMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BetaRNG);
-
- namespace {
- size_t BetaRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BetaRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool BetaRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BetaRNG>(),
- &&b_ = rhs_.cast_final_safe<BetaRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> BetaRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BetaRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string BetaRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BetaRNG>();
- static_cast<void>(op_);
- return "BetaRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BetaRNG, BetaRNG)
- .hash(BetaRNG_hash_impl)
- .is_same_st(BetaRNG_is_same_st_impl)
- .props(BetaRNG_props_impl)
- .make_name(BetaRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Borrow);
-
- namespace {
- size_t Borrow_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Borrow>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Borrow_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Borrow>(),
- &&b_ = rhs_.cast_final_safe<Borrow>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Borrow_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Borrow>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Borrow_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Borrow>();
- static_cast<void>(op_);
- return "Borrow";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Borrow, Borrow)
- .hash(Borrow_hash_impl)
- .is_same_st(Borrow_is_same_st_impl)
- .props(Borrow_props_impl)
- .make_name(Borrow_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Broadcast);
-
- namespace {
- size_t Broadcast_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Broadcast>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- return val;
- }
- bool Broadcast_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Broadcast>(),
- &&b_ = rhs_.cast_final_safe<Broadcast>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.shape != b_.shape) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Broadcast_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Broadcast>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("shape", "{std::vector}");
- return props_;
- }
- std::string Broadcast_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Broadcast>();
- static_cast<void>(op_);
- return "Broadcast";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Broadcast, Broadcast)
- .hash(Broadcast_hash_impl)
- .is_same_st(Broadcast_is_same_st_impl)
- .props(Broadcast_props_impl)
- .make_name(Broadcast_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CambriconRuntime);
-
- namespace {
- size_t CambriconRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.symbol));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.tensor_dim_mutable));
- return val;
- }
- bool CambriconRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CambriconRuntime>(),
- &&b_ = rhs_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- if (a_.symbol != b_.symbol) return false;
- if (a_.tensor_dim_mutable != b_.tensor_dim_mutable) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CambriconRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- props_.emplace_back("symbol", op_.symbol);
- props_.emplace_back("tensor_dim_mutable", std::to_string(op_.tensor_dim_mutable));
- return props_;
- }
- std::string CambriconRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(op_);
- return "CambriconRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CambriconRuntime, CambriconRuntime)
- .hash(CambriconRuntime_hash_impl)
- .is_same_st(CambriconRuntime_is_same_st_impl)
- .props(CambriconRuntime_props_impl)
- .make_name(CambriconRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CheckNonFinite);
-
- namespace {
- size_t CheckNonFinite_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- return val;
- }
- bool CheckNonFinite_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CheckNonFinite>(),
- &&b_ = rhs_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.scale != b_.scale) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CheckNonFinite_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("scale", std::to_string(op_.scale));
- return props_;
- }
- std::string CheckNonFinite_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(op_);
- return "CheckNonFinite";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CheckNonFinite, CheckNonFinite)
- .hash(CheckNonFinite_hash_impl)
- .is_same_st(CheckNonFinite_is_same_st_impl)
- .props(CheckNonFinite_props_impl)
- .make_name(CheckNonFinite_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CollectiveComm);
-
- namespace {
- size_t CollectiveComm_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CollectiveComm>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.key));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.nr_devices));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.rank));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.is_root));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.local_grad));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.addr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.port));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.backend));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool CollectiveComm_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CollectiveComm>(),
- &&b_ = rhs_.cast_final_safe<CollectiveComm>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.key != b_.key) return false;
- if (a_.nr_devices != b_.nr_devices) return false;
- if (a_.rank != b_.rank) return false;
- if (a_.is_root != b_.is_root) return false;
- if (a_.local_grad != b_.local_grad) return false;
- if (a_.addr != b_.addr) return false;
- if (a_.port != b_.port) return false;
- if (a_.dtype != b_.dtype) return false;
- if (a_.backend != b_.backend) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CollectiveComm_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CollectiveComm>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case CollectiveComm::Mode::REDUCE_SUM:
- props_.emplace_back("mode", "REDUCE_SUM");
- break;
- case CollectiveComm::Mode::BROADCAST:
- props_.emplace_back("mode", "BROADCAST");
- break;
- case CollectiveComm::Mode::ALL_GATHER:
- props_.emplace_back("mode", "ALL_GATHER");
- break;
- case CollectiveComm::Mode::REDUCE_SCATTER_SUM:
- props_.emplace_back("mode", "REDUCE_SCATTER_SUM");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_SUM:
- props_.emplace_back("mode", "ALL_REDUCE_SUM");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_MAX:
- props_.emplace_back("mode", "ALL_REDUCE_MAX");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_MIN:
- props_.emplace_back("mode", "ALL_REDUCE_MIN");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_PROD:
- props_.emplace_back("mode", "ALL_REDUCE_PROD");
- break;
- case CollectiveComm::Mode::GATHER:
- props_.emplace_back("mode", "GATHER");
- break;
- case CollectiveComm::Mode::SCATTER:
- props_.emplace_back("mode", "SCATTER");
- break;
- case CollectiveComm::Mode::ALL_TO_ALL:
- props_.emplace_back("mode", "ALL_TO_ALL");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("key", op_.key);
- props_.emplace_back("nr_devices", std::to_string(op_.nr_devices));
- props_.emplace_back("rank", std::to_string(op_.rank));
- props_.emplace_back("is_root", std::to_string(op_.is_root));
- props_.emplace_back("local_grad", std::to_string(op_.local_grad));
- props_.emplace_back("addr", op_.addr);
- props_.emplace_back("port", std::to_string(op_.port));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("backend", op_.backend);
- props_.emplace_back("comp_node", op_.comp_node);
- return props_;
- }
- std::string CollectiveComm_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CollectiveComm>();
- static_cast<void>(op_);
- return "CollectiveComm";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CollectiveComm, CollectiveComm)
- .hash(CollectiveComm_hash_impl)
- .is_same_st(CollectiveComm_is_same_st_impl)
- .props(CollectiveComm_props_impl)
- .make_name(CollectiveComm_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Concat);
-
- namespace {
- size_t Concat_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Concat>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Concat_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Concat>(),
- &&b_ = rhs_.cast_final_safe<Concat>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Concat_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Concat>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Concat_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Concat>();
- static_cast<void>(op_);
- return "Concat";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Concat, Concat)
- .hash(Concat_hash_impl)
- .is_same_st(Concat_is_same_st_impl)
- .props(Concat_props_impl)
- .make_name(Concat_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CondTake);
-
- namespace {
- size_t CondTake_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CondTake>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool CondTake_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CondTake>(),
- &&b_ = rhs_.cast_final_safe<CondTake>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CondTake_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CondTake>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string CondTake_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CondTake>();
- static_cast<void>(op_);
- return "CondTake";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CondTake, CondTake)
- .hash(CondTake_hash_impl)
- .is_same_st(CondTake_is_same_st_impl)
- .props(CondTake_props_impl)
- .make_name(CondTake_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ConvBias);
-
- namespace {
- size_t ConvBias_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvBias>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool ConvBias_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ConvBias>(),
- &&b_ = rhs_.cast_final_safe<ConvBias>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.nonlineMode != b_.nonlineMode) return false;
- if (a_.mode != b_.mode) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ConvBias_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvBias>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.nonlineMode){
- case ConvBias::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case ConvBias::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case ConvBias::NonlineMode::SIGMOID:
- props_.emplace_back("nonlineMode", "SIGMOID");
- break;
- case ConvBias::NonlineMode::H_SWISH:
- props_.emplace_back("nonlineMode", "H_SWISH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- switch (op_.mode){
- case ConvBias::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case ConvBias::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- switch (op_.sparse){
- case ConvBias::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case ConvBias::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case ConvBias::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case ConvBias::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case ConvBias::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case ConvBias::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case ConvBias::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case ConvBias::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case ConvBias::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case ConvBias::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case ConvBias::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case ConvBias::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case ConvBias::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case ConvBias::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case ConvBias::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case ConvBias::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case ConvBias::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case ConvBias::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case ConvBias::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case ConvBias::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.compute_mode){
- case ConvBias::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case ConvBias::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case ConvBias::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case ConvBias::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case ConvBias::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case ConvBias::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string ConvBias_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvBias>();
- static_cast<void>(op_);
- return "ConvBias";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ConvBias, ConvBias)
- .hash(ConvBias_hash_impl)
- .is_same_st(ConvBias_is_same_st_impl)
- .props(ConvBias_props_impl)
- .make_name(ConvBias_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Convolution);
-
- namespace {
- size_t Convolution_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Convolution_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Convolution>(),
- &&b_ = rhs_.cast_final_safe<Convolution>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Convolution_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Convolution::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case Convolution::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case Convolution::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case Convolution::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case Convolution::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Convolution::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Convolution::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Convolution::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Convolution::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Convolution::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Convolution::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Convolution::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Convolution::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Convolution::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Convolution::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Convolution::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Convolution::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Convolution::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Convolution::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Convolution::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Convolution::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Convolution::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case Convolution::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case Convolution::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Convolution::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Convolution::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Convolution::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Convolution::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Convolution_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution>();
- static_cast<void>(op_);
- return "Convolution";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Convolution, Convolution)
- .hash(Convolution_hash_impl)
- .is_same_st(Convolution_is_same_st_impl)
- .props(Convolution_props_impl)
- .make_name(Convolution_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Convolution3D);
-
- namespace {
- size_t Convolution3D_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3D>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.data_type));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Convolution3D_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Convolution3D>(),
- &&b_ = rhs_.cast_final_safe<Convolution3D>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_d != b_.pad_d) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_d != b_.stride_d) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_d != b_.dilate_d) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.data_type != b_.data_type) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Convolution3D_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3D>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Convolution3D::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case Convolution3D::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_d", std::to_string(op_.pad_d));
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_d", std::to_string(op_.stride_d));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_d", std::to_string(op_.dilate_d));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case Convolution3D::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case Convolution3D::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.data_type){
- case Convolution3D::DataType::FLOAT:
- props_.emplace_back("data_type", "FLOAT");
- break;
- case Convolution3D::DataType::FLOAT_IO16xC32:
- props_.emplace_back("data_type", "FLOAT_IO16xC32");
- break;
- default:
- props_.emplace_back("data_type", "INVALID");
- break;
- }
- switch (op_.format){
- case Convolution3D::Format::NCDHW:
- props_.emplace_back("format", "NCDHW");
- break;
- case Convolution3D::Format::NDHWC:
- props_.emplace_back("format", "NDHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Convolution3D::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Convolution3D::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Convolution3D::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Convolution3D::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Convolution3D_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3D>();
- static_cast<void>(op_);
- return "Convolution3D";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Convolution3D, Convolution3D)
- .hash(Convolution3D_hash_impl)
- .is_same_st(Convolution3D_is_same_st_impl)
- .props(Convolution3D_props_impl)
- .make_name(Convolution3D_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Convolution3DBackwardData);
-
- namespace {
- size_t Convolution3DBackwardData_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.data_type));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Convolution3DBackwardData_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Convolution3DBackwardData>(),
- &&b_ = rhs_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_d != b_.pad_d) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_d != b_.stride_d) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_d != b_.dilate_d) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.data_type != b_.data_type) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Convolution3DBackwardData_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Convolution3DBackwardData::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case Convolution3DBackwardData::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_d", std::to_string(op_.pad_d));
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_d", std::to_string(op_.stride_d));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_d", std::to_string(op_.dilate_d));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case Convolution3DBackwardData::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case Convolution3DBackwardData::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.data_type){
- case Convolution3DBackwardData::DataType::FLOAT:
- props_.emplace_back("data_type", "FLOAT");
- break;
- case Convolution3DBackwardData::DataType::FLOAT_IO16xC32:
- props_.emplace_back("data_type", "FLOAT_IO16xC32");
- break;
- default:
- props_.emplace_back("data_type", "INVALID");
- break;
- }
- switch (op_.format){
- case Convolution3DBackwardData::Format::NCDHW:
- props_.emplace_back("format", "NCDHW");
- break;
- case Convolution3DBackwardData::Format::NDHWC:
- props_.emplace_back("format", "NDHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Convolution3DBackwardData::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Convolution3DBackwardData::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Convolution3DBackwardData::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Convolution3DBackwardData::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Convolution3DBackwardData_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(op_);
- return "Convolution3DBackwardData";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Convolution3DBackwardData, Convolution3DBackwardData)
- .hash(Convolution3DBackwardData_hash_impl)
- .is_same_st(Convolution3DBackwardData_is_same_st_impl)
- .props(Convolution3DBackwardData_props_impl)
- .make_name(Convolution3DBackwardData_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ConvolutionBackwardData);
-
- namespace {
- size_t ConvolutionBackwardData_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool ConvolutionBackwardData_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ConvolutionBackwardData>(),
- &&b_ = rhs_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ConvolutionBackwardData_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ConvolutionBackwardData::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case ConvolutionBackwardData::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case ConvolutionBackwardData::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case ConvolutionBackwardData::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case ConvolutionBackwardData::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case ConvolutionBackwardData::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case ConvolutionBackwardData::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case ConvolutionBackwardData::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case ConvolutionBackwardData::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case ConvolutionBackwardData::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case ConvolutionBackwardData::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case ConvolutionBackwardData::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case ConvolutionBackwardData::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case ConvolutionBackwardData::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case ConvolutionBackwardData::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case ConvolutionBackwardData::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case ConvolutionBackwardData::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case ConvolutionBackwardData::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case ConvolutionBackwardData::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case ConvolutionBackwardData::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case ConvolutionBackwardData::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case ConvolutionBackwardData::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case ConvolutionBackwardData::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case ConvolutionBackwardData::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case ConvolutionBackwardData::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case ConvolutionBackwardData::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case ConvolutionBackwardData::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case ConvolutionBackwardData::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string ConvolutionBackwardData_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(op_);
- return "ConvolutionBackwardData";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ConvolutionBackwardData, ConvolutionBackwardData)
- .hash(ConvolutionBackwardData_hash_impl)
- .is_same_st(ConvolutionBackwardData_is_same_st_impl)
- .props(ConvolutionBackwardData_props_impl)
- .make_name(ConvolutionBackwardData_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Copy);
-
- namespace {
- size_t Copy_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Copy>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Copy_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Copy>(),
- &&b_ = rhs_.cast_final_safe<Copy>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Copy_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Copy>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Copy_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Copy>();
- static_cast<void>(op_);
- return "Copy";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Copy, Copy)
- .hash(Copy_hash_impl)
- .is_same_st(Copy_is_same_st_impl)
- .props(Copy_props_impl)
- .make_name(Copy_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Correlation);
-
- namespace {
- size_t Correlation_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Correlation>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.kernel_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.max_displacement));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.is_multiply));
- return val;
- }
- bool Correlation_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Correlation>(),
- &&b_ = rhs_.cast_final_safe<Correlation>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.format != b_.format) return false;
- if (a_.kernel_size != b_.kernel_size) return false;
- if (a_.max_displacement != b_.max_displacement) return false;
- if (a_.stride1 != b_.stride1) return false;
- if (a_.stride2 != b_.stride2) return false;
- if (a_.pad_size != b_.pad_size) return false;
- if (a_.is_multiply != b_.is_multiply) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Correlation_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Correlation>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.format){
- case Correlation::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Correlation::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Correlation::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Correlation::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Correlation::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Correlation::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Correlation::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Correlation::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Correlation::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Correlation::Format::NCHW_WINOGRAD:
- props_.emplace_back("format", "NCHW_WINOGRAD");
- break;
- case Correlation::Format::NCHW88_WINOGRAD:
- props_.emplace_back("format", "NCHW88_WINOGRAD");
- break;
- case Correlation::Format::NCHW44_WINOGRAD:
- props_.emplace_back("format", "NCHW44_WINOGRAD");
- break;
- case Correlation::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Correlation::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Correlation::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Correlation::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Correlation::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Correlation::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Correlation::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Correlation::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("kernel_size", std::to_string(op_.kernel_size));
- props_.emplace_back("max_displacement", std::to_string(op_.max_displacement));
- props_.emplace_back("stride1", std::to_string(op_.stride1));
- props_.emplace_back("stride2", std::to_string(op_.stride2));
- props_.emplace_back("pad_size", std::to_string(op_.pad_size));
- props_.emplace_back("is_multiply", std::to_string(op_.is_multiply));
- return props_;
- }
- std::string Correlation_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Correlation>();
- static_cast<void>(op_);
- return "Correlation";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Correlation, Correlation)
- .hash(Correlation_hash_impl)
- .is_same_st(Correlation_is_same_st_impl)
- .props(Correlation_props_impl)
- .make_name(Correlation_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Cumsum);
-
- namespace {
- size_t Cumsum_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Cumsum>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.exclusive));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.reverse));
- return val;
- }
- bool Cumsum_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Cumsum>(),
- &&b_ = rhs_.cast_final_safe<Cumsum>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.exclusive != b_.exclusive) return false;
- if (a_.reverse != b_.reverse) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Cumsum_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Cumsum>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("exclusive", std::to_string(op_.exclusive));
- props_.emplace_back("reverse", std::to_string(op_.reverse));
- return props_;
- }
- std::string Cumsum_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Cumsum>();
- static_cast<void>(op_);
- return "Cumsum";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Cumsum, Cumsum)
- .hash(Cumsum_hash_impl)
- .is_same_st(Cumsum_is_same_st_impl)
- .props(Cumsum_props_impl)
- .make_name(Cumsum_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CvtColor);
-
- namespace {
- size_t CvtColor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CvtColor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- return val;
- }
- bool CvtColor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CvtColor>(),
- &&b_ = rhs_.cast_final_safe<CvtColor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CvtColor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CvtColor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case CvtColor::Mode::RGB2GRAY:
- props_.emplace_back("mode", "RGB2GRAY");
- break;
- case CvtColor::Mode::RGB2YUV:
- props_.emplace_back("mode", "RGB2YUV");
- break;
- case CvtColor::Mode::YUV2RGB:
- props_.emplace_back("mode", "YUV2RGB");
- break;
- case CvtColor::Mode::GRAY2RGB:
- props_.emplace_back("mode", "GRAY2RGB");
- break;
- case CvtColor::Mode::RGBA2RGB:
- props_.emplace_back("mode", "RGBA2RGB");
- break;
- case CvtColor::Mode::RGBA2BGR:
- props_.emplace_back("mode", "RGBA2BGR");
- break;
- case CvtColor::Mode::RGBA2GRAY:
- props_.emplace_back("mode", "RGBA2GRAY");
- break;
- case CvtColor::Mode::RGB2BGR:
- props_.emplace_back("mode", "RGB2BGR");
- break;
- case CvtColor::Mode::BGR2GRAY:
- props_.emplace_back("mode", "BGR2GRAY");
- break;
- case CvtColor::Mode::BGR2RGB:
- props_.emplace_back("mode", "BGR2RGB");
- break;
- case CvtColor::Mode::YUV2GRAY_NV21:
- props_.emplace_back("mode", "YUV2GRAY_NV21");
- break;
- case CvtColor::Mode::YUV2RGB_NV21:
- props_.emplace_back("mode", "YUV2RGB_NV21");
- break;
- case CvtColor::Mode::YUV2BGR_NV21:
- props_.emplace_back("mode", "YUV2BGR_NV21");
- break;
- case CvtColor::Mode::YUV2GRAY_NV12:
- props_.emplace_back("mode", "YUV2GRAY_NV12");
- break;
- case CvtColor::Mode::YUV2RGB_NV12:
- props_.emplace_back("mode", "YUV2RGB_NV12");
- break;
- case CvtColor::Mode::YUV2BGR_NV12:
- props_.emplace_back("mode", "YUV2BGR_NV12");
- break;
- case CvtColor::Mode::YUV2GRAY_YV12:
- props_.emplace_back("mode", "YUV2GRAY_YV12");
- break;
- case CvtColor::Mode::YUV2RGB_YV12:
- props_.emplace_back("mode", "YUV2RGB_YV12");
- break;
- case CvtColor::Mode::YUV2BGR_YV12:
- props_.emplace_back("mode", "YUV2BGR_YV12");
- break;
- case CvtColor::Mode::YUV2GRAY_YU12:
- props_.emplace_back("mode", "YUV2GRAY_YU12");
- break;
- case CvtColor::Mode::YUV2RGB_YU12:
- props_.emplace_back("mode", "YUV2RGB_YU12");
- break;
- case CvtColor::Mode::YUV2BGR_YU12:
- props_.emplace_back("mode", "YUV2BGR_YU12");
- break;
- case CvtColor::Mode::YCrCb2RGB:
- props_.emplace_back("mode", "YCrCb2RGB");
- break;
- case CvtColor::Mode::YCrCb2BGR:
- props_.emplace_back("mode", "YCrCb2BGR");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_NV21:
- props_.emplace_back("mode", "BT601_YUV2RGB_NV21");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_NV21:
- props_.emplace_back("mode", "BT601_YUV2BGR_NV21");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_NV12:
- props_.emplace_back("mode", "BT601_YUV2RGB_NV12");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_NV12:
- props_.emplace_back("mode", "BT601_YUV2BGR_NV12");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_YV12:
- props_.emplace_back("mode", "BT601_YUV2RGB_YV12");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_YV12:
- props_.emplace_back("mode", "BT601_YUV2BGR_YV12");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_YU12:
- props_.emplace_back("mode", "BT601_YUV2RGB_YU12");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_YU12:
- props_.emplace_back("mode", "BT601_YUV2BGR_YU12");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string CvtColor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CvtColor>();
- static_cast<void>(op_);
- return "CvtColor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CvtColor, CvtColor)
- .hash(CvtColor_hash_impl)
- .is_same_st(CvtColor_is_same_st_impl)
- .props(CvtColor_props_impl)
- .make_name(CvtColor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(DeformableConv);
-
- namespace {
- size_t DeformableConv_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformableConv>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool DeformableConv_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<DeformableConv>(),
- &&b_ = rhs_.cast_final_safe<DeformableConv>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> DeformableConv_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformableConv>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case DeformableConv::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case DeformableConv::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case DeformableConv::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case DeformableConv::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case DeformableConv::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case DeformableConv::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case DeformableConv::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case DeformableConv::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case DeformableConv::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case DeformableConv::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case DeformableConv::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case DeformableConv::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case DeformableConv::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case DeformableConv::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case DeformableConv::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case DeformableConv::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case DeformableConv::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case DeformableConv::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case DeformableConv::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case DeformableConv::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case DeformableConv::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case DeformableConv::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case DeformableConv::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case DeformableConv::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case DeformableConv::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case DeformableConv::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case DeformableConv::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case DeformableConv::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string DeformableConv_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformableConv>();
- static_cast<void>(op_);
- return "DeformableConv";
- }
- } // anonymous namespace
- OP_TRAIT_REG(DeformableConv, DeformableConv)
- .hash(DeformableConv_hash_impl)
- .is_same_st(DeformableConv_is_same_st_impl)
- .props(DeformableConv_props_impl)
- .make_name(DeformableConv_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(DeformablePSROIPooling);
-
- namespace {
- size_t DeformablePSROIPooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.no_trans));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.spatial_scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.trans_std));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.part_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.sample_per_part));
- return val;
- }
- bool DeformablePSROIPooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<DeformablePSROIPooling>(),
- &&b_ = rhs_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.no_trans != b_.no_trans) return false;
- if (a_.spatial_scale != b_.spatial_scale) return false;
- if (a_.trans_std != b_.trans_std) return false;
- if (a_.pooled_h != b_.pooled_h) return false;
- if (a_.pooled_w != b_.pooled_w) return false;
- if (a_.part_size != b_.part_size) return false;
- if (a_.sample_per_part != b_.sample_per_part) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> DeformablePSROIPooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("no_trans", std::to_string(op_.no_trans));
- props_.emplace_back("spatial_scale", std::to_string(op_.spatial_scale));
- props_.emplace_back("trans_std", std::to_string(op_.trans_std));
- props_.emplace_back("pooled_h", std::to_string(op_.pooled_h));
- props_.emplace_back("pooled_w", std::to_string(op_.pooled_w));
- props_.emplace_back("part_size", std::to_string(op_.part_size));
- props_.emplace_back("sample_per_part", std::to_string(op_.sample_per_part));
- return props_;
- }
- std::string DeformablePSROIPooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(op_);
- return "DeformablePSROIPooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(DeformablePSROIPooling, DeformablePSROIPooling)
- .hash(DeformablePSROIPooling_hash_impl)
- .is_same_st(DeformablePSROIPooling_is_same_st_impl)
- .props(DeformablePSROIPooling_props_impl)
- .make_name(DeformablePSROIPooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Diag);
-
- namespace {
- size_t Diag_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Diag>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.k));
- return val;
- }
- bool Diag_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Diag>(),
- &&b_ = rhs_.cast_final_safe<Diag>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.k != b_.k) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Diag_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Diag>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("k", std::to_string(op_.k));
- return props_;
- }
- std::string Diag_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Diag>();
- static_cast<void>(op_);
- return "Diag";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Diag, Diag)
- .hash(Diag_hash_impl)
- .is_same_st(Diag_is_same_st_impl)
- .props(Diag_props_impl)
- .make_name(Diag_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Dimshuffle);
-
- namespace {
- size_t Dimshuffle_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dimshuffle>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pattern));
- return val;
- }
- bool Dimshuffle_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Dimshuffle>(),
- &&b_ = rhs_.cast_final_safe<Dimshuffle>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.pattern != b_.pattern) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Dimshuffle_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dimshuffle>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("pattern", "{std::vector}");
- return props_;
- }
- std::string Dimshuffle_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dimshuffle>();
- static_cast<void>(op_);
- return "Dimshuffle";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Dimshuffle, Dimshuffle)
- .hash(Dimshuffle_hash_impl)
- .is_same_st(Dimshuffle_is_same_st_impl)
- .props(Dimshuffle_props_impl)
- .make_name(Dimshuffle_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Dot);
-
- namespace {
- size_t Dot_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dot>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool Dot_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Dot>(),
- &&b_ = rhs_.cast_final_safe<Dot>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Dot_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dot>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string Dot_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dot>();
- static_cast<void>(op_);
- return "Dot";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Dot, Dot)
- .hash(Dot_hash_impl)
- .is_same_st(Dot_is_same_st_impl)
- .props(Dot_props_impl)
- .make_name(Dot_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Dropout);
-
- namespace {
- size_t Dropout_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dropout>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.drop_prob),
- mgb::hash(op_.handle))
- );
- }
- bool Dropout_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Dropout>(),
- &&b_ = rhs_.cast_final_safe<Dropout>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.drop_prob == b_.drop_prob;}
- std::vector<std::pair<const char*, std::string>> Dropout_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dropout>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("drop_prob", std::to_string(op_.drop_prob));
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string Dropout_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dropout>();
- static_cast<void>(op_);
- return "Dropout";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Dropout, Dropout)
- .hash(Dropout_hash_impl)
- .is_same_st(Dropout_is_same_st_impl)
- .props(Dropout_props_impl)
- .make_name(Dropout_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Elemwise);
-
- namespace {
- size_t Elemwise_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Elemwise>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- return val;
- }
- bool Elemwise_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Elemwise>(),
- &&b_ = rhs_.cast_final_safe<Elemwise>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Elemwise_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Elemwise>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Elemwise::Mode::RELU:
- props_.emplace_back("mode", "RELU");
- break;
- case Elemwise::Mode::ABS:
- props_.emplace_back("mode", "ABS");
- break;
- case Elemwise::Mode::ACOS:
- props_.emplace_back("mode", "ACOS");
- break;
- case Elemwise::Mode::ASIN:
- props_.emplace_back("mode", "ASIN");
- break;
- case Elemwise::Mode::CEIL:
- props_.emplace_back("mode", "CEIL");
- break;
- case Elemwise::Mode::COS:
- props_.emplace_back("mode", "COS");
- break;
- case Elemwise::Mode::EXP:
- props_.emplace_back("mode", "EXP");
- break;
- case Elemwise::Mode::EXPM1:
- props_.emplace_back("mode", "EXPM1");
- break;
- case Elemwise::Mode::FLOOR:
- props_.emplace_back("mode", "FLOOR");
- break;
- case Elemwise::Mode::LOG:
- props_.emplace_back("mode", "LOG");
- break;
- case Elemwise::Mode::LOG1P:
- props_.emplace_back("mode", "LOG1P");
- break;
- case Elemwise::Mode::NEGATE:
- props_.emplace_back("mode", "NEGATE");
- break;
- case Elemwise::Mode::SIGMOID:
- props_.emplace_back("mode", "SIGMOID");
- break;
- case Elemwise::Mode::SIN:
- props_.emplace_back("mode", "SIN");
- break;
- case Elemwise::Mode::TANH:
- props_.emplace_back("mode", "TANH");
- break;
- case Elemwise::Mode::ABS_GRAD:
- props_.emplace_back("mode", "ABS_GRAD");
- break;
- case Elemwise::Mode::ADD:
- props_.emplace_back("mode", "ADD");
- break;
- case Elemwise::Mode::FLOOR_DIV:
- props_.emplace_back("mode", "FLOOR_DIV");
- break;
- case Elemwise::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case Elemwise::Mode::MIN:
- props_.emplace_back("mode", "MIN");
- break;
- case Elemwise::Mode::MOD:
- props_.emplace_back("mode", "MOD");
- break;
- case Elemwise::Mode::MUL:
- props_.emplace_back("mode", "MUL");
- break;
- case Elemwise::Mode::POW:
- props_.emplace_back("mode", "POW");
- break;
- case Elemwise::Mode::SIGMOID_GRAD:
- props_.emplace_back("mode", "SIGMOID_GRAD");
- break;
- case Elemwise::Mode::SUB:
- props_.emplace_back("mode", "SUB");
- break;
- case Elemwise::Mode::SWITCH_GT0:
- props_.emplace_back("mode", "SWITCH_GT0");
- break;
- case Elemwise::Mode::TANH_GRAD:
- props_.emplace_back("mode", "TANH_GRAD");
- break;
- case Elemwise::Mode::TRUE_DIV:
- props_.emplace_back("mode", "TRUE_DIV");
- break;
- case Elemwise::Mode::LOG_SUM_EXP:
- props_.emplace_back("mode", "LOG_SUM_EXP");
- break;
- case Elemwise::Mode::LT:
- props_.emplace_back("mode", "LT");
- break;
- case Elemwise::Mode::LEQ:
- props_.emplace_back("mode", "LEQ");
- break;
- case Elemwise::Mode::EQ:
- props_.emplace_back("mode", "EQ");
- break;
- case Elemwise::Mode::SHL:
- props_.emplace_back("mode", "SHL");
- break;
- case Elemwise::Mode::SHR:
- props_.emplace_back("mode", "SHR");
- break;
- case Elemwise::Mode::COND_LEQ_MOV:
- props_.emplace_back("mode", "COND_LEQ_MOV");
- break;
- case Elemwise::Mode::FUSE_MUL_ADD3:
- props_.emplace_back("mode", "FUSE_MUL_ADD3");
- break;
- case Elemwise::Mode::FUSE_MUL_ADD4:
- props_.emplace_back("mode", "FUSE_MUL_ADD4");
- break;
- case Elemwise::Mode::FUSE_ADD_RELU:
- props_.emplace_back("mode", "FUSE_ADD_RELU");
- break;
- case Elemwise::Mode::FUSE_ADD_SIGMOID:
- props_.emplace_back("mode", "FUSE_ADD_SIGMOID");
- break;
- case Elemwise::Mode::FUSE_ADD_TANH:
- props_.emplace_back("mode", "FUSE_ADD_TANH");
- break;
- case Elemwise::Mode::FAST_TANH:
- props_.emplace_back("mode", "FAST_TANH");
- break;
- case Elemwise::Mode::FAST_TANH_GRAD:
- props_.emplace_back("mode", "FAST_TANH_GRAD");
- break;
- case Elemwise::Mode::ROUND:
- props_.emplace_back("mode", "ROUND");
- break;
- case Elemwise::Mode::RMULH:
- props_.emplace_back("mode", "RMULH");
- break;
- case Elemwise::Mode::ATAN2:
- props_.emplace_back("mode", "ATAN2");
- break;
- case Elemwise::Mode::ERF:
- props_.emplace_back("mode", "ERF");
- break;
- case Elemwise::Mode::ERFINV:
- props_.emplace_back("mode", "ERFINV");
- break;
- case Elemwise::Mode::ERFC:
- props_.emplace_back("mode", "ERFC");
- break;
- case Elemwise::Mode::ERFCINV:
- props_.emplace_back("mode", "ERFCINV");
- break;
- case Elemwise::Mode::H_SWISH:
- props_.emplace_back("mode", "H_SWISH");
- break;
- case Elemwise::Mode::H_SWISH_GRAD:
- props_.emplace_back("mode", "H_SWISH_GRAD");
- break;
- case Elemwise::Mode::FUSE_ADD_H_SWISH:
- props_.emplace_back("mode", "FUSE_ADD_H_SWISH");
- break;
- case Elemwise::Mode::NOT:
- props_.emplace_back("mode", "NOT");
- break;
- case Elemwise::Mode::AND:
- props_.emplace_back("mode", "AND");
- break;
- case Elemwise::Mode::OR:
- props_.emplace_back("mode", "OR");
- break;
- case Elemwise::Mode::XOR:
- props_.emplace_back("mode", "XOR");
- break;
- case Elemwise::Mode::SILU:
- props_.emplace_back("mode", "SILU");
- break;
- case Elemwise::Mode::SILU_GRAD:
- props_.emplace_back("mode", "SILU_GRAD");
- break;
- case Elemwise::Mode::GELU:
- props_.emplace_back("mode", "GELU");
- break;
- case Elemwise::Mode::GELU_GRAD:
- props_.emplace_back("mode", "GELU_GRAD");
- break;
- case Elemwise::Mode::COND_LT_MOV:
- props_.emplace_back("mode", "COND_LT_MOV");
- break;
- case Elemwise::Mode::NEQ:
- props_.emplace_back("mode", "NEQ");
- break;
- case Elemwise::Mode::ISNAN:
- props_.emplace_back("mode", "ISNAN");
- break;
- case Elemwise::Mode::ISINF:
- props_.emplace_back("mode", "ISINF");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string Elemwise_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Elemwise>();
- static_cast<void>(op_);
-
- return to_string(op_.mode);
- }
- } // anonymous namespace
- OP_TRAIT_REG(Elemwise, Elemwise)
- .hash(Elemwise_hash_impl)
- .is_same_st(Elemwise_is_same_st_impl)
- .props(Elemwise_props_impl)
- .make_name(Elemwise_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ElemwiseMultiType);
-
- namespace {
- size_t ElemwiseMultiType_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool ElemwiseMultiType_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ElemwiseMultiType>(),
- &&b_ = rhs_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ElemwiseMultiType_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_INT16x32x32x32:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_INT16x32x32x32");
- break;
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_IXxF32xF32xI8:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_IXxF32xF32xI8");
- break;
- case ElemwiseMultiType::Mode::ROUND_SHR_SATURATE_IXxI8xI8:
- props_.emplace_back("mode", "ROUND_SHR_SATURATE_IXxI8xI8");
- break;
- case ElemwiseMultiType::Mode::FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT16x16x16x8:
- props_.emplace_back("mode", "FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT16x16x16x8");
- break;
- case ElemwiseMultiType::Mode::FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT32x32x32x8:
- props_.emplace_back("mode", "FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT32x32x32x8");
- break;
- case ElemwiseMultiType::Mode::ROUND_SHR_SATURATE_IXxI8xI16:
- props_.emplace_back("mode", "ROUND_SHR_SATURATE_IXxI8xI16");
- break;
- case ElemwiseMultiType::Mode::QADD:
- props_.emplace_back("mode", "QADD");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_RELU:
- props_.emplace_back("mode", "QFUSE_ADD_RELU");
- break;
- case ElemwiseMultiType::Mode::QMUL:
- props_.emplace_back("mode", "QMUL");
- break;
- case ElemwiseMultiType::Mode::QMIN:
- props_.emplace_back("mode", "QMIN");
- break;
- case ElemwiseMultiType::Mode::QMAX:
- props_.emplace_back("mode", "QMAX");
- break;
- case ElemwiseMultiType::Mode::QSUB:
- props_.emplace_back("mode", "QSUB");
- break;
- case ElemwiseMultiType::Mode::QTRUE_DIV:
- props_.emplace_back("mode", "QTRUE_DIV");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_SIGMOID:
- props_.emplace_back("mode", "QFUSE_ADD_SIGMOID");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_TANH:
- props_.emplace_back("mode", "QFUSE_ADD_TANH");
- break;
- case ElemwiseMultiType::Mode::QRELU:
- props_.emplace_back("mode", "QRELU");
- break;
- case ElemwiseMultiType::Mode::QABS:
- props_.emplace_back("mode", "QABS");
- break;
- case ElemwiseMultiType::Mode::QSIGMOID:
- props_.emplace_back("mode", "QSIGMOID");
- break;
- case ElemwiseMultiType::Mode::QEXP:
- props_.emplace_back("mode", "QEXP");
- break;
- case ElemwiseMultiType::Mode::QTANH:
- props_.emplace_back("mode", "QTANH");
- break;
- case ElemwiseMultiType::Mode::QFUSE_MUL_ADD3:
- props_.emplace_back("mode", "QFUSE_MUL_ADD3");
- break;
- case ElemwiseMultiType::Mode::QFAST_TANH:
- props_.emplace_back("mode", "QFAST_TANH");
- break;
- case ElemwiseMultiType::Mode::QNEGATE:
- props_.emplace_back("mode", "QNEGATE");
- break;
- case ElemwiseMultiType::Mode::QACOS:
- props_.emplace_back("mode", "QACOS");
- break;
- case ElemwiseMultiType::Mode::QASIN:
- props_.emplace_back("mode", "QASIN");
- break;
- case ElemwiseMultiType::Mode::QCEIL:
- props_.emplace_back("mode", "QCEIL");
- break;
- case ElemwiseMultiType::Mode::QCOS:
- props_.emplace_back("mode", "QCOS");
- break;
- case ElemwiseMultiType::Mode::QEXPM1:
- props_.emplace_back("mode", "QEXPM1");
- break;
- case ElemwiseMultiType::Mode::QFLOOR:
- props_.emplace_back("mode", "QFLOOR");
- break;
- case ElemwiseMultiType::Mode::QLOG:
- props_.emplace_back("mode", "QLOG");
- break;
- case ElemwiseMultiType::Mode::QLOG1P:
- props_.emplace_back("mode", "QLOG1P");
- break;
- case ElemwiseMultiType::Mode::QSIN:
- props_.emplace_back("mode", "QSIN");
- break;
- case ElemwiseMultiType::Mode::QROUND:
- props_.emplace_back("mode", "QROUND");
- break;
- case ElemwiseMultiType::Mode::QERF:
- props_.emplace_back("mode", "QERF");
- break;
- case ElemwiseMultiType::Mode::QERFINV:
- props_.emplace_back("mode", "QERFINV");
- break;
- case ElemwiseMultiType::Mode::QERFC:
- props_.emplace_back("mode", "QERFC");
- break;
- case ElemwiseMultiType::Mode::QERFCINV:
- props_.emplace_back("mode", "QERFCINV");
- break;
- case ElemwiseMultiType::Mode::QABS_GRAD:
- props_.emplace_back("mode", "QABS_GRAD");
- break;
- case ElemwiseMultiType::Mode::QFLOOR_DIV:
- props_.emplace_back("mode", "QFLOOR_DIV");
- break;
- case ElemwiseMultiType::Mode::QMOD:
- props_.emplace_back("mode", "QMOD");
- break;
- case ElemwiseMultiType::Mode::QSIGMOID_GRAD:
- props_.emplace_back("mode", "QSIGMOID_GRAD");
- break;
- case ElemwiseMultiType::Mode::QSWITCH_GT0:
- props_.emplace_back("mode", "QSWITCH_GT0");
- break;
- case ElemwiseMultiType::Mode::QTANH_GRAD:
- props_.emplace_back("mode", "QTANH_GRAD");
- break;
- case ElemwiseMultiType::Mode::QLT:
- props_.emplace_back("mode", "QLT");
- break;
- case ElemwiseMultiType::Mode::QLEQ:
- props_.emplace_back("mode", "QLEQ");
- break;
- case ElemwiseMultiType::Mode::QEQ:
- props_.emplace_back("mode", "QEQ");
- break;
- case ElemwiseMultiType::Mode::QPOW:
- props_.emplace_back("mode", "QPOW");
- break;
- case ElemwiseMultiType::Mode::QLOG_SUM_EXP:
- props_.emplace_back("mode", "QLOG_SUM_EXP");
- break;
- case ElemwiseMultiType::Mode::QFAST_TANH_GRAD:
- props_.emplace_back("mode", "QFAST_TANH_GRAD");
- break;
- case ElemwiseMultiType::Mode::QATAN2:
- props_.emplace_back("mode", "QATAN2");
- break;
- case ElemwiseMultiType::Mode::QCOND_LEQ_MOV:
- props_.emplace_back("mode", "QCOND_LEQ_MOV");
- break;
- case ElemwiseMultiType::Mode::QH_SWISH:
- props_.emplace_back("mode", "QH_SWISH");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_H_SWISH:
- props_.emplace_back("mode", "QFUSE_ADD_H_SWISH");
- break;
- case ElemwiseMultiType::Mode::QH_SWISH_GRAD:
- props_.emplace_back("mode", "QH_SWISH_GRAD");
- break;
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_INT16xF32xF32xF32:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_INT16xF32xF32xF32");
- break;
- case ElemwiseMultiType::Mode::MUL_INT16xF32xF32:
- props_.emplace_back("mode", "MUL_INT16xF32xF32");
- break;
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_UINT8xF32xF32xF32:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_UINT8xF32xF32xF32");
- break;
- case ElemwiseMultiType::Mode::QCOND_LT_MOV:
- props_.emplace_back("mode", "QCOND_LT_MOV");
- break;
- case ElemwiseMultiType::Mode::EQ:
- props_.emplace_back("mode", "EQ");
- break;
- case ElemwiseMultiType::Mode::NEQ:
- props_.emplace_back("mode", "NEQ");
- break;
- case ElemwiseMultiType::Mode::LT:
- props_.emplace_back("mode", "LT");
- break;
- case ElemwiseMultiType::Mode::LEQ:
- props_.emplace_back("mode", "LEQ");
- break;
- case ElemwiseMultiType::Mode::ISNAN:
- props_.emplace_back("mode", "ISNAN");
- break;
- case ElemwiseMultiType::Mode::ISINF:
- props_.emplace_back("mode", "ISINF");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string ElemwiseMultiType_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(op_);
-
- return to_string(op_.mode);
- }
- } // anonymous namespace
- OP_TRAIT_REG(ElemwiseMultiType, ElemwiseMultiType)
- .hash(ElemwiseMultiType_hash_impl)
- .is_same_st(ElemwiseMultiType_is_same_st_impl)
- .props(ElemwiseMultiType_props_impl)
- .make_name(ElemwiseMultiType_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ExternOpr);
-
- namespace {
- size_t ExternOpr_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ExternOpr>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.name),
- mgb::hash(op_.data))
- );
- }
- bool ExternOpr_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ExternOpr>(),
- &&b_ = rhs_.cast_final_safe<ExternOpr>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.output_shapes != b_.output_shapes) return false;
- if (a_.name != b_.name) return false;
- if (a_.data != b_.data) return false;
- if (a_.data_len != b_.data_len) return false;
- if (a_.output_dtypes != b_.output_dtypes) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ExternOpr_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ExternOpr>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("output_shapes", "{std::vector}");
- props_.emplace_back("name", op_.name);
- props_.emplace_back("data", op_.data);
- props_.emplace_back("data_len", std::to_string(op_.data_len));
- props_.emplace_back("output_dtypes", "{std::vector}");
- return props_;
- }
- std::string ExternOpr_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ExternOpr>();
- static_cast<void>(op_);
- return "ExternOpr";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ExternOpr, ExternOpr)
- .hash(ExternOpr_hash_impl)
- .is_same_st(ExternOpr_is_same_st_impl)
- .props(ExternOpr_props_impl)
- .make_name(ExternOpr_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Eye);
-
- namespace {
- size_t Eye_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Eye>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.k));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Eye_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Eye>(),
- &&b_ = rhs_.cast_final_safe<Eye>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.k != b_.k) return false;
- if (a_.dtype != b_.dtype) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Eye_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Eye>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("k", std::to_string(op_.k));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Eye_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Eye>();
- static_cast<void>(op_);
- return "Eye";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Eye, Eye)
- .hash(Eye_hash_impl)
- .is_same_st(Eye_is_same_st_impl)
- .props(Eye_props_impl)
- .make_name(Eye_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(FakeQuant);
-
- namespace {
- size_t FakeQuant_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FakeQuant>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmin));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmax));
- return val;
- }
- bool FakeQuant_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<FakeQuant>(),
- &&b_ = rhs_.cast_final_safe<FakeQuant>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.qmin != b_.qmin) return false;
- if (a_.qmax != b_.qmax) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> FakeQuant_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FakeQuant>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("qmin", std::to_string(op_.qmin));
- props_.emplace_back("qmax", std::to_string(op_.qmax));
- return props_;
- }
- std::string FakeQuant_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FakeQuant>();
- static_cast<void>(op_);
- return "FakeQuant";
- }
- } // anonymous namespace
- OP_TRAIT_REG(FakeQuant, FakeQuant)
- .hash(FakeQuant_hash_impl)
- .is_same_st(FakeQuant_is_same_st_impl)
- .props(FakeQuant_props_impl)
- .make_name(FakeQuant_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(FastpathCopy);
-
- namespace {
- size_t FastpathCopy_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FastpathCopy>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool FastpathCopy_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<FastpathCopy>(),
- &&b_ = rhs_.cast_final_safe<FastpathCopy>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> FastpathCopy_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FastpathCopy>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string FastpathCopy_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FastpathCopy>();
- static_cast<void>(op_);
- return "FastpathCopy";
- }
- } // anonymous namespace
- OP_TRAIT_REG(FastpathCopy, FastpathCopy)
- .hash(FastpathCopy_hash_impl)
- .is_same_st(FastpathCopy_is_same_st_impl)
- .props(FastpathCopy_props_impl)
- .make_name(FastpathCopy_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GammaRNG);
-
- namespace {
- size_t GammaRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GammaRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool GammaRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GammaRNG>(),
- &&b_ = rhs_.cast_final_safe<GammaRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> GammaRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GammaRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string GammaRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GammaRNG>();
- static_cast<void>(op_);
- return "GammaRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GammaRNG, GammaRNG)
- .hash(GammaRNG_hash_impl)
- .is_same_st(GammaRNG_is_same_st_impl)
- .props(GammaRNG_props_impl)
- .make_name(GammaRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GaussianRNG);
-
- namespace {
- size_t GaussianRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GaussianRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.handle),
- mgb::hash_pair_combine(
- mgb::hash(op_.mean),
- mgb::hash_pair_combine(
- mgb::hash(op_.std),
- mgb::hash(op_.dtype.enumv())
- )
- )
- )
- );
- }
- bool GaussianRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GaussianRNG>(),
- &&b_ = rhs_.cast_final_safe<GaussianRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.mean == b_.mean && a_.std == b_.std && a_.dtype == b_.dtype;}
- std::vector<std::pair<const char*, std::string>> GaussianRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GaussianRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("mean", std::to_string(op_.mean));
- props_.emplace_back("std", std::to_string(op_.std));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string GaussianRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GaussianRNG>();
- static_cast<void>(op_);
- return "GaussianRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GaussianRNG, GaussianRNG)
- .hash(GaussianRNG_hash_impl)
- .is_same_st(GaussianRNG_is_same_st_impl)
- .props(GaussianRNG_props_impl)
- .make_name(GaussianRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GetVarShape);
-
- namespace {
- size_t GetVarShape_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GetVarShape>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool GetVarShape_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GetVarShape>(),
- &&b_ = rhs_.cast_final_safe<GetVarShape>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> GetVarShape_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GetVarShape>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string GetVarShape_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GetVarShape>();
- static_cast<void>(op_);
- return "GetVarShape";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GetVarShape, GetVarShape)
- .hash(GetVarShape_hash_impl)
- .is_same_st(GetVarShape_is_same_st_impl)
- .props(GetVarShape_props_impl)
- .make_name(GetVarShape_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GroupLocal);
-
- namespace {
- size_t GroupLocal_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GroupLocal>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- return val;
- }
- bool GroupLocal_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GroupLocal>(),
- &&b_ = rhs_.cast_final_safe<GroupLocal>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> GroupLocal_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GroupLocal>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case GroupLocal::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case GroupLocal::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case GroupLocal::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case GroupLocal::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case GroupLocal::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case GroupLocal::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case GroupLocal::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case GroupLocal::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case GroupLocal::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case GroupLocal::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case GroupLocal::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case GroupLocal::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case GroupLocal::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case GroupLocal::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case GroupLocal::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case GroupLocal::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case GroupLocal::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case GroupLocal::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case GroupLocal::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case GroupLocal::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case GroupLocal::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case GroupLocal::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case GroupLocal::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case GroupLocal::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string GroupLocal_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GroupLocal>();
- static_cast<void>(op_);
- return "GroupLocal";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GroupLocal, GroupLocal)
- .hash(GroupLocal_hash_impl)
- .is_same_st(GroupLocal_is_same_st_impl)
- .props(GroupLocal_props_impl)
- .make_name(GroupLocal_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Identity);
-
- namespace {
- size_t Identity_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Identity>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool Identity_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Identity>(),
- &&b_ = rhs_.cast_final_safe<Identity>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Identity_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Identity>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string Identity_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Identity>();
- static_cast<void>(op_);
- return "Identity";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Identity, Identity)
- .hash(Identity_hash_impl)
- .is_same_st(Identity_is_same_st_impl)
- .props(Identity_props_impl)
- .make_name(Identity_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Images2Neibs);
-
- namespace {
- size_t Images2Neibs_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Images2Neibs>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_w));
- return val;
- }
- bool Images2Neibs_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Images2Neibs>(),
- &&b_ = rhs_.cast_final_safe<Images2Neibs>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.window_h != b_.window_h) return false;
- if (a_.window_w != b_.window_w) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Images2Neibs_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Images2Neibs>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- props_.emplace_back("window_h", std::to_string(op_.window_h));
- props_.emplace_back("window_w", std::to_string(op_.window_w));
- return props_;
- }
- std::string Images2Neibs_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Images2Neibs>();
- static_cast<void>(op_);
- return "Images2Neibs";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Images2Neibs, Images2Neibs)
- .hash(Images2Neibs_hash_impl)
- .is_same_st(Images2Neibs_is_same_st_impl)
- .props(Images2Neibs_props_impl)
- .make_name(Images2Neibs_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IncrMeshIndexing);
-
- namespace {
- size_t IncrMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IncrMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IncrMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IncrMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IncrMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(op_);
- return "IncrMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IncrMeshIndexing, IncrMeshIndexing)
- .hash(IncrMeshIndexing_hash_impl)
- .is_same_st(IncrMeshIndexing_is_same_st_impl)
- .props(IncrMeshIndexing_props_impl)
- .make_name(IncrMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IncrSubtensor);
-
- namespace {
- size_t IncrSubtensor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IncrSubtensor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IncrSubtensor>(),
- &&b_ = rhs_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IncrSubtensor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IncrSubtensor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(op_);
- return "IncrSubtensor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IncrSubtensor, IncrSubtensor)
- .hash(IncrSubtensor_hash_impl)
- .is_same_st(IncrSubtensor_is_same_st_impl)
- .props(IncrSubtensor_props_impl)
- .make_name(IncrSubtensor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingIncrMultiAxisVec);
-
- namespace {
- size_t IndexingIncrMultiAxisVec_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IndexingIncrMultiAxisVec_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingIncrMultiAxisVec>(),
- &&b_ = rhs_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingIncrMultiAxisVec_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IndexingIncrMultiAxisVec_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(op_);
- return "IndexingIncrMultiAxisVec";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingIncrMultiAxisVec, IndexingIncrMultiAxisVec)
- .hash(IndexingIncrMultiAxisVec_hash_impl)
- .is_same_st(IndexingIncrMultiAxisVec_is_same_st_impl)
- .props(IndexingIncrMultiAxisVec_props_impl)
- .make_name(IndexingIncrMultiAxisVec_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingMultiAxisVec);
-
- namespace {
- size_t IndexingMultiAxisVec_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IndexingMultiAxisVec_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingMultiAxisVec>(),
- &&b_ = rhs_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingMultiAxisVec_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IndexingMultiAxisVec_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(op_);
- return "IndexingMultiAxisVec";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingMultiAxisVec, IndexingMultiAxisVec)
- .hash(IndexingMultiAxisVec_hash_impl)
- .is_same_st(IndexingMultiAxisVec_is_same_st_impl)
- .props(IndexingMultiAxisVec_props_impl)
- .make_name(IndexingMultiAxisVec_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingOneHot);
-
- namespace {
- size_t IndexingOneHot_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.ndim));
- return val;
- }
- bool IndexingOneHot_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingOneHot>(),
- &&b_ = rhs_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.ndim != b_.ndim) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingOneHot_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("ndim", std::to_string(op_.ndim));
- return props_;
- }
- std::string IndexingOneHot_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(op_);
- return "IndexingOneHot";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingOneHot, IndexingOneHot)
- .hash(IndexingOneHot_hash_impl)
- .is_same_st(IndexingOneHot_is_same_st_impl)
- .props(IndexingOneHot_props_impl)
- .make_name(IndexingOneHot_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingSetMultiAxisVec);
-
- namespace {
- size_t IndexingSetMultiAxisVec_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IndexingSetMultiAxisVec_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingSetMultiAxisVec>(),
- &&b_ = rhs_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingSetMultiAxisVec_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IndexingSetMultiAxisVec_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(op_);
- return "IndexingSetMultiAxisVec";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingSetMultiAxisVec, IndexingSetMultiAxisVec)
- .hash(IndexingSetMultiAxisVec_hash_impl)
- .is_same_st(IndexingSetMultiAxisVec_is_same_st_impl)
- .props(IndexingSetMultiAxisVec_props_impl)
- .make_name(IndexingSetMultiAxisVec_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingSetOneHot);
-
- namespace {
- size_t IndexingSetOneHot_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.ndim));
- return val;
- }
- bool IndexingSetOneHot_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingSetOneHot>(),
- &&b_ = rhs_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.ndim != b_.ndim) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingSetOneHot_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("ndim", std::to_string(op_.ndim));
- return props_;
- }
- std::string IndexingSetOneHot_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(op_);
- return "IndexingSetOneHot";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingSetOneHot, IndexingSetOneHot)
- .hash(IndexingSetOneHot_hash_impl)
- .is_same_st(IndexingSetOneHot_is_same_st_impl)
- .props(IndexingSetOneHot_props_impl)
- .make_name(IndexingSetOneHot_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(InplaceAdd);
-
- namespace {
- size_t InplaceAdd_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<InplaceAdd>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool InplaceAdd_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<InplaceAdd>(),
- &&b_ = rhs_.cast_final_safe<InplaceAdd>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> InplaceAdd_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<InplaceAdd>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string InplaceAdd_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<InplaceAdd>();
- static_cast<void>(op_);
- return "InplaceAdd";
- }
- } // anonymous namespace
- OP_TRAIT_REG(InplaceAdd, InplaceAdd)
- .hash(InplaceAdd_hash_impl)
- .is_same_st(InplaceAdd_is_same_st_impl)
- .props(InplaceAdd_props_impl)
- .make_name(InplaceAdd_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LAMBUpdate);
-
- namespace {
- size_t LAMBUpdate_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.beta_1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.beta_2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.step));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.lr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.weight_decay));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.eps));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias_correction));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.always_adapt));
- return val;
- }
- bool LAMBUpdate_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LAMBUpdate>(),
- &&b_ = rhs_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.beta_1 != b_.beta_1) return false;
- if (a_.beta_2 != b_.beta_2) return false;
- if (a_.step != b_.step) return false;
- if (a_.lr != b_.lr) return false;
- if (a_.weight_decay != b_.weight_decay) return false;
- if (a_.eps != b_.eps) return false;
- if (a_.bias_correction != b_.bias_correction) return false;
- if (a_.always_adapt != b_.always_adapt) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LAMBUpdate_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("beta_1", std::to_string(op_.beta_1));
- props_.emplace_back("beta_2", std::to_string(op_.beta_2));
- props_.emplace_back("step", std::to_string(op_.step));
- props_.emplace_back("lr", std::to_string(op_.lr));
- props_.emplace_back("weight_decay", std::to_string(op_.weight_decay));
- props_.emplace_back("eps", std::to_string(op_.eps));
- props_.emplace_back("bias_correction", std::to_string(op_.bias_correction));
- props_.emplace_back("always_adapt", std::to_string(op_.always_adapt));
- return props_;
- }
- std::string LAMBUpdate_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(op_);
- return "LAMBUpdate";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LAMBUpdate, LAMBUpdate)
- .hash(LAMBUpdate_hash_impl)
- .is_same_st(LAMBUpdate_is_same_st_impl)
- .props(LAMBUpdate_props_impl)
- .make_name(LAMBUpdate_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LRN);
-
- namespace {
- size_t LRN_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LRN>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.n));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.k));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.alpha));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.beta));
- return val;
- }
- bool LRN_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LRN>(),
- &&b_ = rhs_.cast_final_safe<LRN>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.n != b_.n) return false;
- if (a_.k != b_.k) return false;
- if (a_.alpha != b_.alpha) return false;
- if (a_.beta != b_.beta) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LRN_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LRN>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("n", std::to_string(op_.n));
- props_.emplace_back("k", std::to_string(op_.k));
- props_.emplace_back("alpha", std::to_string(op_.alpha));
- props_.emplace_back("beta", std::to_string(op_.beta));
- return props_;
- }
- std::string LRN_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LRN>();
- static_cast<void>(op_);
- return "LRN";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LRN, LRN)
- .hash(LRN_hash_impl)
- .is_same_st(LRN_is_same_st_impl)
- .props(LRN_props_impl)
- .make_name(LRN_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LSQ);
-
- namespace {
- size_t LSQ_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSQ>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmin));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmax));
- return val;
- }
- bool LSQ_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LSQ>(),
- &&b_ = rhs_.cast_final_safe<LSQ>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.qmin != b_.qmin) return false;
- if (a_.qmax != b_.qmax) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LSQ_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSQ>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("qmin", std::to_string(op_.qmin));
- props_.emplace_back("qmax", std::to_string(op_.qmax));
- return props_;
- }
- std::string LSQ_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSQ>();
- static_cast<void>(op_);
- return "LSQ";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LSQ, LSQ)
- .hash(LSQ_hash_impl)
- .is_same_st(LSQ_is_same_st_impl)
- .props(LSQ_props_impl)
- .make_name(LSQ_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LSTM);
-
- namespace {
- size_t LSTM_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTM>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.num_layers));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bidirectional));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.hidden_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.proj_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dropout));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- return val;
- }
- bool LSTM_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LSTM>(),
- &&b_ = rhs_.cast_final_safe<LSTM>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.num_layers != b_.num_layers) return false;
- if (a_.bidirectional != b_.bidirectional) return false;
- if (a_.bias != b_.bias) return false;
- if (a_.hidden_size != b_.hidden_size) return false;
- if (a_.proj_size != b_.proj_size) return false;
- if (a_.dropout != b_.dropout) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LSTM_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTM>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("num_layers", std::to_string(op_.num_layers));
- props_.emplace_back("bidirectional", std::to_string(op_.bidirectional));
- props_.emplace_back("bias", std::to_string(op_.bias));
- props_.emplace_back("hidden_size", std::to_string(op_.hidden_size));
- props_.emplace_back("proj_size", std::to_string(op_.proj_size));
- props_.emplace_back("dropout", std::to_string(op_.dropout));
- switch (op_.fwd_mode){
- case LSTM::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case LSTM::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string LSTM_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTM>();
- static_cast<void>(op_);
- return "LSTM";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LSTM, LSTM)
- .hash(LSTM_hash_impl)
- .is_same_st(LSTM_is_same_st_impl)
- .props(LSTM_props_impl)
- .make_name(LSTM_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LSTMCell);
-
- namespace {
- size_t LSTMCell_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTMCell>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool LSTMCell_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LSTMCell>(),
- &&b_ = rhs_.cast_final_safe<LSTMCell>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LSTMCell_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTMCell>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string LSTMCell_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTMCell>();
- static_cast<void>(op_);
- return "LSTMCell";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LSTMCell, LSTMCell)
- .hash(LSTMCell_hash_impl)
- .is_same_st(LSTMCell_is_same_st_impl)
- .props(LSTMCell_props_impl)
- .make_name(LSTMCell_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LayerNorm);
-
- namespace {
- size_t LayerNorm_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LayerNorm>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.affine));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.eps));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.normalized_dim));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.normalized_size));
- return val;
- }
- bool LayerNorm_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LayerNorm>(),
- &&b_ = rhs_.cast_final_safe<LayerNorm>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.affine != b_.affine) return false;
- if (a_.eps != b_.eps) return false;
- if (a_.normalized_dim != b_.normalized_dim) return false;
- if (a_.normalized_size != b_.normalized_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LayerNorm_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LayerNorm>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("affine", std::to_string(op_.affine));
- props_.emplace_back("eps", std::to_string(op_.eps));
- props_.emplace_back("normalized_dim", std::to_string(op_.normalized_dim));
- props_.emplace_back("normalized_size", std::to_string(op_.normalized_size));
- return props_;
- }
- std::string LayerNorm_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LayerNorm>();
- static_cast<void>(op_);
- return "LayerNorm";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LayerNorm, LayerNorm)
- .hash(LayerNorm_hash_impl)
- .is_same_st(LayerNorm_is_same_st_impl)
- .props(LayerNorm_props_impl)
- .make_name(LayerNorm_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Linspace);
-
- namespace {
- size_t Linspace_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Linspace>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.endpoint));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Linspace_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Linspace>(),
- &&b_ = rhs_.cast_final_safe<Linspace>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.endpoint != b_.endpoint) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Linspace_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Linspace>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("endpoint", std::to_string(op_.endpoint));
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Linspace_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Linspace>();
- static_cast<void>(op_);
- return "Linspace";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Linspace, Linspace)
- .hash(Linspace_hash_impl)
- .is_same_st(Linspace_is_same_st_impl)
- .props(Linspace_props_impl)
- .make_name(Linspace_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MagicMindRuntime);
-
- namespace {
- size_t MagicMindRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- return val;
- }
- bool MagicMindRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MagicMindRuntime>(),
- &&b_ = rhs_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MagicMindRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- return props_;
- }
- std::string MagicMindRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(op_);
- return "MagicMindRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MagicMindRuntime, MagicMindRuntime)
- .hash(MagicMindRuntime_hash_impl)
- .is_same_st(MagicMindRuntime_is_same_st_impl)
- .props(MagicMindRuntime_props_impl)
- .make_name(MagicMindRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MatrixInverse);
-
- namespace {
- size_t MatrixInverse_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixInverse>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool MatrixInverse_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MatrixInverse>(),
- &&b_ = rhs_.cast_final_safe<MatrixInverse>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MatrixInverse_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixInverse>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string MatrixInverse_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixInverse>();
- static_cast<void>(op_);
- return "MatrixInverse";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MatrixInverse, MatrixInverse)
- .hash(MatrixInverse_hash_impl)
- .is_same_st(MatrixInverse_is_same_st_impl)
- .props(MatrixInverse_props_impl)
- .make_name(MatrixInverse_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MatrixMul);
-
- namespace {
- size_t MatrixMul_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixMul>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeB));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimB));
- return val;
- }
- bool MatrixMul_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MatrixMul>(),
- &&b_ = rhs_.cast_final_safe<MatrixMul>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.transposeA != b_.transposeA) return false;
- if (a_.transposeB != b_.transposeB) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dimA != b_.dimA) return false;
- if (a_.dimB != b_.dimB) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MatrixMul_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixMul>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("transposeA", std::to_string(op_.transposeA));
- props_.emplace_back("transposeB", std::to_string(op_.transposeB));
- switch (op_.compute_mode){
- case MatrixMul::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case MatrixMul::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.format){
- case MatrixMul::Format::DEFAULT:
- props_.emplace_back("format", "DEFAULT");
- break;
- case MatrixMul::Format::MK4:
- props_.emplace_back("format", "MK4");
- break;
- case MatrixMul::Format::MK8:
- props_.emplace_back("format", "MK8");
- break;
- case MatrixMul::Format::MK4_DOT:
- props_.emplace_back("format", "MK4_DOT");
- break;
- case MatrixMul::Format::N32K4_DOT:
- props_.emplace_back("format", "N32K4_DOT");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case MatrixMul::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case MatrixMul::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case MatrixMul::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case MatrixMul::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dimA", std::to_string(op_.dimA));
- props_.emplace_back("dimB", std::to_string(op_.dimB));
- return props_;
- }
- std::string MatrixMul_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixMul>();
- static_cast<void>(op_);
- return "MatrixMul";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MatrixMul, MatrixMul)
- .hash(MatrixMul_hash_impl)
- .is_same_st(MatrixMul_is_same_st_impl)
- .props(MatrixMul_props_impl)
- .make_name(MatrixMul_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MeshGrid);
-
- namespace {
- size_t MeshGrid_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshGrid>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.indexing));
- return val;
- }
- bool MeshGrid_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MeshGrid>(),
- &&b_ = rhs_.cast_final_safe<MeshGrid>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.indexing != b_.indexing) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MeshGrid_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshGrid>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("indexing", op_.indexing);
- return props_;
- }
- std::string MeshGrid_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshGrid>();
- static_cast<void>(op_);
- return "MeshGrid";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MeshGrid, MeshGrid)
- .hash(MeshGrid_hash_impl)
- .is_same_st(MeshGrid_is_same_st_impl)
- .props(MeshGrid_props_impl)
- .make_name(MeshGrid_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MeshIndexing);
-
- namespace {
- size_t MeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool MeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<MeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string MeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshIndexing>();
- static_cast<void>(op_);
- return "MeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MeshIndexing, MeshIndexing)
- .hash(MeshIndexing_hash_impl)
- .is_same_st(MeshIndexing_is_same_st_impl)
- .props(MeshIndexing_props_impl)
- .make_name(MeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(NMSKeep);
-
- namespace {
- size_t NMSKeep_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NMSKeep>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.iou_thresh));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.max_output));
- return val;
- }
- bool NMSKeep_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<NMSKeep>(),
- &&b_ = rhs_.cast_final_safe<NMSKeep>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.iou_thresh != b_.iou_thresh) return false;
- if (a_.max_output != b_.max_output) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> NMSKeep_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NMSKeep>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("iou_thresh", std::to_string(op_.iou_thresh));
- props_.emplace_back("max_output", std::to_string(op_.max_output));
- return props_;
- }
- std::string NMSKeep_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NMSKeep>();
- static_cast<void>(op_);
- return "NMSKeep";
- }
- } // anonymous namespace
- OP_TRAIT_REG(NMSKeep, NMSKeep)
- .hash(NMSKeep_hash_impl)
- .is_same_st(NMSKeep_is_same_st_impl)
- .props(NMSKeep_props_impl)
- .make_name(NMSKeep_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(NvOf);
-
- namespace {
- size_t NvOf_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NvOf>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.precision));
- return val;
- }
- bool NvOf_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<NvOf>(),
- &&b_ = rhs_.cast_final_safe<NvOf>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.precision != b_.precision) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> NvOf_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NvOf>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("precision", std::to_string(op_.precision));
- return props_;
- }
- std::string NvOf_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NvOf>();
- static_cast<void>(op_);
- return "NvOf";
- }
- } // anonymous namespace
- OP_TRAIT_REG(NvOf, NvOf)
- .hash(NvOf_hash_impl)
- .is_same_st(NvOf_is_same_st_impl)
- .props(NvOf_props_impl)
- .make_name(NvOf_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Padding);
-
- namespace {
- size_t Padding_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Padding>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim0));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim3));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim4));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim5));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim6));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim0));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim3));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim4));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim5));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim6));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.padding_val));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.padding_mode));
- return val;
- }
- bool Padding_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Padding>(),
- &&b_ = rhs_.cast_final_safe<Padding>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.front_offset_dim0 != b_.front_offset_dim0) return false;
- if (a_.front_offset_dim1 != b_.front_offset_dim1) return false;
- if (a_.front_offset_dim2 != b_.front_offset_dim2) return false;
- if (a_.front_offset_dim3 != b_.front_offset_dim3) return false;
- if (a_.front_offset_dim4 != b_.front_offset_dim4) return false;
- if (a_.front_offset_dim5 != b_.front_offset_dim5) return false;
- if (a_.front_offset_dim6 != b_.front_offset_dim6) return false;
- if (a_.back_offset_dim0 != b_.back_offset_dim0) return false;
- if (a_.back_offset_dim1 != b_.back_offset_dim1) return false;
- if (a_.back_offset_dim2 != b_.back_offset_dim2) return false;
- if (a_.back_offset_dim3 != b_.back_offset_dim3) return false;
- if (a_.back_offset_dim4 != b_.back_offset_dim4) return false;
- if (a_.back_offset_dim5 != b_.back_offset_dim5) return false;
- if (a_.back_offset_dim6 != b_.back_offset_dim6) return false;
- if (a_.padding_val != b_.padding_val) return false;
- if (a_.padding_mode != b_.padding_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Padding_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Padding>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("front_offset_dim0", std::to_string(op_.front_offset_dim0));
- props_.emplace_back("front_offset_dim1", std::to_string(op_.front_offset_dim1));
- props_.emplace_back("front_offset_dim2", std::to_string(op_.front_offset_dim2));
- props_.emplace_back("front_offset_dim3", std::to_string(op_.front_offset_dim3));
- props_.emplace_back("front_offset_dim4", std::to_string(op_.front_offset_dim4));
- props_.emplace_back("front_offset_dim5", std::to_string(op_.front_offset_dim5));
- props_.emplace_back("front_offset_dim6", std::to_string(op_.front_offset_dim6));
- props_.emplace_back("back_offset_dim0", std::to_string(op_.back_offset_dim0));
- props_.emplace_back("back_offset_dim1", std::to_string(op_.back_offset_dim1));
- props_.emplace_back("back_offset_dim2", std::to_string(op_.back_offset_dim2));
- props_.emplace_back("back_offset_dim3", std::to_string(op_.back_offset_dim3));
- props_.emplace_back("back_offset_dim4", std::to_string(op_.back_offset_dim4));
- props_.emplace_back("back_offset_dim5", std::to_string(op_.back_offset_dim5));
- props_.emplace_back("back_offset_dim6", std::to_string(op_.back_offset_dim6));
- props_.emplace_back("padding_val", std::to_string(op_.padding_val));
- switch (op_.padding_mode){
- case Padding::PaddingMode::REPLICATE:
- props_.emplace_back("padding_mode", "REPLICATE");
- break;
- case Padding::PaddingMode::REFLECT:
- props_.emplace_back("padding_mode", "REFLECT");
- break;
- case Padding::PaddingMode::CONSTANT:
- props_.emplace_back("padding_mode", "CONSTANT");
- break;
- default:
- props_.emplace_back("padding_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string Padding_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Padding>();
- static_cast<void>(op_);
- return "Padding";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Padding, Padding)
- .hash(Padding_hash_impl)
- .is_same_st(Padding_is_same_st_impl)
- .props(Padding_props_impl)
- .make_name(Padding_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ParamPackConcat);
-
- namespace {
- size_t ParamPackConcat_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.offsets));
- return val;
- }
- bool ParamPackConcat_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ParamPackConcat>(),
- &&b_ = rhs_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.offsets != b_.offsets) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ParamPackConcat_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("offsets", "{std::vector}");
- return props_;
- }
- std::string ParamPackConcat_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(op_);
- return "ParamPackConcat";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ParamPackConcat, ParamPackConcat)
- .hash(ParamPackConcat_hash_impl)
- .is_same_st(ParamPackConcat_is_same_st_impl)
- .props(ParamPackConcat_props_impl)
- .make_name(ParamPackConcat_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ParamPackSplit);
-
- namespace {
- size_t ParamPackSplit_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.offsets));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shapes));
- return val;
- }
- bool ParamPackSplit_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ParamPackSplit>(),
- &&b_ = rhs_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.offsets != b_.offsets) return false;
- if (a_.shapes != b_.shapes) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ParamPackSplit_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("offsets", "{std::vector}");
- props_.emplace_back("shapes", "{std::vector}");
- return props_;
- }
- std::string ParamPackSplit_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(op_);
- return "ParamPackSplit";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ParamPackSplit, ParamPackSplit)
- .hash(ParamPackSplit_hash_impl)
- .is_same_st(ParamPackSplit_is_same_st_impl)
- .props(ParamPackSplit_props_impl)
- .make_name(ParamPackSplit_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PermutationRNG);
-
- namespace {
- size_t PermutationRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PermutationRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.handle),
- mgb::hash(op_.dtype.enumv())
- )
- );
- }
- bool PermutationRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PermutationRNG>(),
- &&b_ = rhs_.cast_final_safe<PermutationRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.dtype == b_.dtype;}
- std::vector<std::pair<const char*, std::string>> PermutationRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PermutationRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string PermutationRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PermutationRNG>();
- static_cast<void>(op_);
- return "PermutationRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PermutationRNG, PermutationRNG)
- .hash(PermutationRNG_hash_impl)
- .is_same_st(PermutationRNG_is_same_st_impl)
- .props(PermutationRNG_props_impl)
- .make_name(PermutationRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PixelShuffle);
-
- namespace {
- size_t PixelShuffle_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffle>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.factor));
- return val;
- }
- bool PixelShuffle_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PixelShuffle>(),
- &&b_ = rhs_.cast_final_safe<PixelShuffle>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.factor != b_.factor) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> PixelShuffle_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffle>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("factor", std::to_string(op_.factor));
- return props_;
- }
- std::string PixelShuffle_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffle>();
- static_cast<void>(op_);
- return "PixelShuffle";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PixelShuffle, PixelShuffle)
- .hash(PixelShuffle_hash_impl)
- .is_same_st(PixelShuffle_is_same_st_impl)
- .props(PixelShuffle_props_impl)
- .make_name(PixelShuffle_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PixelShuffleBackward);
-
- namespace {
- size_t PixelShuffleBackward_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.factor));
- return val;
- }
- bool PixelShuffleBackward_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PixelShuffleBackward>(),
- &&b_ = rhs_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.factor != b_.factor) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> PixelShuffleBackward_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("factor", std::to_string(op_.factor));
- return props_;
- }
- std::string PixelShuffleBackward_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(op_);
- return "PixelShuffleBackward";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PixelShuffleBackward, PixelShuffleBackward)
- .hash(PixelShuffleBackward_hash_impl)
- .is_same_st(PixelShuffleBackward_is_same_st_impl)
- .props(PixelShuffleBackward_props_impl)
- .make_name(PixelShuffleBackward_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PoissonRNG);
-
- namespace {
- size_t PoissonRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PoissonRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool PoissonRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PoissonRNG>(),
- &&b_ = rhs_.cast_final_safe<PoissonRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> PoissonRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PoissonRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string PoissonRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PoissonRNG>();
- static_cast<void>(op_);
- return "PoissonRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PoissonRNG, PoissonRNG)
- .hash(PoissonRNG_hash_impl)
- .is_same_st(PoissonRNG_is_same_st_impl)
- .props(PoissonRNG_props_impl)
- .make_name(PoissonRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Pooling);
-
- namespace {
- size_t Pooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Pooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Pooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Pooling>(),
- &&b_ = rhs_.cast_final_safe<Pooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.window_h != b_.window_h) return false;
- if (a_.window_w != b_.window_w) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Pooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Pooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Pooling::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case Pooling::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- case Pooling::Mode::AVERAGE_COUNT_EXCLUDE_PADDING:
- props_.emplace_back("mode", "AVERAGE_COUNT_EXCLUDE_PADDING");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("window_h", std::to_string(op_.window_h));
- props_.emplace_back("window_w", std::to_string(op_.window_w));
- switch (op_.format){
- case Pooling::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Pooling::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Pooling::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Pooling::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Pooling::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Pooling::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Pooling::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Pooling::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Pooling::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Pooling::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Pooling::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Pooling::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Pooling::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Pooling::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Pooling::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Pooling::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Pooling::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Pooling::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Pooling::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Pooling::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Pooling::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Pooling::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Pooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Pooling>();
- static_cast<void>(op_);
- return "Pooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Pooling, Pooling)
- .hash(Pooling_hash_impl)
- .is_same_st(Pooling_is_same_st_impl)
- .props(Pooling_props_impl)
- .make_name(Pooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RNN);
-
- namespace {
- size_t RNN_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNN>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.num_layers));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bidirectional));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.hidden_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dropout));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- return val;
- }
- bool RNN_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RNN>(),
- &&b_ = rhs_.cast_final_safe<RNN>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.num_layers != b_.num_layers) return false;
- if (a_.bidirectional != b_.bidirectional) return false;
- if (a_.bias != b_.bias) return false;
- if (a_.hidden_size != b_.hidden_size) return false;
- if (a_.dropout != b_.dropout) return false;
- if (a_.nonlineMode != b_.nonlineMode) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RNN_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNN>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("num_layers", std::to_string(op_.num_layers));
- props_.emplace_back("bidirectional", std::to_string(op_.bidirectional));
- props_.emplace_back("bias", std::to_string(op_.bias));
- props_.emplace_back("hidden_size", std::to_string(op_.hidden_size));
- props_.emplace_back("dropout", std::to_string(op_.dropout));
- switch (op_.nonlineMode){
- case RNN::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case RNN::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case RNN::NonlineMode::TANH:
- props_.emplace_back("nonlineMode", "TANH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- switch (op_.fwd_mode){
- case RNN::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case RNN::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string RNN_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNN>();
- static_cast<void>(op_);
- return "RNN";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RNN, RNN)
- .hash(RNN_hash_impl)
- .is_same_st(RNN_is_same_st_impl)
- .props(RNN_props_impl)
- .make_name(RNN_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RNNCell);
-
- namespace {
- size_t RNNCell_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNNCell>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- return val;
- }
- bool RNNCell_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RNNCell>(),
- &&b_ = rhs_.cast_final_safe<RNNCell>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.nonlineMode != b_.nonlineMode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RNNCell_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNNCell>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.nonlineMode){
- case RNNCell::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case RNNCell::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case RNNCell::NonlineMode::TANH:
- props_.emplace_back("nonlineMode", "TANH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- return props_;
- }
- std::string RNNCell_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNNCell>();
- static_cast<void>(op_);
- return "RNNCell";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RNNCell, RNNCell)
- .hash(RNNCell_hash_impl)
- .is_same_st(RNNCell_is_same_st_impl)
- .props(RNNCell_props_impl)
- .make_name(RNNCell_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ROIAlign);
-
- namespace {
- size_t ROIAlign_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIAlign>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.spatial_scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.offset));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_height));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_width));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.sample_height));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.sample_width));
- return val;
- }
- bool ROIAlign_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ROIAlign>(),
- &&b_ = rhs_.cast_final_safe<ROIAlign>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.spatial_scale != b_.spatial_scale) return false;
- if (a_.offset != b_.offset) return false;
- if (a_.pooled_height != b_.pooled_height) return false;
- if (a_.pooled_width != b_.pooled_width) return false;
- if (a_.sample_height != b_.sample_height) return false;
- if (a_.sample_width != b_.sample_width) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ROIAlign_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIAlign>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ROIAlign::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case ROIAlign::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- switch (op_.format){
- case ROIAlign::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case ROIAlign::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case ROIAlign::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case ROIAlign::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case ROIAlign::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case ROIAlign::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case ROIAlign::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case ROIAlign::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case ROIAlign::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case ROIAlign::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case ROIAlign::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case ROIAlign::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case ROIAlign::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case ROIAlign::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case ROIAlign::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case ROIAlign::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case ROIAlign::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case ROIAlign::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("spatial_scale", std::to_string(op_.spatial_scale));
- props_.emplace_back("offset", std::to_string(op_.offset));
- props_.emplace_back("pooled_height", std::to_string(op_.pooled_height));
- props_.emplace_back("pooled_width", std::to_string(op_.pooled_width));
- props_.emplace_back("sample_height", std::to_string(op_.sample_height));
- props_.emplace_back("sample_width", std::to_string(op_.sample_width));
- return props_;
- }
- std::string ROIAlign_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIAlign>();
- static_cast<void>(op_);
- return "ROIAlign";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ROIAlign, ROIAlign)
- .hash(ROIAlign_hash_impl)
- .is_same_st(ROIAlign_is_same_st_impl)
- .props(ROIAlign_props_impl)
- .make_name(ROIAlign_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ROIPooling);
-
- namespace {
- size_t ROIPooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIPooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- return val;
- }
- bool ROIPooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ROIPooling>(),
- &&b_ = rhs_.cast_final_safe<ROIPooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.scale != b_.scale) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ROIPooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIPooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ROIPooling::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case ROIPooling::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("scale", std::to_string(op_.scale));
- return props_;
- }
- std::string ROIPooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIPooling>();
- static_cast<void>(op_);
- return "ROIPooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ROIPooling, ROIPooling)
- .hash(ROIPooling_hash_impl)
- .is_same_st(ROIPooling_is_same_st_impl)
- .props(ROIPooling_props_impl)
- .make_name(ROIPooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Reduce);
-
- namespace {
- size_t Reduce_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reduce>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.data_type));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.keepdim));
- return val;
- }
- bool Reduce_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Reduce>(),
- &&b_ = rhs_.cast_final_safe<Reduce>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.axis != b_.axis) return false;
- if (a_.data_type != b_.data_type) return false;
- if (a_.keepdim != b_.keepdim) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Reduce_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reduce>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Reduce::Mode::SUM:
- props_.emplace_back("mode", "SUM");
- break;
- case Reduce::Mode::SUM_SQR:
- props_.emplace_back("mode", "SUM_SQR");
- break;
- case Reduce::Mode::PRODUCT:
- props_.emplace_back("mode", "PRODUCT");
- break;
- case Reduce::Mode::MIN:
- props_.emplace_back("mode", "MIN");
- break;
- case Reduce::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case Reduce::Mode::MEAN:
- props_.emplace_back("mode", "MEAN");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("axis", std::to_string(op_.axis));
- switch (op_.data_type){
- case Reduce::DataType::DEFAULT:
- props_.emplace_back("data_type", "DEFAULT");
- break;
- case Reduce::DataType::FLOAT_IO16xC32:
- props_.emplace_back("data_type", "FLOAT_IO16xC32");
- break;
- case Reduce::DataType::FLOAT_O32xC32:
- props_.emplace_back("data_type", "FLOAT_O32xC32");
- break;
- case Reduce::DataType::FLOAT_O16xC32:
- props_.emplace_back("data_type", "FLOAT_O16xC32");
- break;
- case Reduce::DataType::QUINT_I8xO32:
- props_.emplace_back("data_type", "QUINT_I8xO32");
- break;
- case Reduce::DataType::QINT_I8xO32:
- props_.emplace_back("data_type", "QINT_I8xO32");
- break;
- default:
- props_.emplace_back("data_type", "INVALID");
- break;
- }
- props_.emplace_back("keepdim", std::to_string(op_.keepdim));
- return props_;
- }
- std::string Reduce_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reduce>();
- static_cast<void>(op_);
- return "Reduce";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Reduce, Reduce)
- .hash(Reduce_hash_impl)
- .is_same_st(Reduce_is_same_st_impl)
- .props(Reduce_props_impl)
- .make_name(Reduce_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RegionRestrictedConvolution);
-
- namespace {
- size_t RegionRestrictedConvolution_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RegionRestrictedConvolution>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- return val;
- }
- bool RegionRestrictedConvolution_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RegionRestrictedConvolution>(),
- &&b_ = rhs_.cast_final_safe<RegionRestrictedConvolution>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RegionRestrictedConvolution_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RegionRestrictedConvolution>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case RegionRestrictedConvolution::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case RegionRestrictedConvolution::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case RegionRestrictedConvolution::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case RegionRestrictedConvolution::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case RegionRestrictedConvolution::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case RegionRestrictedConvolution::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case RegionRestrictedConvolution::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case RegionRestrictedConvolution::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case RegionRestrictedConvolution::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case RegionRestrictedConvolution::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case RegionRestrictedConvolution::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case RegionRestrictedConvolution::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case RegionRestrictedConvolution::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case RegionRestrictedConvolution::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case RegionRestrictedConvolution::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case RegionRestrictedConvolution::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case RegionRestrictedConvolution::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case RegionRestrictedConvolution::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case RegionRestrictedConvolution::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case RegionRestrictedConvolution::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case RegionRestrictedConvolution::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case RegionRestrictedConvolution::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case RegionRestrictedConvolution::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case RegionRestrictedConvolution::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string RegionRestrictedConvolution_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RegionRestrictedConvolution>();
- static_cast<void>(op_);
- return "RegionRestrictedConvolution";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RegionRestrictedConvolution, RegionRestrictedConvolution)
- .hash(RegionRestrictedConvolution_hash_impl)
- .is_same_st(RegionRestrictedConvolution_is_same_st_impl)
- .props(RegionRestrictedConvolution_props_impl)
- .make_name(RegionRestrictedConvolution_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RegionRestrictedConvolutionBackwardData);
-
- namespace {
- size_t RegionRestrictedConvolutionBackwardData_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RegionRestrictedConvolutionBackwardData>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- return val;
- }
- bool RegionRestrictedConvolutionBackwardData_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RegionRestrictedConvolutionBackwardData>(),
- &&b_ = rhs_.cast_final_safe<RegionRestrictedConvolutionBackwardData>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RegionRestrictedConvolutionBackwardData_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RegionRestrictedConvolutionBackwardData>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case RegionRestrictedConvolutionBackwardData::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case RegionRestrictedConvolutionBackwardData::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case RegionRestrictedConvolutionBackwardData::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case RegionRestrictedConvolutionBackwardData::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case RegionRestrictedConvolutionBackwardData::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case RegionRestrictedConvolutionBackwardData::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case RegionRestrictedConvolutionBackwardData::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case RegionRestrictedConvolutionBackwardData::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string RegionRestrictedConvolutionBackwardData_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RegionRestrictedConvolutionBackwardData>();
- static_cast<void>(op_);
- return "RegionRestrictedConvolutionBackwardData";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RegionRestrictedConvolutionBackwardData, RegionRestrictedConvolutionBackwardData)
- .hash(RegionRestrictedConvolutionBackwardData_hash_impl)
- .is_same_st(RegionRestrictedConvolutionBackwardData_is_same_st_impl)
- .props(RegionRestrictedConvolutionBackwardData_props_impl)
- .make_name(RegionRestrictedConvolutionBackwardData_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Remap);
-
- namespace {
- size_t Remap_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Remap>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.border_type));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scalar));
- return val;
- }
- bool Remap_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Remap>(),
- &&b_ = rhs_.cast_final_safe<Remap>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.border_type != b_.border_type) return false;
- if (a_.format != b_.format) return false;
- if (a_.scalar != b_.scalar) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Remap_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Remap>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case Remap::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case Remap::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case Remap::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case Remap::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case Remap::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.border_type){
- case Remap::BorderMode::REPLICATE:
- props_.emplace_back("border_type", "REPLICATE");
- break;
- case Remap::BorderMode::REFLECT:
- props_.emplace_back("border_type", "REFLECT");
- break;
- case Remap::BorderMode::REFLECT_101:
- props_.emplace_back("border_type", "REFLECT_101");
- break;
- case Remap::BorderMode::WRAP:
- props_.emplace_back("border_type", "WRAP");
- break;
- case Remap::BorderMode::CONSTANT:
- props_.emplace_back("border_type", "CONSTANT");
- break;
- case Remap::BorderMode::TRANSPARENT:
- props_.emplace_back("border_type", "TRANSPARENT");
- break;
- case Remap::BorderMode::ISOLATED:
- props_.emplace_back("border_type", "ISOLATED");
- break;
- default:
- props_.emplace_back("border_type", "INVALID");
- break;
- }
- switch (op_.format){
- case Remap::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Remap::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Remap::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Remap::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Remap::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Remap::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Remap::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Remap::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Remap::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Remap::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Remap::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Remap::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Remap::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Remap::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Remap::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Remap::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Remap::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Remap::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("scalar", std::to_string(op_.scalar));
- return props_;
- }
- std::string Remap_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Remap>();
- static_cast<void>(op_);
- return "Remap";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Remap, Remap)
- .hash(Remap_hash_impl)
- .is_same_st(Remap_is_same_st_impl)
- .props(Remap_props_impl)
- .make_name(Remap_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemoteRecv);
-
- namespace {
- size_t RemoteRecv_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteRecv>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.key));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.addr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.port));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.rank_from));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.cn));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.backend));
- return val;
- }
- bool RemoteRecv_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RemoteRecv>(),
- &&b_ = rhs_.cast_final_safe<RemoteRecv>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.key != b_.key) return false;
- if (a_.addr != b_.addr) return false;
- if (a_.port != b_.port) return false;
- if (a_.rank_from != b_.rank_from) return false;
- if (a_.cn != b_.cn) return false;
- if (a_.shape != b_.shape) return false;
- if (a_.dtype != b_.dtype) return false;
- if (a_.backend != b_.backend) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RemoteRecv_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteRecv>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("key", op_.key);
- props_.emplace_back("addr", op_.addr);
- props_.emplace_back("port", std::to_string(op_.port));
- props_.emplace_back("rank_from", std::to_string(op_.rank_from));
- props_.emplace_back("cn", op_.cn.to_string());
- props_.emplace_back("shape", "{std::vector}");
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("backend", op_.backend);
- return props_;
- }
- std::string RemoteRecv_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteRecv>();
- static_cast<void>(op_);
- return "RemoteRecv";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RemoteRecv, RemoteRecv)
- .hash(RemoteRecv_hash_impl)
- .is_same_st(RemoteRecv_is_same_st_impl)
- .props(RemoteRecv_props_impl)
- .make_name(RemoteRecv_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemoteSend);
-
- namespace {
- size_t RemoteSend_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteSend>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.key));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.addr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.port));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.rank_to));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.backend));
- return val;
- }
- bool RemoteSend_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RemoteSend>(),
- &&b_ = rhs_.cast_final_safe<RemoteSend>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.key != b_.key) return false;
- if (a_.addr != b_.addr) return false;
- if (a_.port != b_.port) return false;
- if (a_.rank_to != b_.rank_to) return false;
- if (a_.backend != b_.backend) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RemoteSend_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteSend>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("key", op_.key);
- props_.emplace_back("addr", op_.addr);
- props_.emplace_back("port", std::to_string(op_.port));
- props_.emplace_back("rank_to", std::to_string(op_.rank_to));
- props_.emplace_back("backend", op_.backend);
- return props_;
- }
- std::string RemoteSend_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteSend>();
- static_cast<void>(op_);
- return "RemoteSend";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RemoteSend, RemoteSend)
- .hash(RemoteSend_hash_impl)
- .is_same_st(RemoteSend_is_same_st_impl)
- .props(RemoteSend_props_impl)
- .make_name(RemoteSend_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemoveAxis);
-
- namespace {
- size_t RemoveAxis_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoveAxis>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool RemoveAxis_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RemoveAxis>(),
- &&b_ = rhs_.cast_final_safe<RemoveAxis>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RemoveAxis_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoveAxis>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", "{std::vector}");
- return props_;
- }
- std::string RemoveAxis_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoveAxis>();
- static_cast<void>(op_);
- return "RemoveAxis";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RemoveAxis, RemoveAxis)
- .hash(RemoveAxis_hash_impl)
- .is_same_st(RemoveAxis_is_same_st_impl)
- .props(RemoveAxis_props_impl)
- .make_name(RemoveAxis_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Reshape);
-
- namespace {
- size_t Reshape_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reshape>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- return val;
- }
- bool Reshape_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Reshape>(),
- &&b_ = rhs_.cast_final_safe<Reshape>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.shape != b_.shape) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Reshape_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reshape>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("shape", "{std::vector}");
- return props_;
- }
- std::string Reshape_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reshape>();
- static_cast<void>(op_);
- return "Reshape";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Reshape, Reshape)
- .hash(Reshape_hash_impl)
- .is_same_st(Reshape_is_same_st_impl)
- .props(Reshape_props_impl)
- .make_name(Reshape_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Resize);
-
- namespace {
- size_t Resize_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Resize>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- return val;
- }
- bool Resize_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Resize>(),
- &&b_ = rhs_.cast_final_safe<Resize>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.format != b_.format) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Resize_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Resize>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case Resize::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case Resize::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case Resize::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case Resize::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case Resize::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.format){
- case Resize::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Resize::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Resize::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Resize::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Resize::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Resize::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Resize::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Resize::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Resize::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Resize::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Resize::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Resize::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Resize::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Resize::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Resize::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Resize::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Resize::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Resize::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- return props_;
- }
- std::string Resize_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Resize>();
- static_cast<void>(op_);
- return "Resize";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Resize, Resize)
- .hash(Resize_hash_impl)
- .is_same_st(Resize_is_same_st_impl)
- .props(Resize_props_impl)
- .make_name(Resize_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SVD);
-
- namespace {
- size_t SVD_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SVD>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.full_matrices));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.compute_uv));
- return val;
- }
- bool SVD_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SVD>(),
- &&b_ = rhs_.cast_final_safe<SVD>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.full_matrices != b_.full_matrices) return false;
- if (a_.compute_uv != b_.compute_uv) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SVD_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SVD>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("full_matrices", std::to_string(op_.full_matrices));
- props_.emplace_back("compute_uv", std::to_string(op_.compute_uv));
- return props_;
- }
- std::string SVD_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SVD>();
- static_cast<void>(op_);
- return "SVD";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SVD, SVD)
- .hash(SVD_hash_impl)
- .is_same_st(SVD_is_same_st_impl)
- .props(SVD_props_impl)
- .make_name(SVD_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SetMeshIndexing);
-
- namespace {
- size_t SetMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool SetMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SetMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SetMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string SetMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(op_);
- return "SetMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SetMeshIndexing, SetMeshIndexing)
- .hash(SetMeshIndexing_hash_impl)
- .is_same_st(SetMeshIndexing_is_same_st_impl)
- .props(SetMeshIndexing_props_impl)
- .make_name(SetMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SetSubtensor);
-
- namespace {
- size_t SetSubtensor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetSubtensor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool SetSubtensor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SetSubtensor>(),
- &&b_ = rhs_.cast_final_safe<SetSubtensor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SetSubtensor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetSubtensor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string SetSubtensor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetSubtensor>();
- static_cast<void>(op_);
- return "SetSubtensor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SetSubtensor, SetSubtensor)
- .hash(SetSubtensor_hash_impl)
- .is_same_st(SetSubtensor_is_same_st_impl)
- .props(SetSubtensor_props_impl)
- .make_name(SetSubtensor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ShuffleRNG);
-
- namespace {
- size_t ShuffleRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool ShuffleRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ShuffleRNG>(),
- &&b_ = rhs_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> ShuffleRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string ShuffleRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(op_);
- return "ShuffleRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ShuffleRNG, ShuffleRNG)
- .hash(ShuffleRNG_hash_impl)
- .is_same_st(ShuffleRNG_is_same_st_impl)
- .props(ShuffleRNG_props_impl)
- .make_name(ShuffleRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SlidingWindowTranspose);
-
- namespace {
- size_t SlidingWindowTranspose_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.out_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.out_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_w));
- return val;
- }
- bool SlidingWindowTranspose_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SlidingWindowTranspose>(),
- &&b_ = rhs_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.out_h != b_.out_h) return false;
- if (a_.out_w != b_.out_w) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.window_h != b_.window_h) return false;
- if (a_.window_w != b_.window_w) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SlidingWindowTranspose_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("out_h", std::to_string(op_.out_h));
- props_.emplace_back("out_w", std::to_string(op_.out_w));
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- props_.emplace_back("window_h", std::to_string(op_.window_h));
- props_.emplace_back("window_w", std::to_string(op_.window_w));
- return props_;
- }
- std::string SlidingWindowTranspose_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(op_);
- return "SlidingWindowTranspose";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SlidingWindowTranspose, SlidingWindowTranspose)
- .hash(SlidingWindowTranspose_hash_impl)
- .is_same_st(SlidingWindowTranspose_is_same_st_impl)
- .props(SlidingWindowTranspose_props_impl)
- .make_name(SlidingWindowTranspose_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Softmax);
-
- namespace {
- size_t Softmax_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Softmax>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool Softmax_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Softmax>(),
- &&b_ = rhs_.cast_final_safe<Softmax>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Softmax_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Softmax>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string Softmax_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Softmax>();
- static_cast<void>(op_);
- return "Softmax";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Softmax, Softmax)
- .hash(Softmax_hash_impl)
- .is_same_st(Softmax_is_same_st_impl)
- .props(Softmax_props_impl)
- .make_name(Softmax_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Split);
-
- namespace {
- size_t Split_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Split>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.nsections));
- return val;
- }
- bool Split_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Split>(),
- &&b_ = rhs_.cast_final_safe<Split>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.nsections != b_.nsections) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Split_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Split>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("nsections", std::to_string(op_.nsections));
- return props_;
- }
- std::string Split_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Split>();
- static_cast<void>(op_);
- return "Split";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Split, Split)
- .hash(Split_hash_impl)
- .is_same_st(Split_is_same_st_impl)
- .props(Split_props_impl)
- .make_name(Split_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Subtensor);
-
- namespace {
- size_t Subtensor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Subtensor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool Subtensor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Subtensor>(),
- &&b_ = rhs_.cast_final_safe<Subtensor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Subtensor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Subtensor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string Subtensor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Subtensor>();
- static_cast<void>(op_);
- return "Subtensor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Subtensor, Subtensor)
- .hash(Subtensor_hash_impl)
- .is_same_st(Subtensor_is_same_st_impl)
- .props(Subtensor_props_impl)
- .make_name(Subtensor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TQT);
-
- namespace {
- size_t TQT_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TQT>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmin));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmax));
- return val;
- }
- bool TQT_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TQT>(),
- &&b_ = rhs_.cast_final_safe<TQT>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.qmin != b_.qmin) return false;
- if (a_.qmax != b_.qmax) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TQT_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TQT>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("qmin", std::to_string(op_.qmin));
- props_.emplace_back("qmax", std::to_string(op_.qmax));
- return props_;
- }
- std::string TQT_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TQT>();
- static_cast<void>(op_);
- return "TQT";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TQT, TQT)
- .hash(TQT_hash_impl)
- .is_same_st(TQT_is_same_st_impl)
- .props(TQT_props_impl)
- .make_name(TQT_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TensorRTRuntime);
-
- namespace {
- size_t TensorRTRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- return val;
- }
- bool TensorRTRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TensorRTRuntime>(),
- &&b_ = rhs_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TensorRTRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- return props_;
- }
- std::string TensorRTRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(op_);
- return "TensorRTRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TensorRTRuntime, TensorRTRuntime)
- .hash(TensorRTRuntime_hash_impl)
- .is_same_st(TensorRTRuntime_is_same_st_impl)
- .props(TensorRTRuntime_props_impl)
- .make_name(TensorRTRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TopK);
-
- namespace {
- size_t TopK_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TopK>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- return val;
- }
- bool TopK_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TopK>(),
- &&b_ = rhs_.cast_final_safe<TopK>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TopK_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TopK>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case TopK::Mode::KTH_ONLY:
- props_.emplace_back("mode", "KTH_ONLY");
- break;
- case TopK::Mode::VALUE_IDX_NOSORT:
- props_.emplace_back("mode", "VALUE_IDX_NOSORT");
- break;
- case TopK::Mode::VALUE_IDX_SORTED:
- props_.emplace_back("mode", "VALUE_IDX_SORTED");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string TopK_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TopK>();
- static_cast<void>(op_);
- return "TopK";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TopK, TopK)
- .hash(TopK_hash_impl)
- .is_same_st(TopK_is_same_st_impl)
- .props(TopK_props_impl)
- .make_name(TopK_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TypeCvt);
-
- namespace {
- size_t TypeCvt_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TypeCvt>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool TypeCvt_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TypeCvt>(),
- &&b_ = rhs_.cast_final_safe<TypeCvt>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TypeCvt_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TypeCvt>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string TypeCvt_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TypeCvt>();
- static_cast<void>(op_);
- return "TypeCvt";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TypeCvt, TypeCvt)
- .hash(TypeCvt_hash_impl)
- .is_same_st(TypeCvt_is_same_st_impl)
- .props(TypeCvt_props_impl)
- .make_name(TypeCvt_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(UniformRNG);
-
- namespace {
- size_t UniformRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<UniformRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.handle),
- mgb::hash(op_.dtype.enumv())
- )
- );
- }
- bool UniformRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<UniformRNG>(),
- &&b_ = rhs_.cast_final_safe<UniformRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.dtype == b_.dtype;}
- std::vector<std::pair<const char*, std::string>> UniformRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<UniformRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string UniformRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<UniformRNG>();
- static_cast<void>(op_);
- return "UniformRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(UniformRNG, UniformRNG)
- .hash(UniformRNG_hash_impl)
- .is_same_st(UniformRNG_is_same_st_impl)
- .props(UniformRNG_props_impl)
- .make_name(UniformRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpAffine);
-
- namespace {
- size_t WarpAffine_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpAffine>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.border_mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.border_val));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- return val;
- }
- bool WarpAffine_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<WarpAffine>(),
- &&b_ = rhs_.cast_final_safe<WarpAffine>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.border_mode != b_.border_mode) return false;
- if (a_.border_val != b_.border_val) return false;
- if (a_.format != b_.format) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> WarpAffine_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpAffine>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case WarpAffine::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case WarpAffine::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case WarpAffine::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case WarpAffine::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case WarpAffine::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.border_mode){
- case WarpAffine::BorderMode::REPLICATE:
- props_.emplace_back("border_mode", "REPLICATE");
- break;
- case WarpAffine::BorderMode::REFLECT:
- props_.emplace_back("border_mode", "REFLECT");
- break;
- case WarpAffine::BorderMode::REFLECT_101:
- props_.emplace_back("border_mode", "REFLECT_101");
- break;
- case WarpAffine::BorderMode::WRAP:
- props_.emplace_back("border_mode", "WRAP");
- break;
- case WarpAffine::BorderMode::CONSTANT:
- props_.emplace_back("border_mode", "CONSTANT");
- break;
- case WarpAffine::BorderMode::TRANSPARENT:
- props_.emplace_back("border_mode", "TRANSPARENT");
- break;
- case WarpAffine::BorderMode::ISOLATED:
- props_.emplace_back("border_mode", "ISOLATED");
- break;
- default:
- props_.emplace_back("border_mode", "INVALID");
- break;
- }
- props_.emplace_back("border_val", std::to_string(op_.border_val));
- switch (op_.format){
- case WarpAffine::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case WarpAffine::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case WarpAffine::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case WarpAffine::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case WarpAffine::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case WarpAffine::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case WarpAffine::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case WarpAffine::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case WarpAffine::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case WarpAffine::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case WarpAffine::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case WarpAffine::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case WarpAffine::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case WarpAffine::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case WarpAffine::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case WarpAffine::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case WarpAffine::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case WarpAffine::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- return props_;
- }
- std::string WarpAffine_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpAffine>();
- static_cast<void>(op_);
- return "WarpAffine";
- }
- } // anonymous namespace
- OP_TRAIT_REG(WarpAffine, WarpAffine)
- .hash(WarpAffine_hash_impl)
- .is_same_st(WarpAffine_is_same_st_impl)
- .props(WarpAffine_props_impl)
- .make_name(WarpAffine_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspective);
-
- namespace {
- size_t WarpPerspective_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspective>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.bmode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.border_val));
- return val;
- }
- bool WarpPerspective_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<WarpPerspective>(),
- &&b_ = rhs_.cast_final_safe<WarpPerspective>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.bmode != b_.bmode) return false;
- if (a_.format != b_.format) return false;
- if (a_.border_val != b_.border_val) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> WarpPerspective_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspective>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case WarpPerspective::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case WarpPerspective::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case WarpPerspective::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case WarpPerspective::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case WarpPerspective::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.bmode){
- case WarpPerspective::BorderMode::REPLICATE:
- props_.emplace_back("bmode", "REPLICATE");
- break;
- case WarpPerspective::BorderMode::REFLECT:
- props_.emplace_back("bmode", "REFLECT");
- break;
- case WarpPerspective::BorderMode::REFLECT_101:
- props_.emplace_back("bmode", "REFLECT_101");
- break;
- case WarpPerspective::BorderMode::WRAP:
- props_.emplace_back("bmode", "WRAP");
- break;
- case WarpPerspective::BorderMode::CONSTANT:
- props_.emplace_back("bmode", "CONSTANT");
- break;
- case WarpPerspective::BorderMode::TRANSPARENT:
- props_.emplace_back("bmode", "TRANSPARENT");
- break;
- case WarpPerspective::BorderMode::ISOLATED:
- props_.emplace_back("bmode", "ISOLATED");
- break;
- default:
- props_.emplace_back("bmode", "INVALID");
- break;
- }
- switch (op_.format){
- case WarpPerspective::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case WarpPerspective::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case WarpPerspective::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case WarpPerspective::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case WarpPerspective::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case WarpPerspective::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case WarpPerspective::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case WarpPerspective::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case WarpPerspective::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case WarpPerspective::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case WarpPerspective::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case WarpPerspective::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case WarpPerspective::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case WarpPerspective::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case WarpPerspective::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case WarpPerspective::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case WarpPerspective::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case WarpPerspective::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("border_val", std::to_string(op_.border_val));
- return props_;
- }
- std::string WarpPerspective_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspective>();
- static_cast<void>(op_);
- return "WarpPerspective";
- }
- } // anonymous namespace
- OP_TRAIT_REG(WarpPerspective, WarpPerspective)
- .hash(WarpPerspective_hash_impl)
- .is_same_st(WarpPerspective_is_same_st_impl)
- .props(WarpPerspective_props_impl)
- .make_name(WarpPerspective_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspectiveBackwardData);
-
- namespace {
- size_t WarpPerspectiveBackwardData_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspectiveBackwardData>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.bmode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.border_val));
- return val;
- }
- bool WarpPerspectiveBackwardData_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<WarpPerspectiveBackwardData>(),
- &&b_ = rhs_.cast_final_safe<WarpPerspectiveBackwardData>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.bmode != b_.bmode) return false;
- if (a_.format != b_.format) return false;
- if (a_.border_val != b_.border_val) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> WarpPerspectiveBackwardData_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspectiveBackwardData>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case WarpPerspectiveBackwardData::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case WarpPerspectiveBackwardData::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case WarpPerspectiveBackwardData::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case WarpPerspectiveBackwardData::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case WarpPerspectiveBackwardData::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.bmode){
- case WarpPerspectiveBackwardData::BorderMode::REPLICATE:
- props_.emplace_back("bmode", "REPLICATE");
- break;
- case WarpPerspectiveBackwardData::BorderMode::REFLECT:
- props_.emplace_back("bmode", "REFLECT");
- break;
- case WarpPerspectiveBackwardData::BorderMode::REFLECT_101:
- props_.emplace_back("bmode", "REFLECT_101");
- break;
- case WarpPerspectiveBackwardData::BorderMode::WRAP:
- props_.emplace_back("bmode", "WRAP");
- break;
- case WarpPerspectiveBackwardData::BorderMode::CONSTANT:
- props_.emplace_back("bmode", "CONSTANT");
- break;
- case WarpPerspectiveBackwardData::BorderMode::TRANSPARENT:
- props_.emplace_back("bmode", "TRANSPARENT");
- break;
- case WarpPerspectiveBackwardData::BorderMode::ISOLATED:
- props_.emplace_back("bmode", "ISOLATED");
- break;
- default:
- props_.emplace_back("bmode", "INVALID");
- break;
- }
- switch (op_.format){
- case WarpPerspectiveBackwardData::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case WarpPerspectiveBackwardData::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case WarpPerspectiveBackwardData::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case WarpPerspectiveBackwardData::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case WarpPerspectiveBackwardData::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case WarpPerspectiveBackwardData::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case WarpPerspectiveBackwardData::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("border_val", std::to_string(op_.border_val));
- return props_;
- }
- std::string WarpPerspectiveBackwardData_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspectiveBackwardData>();
- static_cast<void>(op_);
- return "WarpPerspectiveBackwardData";
- }
- } // anonymous namespace
- OP_TRAIT_REG(WarpPerspectiveBackwardData, WarpPerspectiveBackwardData)
- .hash(WarpPerspectiveBackwardData_hash_impl)
- .is_same_st(WarpPerspectiveBackwardData_is_same_st_impl)
- .props(WarpPerspectiveBackwardData_props_impl)
- .make_name(WarpPerspectiveBackwardData_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspectiveBackwardMat);
-
- namespace {
- size_t WarpPerspectiveBackwardMat_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspectiveBackwardMat>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.bmode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.border_val));
- return val;
- }
- bool WarpPerspectiveBackwardMat_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<WarpPerspectiveBackwardMat>(),
- &&b_ = rhs_.cast_final_safe<WarpPerspectiveBackwardMat>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.bmode != b_.bmode) return false;
- if (a_.format != b_.format) return false;
- if (a_.border_val != b_.border_val) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> WarpPerspectiveBackwardMat_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspectiveBackwardMat>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case WarpPerspectiveBackwardMat::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case WarpPerspectiveBackwardMat::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case WarpPerspectiveBackwardMat::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case WarpPerspectiveBackwardMat::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case WarpPerspectiveBackwardMat::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.bmode){
- case WarpPerspectiveBackwardMat::BorderMode::REPLICATE:
- props_.emplace_back("bmode", "REPLICATE");
- break;
- case WarpPerspectiveBackwardMat::BorderMode::REFLECT:
- props_.emplace_back("bmode", "REFLECT");
- break;
- case WarpPerspectiveBackwardMat::BorderMode::REFLECT_101:
- props_.emplace_back("bmode", "REFLECT_101");
- break;
- case WarpPerspectiveBackwardMat::BorderMode::WRAP:
- props_.emplace_back("bmode", "WRAP");
- break;
- case WarpPerspectiveBackwardMat::BorderMode::CONSTANT:
- props_.emplace_back("bmode", "CONSTANT");
- break;
- case WarpPerspectiveBackwardMat::BorderMode::TRANSPARENT:
- props_.emplace_back("bmode", "TRANSPARENT");
- break;
- case WarpPerspectiveBackwardMat::BorderMode::ISOLATED:
- props_.emplace_back("bmode", "ISOLATED");
- break;
- default:
- props_.emplace_back("bmode", "INVALID");
- break;
- }
- switch (op_.format){
- case WarpPerspectiveBackwardMat::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case WarpPerspectiveBackwardMat::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case WarpPerspectiveBackwardMat::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case WarpPerspectiveBackwardMat::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case WarpPerspectiveBackwardMat::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case WarpPerspectiveBackwardMat::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case WarpPerspectiveBackwardMat::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("border_val", std::to_string(op_.border_val));
- return props_;
- }
- std::string WarpPerspectiveBackwardMat_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspectiveBackwardMat>();
- static_cast<void>(op_);
- return "WarpPerspectiveBackwardMat";
- }
- } // anonymous namespace
- OP_TRAIT_REG(WarpPerspectiveBackwardMat, WarpPerspectiveBackwardMat)
- .hash(WarpPerspectiveBackwardMat_hash_impl)
- .is_same_st(WarpPerspectiveBackwardMat_is_same_st_impl)
- .props(WarpPerspectiveBackwardMat_props_impl)
- .make_name(WarpPerspectiveBackwardMat_make_name_impl);
-
- // clang-format on
|