|
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917 |
- // clang-format off
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AdaptivePooling);
-
- namespace {
- size_t AdaptivePooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- return val;
- }
- bool AdaptivePooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AdaptivePooling>(),
- &&b_ = rhs_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.shape != b_.shape) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AdaptivePooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case AdaptivePooling::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case AdaptivePooling::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- case AdaptivePooling::Mode::AVERAGE_COUNT_EXCLUDE_PADDING:
- props_.emplace_back("mode", "AVERAGE_COUNT_EXCLUDE_PADDING");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- switch (op_.format){
- case AdaptivePooling::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case AdaptivePooling::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case AdaptivePooling::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case AdaptivePooling::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case AdaptivePooling::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case AdaptivePooling::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case AdaptivePooling::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case AdaptivePooling::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case AdaptivePooling::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case AdaptivePooling::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case AdaptivePooling::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case AdaptivePooling::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case AdaptivePooling::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case AdaptivePooling::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case AdaptivePooling::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case AdaptivePooling::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case AdaptivePooling::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case AdaptivePooling::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("shape", "{std::vector}");
- return props_;
- }
- std::string AdaptivePooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AdaptivePooling>();
- static_cast<void>(op_);
- return "AdaptivePooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AdaptivePooling, AdaptivePooling)
- .hash(AdaptivePooling_hash_impl)
- .is_same_st(AdaptivePooling_is_same_st_impl)
- .props(AdaptivePooling_props_impl)
- .make_name(AdaptivePooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AddAxis);
-
- namespace {
- size_t AddAxis_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AddAxis>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool AddAxis_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AddAxis>(),
- &&b_ = rhs_.cast_final_safe<AddAxis>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AddAxis_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AddAxis>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", "{std::vector}");
- return props_;
- }
- std::string AddAxis_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AddAxis>();
- static_cast<void>(op_);
- return "AddAxis";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AddAxis, AddAxis)
- .hash(AddAxis_hash_impl)
- .is_same_st(AddAxis_is_same_st_impl)
- .props(AddAxis_props_impl)
- .make_name(AddAxis_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Argmax);
-
- namespace {
- size_t Argmax_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmax>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool Argmax_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Argmax>(),
- &&b_ = rhs_.cast_final_safe<Argmax>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Argmax_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmax>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string Argmax_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmax>();
- static_cast<void>(op_);
- return "Argmax";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Argmax, Argmax)
- .hash(Argmax_hash_impl)
- .is_same_st(Argmax_is_same_st_impl)
- .props(Argmax_props_impl)
- .make_name(Argmax_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Argmin);
-
- namespace {
- size_t Argmin_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmin>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool Argmin_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Argmin>(),
- &&b_ = rhs_.cast_final_safe<Argmin>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Argmin_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmin>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string Argmin_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argmin>();
- static_cast<void>(op_);
- return "Argmin";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Argmin, Argmin)
- .hash(Argmin_hash_impl)
- .is_same_st(Argmin_is_same_st_impl)
- .props(Argmin_props_impl)
- .make_name(Argmin_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Argsort);
-
- namespace {
- size_t Argsort_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argsort>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.order));
- return val;
- }
- bool Argsort_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Argsort>(),
- &&b_ = rhs_.cast_final_safe<Argsort>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.order != b_.order) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Argsort_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argsort>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.order){
- case Argsort::Order::ASCENDING:
- props_.emplace_back("order", "ASCENDING");
- break;
- case Argsort::Order::DESCENDING:
- props_.emplace_back("order", "DESCENDING");
- break;
- default:
- props_.emplace_back("order", "INVALID");
- break;
- }
- return props_;
- }
- std::string Argsort_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Argsort>();
- static_cast<void>(op_);
- return "Argsort";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Argsort, Argsort)
- .hash(Argsort_hash_impl)
- .is_same_st(Argsort_is_same_st_impl)
- .props(Argsort_props_impl)
- .make_name(Argsort_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AssertEqual);
-
- namespace {
- size_t AssertEqual_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AssertEqual>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.maxerr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.verbose));
- return val;
- }
- bool AssertEqual_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AssertEqual>(),
- &&b_ = rhs_.cast_final_safe<AssertEqual>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.maxerr != b_.maxerr) return false;
- if (a_.verbose != b_.verbose) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AssertEqual_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AssertEqual>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("maxerr", std::to_string(op_.maxerr));
- props_.emplace_back("verbose", std::to_string(op_.verbose));
- return props_;
- }
- std::string AssertEqual_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AssertEqual>();
- static_cast<void>(op_);
- return "AssertEqual";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AssertEqual, AssertEqual)
- .hash(AssertEqual_hash_impl)
- .is_same_st(AssertEqual_is_same_st_impl)
- .props(AssertEqual_props_impl)
- .make_name(AssertEqual_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(AtlasRuntime);
-
- namespace {
- size_t AtlasRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- return val;
- }
- bool AtlasRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<AtlasRuntime>(),
- &&b_ = rhs_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> AtlasRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- return props_;
- }
- std::string AtlasRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<AtlasRuntime>();
- static_cast<void>(op_);
- return "AtlasRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(AtlasRuntime, AtlasRuntime)
- .hash(AtlasRuntime_hash_impl)
- .is_same_st(AtlasRuntime_is_same_st_impl)
- .props(AtlasRuntime_props_impl)
- .make_name(AtlasRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Barrier);
-
- namespace {
- size_t Barrier_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Barrier>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.nr_outputs));
- return val;
- }
- bool Barrier_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Barrier>(),
- &&b_ = rhs_.cast_final_safe<Barrier>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.comp_node != b_.comp_node) return false;
- if (a_.nr_outputs != b_.nr_outputs) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Barrier_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Barrier>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- props_.emplace_back("nr_outputs", std::to_string(op_.nr_outputs));
- return props_;
- }
- std::string Barrier_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Barrier>();
- static_cast<void>(op_);
- return "Barrier";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Barrier, Barrier)
- .hash(Barrier_hash_impl)
- .is_same_st(Barrier_is_same_st_impl)
- .props(Barrier_props_impl)
- .make_name(Barrier_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchConvBias);
-
- namespace {
- size_t BatchConvBias_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchConvBias>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool BatchConvBias_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchConvBias>(),
- &&b_ = rhs_.cast_final_safe<BatchConvBias>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.nonlineMode != b_.nonlineMode) return false;
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchConvBias_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchConvBias>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.nonlineMode){
- case BatchConvBias::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case BatchConvBias::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case BatchConvBias::NonlineMode::SIGMOID:
- props_.emplace_back("nonlineMode", "SIGMOID");
- break;
- case BatchConvBias::NonlineMode::H_SWISH:
- props_.emplace_back("nonlineMode", "H_SWISH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- switch (op_.mode){
- case BatchConvBias::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case BatchConvBias::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case BatchConvBias::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case BatchConvBias::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case BatchConvBias::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case BatchConvBias::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case BatchConvBias::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case BatchConvBias::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case BatchConvBias::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case BatchConvBias::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case BatchConvBias::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case BatchConvBias::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case BatchConvBias::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case BatchConvBias::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case BatchConvBias::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case BatchConvBias::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case BatchConvBias::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case BatchConvBias::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case BatchConvBias::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case BatchConvBias::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case BatchConvBias::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case BatchConvBias::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case BatchConvBias::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case BatchConvBias::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case BatchConvBias::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case BatchConvBias::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case BatchConvBias::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case BatchConvBias::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string BatchConvBias_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchConvBias>();
- static_cast<void>(op_);
- return "BatchConvBias";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchConvBias, BatchConvBias)
- .hash(BatchConvBias_hash_impl)
- .is_same_st(BatchConvBias_is_same_st_impl)
- .props(BatchConvBias_props_impl)
- .make_name(BatchConvBias_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchNorm);
-
- namespace {
- size_t BatchNorm_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNorm>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.param_dim));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.epsilon));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.avg_factor));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- return val;
- }
- bool BatchNorm_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchNorm>(),
- &&b_ = rhs_.cast_final_safe<BatchNorm>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.param_dim != b_.param_dim) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- if (a_.epsilon != b_.epsilon) return false;
- if (a_.avg_factor != b_.avg_factor) return false;
- if (a_.scale != b_.scale) return false;
- if (a_.bias != b_.bias) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchNorm_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNorm>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.param_dim){
- case BatchNorm::ParamDim::DIM_11HW:
- props_.emplace_back("param_dim", "DIM_11HW");
- break;
- case BatchNorm::ParamDim::DIM_1CHW:
- props_.emplace_back("param_dim", "DIM_1CHW");
- break;
- case BatchNorm::ParamDim::DIM_1C11:
- props_.emplace_back("param_dim", "DIM_1C11");
- break;
- case BatchNorm::ParamDim::DIM_111C:
- props_.emplace_back("param_dim", "DIM_111C");
- break;
- default:
- props_.emplace_back("param_dim", "INVALID");
- break;
- }
- switch (op_.fwd_mode){
- case BatchNorm::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case BatchNorm::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- props_.emplace_back("epsilon", std::to_string(op_.epsilon));
- props_.emplace_back("avg_factor", std::to_string(op_.avg_factor));
- props_.emplace_back("scale", std::to_string(op_.scale));
- props_.emplace_back("bias", std::to_string(op_.bias));
- return props_;
- }
- std::string BatchNorm_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNorm>();
- static_cast<void>(op_);
- return "BatchNorm";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchNorm, BatchNorm)
- .hash(BatchNorm_hash_impl)
- .is_same_st(BatchNorm_is_same_st_impl)
- .props(BatchNorm_props_impl)
- .make_name(BatchNorm_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchNormBackward);
-
- namespace {
- size_t BatchNormBackward_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.param_dim));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.epsilon));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.avg_factor));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- return val;
- }
- bool BatchNormBackward_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchNormBackward>(),
- &&b_ = rhs_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.param_dim != b_.param_dim) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- if (a_.epsilon != b_.epsilon) return false;
- if (a_.avg_factor != b_.avg_factor) return false;
- if (a_.scale != b_.scale) return false;
- if (a_.bias != b_.bias) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchNormBackward_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.param_dim){
- case BatchNormBackward::ParamDim::DIM_11HW:
- props_.emplace_back("param_dim", "DIM_11HW");
- break;
- case BatchNormBackward::ParamDim::DIM_1CHW:
- props_.emplace_back("param_dim", "DIM_1CHW");
- break;
- case BatchNormBackward::ParamDim::DIM_1C11:
- props_.emplace_back("param_dim", "DIM_1C11");
- break;
- case BatchNormBackward::ParamDim::DIM_111C:
- props_.emplace_back("param_dim", "DIM_111C");
- break;
- default:
- props_.emplace_back("param_dim", "INVALID");
- break;
- }
- switch (op_.fwd_mode){
- case BatchNormBackward::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case BatchNormBackward::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- props_.emplace_back("epsilon", std::to_string(op_.epsilon));
- props_.emplace_back("avg_factor", std::to_string(op_.avg_factor));
- props_.emplace_back("scale", std::to_string(op_.scale));
- props_.emplace_back("bias", std::to_string(op_.bias));
- return props_;
- }
- std::string BatchNormBackward_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchNormBackward>();
- static_cast<void>(op_);
- return "BatchNormBackward";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchNormBackward, BatchNormBackward)
- .hash(BatchNormBackward_hash_impl)
- .is_same_st(BatchNormBackward_is_same_st_impl)
- .props(BatchNormBackward_props_impl)
- .make_name(BatchNormBackward_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedIncrMeshIndexing);
-
- namespace {
- size_t BatchedIncrMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool BatchedIncrMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedIncrMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedIncrMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string BatchedIncrMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedIncrMeshIndexing>();
- static_cast<void>(op_);
- return "BatchedIncrMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedIncrMeshIndexing, BatchedIncrMeshIndexing)
- .hash(BatchedIncrMeshIndexing_hash_impl)
- .is_same_st(BatchedIncrMeshIndexing_is_same_st_impl)
- .props(BatchedIncrMeshIndexing_props_impl)
- .make_name(BatchedIncrMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedMatrixMul);
-
- namespace {
- size_t BatchedMatrixMul_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeB));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimB));
- return val;
- }
- bool BatchedMatrixMul_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedMatrixMul>(),
- &&b_ = rhs_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.transposeA != b_.transposeA) return false;
- if (a_.transposeB != b_.transposeB) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dimA != b_.dimA) return false;
- if (a_.dimB != b_.dimB) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedMatrixMul_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("transposeA", std::to_string(op_.transposeA));
- props_.emplace_back("transposeB", std::to_string(op_.transposeB));
- switch (op_.compute_mode){
- case BatchedMatrixMul::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case BatchedMatrixMul::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.format){
- case BatchedMatrixMul::Format::DEFAULT:
- props_.emplace_back("format", "DEFAULT");
- break;
- case BatchedMatrixMul::Format::MK4:
- props_.emplace_back("format", "MK4");
- break;
- case BatchedMatrixMul::Format::MK8:
- props_.emplace_back("format", "MK8");
- break;
- case BatchedMatrixMul::Format::MK4_DOT:
- props_.emplace_back("format", "MK4_DOT");
- break;
- case BatchedMatrixMul::Format::N32K4_DOT:
- props_.emplace_back("format", "N32K4_DOT");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case BatchedMatrixMul::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case BatchedMatrixMul::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case BatchedMatrixMul::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case BatchedMatrixMul::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dimA", std::to_string(op_.dimA));
- props_.emplace_back("dimB", std::to_string(op_.dimB));
- return props_;
- }
- std::string BatchedMatrixMul_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMatrixMul>();
- static_cast<void>(op_);
- return "BatchedMatrixMul";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedMatrixMul, BatchedMatrixMul)
- .hash(BatchedMatrixMul_hash_impl)
- .is_same_st(BatchedMatrixMul_is_same_st_impl)
- .props(BatchedMatrixMul_props_impl)
- .make_name(BatchedMatrixMul_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedMeshIndexing);
-
- namespace {
- size_t BatchedMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool BatchedMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string BatchedMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedMeshIndexing>();
- static_cast<void>(op_);
- return "BatchedMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedMeshIndexing, BatchedMeshIndexing)
- .hash(BatchedMeshIndexing_hash_impl)
- .is_same_st(BatchedMeshIndexing_is_same_st_impl)
- .props(BatchedMeshIndexing_props_impl)
- .make_name(BatchedMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BatchedSetMeshIndexing);
-
- namespace {
- size_t BatchedSetMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool BatchedSetMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BatchedSetMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> BatchedSetMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string BatchedSetMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BatchedSetMeshIndexing>();
- static_cast<void>(op_);
- return "BatchedSetMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BatchedSetMeshIndexing, BatchedSetMeshIndexing)
- .hash(BatchedSetMeshIndexing_hash_impl)
- .is_same_st(BatchedSetMeshIndexing_is_same_st_impl)
- .props(BatchedSetMeshIndexing_props_impl)
- .make_name(BatchedSetMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(BetaRNG);
-
- namespace {
- size_t BetaRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BetaRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool BetaRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<BetaRNG>(),
- &&b_ = rhs_.cast_final_safe<BetaRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> BetaRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BetaRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string BetaRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<BetaRNG>();
- static_cast<void>(op_);
- return "BetaRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(BetaRNG, BetaRNG)
- .hash(BetaRNG_hash_impl)
- .is_same_st(BetaRNG_is_same_st_impl)
- .props(BetaRNG_props_impl)
- .make_name(BetaRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Borrow);
-
- namespace {
- size_t Borrow_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Borrow>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Borrow_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Borrow>(),
- &&b_ = rhs_.cast_final_safe<Borrow>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Borrow_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Borrow>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Borrow_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Borrow>();
- static_cast<void>(op_);
- return "Borrow";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Borrow, Borrow)
- .hash(Borrow_hash_impl)
- .is_same_st(Borrow_is_same_st_impl)
- .props(Borrow_props_impl)
- .make_name(Borrow_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Broadcast);
-
- namespace {
- size_t Broadcast_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Broadcast>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- return val;
- }
- bool Broadcast_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Broadcast>(),
- &&b_ = rhs_.cast_final_safe<Broadcast>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.shape != b_.shape) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Broadcast_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Broadcast>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("shape", "{std::vector}");
- return props_;
- }
- std::string Broadcast_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Broadcast>();
- static_cast<void>(op_);
- return "Broadcast";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Broadcast, Broadcast)
- .hash(Broadcast_hash_impl)
- .is_same_st(Broadcast_is_same_st_impl)
- .props(Broadcast_props_impl)
- .make_name(Broadcast_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CambriconRuntime);
-
- namespace {
- size_t CambriconRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.symbol));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.tensor_dim_mutable));
- return val;
- }
- bool CambriconRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CambriconRuntime>(),
- &&b_ = rhs_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- if (a_.symbol != b_.symbol) return false;
- if (a_.tensor_dim_mutable != b_.tensor_dim_mutable) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CambriconRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- props_.emplace_back("symbol", op_.symbol);
- props_.emplace_back("tensor_dim_mutable", std::to_string(op_.tensor_dim_mutable));
- return props_;
- }
- std::string CambriconRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CambriconRuntime>();
- static_cast<void>(op_);
- return "CambriconRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CambriconRuntime, CambriconRuntime)
- .hash(CambriconRuntime_hash_impl)
- .is_same_st(CambriconRuntime_is_same_st_impl)
- .props(CambriconRuntime_props_impl)
- .make_name(CambriconRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CheckNonFinite);
-
- namespace {
- size_t CheckNonFinite_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- return val;
- }
- bool CheckNonFinite_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CheckNonFinite>(),
- &&b_ = rhs_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.scale != b_.scale) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CheckNonFinite_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("scale", std::to_string(op_.scale));
- return props_;
- }
- std::string CheckNonFinite_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CheckNonFinite>();
- static_cast<void>(op_);
- return "CheckNonFinite";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CheckNonFinite, CheckNonFinite)
- .hash(CheckNonFinite_hash_impl)
- .is_same_st(CheckNonFinite_is_same_st_impl)
- .props(CheckNonFinite_props_impl)
- .make_name(CheckNonFinite_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CollectiveComm);
-
- namespace {
- size_t CollectiveComm_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CollectiveComm>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.key));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.nr_devices));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.rank));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.is_root));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.local_grad));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.addr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.port));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.backend));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool CollectiveComm_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CollectiveComm>(),
- &&b_ = rhs_.cast_final_safe<CollectiveComm>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.key != b_.key) return false;
- if (a_.nr_devices != b_.nr_devices) return false;
- if (a_.rank != b_.rank) return false;
- if (a_.is_root != b_.is_root) return false;
- if (a_.local_grad != b_.local_grad) return false;
- if (a_.addr != b_.addr) return false;
- if (a_.port != b_.port) return false;
- if (a_.dtype != b_.dtype) return false;
- if (a_.backend != b_.backend) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CollectiveComm_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CollectiveComm>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case CollectiveComm::Mode::REDUCE_SUM:
- props_.emplace_back("mode", "REDUCE_SUM");
- break;
- case CollectiveComm::Mode::BROADCAST:
- props_.emplace_back("mode", "BROADCAST");
- break;
- case CollectiveComm::Mode::ALL_GATHER:
- props_.emplace_back("mode", "ALL_GATHER");
- break;
- case CollectiveComm::Mode::REDUCE_SCATTER_SUM:
- props_.emplace_back("mode", "REDUCE_SCATTER_SUM");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_SUM:
- props_.emplace_back("mode", "ALL_REDUCE_SUM");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_MAX:
- props_.emplace_back("mode", "ALL_REDUCE_MAX");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_MIN:
- props_.emplace_back("mode", "ALL_REDUCE_MIN");
- break;
- case CollectiveComm::Mode::ALL_REDUCE_PROD:
- props_.emplace_back("mode", "ALL_REDUCE_PROD");
- break;
- case CollectiveComm::Mode::GATHER:
- props_.emplace_back("mode", "GATHER");
- break;
- case CollectiveComm::Mode::SCATTER:
- props_.emplace_back("mode", "SCATTER");
- break;
- case CollectiveComm::Mode::ALL_TO_ALL:
- props_.emplace_back("mode", "ALL_TO_ALL");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("key", op_.key);
- props_.emplace_back("nr_devices", std::to_string(op_.nr_devices));
- props_.emplace_back("rank", std::to_string(op_.rank));
- props_.emplace_back("is_root", std::to_string(op_.is_root));
- props_.emplace_back("local_grad", std::to_string(op_.local_grad));
- props_.emplace_back("addr", op_.addr);
- props_.emplace_back("port", std::to_string(op_.port));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("backend", op_.backend);
- props_.emplace_back("comp_node", op_.comp_node);
- return props_;
- }
- std::string CollectiveComm_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CollectiveComm>();
- static_cast<void>(op_);
- return "CollectiveComm";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CollectiveComm, CollectiveComm)
- .hash(CollectiveComm_hash_impl)
- .is_same_st(CollectiveComm_is_same_st_impl)
- .props(CollectiveComm_props_impl)
- .make_name(CollectiveComm_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Concat);
-
- namespace {
- size_t Concat_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Concat>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Concat_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Concat>(),
- &&b_ = rhs_.cast_final_safe<Concat>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Concat_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Concat>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Concat_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Concat>();
- static_cast<void>(op_);
- return "Concat";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Concat, Concat)
- .hash(Concat_hash_impl)
- .is_same_st(Concat_is_same_st_impl)
- .props(Concat_props_impl)
- .make_name(Concat_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CondTake);
-
- namespace {
- size_t CondTake_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CondTake>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool CondTake_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CondTake>(),
- &&b_ = rhs_.cast_final_safe<CondTake>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CondTake_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CondTake>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string CondTake_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CondTake>();
- static_cast<void>(op_);
- return "CondTake";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CondTake, CondTake)
- .hash(CondTake_hash_impl)
- .is_same_st(CondTake_is_same_st_impl)
- .props(CondTake_props_impl)
- .make_name(CondTake_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ConvBias);
-
- namespace {
- size_t ConvBias_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvBias>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool ConvBias_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ConvBias>(),
- &&b_ = rhs_.cast_final_safe<ConvBias>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.nonlineMode != b_.nonlineMode) return false;
- if (a_.mode != b_.mode) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ConvBias_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvBias>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.nonlineMode){
- case ConvBias::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case ConvBias::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case ConvBias::NonlineMode::SIGMOID:
- props_.emplace_back("nonlineMode", "SIGMOID");
- break;
- case ConvBias::NonlineMode::H_SWISH:
- props_.emplace_back("nonlineMode", "H_SWISH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- switch (op_.mode){
- case ConvBias::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case ConvBias::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- switch (op_.sparse){
- case ConvBias::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case ConvBias::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case ConvBias::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case ConvBias::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case ConvBias::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case ConvBias::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case ConvBias::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case ConvBias::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case ConvBias::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case ConvBias::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case ConvBias::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case ConvBias::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case ConvBias::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case ConvBias::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case ConvBias::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case ConvBias::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case ConvBias::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case ConvBias::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case ConvBias::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case ConvBias::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.compute_mode){
- case ConvBias::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case ConvBias::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case ConvBias::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case ConvBias::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case ConvBias::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case ConvBias::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string ConvBias_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvBias>();
- static_cast<void>(op_);
- return "ConvBias";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ConvBias, ConvBias)
- .hash(ConvBias_hash_impl)
- .is_same_st(ConvBias_is_same_st_impl)
- .props(ConvBias_props_impl)
- .make_name(ConvBias_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Convolution);
-
- namespace {
- size_t Convolution_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Convolution_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Convolution>(),
- &&b_ = rhs_.cast_final_safe<Convolution>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Convolution_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Convolution::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case Convolution::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case Convolution::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case Convolution::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case Convolution::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Convolution::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Convolution::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Convolution::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Convolution::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Convolution::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Convolution::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Convolution::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Convolution::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Convolution::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Convolution::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Convolution::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Convolution::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Convolution::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Convolution::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Convolution::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Convolution::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Convolution::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case Convolution::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case Convolution::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Convolution::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Convolution::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Convolution::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Convolution::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Convolution_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution>();
- static_cast<void>(op_);
- return "Convolution";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Convolution, Convolution)
- .hash(Convolution_hash_impl)
- .is_same_st(Convolution_is_same_st_impl)
- .props(Convolution_props_impl)
- .make_name(Convolution_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Convolution3D);
-
- namespace {
- size_t Convolution3D_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3D>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.data_type));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Convolution3D_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Convolution3D>(),
- &&b_ = rhs_.cast_final_safe<Convolution3D>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_d != b_.pad_d) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_d != b_.stride_d) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_d != b_.dilate_d) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.data_type != b_.data_type) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Convolution3D_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3D>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Convolution3D::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case Convolution3D::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_d", std::to_string(op_.pad_d));
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_d", std::to_string(op_.stride_d));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_d", std::to_string(op_.dilate_d));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case Convolution3D::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case Convolution3D::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.data_type){
- case Convolution3D::DataType::FLOAT:
- props_.emplace_back("data_type", "FLOAT");
- break;
- case Convolution3D::DataType::FLOAT_IO16xC32:
- props_.emplace_back("data_type", "FLOAT_IO16xC32");
- break;
- default:
- props_.emplace_back("data_type", "INVALID");
- break;
- }
- switch (op_.format){
- case Convolution3D::Format::NCDHW:
- props_.emplace_back("format", "NCDHW");
- break;
- case Convolution3D::Format::NDHWC:
- props_.emplace_back("format", "NDHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Convolution3D::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Convolution3D::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Convolution3D::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Convolution3D::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Convolution3D_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3D>();
- static_cast<void>(op_);
- return "Convolution3D";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Convolution3D, Convolution3D)
- .hash(Convolution3D_hash_impl)
- .is_same_st(Convolution3D_is_same_st_impl)
- .props(Convolution3D_props_impl)
- .make_name(Convolution3D_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Convolution3DBackwardData);
-
- namespace {
- size_t Convolution3DBackwardData_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_d));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.data_type));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Convolution3DBackwardData_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Convolution3DBackwardData>(),
- &&b_ = rhs_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_d != b_.pad_d) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_d != b_.stride_d) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_d != b_.dilate_d) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.data_type != b_.data_type) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Convolution3DBackwardData_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Convolution3DBackwardData::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case Convolution3DBackwardData::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_d", std::to_string(op_.pad_d));
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_d", std::to_string(op_.stride_d));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_d", std::to_string(op_.dilate_d));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case Convolution3DBackwardData::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case Convolution3DBackwardData::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.data_type){
- case Convolution3DBackwardData::DataType::FLOAT:
- props_.emplace_back("data_type", "FLOAT");
- break;
- case Convolution3DBackwardData::DataType::FLOAT_IO16xC32:
- props_.emplace_back("data_type", "FLOAT_IO16xC32");
- break;
- default:
- props_.emplace_back("data_type", "INVALID");
- break;
- }
- switch (op_.format){
- case Convolution3DBackwardData::Format::NCDHW:
- props_.emplace_back("format", "NCDHW");
- break;
- case Convolution3DBackwardData::Format::NDHWC:
- props_.emplace_back("format", "NDHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Convolution3DBackwardData::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Convolution3DBackwardData::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Convolution3DBackwardData::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Convolution3DBackwardData::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Convolution3DBackwardData_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Convolution3DBackwardData>();
- static_cast<void>(op_);
- return "Convolution3DBackwardData";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Convolution3DBackwardData, Convolution3DBackwardData)
- .hash(Convolution3DBackwardData_hash_impl)
- .is_same_st(Convolution3DBackwardData_is_same_st_impl)
- .props(Convolution3DBackwardData_props_impl)
- .make_name(Convolution3DBackwardData_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ConvolutionBackwardData);
-
- namespace {
- size_t ConvolutionBackwardData_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool ConvolutionBackwardData_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ConvolutionBackwardData>(),
- &&b_ = rhs_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ConvolutionBackwardData_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ConvolutionBackwardData::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case ConvolutionBackwardData::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case ConvolutionBackwardData::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case ConvolutionBackwardData::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case ConvolutionBackwardData::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case ConvolutionBackwardData::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case ConvolutionBackwardData::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case ConvolutionBackwardData::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case ConvolutionBackwardData::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case ConvolutionBackwardData::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case ConvolutionBackwardData::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case ConvolutionBackwardData::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case ConvolutionBackwardData::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case ConvolutionBackwardData::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case ConvolutionBackwardData::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case ConvolutionBackwardData::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case ConvolutionBackwardData::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case ConvolutionBackwardData::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case ConvolutionBackwardData::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case ConvolutionBackwardData::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case ConvolutionBackwardData::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case ConvolutionBackwardData::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case ConvolutionBackwardData::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case ConvolutionBackwardData::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case ConvolutionBackwardData::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case ConvolutionBackwardData::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case ConvolutionBackwardData::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case ConvolutionBackwardData::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string ConvolutionBackwardData_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ConvolutionBackwardData>();
- static_cast<void>(op_);
- return "ConvolutionBackwardData";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ConvolutionBackwardData, ConvolutionBackwardData)
- .hash(ConvolutionBackwardData_hash_impl)
- .is_same_st(ConvolutionBackwardData_is_same_st_impl)
- .props(ConvolutionBackwardData_props_impl)
- .make_name(ConvolutionBackwardData_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Copy);
-
- namespace {
- size_t Copy_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Copy>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Copy_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Copy>(),
- &&b_ = rhs_.cast_final_safe<Copy>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Copy_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Copy>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Copy_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Copy>();
- static_cast<void>(op_);
- return "Copy";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Copy, Copy)
- .hash(Copy_hash_impl)
- .is_same_st(Copy_is_same_st_impl)
- .props(Copy_props_impl)
- .make_name(Copy_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Correlation);
-
- namespace {
- size_t Correlation_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Correlation>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.kernel_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.max_displacement));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.is_multiply));
- return val;
- }
- bool Correlation_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Correlation>(),
- &&b_ = rhs_.cast_final_safe<Correlation>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.format != b_.format) return false;
- if (a_.kernel_size != b_.kernel_size) return false;
- if (a_.max_displacement != b_.max_displacement) return false;
- if (a_.stride1 != b_.stride1) return false;
- if (a_.stride2 != b_.stride2) return false;
- if (a_.pad_size != b_.pad_size) return false;
- if (a_.is_multiply != b_.is_multiply) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Correlation_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Correlation>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.format){
- case Correlation::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Correlation::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Correlation::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Correlation::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Correlation::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Correlation::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Correlation::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Correlation::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Correlation::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Correlation::Format::NCHW_WINOGRAD:
- props_.emplace_back("format", "NCHW_WINOGRAD");
- break;
- case Correlation::Format::NCHW88_WINOGRAD:
- props_.emplace_back("format", "NCHW88_WINOGRAD");
- break;
- case Correlation::Format::NCHW44_WINOGRAD:
- props_.emplace_back("format", "NCHW44_WINOGRAD");
- break;
- case Correlation::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Correlation::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Correlation::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Correlation::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Correlation::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Correlation::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Correlation::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Correlation::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("kernel_size", std::to_string(op_.kernel_size));
- props_.emplace_back("max_displacement", std::to_string(op_.max_displacement));
- props_.emplace_back("stride1", std::to_string(op_.stride1));
- props_.emplace_back("stride2", std::to_string(op_.stride2));
- props_.emplace_back("pad_size", std::to_string(op_.pad_size));
- props_.emplace_back("is_multiply", std::to_string(op_.is_multiply));
- return props_;
- }
- std::string Correlation_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Correlation>();
- static_cast<void>(op_);
- return "Correlation";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Correlation, Correlation)
- .hash(Correlation_hash_impl)
- .is_same_st(Correlation_is_same_st_impl)
- .props(Correlation_props_impl)
- .make_name(Correlation_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Cumsum);
-
- namespace {
- size_t Cumsum_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Cumsum>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.exclusive));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.reverse));
- return val;
- }
- bool Cumsum_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Cumsum>(),
- &&b_ = rhs_.cast_final_safe<Cumsum>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.exclusive != b_.exclusive) return false;
- if (a_.reverse != b_.reverse) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Cumsum_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Cumsum>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("exclusive", std::to_string(op_.exclusive));
- props_.emplace_back("reverse", std::to_string(op_.reverse));
- return props_;
- }
- std::string Cumsum_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Cumsum>();
- static_cast<void>(op_);
- return "Cumsum";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Cumsum, Cumsum)
- .hash(Cumsum_hash_impl)
- .is_same_st(Cumsum_is_same_st_impl)
- .props(Cumsum_props_impl)
- .make_name(Cumsum_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CvtColor);
-
- namespace {
- size_t CvtColor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CvtColor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- return val;
- }
- bool CvtColor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<CvtColor>(),
- &&b_ = rhs_.cast_final_safe<CvtColor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> CvtColor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CvtColor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case CvtColor::Mode::RGB2GRAY:
- props_.emplace_back("mode", "RGB2GRAY");
- break;
- case CvtColor::Mode::RGB2YUV:
- props_.emplace_back("mode", "RGB2YUV");
- break;
- case CvtColor::Mode::YUV2RGB:
- props_.emplace_back("mode", "YUV2RGB");
- break;
- case CvtColor::Mode::GRAY2RGB:
- props_.emplace_back("mode", "GRAY2RGB");
- break;
- case CvtColor::Mode::RGBA2RGB:
- props_.emplace_back("mode", "RGBA2RGB");
- break;
- case CvtColor::Mode::RGBA2BGR:
- props_.emplace_back("mode", "RGBA2BGR");
- break;
- case CvtColor::Mode::RGBA2GRAY:
- props_.emplace_back("mode", "RGBA2GRAY");
- break;
- case CvtColor::Mode::RGB2BGR:
- props_.emplace_back("mode", "RGB2BGR");
- break;
- case CvtColor::Mode::BGR2GRAY:
- props_.emplace_back("mode", "BGR2GRAY");
- break;
- case CvtColor::Mode::BGR2RGB:
- props_.emplace_back("mode", "BGR2RGB");
- break;
- case CvtColor::Mode::YUV2GRAY_NV21:
- props_.emplace_back("mode", "YUV2GRAY_NV21");
- break;
- case CvtColor::Mode::YUV2RGB_NV21:
- props_.emplace_back("mode", "YUV2RGB_NV21");
- break;
- case CvtColor::Mode::YUV2BGR_NV21:
- props_.emplace_back("mode", "YUV2BGR_NV21");
- break;
- case CvtColor::Mode::YUV2GRAY_NV12:
- props_.emplace_back("mode", "YUV2GRAY_NV12");
- break;
- case CvtColor::Mode::YUV2RGB_NV12:
- props_.emplace_back("mode", "YUV2RGB_NV12");
- break;
- case CvtColor::Mode::YUV2BGR_NV12:
- props_.emplace_back("mode", "YUV2BGR_NV12");
- break;
- case CvtColor::Mode::YUV2GRAY_YV12:
- props_.emplace_back("mode", "YUV2GRAY_YV12");
- break;
- case CvtColor::Mode::YUV2RGB_YV12:
- props_.emplace_back("mode", "YUV2RGB_YV12");
- break;
- case CvtColor::Mode::YUV2BGR_YV12:
- props_.emplace_back("mode", "YUV2BGR_YV12");
- break;
- case CvtColor::Mode::YUV2GRAY_YU12:
- props_.emplace_back("mode", "YUV2GRAY_YU12");
- break;
- case CvtColor::Mode::YUV2RGB_YU12:
- props_.emplace_back("mode", "YUV2RGB_YU12");
- break;
- case CvtColor::Mode::YUV2BGR_YU12:
- props_.emplace_back("mode", "YUV2BGR_YU12");
- break;
- case CvtColor::Mode::YCrCb2RGB:
- props_.emplace_back("mode", "YCrCb2RGB");
- break;
- case CvtColor::Mode::YCrCb2BGR:
- props_.emplace_back("mode", "YCrCb2BGR");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_NV21:
- props_.emplace_back("mode", "BT601_YUV2RGB_NV21");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_NV21:
- props_.emplace_back("mode", "BT601_YUV2BGR_NV21");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_NV12:
- props_.emplace_back("mode", "BT601_YUV2RGB_NV12");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_NV12:
- props_.emplace_back("mode", "BT601_YUV2BGR_NV12");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_YV12:
- props_.emplace_back("mode", "BT601_YUV2RGB_YV12");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_YV12:
- props_.emplace_back("mode", "BT601_YUV2BGR_YV12");
- break;
- case CvtColor::Mode::BT601_YUV2RGB_YU12:
- props_.emplace_back("mode", "BT601_YUV2RGB_YU12");
- break;
- case CvtColor::Mode::BT601_YUV2BGR_YU12:
- props_.emplace_back("mode", "BT601_YUV2BGR_YU12");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string CvtColor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<CvtColor>();
- static_cast<void>(op_);
- return "CvtColor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(CvtColor, CvtColor)
- .hash(CvtColor_hash_impl)
- .is_same_st(CvtColor_is_same_st_impl)
- .props(CvtColor_props_impl)
- .make_name(CvtColor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(DeformableConv);
-
- namespace {
- size_t DeformableConv_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformableConv>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool DeformableConv_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<DeformableConv>(),
- &&b_ = rhs_.cast_final_safe<DeformableConv>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> DeformableConv_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformableConv>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case DeformableConv::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case DeformableConv::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case DeformableConv::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case DeformableConv::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case DeformableConv::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case DeformableConv::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case DeformableConv::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case DeformableConv::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case DeformableConv::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case DeformableConv::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case DeformableConv::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case DeformableConv::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case DeformableConv::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case DeformableConv::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case DeformableConv::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case DeformableConv::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case DeformableConv::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case DeformableConv::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case DeformableConv::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case DeformableConv::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case DeformableConv::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case DeformableConv::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case DeformableConv::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case DeformableConv::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.strategy){
- case DeformableConv::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case DeformableConv::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case DeformableConv::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case DeformableConv::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string DeformableConv_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformableConv>();
- static_cast<void>(op_);
- return "DeformableConv";
- }
- } // anonymous namespace
- OP_TRAIT_REG(DeformableConv, DeformableConv)
- .hash(DeformableConv_hash_impl)
- .is_same_st(DeformableConv_is_same_st_impl)
- .props(DeformableConv_props_impl)
- .make_name(DeformableConv_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(DeformablePSROIPooling);
-
- namespace {
- size_t DeformablePSROIPooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.no_trans));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.spatial_scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.trans_std));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.part_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.sample_per_part));
- return val;
- }
- bool DeformablePSROIPooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<DeformablePSROIPooling>(),
- &&b_ = rhs_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.no_trans != b_.no_trans) return false;
- if (a_.spatial_scale != b_.spatial_scale) return false;
- if (a_.trans_std != b_.trans_std) return false;
- if (a_.pooled_h != b_.pooled_h) return false;
- if (a_.pooled_w != b_.pooled_w) return false;
- if (a_.part_size != b_.part_size) return false;
- if (a_.sample_per_part != b_.sample_per_part) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> DeformablePSROIPooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("no_trans", std::to_string(op_.no_trans));
- props_.emplace_back("spatial_scale", std::to_string(op_.spatial_scale));
- props_.emplace_back("trans_std", std::to_string(op_.trans_std));
- props_.emplace_back("pooled_h", std::to_string(op_.pooled_h));
- props_.emplace_back("pooled_w", std::to_string(op_.pooled_w));
- props_.emplace_back("part_size", std::to_string(op_.part_size));
- props_.emplace_back("sample_per_part", std::to_string(op_.sample_per_part));
- return props_;
- }
- std::string DeformablePSROIPooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<DeformablePSROIPooling>();
- static_cast<void>(op_);
- return "DeformablePSROIPooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(DeformablePSROIPooling, DeformablePSROIPooling)
- .hash(DeformablePSROIPooling_hash_impl)
- .is_same_st(DeformablePSROIPooling_is_same_st_impl)
- .props(DeformablePSROIPooling_props_impl)
- .make_name(DeformablePSROIPooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Diag);
-
- namespace {
- size_t Diag_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Diag>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.k));
- return val;
- }
- bool Diag_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Diag>(),
- &&b_ = rhs_.cast_final_safe<Diag>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.k != b_.k) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Diag_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Diag>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("k", std::to_string(op_.k));
- return props_;
- }
- std::string Diag_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Diag>();
- static_cast<void>(op_);
- return "Diag";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Diag, Diag)
- .hash(Diag_hash_impl)
- .is_same_st(Diag_is_same_st_impl)
- .props(Diag_props_impl)
- .make_name(Diag_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Dimshuffle);
-
- namespace {
- size_t Dimshuffle_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dimshuffle>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pattern));
- return val;
- }
- bool Dimshuffle_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Dimshuffle>(),
- &&b_ = rhs_.cast_final_safe<Dimshuffle>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.pattern != b_.pattern) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Dimshuffle_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dimshuffle>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("pattern", "{std::vector}");
- return props_;
- }
- std::string Dimshuffle_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dimshuffle>();
- static_cast<void>(op_);
- return "Dimshuffle";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Dimshuffle, Dimshuffle)
- .hash(Dimshuffle_hash_impl)
- .is_same_st(Dimshuffle_is_same_st_impl)
- .props(Dimshuffle_props_impl)
- .make_name(Dimshuffle_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Dot);
-
- namespace {
- size_t Dot_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dot>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool Dot_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Dot>(),
- &&b_ = rhs_.cast_final_safe<Dot>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Dot_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dot>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string Dot_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dot>();
- static_cast<void>(op_);
- return "Dot";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Dot, Dot)
- .hash(Dot_hash_impl)
- .is_same_st(Dot_is_same_st_impl)
- .props(Dot_props_impl)
- .make_name(Dot_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Dropout);
-
- namespace {
- size_t Dropout_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dropout>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.drop_prob),
- mgb::hash(op_.handle))
- );
- }
- bool Dropout_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Dropout>(),
- &&b_ = rhs_.cast_final_safe<Dropout>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.drop_prob == b_.drop_prob;}
- std::vector<std::pair<const char*, std::string>> Dropout_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dropout>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("drop_prob", std::to_string(op_.drop_prob));
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string Dropout_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Dropout>();
- static_cast<void>(op_);
- return "Dropout";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Dropout, Dropout)
- .hash(Dropout_hash_impl)
- .is_same_st(Dropout_is_same_st_impl)
- .props(Dropout_props_impl)
- .make_name(Dropout_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Elemwise);
-
- namespace {
- size_t Elemwise_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Elemwise>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- return val;
- }
- bool Elemwise_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Elemwise>(),
- &&b_ = rhs_.cast_final_safe<Elemwise>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Elemwise_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Elemwise>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Elemwise::Mode::RELU:
- props_.emplace_back("mode", "RELU");
- break;
- case Elemwise::Mode::ABS:
- props_.emplace_back("mode", "ABS");
- break;
- case Elemwise::Mode::ACOS:
- props_.emplace_back("mode", "ACOS");
- break;
- case Elemwise::Mode::ASIN:
- props_.emplace_back("mode", "ASIN");
- break;
- case Elemwise::Mode::CEIL:
- props_.emplace_back("mode", "CEIL");
- break;
- case Elemwise::Mode::COS:
- props_.emplace_back("mode", "COS");
- break;
- case Elemwise::Mode::EXP:
- props_.emplace_back("mode", "EXP");
- break;
- case Elemwise::Mode::EXPM1:
- props_.emplace_back("mode", "EXPM1");
- break;
- case Elemwise::Mode::FLOOR:
- props_.emplace_back("mode", "FLOOR");
- break;
- case Elemwise::Mode::LOG:
- props_.emplace_back("mode", "LOG");
- break;
- case Elemwise::Mode::LOG1P:
- props_.emplace_back("mode", "LOG1P");
- break;
- case Elemwise::Mode::NEGATE:
- props_.emplace_back("mode", "NEGATE");
- break;
- case Elemwise::Mode::SIGMOID:
- props_.emplace_back("mode", "SIGMOID");
- break;
- case Elemwise::Mode::SIN:
- props_.emplace_back("mode", "SIN");
- break;
- case Elemwise::Mode::TANH:
- props_.emplace_back("mode", "TANH");
- break;
- case Elemwise::Mode::ABS_GRAD:
- props_.emplace_back("mode", "ABS_GRAD");
- break;
- case Elemwise::Mode::ADD:
- props_.emplace_back("mode", "ADD");
- break;
- case Elemwise::Mode::FLOOR_DIV:
- props_.emplace_back("mode", "FLOOR_DIV");
- break;
- case Elemwise::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case Elemwise::Mode::MIN:
- props_.emplace_back("mode", "MIN");
- break;
- case Elemwise::Mode::MOD:
- props_.emplace_back("mode", "MOD");
- break;
- case Elemwise::Mode::MUL:
- props_.emplace_back("mode", "MUL");
- break;
- case Elemwise::Mode::POW:
- props_.emplace_back("mode", "POW");
- break;
- case Elemwise::Mode::SIGMOID_GRAD:
- props_.emplace_back("mode", "SIGMOID_GRAD");
- break;
- case Elemwise::Mode::SUB:
- props_.emplace_back("mode", "SUB");
- break;
- case Elemwise::Mode::SWITCH_GT0:
- props_.emplace_back("mode", "SWITCH_GT0");
- break;
- case Elemwise::Mode::TANH_GRAD:
- props_.emplace_back("mode", "TANH_GRAD");
- break;
- case Elemwise::Mode::TRUE_DIV:
- props_.emplace_back("mode", "TRUE_DIV");
- break;
- case Elemwise::Mode::LOG_SUM_EXP:
- props_.emplace_back("mode", "LOG_SUM_EXP");
- break;
- case Elemwise::Mode::LT:
- props_.emplace_back("mode", "LT");
- break;
- case Elemwise::Mode::LEQ:
- props_.emplace_back("mode", "LEQ");
- break;
- case Elemwise::Mode::EQ:
- props_.emplace_back("mode", "EQ");
- break;
- case Elemwise::Mode::SHL:
- props_.emplace_back("mode", "SHL");
- break;
- case Elemwise::Mode::SHR:
- props_.emplace_back("mode", "SHR");
- break;
- case Elemwise::Mode::COND_LEQ_MOV:
- props_.emplace_back("mode", "COND_LEQ_MOV");
- break;
- case Elemwise::Mode::FUSE_MUL_ADD3:
- props_.emplace_back("mode", "FUSE_MUL_ADD3");
- break;
- case Elemwise::Mode::FUSE_MUL_ADD4:
- props_.emplace_back("mode", "FUSE_MUL_ADD4");
- break;
- case Elemwise::Mode::FUSE_ADD_RELU:
- props_.emplace_back("mode", "FUSE_ADD_RELU");
- break;
- case Elemwise::Mode::FUSE_ADD_SIGMOID:
- props_.emplace_back("mode", "FUSE_ADD_SIGMOID");
- break;
- case Elemwise::Mode::FUSE_ADD_TANH:
- props_.emplace_back("mode", "FUSE_ADD_TANH");
- break;
- case Elemwise::Mode::FAST_TANH:
- props_.emplace_back("mode", "FAST_TANH");
- break;
- case Elemwise::Mode::FAST_TANH_GRAD:
- props_.emplace_back("mode", "FAST_TANH_GRAD");
- break;
- case Elemwise::Mode::ROUND:
- props_.emplace_back("mode", "ROUND");
- break;
- case Elemwise::Mode::RMULH:
- props_.emplace_back("mode", "RMULH");
- break;
- case Elemwise::Mode::ATAN2:
- props_.emplace_back("mode", "ATAN2");
- break;
- case Elemwise::Mode::ERF:
- props_.emplace_back("mode", "ERF");
- break;
- case Elemwise::Mode::ERFINV:
- props_.emplace_back("mode", "ERFINV");
- break;
- case Elemwise::Mode::ERFC:
- props_.emplace_back("mode", "ERFC");
- break;
- case Elemwise::Mode::ERFCINV:
- props_.emplace_back("mode", "ERFCINV");
- break;
- case Elemwise::Mode::H_SWISH:
- props_.emplace_back("mode", "H_SWISH");
- break;
- case Elemwise::Mode::H_SWISH_GRAD:
- props_.emplace_back("mode", "H_SWISH_GRAD");
- break;
- case Elemwise::Mode::FUSE_ADD_H_SWISH:
- props_.emplace_back("mode", "FUSE_ADD_H_SWISH");
- break;
- case Elemwise::Mode::NOT:
- props_.emplace_back("mode", "NOT");
- break;
- case Elemwise::Mode::AND:
- props_.emplace_back("mode", "AND");
- break;
- case Elemwise::Mode::OR:
- props_.emplace_back("mode", "OR");
- break;
- case Elemwise::Mode::XOR:
- props_.emplace_back("mode", "XOR");
- break;
- case Elemwise::Mode::SILU:
- props_.emplace_back("mode", "SILU");
- break;
- case Elemwise::Mode::SILU_GRAD:
- props_.emplace_back("mode", "SILU_GRAD");
- break;
- case Elemwise::Mode::GELU:
- props_.emplace_back("mode", "GELU");
- break;
- case Elemwise::Mode::GELU_GRAD:
- props_.emplace_back("mode", "GELU_GRAD");
- break;
- case Elemwise::Mode::COND_LT_MOV:
- props_.emplace_back("mode", "COND_LT_MOV");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string Elemwise_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Elemwise>();
- static_cast<void>(op_);
-
- return to_string(op_.mode);
- }
- } // anonymous namespace
- OP_TRAIT_REG(Elemwise, Elemwise)
- .hash(Elemwise_hash_impl)
- .is_same_st(Elemwise_is_same_st_impl)
- .props(Elemwise_props_impl)
- .make_name(Elemwise_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ElemwiseMultiType);
-
- namespace {
- size_t ElemwiseMultiType_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool ElemwiseMultiType_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ElemwiseMultiType>(),
- &&b_ = rhs_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ElemwiseMultiType_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_INT16x32x32x32:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_INT16x32x32x32");
- break;
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_IXxF32xF32xI8:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_IXxF32xF32xI8");
- break;
- case ElemwiseMultiType::Mode::ROUND_SHR_SATURATE_IXxI8xI8:
- props_.emplace_back("mode", "ROUND_SHR_SATURATE_IXxI8xI8");
- break;
- case ElemwiseMultiType::Mode::FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT16x16x16x8:
- props_.emplace_back("mode", "FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT16x16x16x8");
- break;
- case ElemwiseMultiType::Mode::FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT32x32x32x8:
- props_.emplace_back("mode", "FUSE_ADD_RMULH_ROUND_SHR_SATURATE_INT32x32x32x8");
- break;
- case ElemwiseMultiType::Mode::ROUND_SHR_SATURATE_IXxI8xI16:
- props_.emplace_back("mode", "ROUND_SHR_SATURATE_IXxI8xI16");
- break;
- case ElemwiseMultiType::Mode::QADD:
- props_.emplace_back("mode", "QADD");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_RELU:
- props_.emplace_back("mode", "QFUSE_ADD_RELU");
- break;
- case ElemwiseMultiType::Mode::QMUL:
- props_.emplace_back("mode", "QMUL");
- break;
- case ElemwiseMultiType::Mode::QMIN:
- props_.emplace_back("mode", "QMIN");
- break;
- case ElemwiseMultiType::Mode::QMAX:
- props_.emplace_back("mode", "QMAX");
- break;
- case ElemwiseMultiType::Mode::QSUB:
- props_.emplace_back("mode", "QSUB");
- break;
- case ElemwiseMultiType::Mode::QTRUE_DIV:
- props_.emplace_back("mode", "QTRUE_DIV");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_SIGMOID:
- props_.emplace_back("mode", "QFUSE_ADD_SIGMOID");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_TANH:
- props_.emplace_back("mode", "QFUSE_ADD_TANH");
- break;
- case ElemwiseMultiType::Mode::QRELU:
- props_.emplace_back("mode", "QRELU");
- break;
- case ElemwiseMultiType::Mode::QABS:
- props_.emplace_back("mode", "QABS");
- break;
- case ElemwiseMultiType::Mode::QSIGMOID:
- props_.emplace_back("mode", "QSIGMOID");
- break;
- case ElemwiseMultiType::Mode::QEXP:
- props_.emplace_back("mode", "QEXP");
- break;
- case ElemwiseMultiType::Mode::QTANH:
- props_.emplace_back("mode", "QTANH");
- break;
- case ElemwiseMultiType::Mode::QFUSE_MUL_ADD3:
- props_.emplace_back("mode", "QFUSE_MUL_ADD3");
- break;
- case ElemwiseMultiType::Mode::QFAST_TANH:
- props_.emplace_back("mode", "QFAST_TANH");
- break;
- case ElemwiseMultiType::Mode::QNEGATE:
- props_.emplace_back("mode", "QNEGATE");
- break;
- case ElemwiseMultiType::Mode::QACOS:
- props_.emplace_back("mode", "QACOS");
- break;
- case ElemwiseMultiType::Mode::QASIN:
- props_.emplace_back("mode", "QASIN");
- break;
- case ElemwiseMultiType::Mode::QCEIL:
- props_.emplace_back("mode", "QCEIL");
- break;
- case ElemwiseMultiType::Mode::QCOS:
- props_.emplace_back("mode", "QCOS");
- break;
- case ElemwiseMultiType::Mode::QEXPM1:
- props_.emplace_back("mode", "QEXPM1");
- break;
- case ElemwiseMultiType::Mode::QFLOOR:
- props_.emplace_back("mode", "QFLOOR");
- break;
- case ElemwiseMultiType::Mode::QLOG:
- props_.emplace_back("mode", "QLOG");
- break;
- case ElemwiseMultiType::Mode::QLOG1P:
- props_.emplace_back("mode", "QLOG1P");
- break;
- case ElemwiseMultiType::Mode::QSIN:
- props_.emplace_back("mode", "QSIN");
- break;
- case ElemwiseMultiType::Mode::QROUND:
- props_.emplace_back("mode", "QROUND");
- break;
- case ElemwiseMultiType::Mode::QERF:
- props_.emplace_back("mode", "QERF");
- break;
- case ElemwiseMultiType::Mode::QERFINV:
- props_.emplace_back("mode", "QERFINV");
- break;
- case ElemwiseMultiType::Mode::QERFC:
- props_.emplace_back("mode", "QERFC");
- break;
- case ElemwiseMultiType::Mode::QERFCINV:
- props_.emplace_back("mode", "QERFCINV");
- break;
- case ElemwiseMultiType::Mode::QABS_GRAD:
- props_.emplace_back("mode", "QABS_GRAD");
- break;
- case ElemwiseMultiType::Mode::QFLOOR_DIV:
- props_.emplace_back("mode", "QFLOOR_DIV");
- break;
- case ElemwiseMultiType::Mode::QMOD:
- props_.emplace_back("mode", "QMOD");
- break;
- case ElemwiseMultiType::Mode::QSIGMOID_GRAD:
- props_.emplace_back("mode", "QSIGMOID_GRAD");
- break;
- case ElemwiseMultiType::Mode::QSWITCH_GT0:
- props_.emplace_back("mode", "QSWITCH_GT0");
- break;
- case ElemwiseMultiType::Mode::QTANH_GRAD:
- props_.emplace_back("mode", "QTANH_GRAD");
- break;
- case ElemwiseMultiType::Mode::QLT:
- props_.emplace_back("mode", "QLT");
- break;
- case ElemwiseMultiType::Mode::QLEQ:
- props_.emplace_back("mode", "QLEQ");
- break;
- case ElemwiseMultiType::Mode::QEQ:
- props_.emplace_back("mode", "QEQ");
- break;
- case ElemwiseMultiType::Mode::QPOW:
- props_.emplace_back("mode", "QPOW");
- break;
- case ElemwiseMultiType::Mode::QLOG_SUM_EXP:
- props_.emplace_back("mode", "QLOG_SUM_EXP");
- break;
- case ElemwiseMultiType::Mode::QFAST_TANH_GRAD:
- props_.emplace_back("mode", "QFAST_TANH_GRAD");
- break;
- case ElemwiseMultiType::Mode::QATAN2:
- props_.emplace_back("mode", "QATAN2");
- break;
- case ElemwiseMultiType::Mode::QCOND_LEQ_MOV:
- props_.emplace_back("mode", "QCOND_LEQ_MOV");
- break;
- case ElemwiseMultiType::Mode::QH_SWISH:
- props_.emplace_back("mode", "QH_SWISH");
- break;
- case ElemwiseMultiType::Mode::QFUSE_ADD_H_SWISH:
- props_.emplace_back("mode", "QFUSE_ADD_H_SWISH");
- break;
- case ElemwiseMultiType::Mode::QH_SWISH_GRAD:
- props_.emplace_back("mode", "QH_SWISH_GRAD");
- break;
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_INT16xF32xF32xF32:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_INT16xF32xF32xF32");
- break;
- case ElemwiseMultiType::Mode::MUL_INT16xF32xF32:
- props_.emplace_back("mode", "MUL_INT16xF32xF32");
- break;
- case ElemwiseMultiType::Mode::FUSE_MUL_ADD3_UINT8xF32xF32xF32:
- props_.emplace_back("mode", "FUSE_MUL_ADD3_UINT8xF32xF32xF32");
- break;
- case ElemwiseMultiType::Mode::QCOND_LT_MOV:
- props_.emplace_back("mode", "QCOND_LT_MOV");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string ElemwiseMultiType_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ElemwiseMultiType>();
- static_cast<void>(op_);
-
- return to_string(op_.mode);
- }
- } // anonymous namespace
- OP_TRAIT_REG(ElemwiseMultiType, ElemwiseMultiType)
- .hash(ElemwiseMultiType_hash_impl)
- .is_same_st(ElemwiseMultiType_is_same_st_impl)
- .props(ElemwiseMultiType_props_impl)
- .make_name(ElemwiseMultiType_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ExternOpr);
-
- namespace {
- size_t ExternOpr_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ExternOpr>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.name),
- mgb::hash(op_.data))
- );
- }
- bool ExternOpr_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ExternOpr>(),
- &&b_ = rhs_.cast_final_safe<ExternOpr>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.output_shapes != b_.output_shapes) return false;
- if (a_.name != b_.name) return false;
- if (a_.data != b_.data) return false;
- if (a_.data_len != b_.data_len) return false;
- if (a_.output_dtypes != b_.output_dtypes) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ExternOpr_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ExternOpr>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("output_shapes", "{std::vector}");
- props_.emplace_back("name", op_.name);
- props_.emplace_back("data", op_.data);
- props_.emplace_back("data_len", std::to_string(op_.data_len));
- props_.emplace_back("output_dtypes", "{std::vector}");
- return props_;
- }
- std::string ExternOpr_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ExternOpr>();
- static_cast<void>(op_);
- return "ExternOpr";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ExternOpr, ExternOpr)
- .hash(ExternOpr_hash_impl)
- .is_same_st(ExternOpr_is_same_st_impl)
- .props(ExternOpr_props_impl)
- .make_name(ExternOpr_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Eye);
-
- namespace {
- size_t Eye_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Eye>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.k));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Eye_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Eye>(),
- &&b_ = rhs_.cast_final_safe<Eye>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.k != b_.k) return false;
- if (a_.dtype != b_.dtype) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Eye_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Eye>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("k", std::to_string(op_.k));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Eye_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Eye>();
- static_cast<void>(op_);
- return "Eye";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Eye, Eye)
- .hash(Eye_hash_impl)
- .is_same_st(Eye_is_same_st_impl)
- .props(Eye_props_impl)
- .make_name(Eye_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(FakeQuant);
-
- namespace {
- size_t FakeQuant_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FakeQuant>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmin));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmax));
- return val;
- }
- bool FakeQuant_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<FakeQuant>(),
- &&b_ = rhs_.cast_final_safe<FakeQuant>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.qmin != b_.qmin) return false;
- if (a_.qmax != b_.qmax) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> FakeQuant_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FakeQuant>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("qmin", std::to_string(op_.qmin));
- props_.emplace_back("qmax", std::to_string(op_.qmax));
- return props_;
- }
- std::string FakeQuant_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FakeQuant>();
- static_cast<void>(op_);
- return "FakeQuant";
- }
- } // anonymous namespace
- OP_TRAIT_REG(FakeQuant, FakeQuant)
- .hash(FakeQuant_hash_impl)
- .is_same_st(FakeQuant_is_same_st_impl)
- .props(FakeQuant_props_impl)
- .make_name(FakeQuant_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(FastpathCopy);
-
- namespace {
- size_t FastpathCopy_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FastpathCopy>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool FastpathCopy_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<FastpathCopy>(),
- &&b_ = rhs_.cast_final_safe<FastpathCopy>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> FastpathCopy_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FastpathCopy>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string FastpathCopy_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<FastpathCopy>();
- static_cast<void>(op_);
- return "FastpathCopy";
- }
- } // anonymous namespace
- OP_TRAIT_REG(FastpathCopy, FastpathCopy)
- .hash(FastpathCopy_hash_impl)
- .is_same_st(FastpathCopy_is_same_st_impl)
- .props(FastpathCopy_props_impl)
- .make_name(FastpathCopy_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GammaRNG);
-
- namespace {
- size_t GammaRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GammaRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool GammaRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GammaRNG>(),
- &&b_ = rhs_.cast_final_safe<GammaRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> GammaRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GammaRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string GammaRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GammaRNG>();
- static_cast<void>(op_);
- return "GammaRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GammaRNG, GammaRNG)
- .hash(GammaRNG_hash_impl)
- .is_same_st(GammaRNG_is_same_st_impl)
- .props(GammaRNG_props_impl)
- .make_name(GammaRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GaussianRNG);
-
- namespace {
- size_t GaussianRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GaussianRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.handle),
- mgb::hash_pair_combine(
- mgb::hash(op_.mean),
- mgb::hash_pair_combine(
- mgb::hash(op_.std),
- mgb::hash(op_.dtype.enumv())
- )
- )
- )
- );
- }
- bool GaussianRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GaussianRNG>(),
- &&b_ = rhs_.cast_final_safe<GaussianRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.mean == b_.mean && a_.std == b_.std && a_.dtype == b_.dtype;}
- std::vector<std::pair<const char*, std::string>> GaussianRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GaussianRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("mean", std::to_string(op_.mean));
- props_.emplace_back("std", std::to_string(op_.std));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string GaussianRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GaussianRNG>();
- static_cast<void>(op_);
- return "GaussianRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GaussianRNG, GaussianRNG)
- .hash(GaussianRNG_hash_impl)
- .is_same_st(GaussianRNG_is_same_st_impl)
- .props(GaussianRNG_props_impl)
- .make_name(GaussianRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GetVarShape);
-
- namespace {
- size_t GetVarShape_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GetVarShape>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool GetVarShape_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GetVarShape>(),
- &&b_ = rhs_.cast_final_safe<GetVarShape>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> GetVarShape_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GetVarShape>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string GetVarShape_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GetVarShape>();
- static_cast<void>(op_);
- return "GetVarShape";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GetVarShape, GetVarShape)
- .hash(GetVarShape_hash_impl)
- .is_same_st(GetVarShape_is_same_st_impl)
- .props(GetVarShape_props_impl)
- .make_name(GetVarShape_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GroupLocal);
-
- namespace {
- size_t GroupLocal_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GroupLocal>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.sparse));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- return val;
- }
- bool GroupLocal_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<GroupLocal>(),
- &&b_ = rhs_.cast_final_safe<GroupLocal>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.sparse != b_.sparse) return false;
- if (a_.format != b_.format) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> GroupLocal_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GroupLocal>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case GroupLocal::Mode::CROSS_CORRELATION:
- props_.emplace_back("mode", "CROSS_CORRELATION");
- break;
- case GroupLocal::Mode::CONVOLUTION:
- props_.emplace_back("mode", "CONVOLUTION");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- switch (op_.sparse){
- case GroupLocal::Sparse::DENSE:
- props_.emplace_back("sparse", "DENSE");
- break;
- case GroupLocal::Sparse::GROUP:
- props_.emplace_back("sparse", "GROUP");
- break;
- default:
- props_.emplace_back("sparse", "INVALID");
- break;
- }
- switch (op_.format){
- case GroupLocal::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case GroupLocal::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case GroupLocal::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case GroupLocal::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case GroupLocal::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case GroupLocal::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case GroupLocal::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case GroupLocal::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case GroupLocal::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case GroupLocal::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case GroupLocal::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case GroupLocal::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case GroupLocal::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case GroupLocal::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case GroupLocal::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case GroupLocal::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case GroupLocal::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case GroupLocal::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.compute_mode){
- case GroupLocal::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case GroupLocal::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string GroupLocal_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<GroupLocal>();
- static_cast<void>(op_);
- return "GroupLocal";
- }
- } // anonymous namespace
- OP_TRAIT_REG(GroupLocal, GroupLocal)
- .hash(GroupLocal_hash_impl)
- .is_same_st(GroupLocal_is_same_st_impl)
- .props(GroupLocal_props_impl)
- .make_name(GroupLocal_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Identity);
-
- namespace {
- size_t Identity_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Identity>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool Identity_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Identity>(),
- &&b_ = rhs_.cast_final_safe<Identity>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Identity_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Identity>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string Identity_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Identity>();
- static_cast<void>(op_);
- return "Identity";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Identity, Identity)
- .hash(Identity_hash_impl)
- .is_same_st(Identity_is_same_st_impl)
- .props(Identity_props_impl)
- .make_name(Identity_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Images2Neibs);
-
- namespace {
- size_t Images2Neibs_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Images2Neibs>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_w));
- return val;
- }
- bool Images2Neibs_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Images2Neibs>(),
- &&b_ = rhs_.cast_final_safe<Images2Neibs>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.window_h != b_.window_h) return false;
- if (a_.window_w != b_.window_w) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Images2Neibs_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Images2Neibs>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- props_.emplace_back("window_h", std::to_string(op_.window_h));
- props_.emplace_back("window_w", std::to_string(op_.window_w));
- return props_;
- }
- std::string Images2Neibs_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Images2Neibs>();
- static_cast<void>(op_);
- return "Images2Neibs";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Images2Neibs, Images2Neibs)
- .hash(Images2Neibs_hash_impl)
- .is_same_st(Images2Neibs_is_same_st_impl)
- .props(Images2Neibs_props_impl)
- .make_name(Images2Neibs_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IncrMeshIndexing);
-
- namespace {
- size_t IncrMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IncrMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IncrMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IncrMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IncrMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrMeshIndexing>();
- static_cast<void>(op_);
- return "IncrMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IncrMeshIndexing, IncrMeshIndexing)
- .hash(IncrMeshIndexing_hash_impl)
- .is_same_st(IncrMeshIndexing_is_same_st_impl)
- .props(IncrMeshIndexing_props_impl)
- .make_name(IncrMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IncrSubtensor);
-
- namespace {
- size_t IncrSubtensor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IncrSubtensor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IncrSubtensor>(),
- &&b_ = rhs_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IncrSubtensor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IncrSubtensor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IncrSubtensor>();
- static_cast<void>(op_);
- return "IncrSubtensor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IncrSubtensor, IncrSubtensor)
- .hash(IncrSubtensor_hash_impl)
- .is_same_st(IncrSubtensor_is_same_st_impl)
- .props(IncrSubtensor_props_impl)
- .make_name(IncrSubtensor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingIncrMultiAxisVec);
-
- namespace {
- size_t IndexingIncrMultiAxisVec_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IndexingIncrMultiAxisVec_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingIncrMultiAxisVec>(),
- &&b_ = rhs_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingIncrMultiAxisVec_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IndexingIncrMultiAxisVec_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingIncrMultiAxisVec>();
- static_cast<void>(op_);
- return "IndexingIncrMultiAxisVec";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingIncrMultiAxisVec, IndexingIncrMultiAxisVec)
- .hash(IndexingIncrMultiAxisVec_hash_impl)
- .is_same_st(IndexingIncrMultiAxisVec_is_same_st_impl)
- .props(IndexingIncrMultiAxisVec_props_impl)
- .make_name(IndexingIncrMultiAxisVec_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingMultiAxisVec);
-
- namespace {
- size_t IndexingMultiAxisVec_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IndexingMultiAxisVec_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingMultiAxisVec>(),
- &&b_ = rhs_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingMultiAxisVec_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IndexingMultiAxisVec_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingMultiAxisVec>();
- static_cast<void>(op_);
- return "IndexingMultiAxisVec";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingMultiAxisVec, IndexingMultiAxisVec)
- .hash(IndexingMultiAxisVec_hash_impl)
- .is_same_st(IndexingMultiAxisVec_is_same_st_impl)
- .props(IndexingMultiAxisVec_props_impl)
- .make_name(IndexingMultiAxisVec_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingOneHot);
-
- namespace {
- size_t IndexingOneHot_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.ndim));
- return val;
- }
- bool IndexingOneHot_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingOneHot>(),
- &&b_ = rhs_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.ndim != b_.ndim) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingOneHot_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("ndim", std::to_string(op_.ndim));
- return props_;
- }
- std::string IndexingOneHot_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingOneHot>();
- static_cast<void>(op_);
- return "IndexingOneHot";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingOneHot, IndexingOneHot)
- .hash(IndexingOneHot_hash_impl)
- .is_same_st(IndexingOneHot_is_same_st_impl)
- .props(IndexingOneHot_props_impl)
- .make_name(IndexingOneHot_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingSetMultiAxisVec);
-
- namespace {
- size_t IndexingSetMultiAxisVec_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool IndexingSetMultiAxisVec_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingSetMultiAxisVec>(),
- &&b_ = rhs_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingSetMultiAxisVec_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string IndexingSetMultiAxisVec_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetMultiAxisVec>();
- static_cast<void>(op_);
- return "IndexingSetMultiAxisVec";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingSetMultiAxisVec, IndexingSetMultiAxisVec)
- .hash(IndexingSetMultiAxisVec_hash_impl)
- .is_same_st(IndexingSetMultiAxisVec_is_same_st_impl)
- .props(IndexingSetMultiAxisVec_props_impl)
- .make_name(IndexingSetMultiAxisVec_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(IndexingSetOneHot);
-
- namespace {
- size_t IndexingSetOneHot_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.ndim));
- return val;
- }
- bool IndexingSetOneHot_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<IndexingSetOneHot>(),
- &&b_ = rhs_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.ndim != b_.ndim) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> IndexingSetOneHot_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("ndim", std::to_string(op_.ndim));
- return props_;
- }
- std::string IndexingSetOneHot_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<IndexingSetOneHot>();
- static_cast<void>(op_);
- return "IndexingSetOneHot";
- }
- } // anonymous namespace
- OP_TRAIT_REG(IndexingSetOneHot, IndexingSetOneHot)
- .hash(IndexingSetOneHot_hash_impl)
- .is_same_st(IndexingSetOneHot_is_same_st_impl)
- .props(IndexingSetOneHot_props_impl)
- .make_name(IndexingSetOneHot_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(InplaceAdd);
-
- namespace {
- size_t InplaceAdd_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<InplaceAdd>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool InplaceAdd_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<InplaceAdd>(),
- &&b_ = rhs_.cast_final_safe<InplaceAdd>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> InplaceAdd_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<InplaceAdd>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string InplaceAdd_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<InplaceAdd>();
- static_cast<void>(op_);
- return "InplaceAdd";
- }
- } // anonymous namespace
- OP_TRAIT_REG(InplaceAdd, InplaceAdd)
- .hash(InplaceAdd_hash_impl)
- .is_same_st(InplaceAdd_is_same_st_impl)
- .props(InplaceAdd_props_impl)
- .make_name(InplaceAdd_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LAMBUpdate);
-
- namespace {
- size_t LAMBUpdate_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.beta_1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.beta_2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.step));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.lr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.weight_decay));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.eps));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias_correction));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.always_adapt));
- return val;
- }
- bool LAMBUpdate_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LAMBUpdate>(),
- &&b_ = rhs_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.beta_1 != b_.beta_1) return false;
- if (a_.beta_2 != b_.beta_2) return false;
- if (a_.step != b_.step) return false;
- if (a_.lr != b_.lr) return false;
- if (a_.weight_decay != b_.weight_decay) return false;
- if (a_.eps != b_.eps) return false;
- if (a_.bias_correction != b_.bias_correction) return false;
- if (a_.always_adapt != b_.always_adapt) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LAMBUpdate_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("beta_1", std::to_string(op_.beta_1));
- props_.emplace_back("beta_2", std::to_string(op_.beta_2));
- props_.emplace_back("step", std::to_string(op_.step));
- props_.emplace_back("lr", std::to_string(op_.lr));
- props_.emplace_back("weight_decay", std::to_string(op_.weight_decay));
- props_.emplace_back("eps", std::to_string(op_.eps));
- props_.emplace_back("bias_correction", std::to_string(op_.bias_correction));
- props_.emplace_back("always_adapt", std::to_string(op_.always_adapt));
- return props_;
- }
- std::string LAMBUpdate_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LAMBUpdate>();
- static_cast<void>(op_);
- return "LAMBUpdate";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LAMBUpdate, LAMBUpdate)
- .hash(LAMBUpdate_hash_impl)
- .is_same_st(LAMBUpdate_is_same_st_impl)
- .props(LAMBUpdate_props_impl)
- .make_name(LAMBUpdate_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LRN);
-
- namespace {
- size_t LRN_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LRN>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.n));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.k));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.alpha));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.beta));
- return val;
- }
- bool LRN_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LRN>(),
- &&b_ = rhs_.cast_final_safe<LRN>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.n != b_.n) return false;
- if (a_.k != b_.k) return false;
- if (a_.alpha != b_.alpha) return false;
- if (a_.beta != b_.beta) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LRN_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LRN>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("n", std::to_string(op_.n));
- props_.emplace_back("k", std::to_string(op_.k));
- props_.emplace_back("alpha", std::to_string(op_.alpha));
- props_.emplace_back("beta", std::to_string(op_.beta));
- return props_;
- }
- std::string LRN_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LRN>();
- static_cast<void>(op_);
- return "LRN";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LRN, LRN)
- .hash(LRN_hash_impl)
- .is_same_st(LRN_is_same_st_impl)
- .props(LRN_props_impl)
- .make_name(LRN_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LSQ);
-
- namespace {
- size_t LSQ_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSQ>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmin));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmax));
- return val;
- }
- bool LSQ_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LSQ>(),
- &&b_ = rhs_.cast_final_safe<LSQ>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.qmin != b_.qmin) return false;
- if (a_.qmax != b_.qmax) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LSQ_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSQ>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("qmin", std::to_string(op_.qmin));
- props_.emplace_back("qmax", std::to_string(op_.qmax));
- return props_;
- }
- std::string LSQ_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSQ>();
- static_cast<void>(op_);
- return "LSQ";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LSQ, LSQ)
- .hash(LSQ_hash_impl)
- .is_same_st(LSQ_is_same_st_impl)
- .props(LSQ_props_impl)
- .make_name(LSQ_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LSTM);
-
- namespace {
- size_t LSTM_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTM>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.num_layers));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bidirectional));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.hidden_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.proj_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dropout));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- return val;
- }
- bool LSTM_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LSTM>(),
- &&b_ = rhs_.cast_final_safe<LSTM>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.num_layers != b_.num_layers) return false;
- if (a_.bidirectional != b_.bidirectional) return false;
- if (a_.bias != b_.bias) return false;
- if (a_.hidden_size != b_.hidden_size) return false;
- if (a_.proj_size != b_.proj_size) return false;
- if (a_.dropout != b_.dropout) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LSTM_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTM>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("num_layers", std::to_string(op_.num_layers));
- props_.emplace_back("bidirectional", std::to_string(op_.bidirectional));
- props_.emplace_back("bias", std::to_string(op_.bias));
- props_.emplace_back("hidden_size", std::to_string(op_.hidden_size));
- props_.emplace_back("proj_size", std::to_string(op_.proj_size));
- props_.emplace_back("dropout", std::to_string(op_.dropout));
- switch (op_.fwd_mode){
- case LSTM::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case LSTM::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string LSTM_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTM>();
- static_cast<void>(op_);
- return "LSTM";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LSTM, LSTM)
- .hash(LSTM_hash_impl)
- .is_same_st(LSTM_is_same_st_impl)
- .props(LSTM_props_impl)
- .make_name(LSTM_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LSTMCell);
-
- namespace {
- size_t LSTMCell_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTMCell>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool LSTMCell_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LSTMCell>(),
- &&b_ = rhs_.cast_final_safe<LSTMCell>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LSTMCell_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTMCell>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string LSTMCell_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LSTMCell>();
- static_cast<void>(op_);
- return "LSTMCell";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LSTMCell, LSTMCell)
- .hash(LSTMCell_hash_impl)
- .is_same_st(LSTMCell_is_same_st_impl)
- .props(LSTMCell_props_impl)
- .make_name(LSTMCell_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(LayerNorm);
-
- namespace {
- size_t LayerNorm_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LayerNorm>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.affine));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.eps));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.normalized_dim));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.normalized_size));
- return val;
- }
- bool LayerNorm_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<LayerNorm>(),
- &&b_ = rhs_.cast_final_safe<LayerNorm>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.affine != b_.affine) return false;
- if (a_.eps != b_.eps) return false;
- if (a_.normalized_dim != b_.normalized_dim) return false;
- if (a_.normalized_size != b_.normalized_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> LayerNorm_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LayerNorm>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("affine", std::to_string(op_.affine));
- props_.emplace_back("eps", std::to_string(op_.eps));
- props_.emplace_back("normalized_dim", std::to_string(op_.normalized_dim));
- props_.emplace_back("normalized_size", std::to_string(op_.normalized_size));
- return props_;
- }
- std::string LayerNorm_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<LayerNorm>();
- static_cast<void>(op_);
- return "LayerNorm";
- }
- } // anonymous namespace
- OP_TRAIT_REG(LayerNorm, LayerNorm)
- .hash(LayerNorm_hash_impl)
- .is_same_st(LayerNorm_is_same_st_impl)
- .props(LayerNorm_props_impl)
- .make_name(LayerNorm_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Linspace);
-
- namespace {
- size_t Linspace_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Linspace>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.endpoint));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.comp_node));
- return val;
- }
- bool Linspace_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Linspace>(),
- &&b_ = rhs_.cast_final_safe<Linspace>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.endpoint != b_.endpoint) return false;
- if (a_.comp_node != b_.comp_node) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Linspace_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Linspace>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("endpoint", std::to_string(op_.endpoint));
- props_.emplace_back("comp_node", op_.comp_node.to_string());
- return props_;
- }
- std::string Linspace_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Linspace>();
- static_cast<void>(op_);
- return "Linspace";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Linspace, Linspace)
- .hash(Linspace_hash_impl)
- .is_same_st(Linspace_is_same_st_impl)
- .props(Linspace_props_impl)
- .make_name(Linspace_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MagicMindRuntime);
-
- namespace {
- size_t MagicMindRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- return val;
- }
- bool MagicMindRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MagicMindRuntime>(),
- &&b_ = rhs_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MagicMindRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- return props_;
- }
- std::string MagicMindRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MagicMindRuntime>();
- static_cast<void>(op_);
- return "MagicMindRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MagicMindRuntime, MagicMindRuntime)
- .hash(MagicMindRuntime_hash_impl)
- .is_same_st(MagicMindRuntime_is_same_st_impl)
- .props(MagicMindRuntime_props_impl)
- .make_name(MagicMindRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MatrixInverse);
-
- namespace {
- size_t MatrixInverse_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixInverse>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- return val;
- }
- bool MatrixInverse_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MatrixInverse>(),
- &&b_ = rhs_.cast_final_safe<MatrixInverse>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MatrixInverse_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixInverse>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- return props_;
- }
- std::string MatrixInverse_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixInverse>();
- static_cast<void>(op_);
- return "MatrixInverse";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MatrixInverse, MatrixInverse)
- .hash(MatrixInverse_hash_impl)
- .is_same_st(MatrixInverse_is_same_st_impl)
- .props(MatrixInverse_props_impl)
- .make_name(MatrixInverse_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MatrixMul);
-
- namespace {
- size_t MatrixMul_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixMul>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.transposeB));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.compute_mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimA));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dimB));
- return val;
- }
- bool MatrixMul_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MatrixMul>(),
- &&b_ = rhs_.cast_final_safe<MatrixMul>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.transposeA != b_.transposeA) return false;
- if (a_.transposeB != b_.transposeB) return false;
- if (a_.compute_mode != b_.compute_mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- if (a_.dimA != b_.dimA) return false;
- if (a_.dimB != b_.dimB) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MatrixMul_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixMul>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("transposeA", std::to_string(op_.transposeA));
- props_.emplace_back("transposeB", std::to_string(op_.transposeB));
- switch (op_.compute_mode){
- case MatrixMul::ComputeMode::DEFAULT:
- props_.emplace_back("compute_mode", "DEFAULT");
- break;
- case MatrixMul::ComputeMode::FLOAT32:
- props_.emplace_back("compute_mode", "FLOAT32");
- break;
- default:
- props_.emplace_back("compute_mode", "INVALID");
- break;
- }
- switch (op_.format){
- case MatrixMul::Format::DEFAULT:
- props_.emplace_back("format", "DEFAULT");
- break;
- case MatrixMul::Format::MK4:
- props_.emplace_back("format", "MK4");
- break;
- case MatrixMul::Format::MK8:
- props_.emplace_back("format", "MK8");
- break;
- case MatrixMul::Format::MK4_DOT:
- props_.emplace_back("format", "MK4_DOT");
- break;
- case MatrixMul::Format::N32K4_DOT:
- props_.emplace_back("format", "N32K4_DOT");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case MatrixMul::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case MatrixMul::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case MatrixMul::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case MatrixMul::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- props_.emplace_back("dimA", std::to_string(op_.dimA));
- props_.emplace_back("dimB", std::to_string(op_.dimB));
- return props_;
- }
- std::string MatrixMul_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MatrixMul>();
- static_cast<void>(op_);
- return "MatrixMul";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MatrixMul, MatrixMul)
- .hash(MatrixMul_hash_impl)
- .is_same_st(MatrixMul_is_same_st_impl)
- .props(MatrixMul_props_impl)
- .make_name(MatrixMul_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(MeshIndexing);
-
- namespace {
- size_t MeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool MeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<MeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<MeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> MeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string MeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<MeshIndexing>();
- static_cast<void>(op_);
- return "MeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(MeshIndexing, MeshIndexing)
- .hash(MeshIndexing_hash_impl)
- .is_same_st(MeshIndexing_is_same_st_impl)
- .props(MeshIndexing_props_impl)
- .make_name(MeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(NMSKeep);
-
- namespace {
- size_t NMSKeep_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NMSKeep>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.iou_thresh));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.max_output));
- return val;
- }
- bool NMSKeep_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<NMSKeep>(),
- &&b_ = rhs_.cast_final_safe<NMSKeep>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.iou_thresh != b_.iou_thresh) return false;
- if (a_.max_output != b_.max_output) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> NMSKeep_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NMSKeep>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("iou_thresh", std::to_string(op_.iou_thresh));
- props_.emplace_back("max_output", std::to_string(op_.max_output));
- return props_;
- }
- std::string NMSKeep_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NMSKeep>();
- static_cast<void>(op_);
- return "NMSKeep";
- }
- } // anonymous namespace
- OP_TRAIT_REG(NMSKeep, NMSKeep)
- .hash(NMSKeep_hash_impl)
- .is_same_st(NMSKeep_is_same_st_impl)
- .props(NMSKeep_props_impl)
- .make_name(NMSKeep_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(NvOf);
-
- namespace {
- size_t NvOf_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NvOf>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.precision));
- return val;
- }
- bool NvOf_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<NvOf>(),
- &&b_ = rhs_.cast_final_safe<NvOf>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.precision != b_.precision) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> NvOf_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NvOf>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("precision", std::to_string(op_.precision));
- return props_;
- }
- std::string NvOf_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<NvOf>();
- static_cast<void>(op_);
- return "NvOf";
- }
- } // anonymous namespace
- OP_TRAIT_REG(NvOf, NvOf)
- .hash(NvOf_hash_impl)
- .is_same_st(NvOf_is_same_st_impl)
- .props(NvOf_props_impl)
- .make_name(NvOf_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Padding);
-
- namespace {
- size_t Padding_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Padding>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim0));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim3));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim4));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim5));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.front_offset_dim6));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim0));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim1));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim2));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim3));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim4));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim5));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.back_offset_dim6));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.padding_val));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.padding_mode));
- return val;
- }
- bool Padding_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Padding>(),
- &&b_ = rhs_.cast_final_safe<Padding>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.front_offset_dim0 != b_.front_offset_dim0) return false;
- if (a_.front_offset_dim1 != b_.front_offset_dim1) return false;
- if (a_.front_offset_dim2 != b_.front_offset_dim2) return false;
- if (a_.front_offset_dim3 != b_.front_offset_dim3) return false;
- if (a_.front_offset_dim4 != b_.front_offset_dim4) return false;
- if (a_.front_offset_dim5 != b_.front_offset_dim5) return false;
- if (a_.front_offset_dim6 != b_.front_offset_dim6) return false;
- if (a_.back_offset_dim0 != b_.back_offset_dim0) return false;
- if (a_.back_offset_dim1 != b_.back_offset_dim1) return false;
- if (a_.back_offset_dim2 != b_.back_offset_dim2) return false;
- if (a_.back_offset_dim3 != b_.back_offset_dim3) return false;
- if (a_.back_offset_dim4 != b_.back_offset_dim4) return false;
- if (a_.back_offset_dim5 != b_.back_offset_dim5) return false;
- if (a_.back_offset_dim6 != b_.back_offset_dim6) return false;
- if (a_.padding_val != b_.padding_val) return false;
- if (a_.padding_mode != b_.padding_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Padding_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Padding>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("front_offset_dim0", std::to_string(op_.front_offset_dim0));
- props_.emplace_back("front_offset_dim1", std::to_string(op_.front_offset_dim1));
- props_.emplace_back("front_offset_dim2", std::to_string(op_.front_offset_dim2));
- props_.emplace_back("front_offset_dim3", std::to_string(op_.front_offset_dim3));
- props_.emplace_back("front_offset_dim4", std::to_string(op_.front_offset_dim4));
- props_.emplace_back("front_offset_dim5", std::to_string(op_.front_offset_dim5));
- props_.emplace_back("front_offset_dim6", std::to_string(op_.front_offset_dim6));
- props_.emplace_back("back_offset_dim0", std::to_string(op_.back_offset_dim0));
- props_.emplace_back("back_offset_dim1", std::to_string(op_.back_offset_dim1));
- props_.emplace_back("back_offset_dim2", std::to_string(op_.back_offset_dim2));
- props_.emplace_back("back_offset_dim3", std::to_string(op_.back_offset_dim3));
- props_.emplace_back("back_offset_dim4", std::to_string(op_.back_offset_dim4));
- props_.emplace_back("back_offset_dim5", std::to_string(op_.back_offset_dim5));
- props_.emplace_back("back_offset_dim6", std::to_string(op_.back_offset_dim6));
- props_.emplace_back("padding_val", std::to_string(op_.padding_val));
- switch (op_.padding_mode){
- case Padding::PaddingMode::REPLICATE:
- props_.emplace_back("padding_mode", "REPLICATE");
- break;
- case Padding::PaddingMode::REFLECT:
- props_.emplace_back("padding_mode", "REFLECT");
- break;
- case Padding::PaddingMode::CONSTANT:
- props_.emplace_back("padding_mode", "CONSTANT");
- break;
- default:
- props_.emplace_back("padding_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string Padding_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Padding>();
- static_cast<void>(op_);
- return "Padding";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Padding, Padding)
- .hash(Padding_hash_impl)
- .is_same_st(Padding_is_same_st_impl)
- .props(Padding_props_impl)
- .make_name(Padding_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ParamPackConcat);
-
- namespace {
- size_t ParamPackConcat_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.offsets));
- return val;
- }
- bool ParamPackConcat_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ParamPackConcat>(),
- &&b_ = rhs_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.offsets != b_.offsets) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ParamPackConcat_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("offsets", "{std::vector}");
- return props_;
- }
- std::string ParamPackConcat_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackConcat>();
- static_cast<void>(op_);
- return "ParamPackConcat";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ParamPackConcat, ParamPackConcat)
- .hash(ParamPackConcat_hash_impl)
- .is_same_st(ParamPackConcat_is_same_st_impl)
- .props(ParamPackConcat_props_impl)
- .make_name(ParamPackConcat_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ParamPackSplit);
-
- namespace {
- size_t ParamPackSplit_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.offsets));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shapes));
- return val;
- }
- bool ParamPackSplit_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ParamPackSplit>(),
- &&b_ = rhs_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.offsets != b_.offsets) return false;
- if (a_.shapes != b_.shapes) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ParamPackSplit_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("offsets", "{std::vector}");
- props_.emplace_back("shapes", "{std::vector}");
- return props_;
- }
- std::string ParamPackSplit_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ParamPackSplit>();
- static_cast<void>(op_);
- return "ParamPackSplit";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ParamPackSplit, ParamPackSplit)
- .hash(ParamPackSplit_hash_impl)
- .is_same_st(ParamPackSplit_is_same_st_impl)
- .props(ParamPackSplit_props_impl)
- .make_name(ParamPackSplit_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PermutationRNG);
-
- namespace {
- size_t PermutationRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PermutationRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.handle),
- mgb::hash(op_.dtype.enumv())
- )
- );
- }
- bool PermutationRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PermutationRNG>(),
- &&b_ = rhs_.cast_final_safe<PermutationRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.dtype == b_.dtype;}
- std::vector<std::pair<const char*, std::string>> PermutationRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PermutationRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string PermutationRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PermutationRNG>();
- static_cast<void>(op_);
- return "PermutationRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PermutationRNG, PermutationRNG)
- .hash(PermutationRNG_hash_impl)
- .is_same_st(PermutationRNG_is_same_st_impl)
- .props(PermutationRNG_props_impl)
- .make_name(PermutationRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PixelShuffle);
-
- namespace {
- size_t PixelShuffle_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffle>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.factor));
- return val;
- }
- bool PixelShuffle_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PixelShuffle>(),
- &&b_ = rhs_.cast_final_safe<PixelShuffle>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.factor != b_.factor) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> PixelShuffle_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffle>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("factor", std::to_string(op_.factor));
- return props_;
- }
- std::string PixelShuffle_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffle>();
- static_cast<void>(op_);
- return "PixelShuffle";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PixelShuffle, PixelShuffle)
- .hash(PixelShuffle_hash_impl)
- .is_same_st(PixelShuffle_is_same_st_impl)
- .props(PixelShuffle_props_impl)
- .make_name(PixelShuffle_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PixelShuffleBackward);
-
- namespace {
- size_t PixelShuffleBackward_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.factor));
- return val;
- }
- bool PixelShuffleBackward_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PixelShuffleBackward>(),
- &&b_ = rhs_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.factor != b_.factor) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> PixelShuffleBackward_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("factor", std::to_string(op_.factor));
- return props_;
- }
- std::string PixelShuffleBackward_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PixelShuffleBackward>();
- static_cast<void>(op_);
- return "PixelShuffleBackward";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PixelShuffleBackward, PixelShuffleBackward)
- .hash(PixelShuffleBackward_hash_impl)
- .is_same_st(PixelShuffleBackward_is_same_st_impl)
- .props(PixelShuffleBackward_props_impl)
- .make_name(PixelShuffleBackward_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(PoissonRNG);
-
- namespace {
- size_t PoissonRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PoissonRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool PoissonRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<PoissonRNG>(),
- &&b_ = rhs_.cast_final_safe<PoissonRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> PoissonRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PoissonRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string PoissonRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<PoissonRNG>();
- static_cast<void>(op_);
- return "PoissonRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(PoissonRNG, PoissonRNG)
- .hash(PoissonRNG_hash_impl)
- .is_same_st(PoissonRNG_is_same_st_impl)
- .props(PoissonRNG_props_impl)
- .make_name(PoissonRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Pooling);
-
- namespace {
- size_t Pooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Pooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_w));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.strategy));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.workspace_limit));
- return val;
- }
- bool Pooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Pooling>(),
- &&b_ = rhs_.cast_final_safe<Pooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.window_h != b_.window_h) return false;
- if (a_.window_w != b_.window_w) return false;
- if (a_.format != b_.format) return false;
- if (a_.strategy != b_.strategy) return false;
- if (a_.workspace_limit != b_.workspace_limit) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Pooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Pooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Pooling::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case Pooling::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- case Pooling::Mode::AVERAGE_COUNT_EXCLUDE_PADDING:
- props_.emplace_back("mode", "AVERAGE_COUNT_EXCLUDE_PADDING");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("window_h", std::to_string(op_.window_h));
- props_.emplace_back("window_w", std::to_string(op_.window_w));
- switch (op_.format){
- case Pooling::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Pooling::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Pooling::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Pooling::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Pooling::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Pooling::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Pooling::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Pooling::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Pooling::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Pooling::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Pooling::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Pooling::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Pooling::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Pooling::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Pooling::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Pooling::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Pooling::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Pooling::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- switch (op_.strategy){
- case Pooling::Strategy::HEURISTIC:
- props_.emplace_back("strategy", "HEURISTIC");
- break;
- case Pooling::Strategy::PROFILE:
- props_.emplace_back("strategy", "PROFILE");
- break;
- case Pooling::Strategy::REPRODUCIBLE:
- props_.emplace_back("strategy", "REPRODUCIBLE");
- break;
- case Pooling::Strategy::OPTIMIZED:
- props_.emplace_back("strategy", "OPTIMIZED");
- break;
- default:
- props_.emplace_back("strategy", "INVALID");
- break;
- }
- props_.emplace_back("workspace_limit", std::to_string(op_.workspace_limit));
- return props_;
- }
- std::string Pooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Pooling>();
- static_cast<void>(op_);
- return "Pooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Pooling, Pooling)
- .hash(Pooling_hash_impl)
- .is_same_st(Pooling_is_same_st_impl)
- .props(Pooling_props_impl)
- .make_name(Pooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RNN);
-
- namespace {
- size_t RNN_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNN>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.num_layers));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bidirectional));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.bias));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.hidden_size));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dropout));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.fwd_mode));
- return val;
- }
- bool RNN_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RNN>(),
- &&b_ = rhs_.cast_final_safe<RNN>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.num_layers != b_.num_layers) return false;
- if (a_.bidirectional != b_.bidirectional) return false;
- if (a_.bias != b_.bias) return false;
- if (a_.hidden_size != b_.hidden_size) return false;
- if (a_.dropout != b_.dropout) return false;
- if (a_.nonlineMode != b_.nonlineMode) return false;
- if (a_.fwd_mode != b_.fwd_mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RNN_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNN>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("num_layers", std::to_string(op_.num_layers));
- props_.emplace_back("bidirectional", std::to_string(op_.bidirectional));
- props_.emplace_back("bias", std::to_string(op_.bias));
- props_.emplace_back("hidden_size", std::to_string(op_.hidden_size));
- props_.emplace_back("dropout", std::to_string(op_.dropout));
- switch (op_.nonlineMode){
- case RNN::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case RNN::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case RNN::NonlineMode::TANH:
- props_.emplace_back("nonlineMode", "TANH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- switch (op_.fwd_mode){
- case RNN::FwdMode::TRAINING:
- props_.emplace_back("fwd_mode", "TRAINING");
- break;
- case RNN::FwdMode::INFERENCE:
- props_.emplace_back("fwd_mode", "INFERENCE");
- break;
- default:
- props_.emplace_back("fwd_mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string RNN_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNN>();
- static_cast<void>(op_);
- return "RNN";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RNN, RNN)
- .hash(RNN_hash_impl)
- .is_same_st(RNN_is_same_st_impl)
- .props(RNN_props_impl)
- .make_name(RNN_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RNNCell);
-
- namespace {
- size_t RNNCell_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNNCell>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.nonlineMode));
- return val;
- }
- bool RNNCell_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RNNCell>(),
- &&b_ = rhs_.cast_final_safe<RNNCell>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.nonlineMode != b_.nonlineMode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RNNCell_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNNCell>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.nonlineMode){
- case RNNCell::NonlineMode::IDENTITY:
- props_.emplace_back("nonlineMode", "IDENTITY");
- break;
- case RNNCell::NonlineMode::RELU:
- props_.emplace_back("nonlineMode", "RELU");
- break;
- case RNNCell::NonlineMode::TANH:
- props_.emplace_back("nonlineMode", "TANH");
- break;
- default:
- props_.emplace_back("nonlineMode", "INVALID");
- break;
- }
- return props_;
- }
- std::string RNNCell_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RNNCell>();
- static_cast<void>(op_);
- return "RNNCell";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RNNCell, RNNCell)
- .hash(RNNCell_hash_impl)
- .is_same_st(RNNCell_is_same_st_impl)
- .props(RNNCell_props_impl)
- .make_name(RNNCell_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ROIAlign);
-
- namespace {
- size_t ROIAlign_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIAlign>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.spatial_scale));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.offset));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_height));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pooled_width));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.sample_height));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.sample_width));
- return val;
- }
- bool ROIAlign_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ROIAlign>(),
- &&b_ = rhs_.cast_final_safe<ROIAlign>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.format != b_.format) return false;
- if (a_.spatial_scale != b_.spatial_scale) return false;
- if (a_.offset != b_.offset) return false;
- if (a_.pooled_height != b_.pooled_height) return false;
- if (a_.pooled_width != b_.pooled_width) return false;
- if (a_.sample_height != b_.sample_height) return false;
- if (a_.sample_width != b_.sample_width) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ROIAlign_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIAlign>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ROIAlign::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case ROIAlign::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- switch (op_.format){
- case ROIAlign::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case ROIAlign::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case ROIAlign::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case ROIAlign::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case ROIAlign::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case ROIAlign::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case ROIAlign::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case ROIAlign::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case ROIAlign::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case ROIAlign::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case ROIAlign::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case ROIAlign::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case ROIAlign::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case ROIAlign::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case ROIAlign::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case ROIAlign::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case ROIAlign::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case ROIAlign::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("spatial_scale", std::to_string(op_.spatial_scale));
- props_.emplace_back("offset", std::to_string(op_.offset));
- props_.emplace_back("pooled_height", std::to_string(op_.pooled_height));
- props_.emplace_back("pooled_width", std::to_string(op_.pooled_width));
- props_.emplace_back("sample_height", std::to_string(op_.sample_height));
- props_.emplace_back("sample_width", std::to_string(op_.sample_width));
- return props_;
- }
- std::string ROIAlign_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIAlign>();
- static_cast<void>(op_);
- return "ROIAlign";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ROIAlign, ROIAlign)
- .hash(ROIAlign_hash_impl)
- .is_same_st(ROIAlign_is_same_st_impl)
- .props(ROIAlign_props_impl)
- .make_name(ROIAlign_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ROIPooling);
-
- namespace {
- size_t ROIPooling_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIPooling>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scale));
- return val;
- }
- bool ROIPooling_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ROIPooling>(),
- &&b_ = rhs_.cast_final_safe<ROIPooling>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.scale != b_.scale) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> ROIPooling_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIPooling>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case ROIPooling::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case ROIPooling::Mode::AVERAGE:
- props_.emplace_back("mode", "AVERAGE");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("scale", std::to_string(op_.scale));
- return props_;
- }
- std::string ROIPooling_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ROIPooling>();
- static_cast<void>(op_);
- return "ROIPooling";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ROIPooling, ROIPooling)
- .hash(ROIPooling_hash_impl)
- .is_same_st(ROIPooling_is_same_st_impl)
- .props(ROIPooling_props_impl)
- .make_name(ROIPooling_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Reduce);
-
- namespace {
- size_t Reduce_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reduce>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.data_type));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.keepdim));
- return val;
- }
- bool Reduce_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Reduce>(),
- &&b_ = rhs_.cast_final_safe<Reduce>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- if (a_.axis != b_.axis) return false;
- if (a_.data_type != b_.data_type) return false;
- if (a_.keepdim != b_.keepdim) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Reduce_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reduce>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case Reduce::Mode::SUM:
- props_.emplace_back("mode", "SUM");
- break;
- case Reduce::Mode::SUM_SQR:
- props_.emplace_back("mode", "SUM_SQR");
- break;
- case Reduce::Mode::PRODUCT:
- props_.emplace_back("mode", "PRODUCT");
- break;
- case Reduce::Mode::MIN:
- props_.emplace_back("mode", "MIN");
- break;
- case Reduce::Mode::MAX:
- props_.emplace_back("mode", "MAX");
- break;
- case Reduce::Mode::MEAN:
- props_.emplace_back("mode", "MEAN");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- props_.emplace_back("axis", std::to_string(op_.axis));
- switch (op_.data_type){
- case Reduce::DataType::DEFAULT:
- props_.emplace_back("data_type", "DEFAULT");
- break;
- case Reduce::DataType::FLOAT_IO16xC32:
- props_.emplace_back("data_type", "FLOAT_IO16xC32");
- break;
- case Reduce::DataType::FLOAT_O32xC32:
- props_.emplace_back("data_type", "FLOAT_O32xC32");
- break;
- case Reduce::DataType::FLOAT_O16xC32:
- props_.emplace_back("data_type", "FLOAT_O16xC32");
- break;
- case Reduce::DataType::QUINT_I8xO32:
- props_.emplace_back("data_type", "QUINT_I8xO32");
- break;
- case Reduce::DataType::QINT_I8xO32:
- props_.emplace_back("data_type", "QINT_I8xO32");
- break;
- default:
- props_.emplace_back("data_type", "INVALID");
- break;
- }
- props_.emplace_back("keepdim", std::to_string(op_.keepdim));
- return props_;
- }
- std::string Reduce_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reduce>();
- static_cast<void>(op_);
- return "Reduce";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Reduce, Reduce)
- .hash(Reduce_hash_impl)
- .is_same_st(Reduce_is_same_st_impl)
- .props(Reduce_props_impl)
- .make_name(Reduce_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Remap);
-
- namespace {
- size_t Remap_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Remap>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.border_type));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.scalar));
- return val;
- }
- bool Remap_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Remap>(),
- &&b_ = rhs_.cast_final_safe<Remap>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.border_type != b_.border_type) return false;
- if (a_.format != b_.format) return false;
- if (a_.scalar != b_.scalar) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Remap_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Remap>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case Remap::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case Remap::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case Remap::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case Remap::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case Remap::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.border_type){
- case Remap::BorderMode::REPLICATE:
- props_.emplace_back("border_type", "REPLICATE");
- break;
- case Remap::BorderMode::REFLECT:
- props_.emplace_back("border_type", "REFLECT");
- break;
- case Remap::BorderMode::REFLECT_101:
- props_.emplace_back("border_type", "REFLECT_101");
- break;
- case Remap::BorderMode::WRAP:
- props_.emplace_back("border_type", "WRAP");
- break;
- case Remap::BorderMode::CONSTANT:
- props_.emplace_back("border_type", "CONSTANT");
- break;
- case Remap::BorderMode::TRANSPARENT:
- props_.emplace_back("border_type", "TRANSPARENT");
- break;
- case Remap::BorderMode::ISOLATED:
- props_.emplace_back("border_type", "ISOLATED");
- break;
- default:
- props_.emplace_back("border_type", "INVALID");
- break;
- }
- switch (op_.format){
- case Remap::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Remap::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Remap::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Remap::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Remap::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Remap::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Remap::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Remap::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Remap::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Remap::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Remap::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Remap::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Remap::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Remap::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Remap::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Remap::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Remap::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Remap::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("scalar", std::to_string(op_.scalar));
- return props_;
- }
- std::string Remap_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Remap>();
- static_cast<void>(op_);
- return "Remap";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Remap, Remap)
- .hash(Remap_hash_impl)
- .is_same_st(Remap_is_same_st_impl)
- .props(Remap_props_impl)
- .make_name(Remap_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemoteRecv);
-
- namespace {
- size_t RemoteRecv_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteRecv>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.key));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.addr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.port));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.rank_from));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.cn));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.backend));
- return val;
- }
- bool RemoteRecv_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RemoteRecv>(),
- &&b_ = rhs_.cast_final_safe<RemoteRecv>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.key != b_.key) return false;
- if (a_.addr != b_.addr) return false;
- if (a_.port != b_.port) return false;
- if (a_.rank_from != b_.rank_from) return false;
- if (a_.cn != b_.cn) return false;
- if (a_.shape != b_.shape) return false;
- if (a_.dtype != b_.dtype) return false;
- if (a_.backend != b_.backend) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RemoteRecv_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteRecv>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("key", op_.key);
- props_.emplace_back("addr", op_.addr);
- props_.emplace_back("port", std::to_string(op_.port));
- props_.emplace_back("rank_from", std::to_string(op_.rank_from));
- props_.emplace_back("cn", op_.cn.to_string());
- props_.emplace_back("shape", "{std::vector}");
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("backend", op_.backend);
- return props_;
- }
- std::string RemoteRecv_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteRecv>();
- static_cast<void>(op_);
- return "RemoteRecv";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RemoteRecv, RemoteRecv)
- .hash(RemoteRecv_hash_impl)
- .is_same_st(RemoteRecv_is_same_st_impl)
- .props(RemoteRecv_props_impl)
- .make_name(RemoteRecv_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemoteSend);
-
- namespace {
- size_t RemoteSend_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteSend>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.key));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.addr));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.port));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.rank_to));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.backend));
- return val;
- }
- bool RemoteSend_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RemoteSend>(),
- &&b_ = rhs_.cast_final_safe<RemoteSend>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.key != b_.key) return false;
- if (a_.addr != b_.addr) return false;
- if (a_.port != b_.port) return false;
- if (a_.rank_to != b_.rank_to) return false;
- if (a_.backend != b_.backend) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RemoteSend_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteSend>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("key", op_.key);
- props_.emplace_back("addr", op_.addr);
- props_.emplace_back("port", std::to_string(op_.port));
- props_.emplace_back("rank_to", std::to_string(op_.rank_to));
- props_.emplace_back("backend", op_.backend);
- return props_;
- }
- std::string RemoteSend_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoteSend>();
- static_cast<void>(op_);
- return "RemoteSend";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RemoteSend, RemoteSend)
- .hash(RemoteSend_hash_impl)
- .is_same_st(RemoteSend_is_same_st_impl)
- .props(RemoteSend_props_impl)
- .make_name(RemoteSend_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemoveAxis);
-
- namespace {
- size_t RemoveAxis_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoveAxis>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool RemoveAxis_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<RemoveAxis>(),
- &&b_ = rhs_.cast_final_safe<RemoveAxis>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> RemoveAxis_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoveAxis>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", "{std::vector}");
- return props_;
- }
- std::string RemoveAxis_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<RemoveAxis>();
- static_cast<void>(op_);
- return "RemoveAxis";
- }
- } // anonymous namespace
- OP_TRAIT_REG(RemoveAxis, RemoveAxis)
- .hash(RemoveAxis_hash_impl)
- .is_same_st(RemoveAxis_is_same_st_impl)
- .props(RemoveAxis_props_impl)
- .make_name(RemoveAxis_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Reshape);
-
- namespace {
- size_t Reshape_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reshape>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.shape));
- return val;
- }
- bool Reshape_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Reshape>(),
- &&b_ = rhs_.cast_final_safe<Reshape>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.shape != b_.shape) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Reshape_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reshape>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("shape", "{std::vector}");
- return props_;
- }
- std::string Reshape_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Reshape>();
- static_cast<void>(op_);
- return "Reshape";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Reshape, Reshape)
- .hash(Reshape_hash_impl)
- .is_same_st(Reshape_is_same_st_impl)
- .props(Reshape_props_impl)
- .make_name(Reshape_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Resize);
-
- namespace {
- size_t Resize_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Resize>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- return val;
- }
- bool Resize_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Resize>(),
- &&b_ = rhs_.cast_final_safe<Resize>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.format != b_.format) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Resize_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Resize>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case Resize::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case Resize::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case Resize::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case Resize::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case Resize::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.format){
- case Resize::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case Resize::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case Resize::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case Resize::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case Resize::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case Resize::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case Resize::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case Resize::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case Resize::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case Resize::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case Resize::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case Resize::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case Resize::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case Resize::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case Resize::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case Resize::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case Resize::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case Resize::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- return props_;
- }
- std::string Resize_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Resize>();
- static_cast<void>(op_);
- return "Resize";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Resize, Resize)
- .hash(Resize_hash_impl)
- .is_same_st(Resize_is_same_st_impl)
- .props(Resize_props_impl)
- .make_name(Resize_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SVD);
-
- namespace {
- size_t SVD_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SVD>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.full_matrices));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.compute_uv));
- return val;
- }
- bool SVD_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SVD>(),
- &&b_ = rhs_.cast_final_safe<SVD>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.full_matrices != b_.full_matrices) return false;
- if (a_.compute_uv != b_.compute_uv) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SVD_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SVD>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("full_matrices", std::to_string(op_.full_matrices));
- props_.emplace_back("compute_uv", std::to_string(op_.compute_uv));
- return props_;
- }
- std::string SVD_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SVD>();
- static_cast<void>(op_);
- return "SVD";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SVD, SVD)
- .hash(SVD_hash_impl)
- .is_same_st(SVD_is_same_st_impl)
- .props(SVD_props_impl)
- .make_name(SVD_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SetMeshIndexing);
-
- namespace {
- size_t SetMeshIndexing_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool SetMeshIndexing_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SetMeshIndexing>(),
- &&b_ = rhs_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SetMeshIndexing_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string SetMeshIndexing_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetMeshIndexing>();
- static_cast<void>(op_);
- return "SetMeshIndexing";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SetMeshIndexing, SetMeshIndexing)
- .hash(SetMeshIndexing_hash_impl)
- .is_same_st(SetMeshIndexing_is_same_st_impl)
- .props(SetMeshIndexing_props_impl)
- .make_name(SetMeshIndexing_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SetSubtensor);
-
- namespace {
- size_t SetSubtensor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetSubtensor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool SetSubtensor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SetSubtensor>(),
- &&b_ = rhs_.cast_final_safe<SetSubtensor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SetSubtensor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetSubtensor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string SetSubtensor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SetSubtensor>();
- static_cast<void>(op_);
- return "SetSubtensor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SetSubtensor, SetSubtensor)
- .hash(SetSubtensor_hash_impl)
- .is_same_st(SetSubtensor_is_same_st_impl)
- .props(SetSubtensor_props_impl)
- .make_name(SetSubtensor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ShuffleRNG);
-
- namespace {
- size_t ShuffleRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash(op_.handle)
- );
- }
- bool ShuffleRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<ShuffleRNG>(),
- &&b_ = rhs_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle;}
- std::vector<std::pair<const char*, std::string>> ShuffleRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string ShuffleRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<ShuffleRNG>();
- static_cast<void>(op_);
- return "ShuffleRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(ShuffleRNG, ShuffleRNG)
- .hash(ShuffleRNG_hash_impl)
- .is_same_st(ShuffleRNG_is_same_st_impl)
- .props(ShuffleRNG_props_impl)
- .make_name(ShuffleRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(SlidingWindowTranspose);
-
- namespace {
- size_t SlidingWindowTranspose_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.out_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.out_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.pad_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.stride_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dilate_w));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_h));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.window_w));
- return val;
- }
- bool SlidingWindowTranspose_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<SlidingWindowTranspose>(),
- &&b_ = rhs_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.out_h != b_.out_h) return false;
- if (a_.out_w != b_.out_w) return false;
- if (a_.pad_h != b_.pad_h) return false;
- if (a_.pad_w != b_.pad_w) return false;
- if (a_.stride_h != b_.stride_h) return false;
- if (a_.stride_w != b_.stride_w) return false;
- if (a_.dilate_h != b_.dilate_h) return false;
- if (a_.dilate_w != b_.dilate_w) return false;
- if (a_.window_h != b_.window_h) return false;
- if (a_.window_w != b_.window_w) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> SlidingWindowTranspose_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("out_h", std::to_string(op_.out_h));
- props_.emplace_back("out_w", std::to_string(op_.out_w));
- props_.emplace_back("pad_h", std::to_string(op_.pad_h));
- props_.emplace_back("pad_w", std::to_string(op_.pad_w));
- props_.emplace_back("stride_h", std::to_string(op_.stride_h));
- props_.emplace_back("stride_w", std::to_string(op_.stride_w));
- props_.emplace_back("dilate_h", std::to_string(op_.dilate_h));
- props_.emplace_back("dilate_w", std::to_string(op_.dilate_w));
- props_.emplace_back("window_h", std::to_string(op_.window_h));
- props_.emplace_back("window_w", std::to_string(op_.window_w));
- return props_;
- }
- std::string SlidingWindowTranspose_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<SlidingWindowTranspose>();
- static_cast<void>(op_);
- return "SlidingWindowTranspose";
- }
- } // anonymous namespace
- OP_TRAIT_REG(SlidingWindowTranspose, SlidingWindowTranspose)
- .hash(SlidingWindowTranspose_hash_impl)
- .is_same_st(SlidingWindowTranspose_is_same_st_impl)
- .props(SlidingWindowTranspose_props_impl)
- .make_name(SlidingWindowTranspose_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Softmax);
-
- namespace {
- size_t Softmax_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Softmax>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- return val;
- }
- bool Softmax_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Softmax>(),
- &&b_ = rhs_.cast_final_safe<Softmax>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Softmax_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Softmax>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- return props_;
- }
- std::string Softmax_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Softmax>();
- static_cast<void>(op_);
- return "Softmax";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Softmax, Softmax)
- .hash(Softmax_hash_impl)
- .is_same_st(Softmax_is_same_st_impl)
- .props(Softmax_props_impl)
- .make_name(Softmax_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Split);
-
- namespace {
- size_t Split_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Split>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.axis));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.nsections));
- return val;
- }
- bool Split_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Split>(),
- &&b_ = rhs_.cast_final_safe<Split>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.axis != b_.axis) return false;
- if (a_.nsections != b_.nsections) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Split_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Split>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("axis", std::to_string(op_.axis));
- props_.emplace_back("nsections", std::to_string(op_.nsections));
- return props_;
- }
- std::string Split_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Split>();
- static_cast<void>(op_);
- return "Split";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Split, Split)
- .hash(Split_hash_impl)
- .is_same_st(Split_is_same_st_impl)
- .props(Split_props_impl)
- .make_name(Split_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(Subtensor);
-
- namespace {
- size_t Subtensor_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Subtensor>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.items));
- return val;
- }
- bool Subtensor_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<Subtensor>(),
- &&b_ = rhs_.cast_final_safe<Subtensor>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.items != b_.items) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> Subtensor_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Subtensor>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("items", "{std::vector}");
- return props_;
- }
- std::string Subtensor_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<Subtensor>();
- static_cast<void>(op_);
- return "Subtensor";
- }
- } // anonymous namespace
- OP_TRAIT_REG(Subtensor, Subtensor)
- .hash(Subtensor_hash_impl)
- .is_same_st(Subtensor_is_same_st_impl)
- .props(Subtensor_props_impl)
- .make_name(Subtensor_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TQT);
-
- namespace {
- size_t TQT_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TQT>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmin));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.qmax));
- return val;
- }
- bool TQT_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TQT>(),
- &&b_ = rhs_.cast_final_safe<TQT>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.qmin != b_.qmin) return false;
- if (a_.qmax != b_.qmax) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TQT_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TQT>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("qmin", std::to_string(op_.qmin));
- props_.emplace_back("qmax", std::to_string(op_.qmax));
- return props_;
- }
- std::string TQT_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TQT>();
- static_cast<void>(op_);
- return "TQT";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TQT, TQT)
- .hash(TQT_hash_impl)
- .is_same_st(TQT_is_same_st_impl)
- .props(TQT_props_impl)
- .make_name(TQT_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TensorRTRuntime);
-
- namespace {
- size_t TensorRTRuntime_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.buf_size));
- return val;
- }
- bool TensorRTRuntime_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TensorRTRuntime>(),
- &&b_ = rhs_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.buf != b_.buf) return false;
- if (a_.buf_size != b_.buf_size) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TensorRTRuntime_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("buf", op_.buf);
- props_.emplace_back("buf_size", std::to_string(op_.buf_size));
- return props_;
- }
- std::string TensorRTRuntime_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TensorRTRuntime>();
- static_cast<void>(op_);
- return "TensorRTRuntime";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TensorRTRuntime, TensorRTRuntime)
- .hash(TensorRTRuntime_hash_impl)
- .is_same_st(TensorRTRuntime_is_same_st_impl)
- .props(TensorRTRuntime_props_impl)
- .make_name(TensorRTRuntime_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TopK);
-
- namespace {
- size_t TopK_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TopK>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.mode));
- return val;
- }
- bool TopK_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TopK>(),
- &&b_ = rhs_.cast_final_safe<TopK>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.mode != b_.mode) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TopK_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TopK>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.mode){
- case TopK::Mode::KTH_ONLY:
- props_.emplace_back("mode", "KTH_ONLY");
- break;
- case TopK::Mode::VALUE_IDX_NOSORT:
- props_.emplace_back("mode", "VALUE_IDX_NOSORT");
- break;
- case TopK::Mode::VALUE_IDX_SORTED:
- props_.emplace_back("mode", "VALUE_IDX_SORTED");
- break;
- default:
- props_.emplace_back("mode", "INVALID");
- break;
- }
- return props_;
- }
- std::string TopK_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TopK>();
- static_cast<void>(op_);
- return "TopK";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TopK, TopK)
- .hash(TopK_hash_impl)
- .is_same_st(TopK_is_same_st_impl)
- .props(TopK_props_impl)
- .make_name(TopK_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(TypeCvt);
-
- namespace {
- size_t TypeCvt_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TypeCvt>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::hash(op_.dtype.handle()));
- return val;
- }
- bool TypeCvt_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<TypeCvt>(),
- &&b_ = rhs_.cast_final_safe<TypeCvt>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.dtype != b_.dtype) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> TypeCvt_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TypeCvt>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("dtype", op_.dtype.name());
- return props_;
- }
- std::string TypeCvt_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<TypeCvt>();
- static_cast<void>(op_);
- return "TypeCvt";
- }
- } // anonymous namespace
- OP_TRAIT_REG(TypeCvt, TypeCvt)
- .hash(TypeCvt_hash_impl)
- .is_same_st(TypeCvt_is_same_st_impl)
- .props(TypeCvt_props_impl)
- .make_name(TypeCvt_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(UniformRNG);
-
- namespace {
- size_t UniformRNG_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<UniformRNG>();
- static_cast<void>(op_);
-
- return mgb::hash_pair_combine(
- mgb::hash(op_.dyn_typeinfo()),
- mgb::hash_pair_combine(
- mgb::hash(op_.handle),
- mgb::hash(op_.dtype.enumv())
- )
- );
- }
- bool UniformRNG_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<UniformRNG>(),
- &&b_ = rhs_.cast_final_safe<UniformRNG>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- return a_.handle == b_.handle && a_.dtype == b_.dtype;}
- std::vector<std::pair<const char*, std::string>> UniformRNG_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<UniformRNG>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- props_.emplace_back("seed", std::to_string(op_.seed));
- props_.emplace_back("dtype", op_.dtype.name());
- props_.emplace_back("handle", std::to_string(op_.handle));
- return props_;
- }
- std::string UniformRNG_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<UniformRNG>();
- static_cast<void>(op_);
- return "UniformRNG";
- }
- } // anonymous namespace
- OP_TRAIT_REG(UniformRNG, UniformRNG)
- .hash(UniformRNG_hash_impl)
- .is_same_st(UniformRNG_is_same_st_impl)
- .props(UniformRNG_props_impl)
- .make_name(UniformRNG_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpAffine);
-
- namespace {
- size_t WarpAffine_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpAffine>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.border_mode));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.border_val));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- return val;
- }
- bool WarpAffine_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<WarpAffine>(),
- &&b_ = rhs_.cast_final_safe<WarpAffine>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.border_mode != b_.border_mode) return false;
- if (a_.border_val != b_.border_val) return false;
- if (a_.format != b_.format) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> WarpAffine_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpAffine>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case WarpAffine::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case WarpAffine::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case WarpAffine::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case WarpAffine::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case WarpAffine::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.border_mode){
- case WarpAffine::BorderMode::REPLICATE:
- props_.emplace_back("border_mode", "REPLICATE");
- break;
- case WarpAffine::BorderMode::REFLECT:
- props_.emplace_back("border_mode", "REFLECT");
- break;
- case WarpAffine::BorderMode::REFLECT_101:
- props_.emplace_back("border_mode", "REFLECT_101");
- break;
- case WarpAffine::BorderMode::WRAP:
- props_.emplace_back("border_mode", "WRAP");
- break;
- case WarpAffine::BorderMode::CONSTANT:
- props_.emplace_back("border_mode", "CONSTANT");
- break;
- case WarpAffine::BorderMode::TRANSPARENT:
- props_.emplace_back("border_mode", "TRANSPARENT");
- break;
- case WarpAffine::BorderMode::ISOLATED:
- props_.emplace_back("border_mode", "ISOLATED");
- break;
- default:
- props_.emplace_back("border_mode", "INVALID");
- break;
- }
- props_.emplace_back("border_val", std::to_string(op_.border_val));
- switch (op_.format){
- case WarpAffine::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case WarpAffine::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case WarpAffine::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case WarpAffine::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case WarpAffine::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case WarpAffine::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case WarpAffine::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case WarpAffine::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case WarpAffine::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case WarpAffine::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case WarpAffine::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case WarpAffine::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case WarpAffine::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case WarpAffine::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case WarpAffine::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case WarpAffine::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case WarpAffine::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case WarpAffine::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- return props_;
- }
- std::string WarpAffine_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpAffine>();
- static_cast<void>(op_);
- return "WarpAffine";
- }
- } // anonymous namespace
- OP_TRAIT_REG(WarpAffine, WarpAffine)
- .hash(WarpAffine_hash_impl)
- .is_same_st(WarpAffine_is_same_st_impl)
- .props(WarpAffine_props_impl)
- .make_name(WarpAffine_make_name_impl);
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspective);
-
- namespace {
- size_t WarpPerspective_hash_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspective>();
- static_cast<void>(op_);
- size_t val = mgb::hash(op_.dyn_typeinfo());
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.imode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.bmode));
- val = mgb::hash_pair_combine(val, mgb::enumhash()(op_.format));
- val = mgb::hash_pair_combine(val, mgb::hash(op_.border_val));
- return val;
- }
- bool WarpPerspective_is_same_st_impl(const OpDef& lhs_, const OpDef& rhs_) {
- auto &&a_ = lhs_.cast_final_safe<WarpPerspective>(),
- &&b_ = rhs_.cast_final_safe<WarpPerspective>();
- static_cast<void>(a_);
- static_cast<void>(b_);
- if (a_.imode != b_.imode) return false;
- if (a_.bmode != b_.bmode) return false;
- if (a_.format != b_.format) return false;
- if (a_.border_val != b_.border_val) return false;
- return true;
- }
- std::vector<std::pair<const char*, std::string>> WarpPerspective_props_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspective>();
- static_cast<void>(op_);
- std::vector<std::pair<const char*, std::string>> props_;
- switch (op_.imode){
- case WarpPerspective::InterpolationMode::NEAREST:
- props_.emplace_back("imode", "NEAREST");
- break;
- case WarpPerspective::InterpolationMode::LINEAR:
- props_.emplace_back("imode", "LINEAR");
- break;
- case WarpPerspective::InterpolationMode::AREA:
- props_.emplace_back("imode", "AREA");
- break;
- case WarpPerspective::InterpolationMode::CUBIC:
- props_.emplace_back("imode", "CUBIC");
- break;
- case WarpPerspective::InterpolationMode::LANCZOS4:
- props_.emplace_back("imode", "LANCZOS4");
- break;
- default:
- props_.emplace_back("imode", "INVALID");
- break;
- }
- switch (op_.bmode){
- case WarpPerspective::BorderMode::REPLICATE:
- props_.emplace_back("bmode", "REPLICATE");
- break;
- case WarpPerspective::BorderMode::REFLECT:
- props_.emplace_back("bmode", "REFLECT");
- break;
- case WarpPerspective::BorderMode::REFLECT_101:
- props_.emplace_back("bmode", "REFLECT_101");
- break;
- case WarpPerspective::BorderMode::WRAP:
- props_.emplace_back("bmode", "WRAP");
- break;
- case WarpPerspective::BorderMode::CONSTANT:
- props_.emplace_back("bmode", "CONSTANT");
- break;
- case WarpPerspective::BorderMode::TRANSPARENT:
- props_.emplace_back("bmode", "TRANSPARENT");
- break;
- case WarpPerspective::BorderMode::ISOLATED:
- props_.emplace_back("bmode", "ISOLATED");
- break;
- default:
- props_.emplace_back("bmode", "INVALID");
- break;
- }
- switch (op_.format){
- case WarpPerspective::Format::NCHW:
- props_.emplace_back("format", "NCHW");
- break;
- case WarpPerspective::Format::NHWC:
- props_.emplace_back("format", "NHWC");
- break;
- case WarpPerspective::Format::NHWCD4:
- props_.emplace_back("format", "NHWCD4");
- break;
- case WarpPerspective::Format::NCHW4:
- props_.emplace_back("format", "NCHW4");
- break;
- case WarpPerspective::Format::NCHW8:
- props_.emplace_back("format", "NCHW8");
- break;
- case WarpPerspective::Format::NCHW32:
- props_.emplace_back("format", "NCHW32");
- break;
- case WarpPerspective::Format::NCHW88:
- props_.emplace_back("format", "NCHW88");
- break;
- case WarpPerspective::Format::NCHW44:
- props_.emplace_back("format", "NCHW44");
- break;
- case WarpPerspective::Format::NCHW44_DOT:
- props_.emplace_back("format", "NCHW44_DOT");
- break;
- case WarpPerspective::Format::NCHW4_NCHW32:
- props_.emplace_back("format", "NCHW4_NCHW32");
- break;
- case WarpPerspective::Format::NCHW32_NCHW4:
- props_.emplace_back("format", "NCHW32_NCHW4");
- break;
- case WarpPerspective::Format::NCHW4_NCHW:
- props_.emplace_back("format", "NCHW4_NCHW");
- break;
- case WarpPerspective::Format::NHWC_NCHW:
- props_.emplace_back("format", "NHWC_NCHW");
- break;
- case WarpPerspective::Format::NHWC_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NHWC_NCHW4_IC_SMALL");
- break;
- case WarpPerspective::Format::NCHW_NCHW4_IC_SMALL:
- props_.emplace_back("format", "NCHW_NCHW4_IC_SMALL");
- break;
- case WarpPerspective::Format::CHWN4:
- props_.emplace_back("format", "CHWN4");
- break;
- case WarpPerspective::Format::NCHW64:
- props_.emplace_back("format", "NCHW64");
- break;
- case WarpPerspective::Format::NCHW4_NHWC:
- props_.emplace_back("format", "NCHW4_NHWC");
- break;
- default:
- props_.emplace_back("format", "INVALID");
- break;
- }
- props_.emplace_back("border_val", std::to_string(op_.border_val));
- return props_;
- }
- std::string WarpPerspective_make_name_impl(const OpDef& def_) {
- auto&& op_ = def_.cast_final_safe<WarpPerspective>();
- static_cast<void>(op_);
- return "WarpPerspective";
- }
- } // anonymous namespace
- OP_TRAIT_REG(WarpPerspective, WarpPerspective)
- .hash(WarpPerspective_hash_impl)
- .is_same_st(WarpPerspective_is_same_st_impl)
- .props(WarpPerspective_props_impl)
- .make_name(WarpPerspective_make_name_impl);
-
- // clang-format on
|