ge25519_nielsadd2.S 159 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152
  1. # qhasm: int64 rp
  2. # qhasm: int64 qp
  3. # qhasm: input rp
  4. # qhasm: input qp
  5. # qhasm: int64 caller1
  6. # qhasm: int64 caller2
  7. # qhasm: int64 caller3
  8. # qhasm: int64 caller4
  9. # qhasm: int64 caller5
  10. # qhasm: int64 caller6
  11. # qhasm: int64 caller7
  12. # qhasm: caller caller1
  13. # qhasm: caller caller2
  14. # qhasm: caller caller3
  15. # qhasm: caller caller4
  16. # qhasm: caller caller5
  17. # qhasm: caller caller6
  18. # qhasm: caller caller7
  19. # qhasm: stack64 caller1_stack
  20. # qhasm: stack64 caller2_stack
  21. # qhasm: stack64 caller3_stack
  22. # qhasm: stack64 caller4_stack
  23. # qhasm: stack64 caller5_stack
  24. # qhasm: stack64 caller6_stack
  25. # qhasm: stack64 caller7_stack
  26. # qhasm: int64 a0
  27. # qhasm: int64 a1
  28. # qhasm: int64 a2
  29. # qhasm: int64 a3
  30. # qhasm: int64 a4
  31. # qhasm: stack64 a0_stack
  32. # qhasm: stack64 a1_stack
  33. # qhasm: stack64 a2_stack
  34. # qhasm: stack64 a3_stack
  35. # qhasm: stack64 a4_stack
  36. # qhasm: int64 b0
  37. # qhasm: int64 b1
  38. # qhasm: int64 b2
  39. # qhasm: int64 b3
  40. # qhasm: int64 b4
  41. # qhasm: stack64 b0_stack
  42. # qhasm: stack64 b1_stack
  43. # qhasm: stack64 b2_stack
  44. # qhasm: stack64 b3_stack
  45. # qhasm: stack64 b4_stack
  46. # qhasm: int64 c0
  47. # qhasm: int64 c1
  48. # qhasm: int64 c2
  49. # qhasm: int64 c3
  50. # qhasm: int64 c4
  51. # qhasm: stack64 c0_stack
  52. # qhasm: stack64 c1_stack
  53. # qhasm: stack64 c2_stack
  54. # qhasm: stack64 c3_stack
  55. # qhasm: stack64 c4_stack
  56. # qhasm: int64 d0
  57. # qhasm: int64 d1
  58. # qhasm: int64 d2
  59. # qhasm: int64 d3
  60. # qhasm: int64 d4
  61. # qhasm: stack64 d0_stack
  62. # qhasm: stack64 d1_stack
  63. # qhasm: stack64 d2_stack
  64. # qhasm: stack64 d3_stack
  65. # qhasm: stack64 d4_stack
  66. # qhasm: int64 e0
  67. # qhasm: int64 e1
  68. # qhasm: int64 e2
  69. # qhasm: int64 e3
  70. # qhasm: int64 e4
  71. # qhasm: stack64 e0_stack
  72. # qhasm: stack64 e1_stack
  73. # qhasm: stack64 e2_stack
  74. # qhasm: stack64 e3_stack
  75. # qhasm: stack64 e4_stack
  76. # qhasm: int64 f0
  77. # qhasm: int64 f1
  78. # qhasm: int64 f2
  79. # qhasm: int64 f3
  80. # qhasm: int64 f4
  81. # qhasm: stack64 f0_stack
  82. # qhasm: stack64 f1_stack
  83. # qhasm: stack64 f2_stack
  84. # qhasm: stack64 f3_stack
  85. # qhasm: stack64 f4_stack
  86. # qhasm: int64 g0
  87. # qhasm: int64 g1
  88. # qhasm: int64 g2
  89. # qhasm: int64 g3
  90. # qhasm: int64 g4
  91. # qhasm: stack64 g0_stack
  92. # qhasm: stack64 g1_stack
  93. # qhasm: stack64 g2_stack
  94. # qhasm: stack64 g3_stack
  95. # qhasm: stack64 g4_stack
  96. # qhasm: int64 h0
  97. # qhasm: int64 h1
  98. # qhasm: int64 h2
  99. # qhasm: int64 h3
  100. # qhasm: int64 h4
  101. # qhasm: stack64 h0_stack
  102. # qhasm: stack64 h1_stack
  103. # qhasm: stack64 h2_stack
  104. # qhasm: stack64 h3_stack
  105. # qhasm: stack64 h4_stack
  106. # qhasm: int64 qt0
  107. # qhasm: int64 qt1
  108. # qhasm: int64 qt2
  109. # qhasm: int64 qt3
  110. # qhasm: int64 qt4
  111. # qhasm: stack64 qt0_stack
  112. # qhasm: stack64 qt1_stack
  113. # qhasm: stack64 qt2_stack
  114. # qhasm: stack64 qt3_stack
  115. # qhasm: stack64 qt4_stack
  116. # qhasm: int64 t10
  117. # qhasm: int64 t11
  118. # qhasm: int64 t12
  119. # qhasm: int64 t13
  120. # qhasm: int64 t14
  121. # qhasm: stack64 t10_stack
  122. # qhasm: stack64 t11_stack
  123. # qhasm: stack64 t12_stack
  124. # qhasm: stack64 t13_stack
  125. # qhasm: stack64 t14_stack
  126. # qhasm: int64 t20
  127. # qhasm: int64 t21
  128. # qhasm: int64 t22
  129. # qhasm: int64 t23
  130. # qhasm: int64 t24
  131. # qhasm: stack64 t20_stack
  132. # qhasm: stack64 t21_stack
  133. # qhasm: stack64 t22_stack
  134. # qhasm: stack64 t23_stack
  135. # qhasm: stack64 t24_stack
  136. # qhasm: int64 rx0
  137. # qhasm: int64 rx1
  138. # qhasm: int64 rx2
  139. # qhasm: int64 rx3
  140. # qhasm: int64 rx4
  141. # qhasm: int64 ry0
  142. # qhasm: int64 ry1
  143. # qhasm: int64 ry2
  144. # qhasm: int64 ry3
  145. # qhasm: int64 ry4
  146. # qhasm: int64 rz0
  147. # qhasm: int64 rz1
  148. # qhasm: int64 rz2
  149. # qhasm: int64 rz3
  150. # qhasm: int64 rz4
  151. # qhasm: int64 rt0
  152. # qhasm: int64 rt1
  153. # qhasm: int64 rt2
  154. # qhasm: int64 rt3
  155. # qhasm: int64 rt4
  156. # qhasm: int64 mulr01
  157. # qhasm: int64 mulr11
  158. # qhasm: int64 mulr21
  159. # qhasm: int64 mulr31
  160. # qhasm: int64 mulr41
  161. # qhasm: int64 mulrax
  162. # qhasm: int64 mulrdx
  163. # qhasm: int64 mult
  164. # qhasm: int64 mulredmask
  165. # qhasm: stack64 mulx219_stack
  166. # qhasm: stack64 mulx319_stack
  167. # qhasm: stack64 mulx419_stack
  168. # qhasm: enter CRYPTO_NAMESPACE(batch_ge25519_nielsadd2)
  169. .text
  170. .p2align 5
  171. .globl _CRYPTO_NAMESPACE(batch_ge25519_nielsadd2)
  172. .globl CRYPTO_NAMESPACE(batch_ge25519_nielsadd2)
  173. _CRYPTO_NAMESPACE(batch_ge25519_nielsadd2):
  174. CRYPTO_NAMESPACE(batch_ge25519_nielsadd2):
  175. mov %rsp,%r11
  176. and $31,%r11
  177. add $256,%r11
  178. sub %r11,%rsp
  179. # qhasm: caller1_stack = caller1
  180. # asm 1: movq <caller1=int64#9,>caller1_stack=stack64#1
  181. # asm 2: movq <caller1=%r11,>caller1_stack=0(%rsp)
  182. movq %r11,0(%rsp)
  183. # qhasm: caller2_stack = caller2
  184. # asm 1: movq <caller2=int64#10,>caller2_stack=stack64#2
  185. # asm 2: movq <caller2=%r12,>caller2_stack=8(%rsp)
  186. movq %r12,8(%rsp)
  187. # qhasm: caller3_stack = caller3
  188. # asm 1: movq <caller3=int64#11,>caller3_stack=stack64#3
  189. # asm 2: movq <caller3=%r13,>caller3_stack=16(%rsp)
  190. movq %r13,16(%rsp)
  191. # qhasm: caller4_stack = caller4
  192. # asm 1: movq <caller4=int64#12,>caller4_stack=stack64#4
  193. # asm 2: movq <caller4=%r14,>caller4_stack=24(%rsp)
  194. movq %r14,24(%rsp)
  195. # qhasm: caller5_stack = caller5
  196. # asm 1: movq <caller5=int64#13,>caller5_stack=stack64#5
  197. # asm 2: movq <caller5=%r15,>caller5_stack=32(%rsp)
  198. movq %r15,32(%rsp)
  199. # qhasm: caller6_stack = caller6
  200. # asm 1: movq <caller6=int64#14,>caller6_stack=stack64#6
  201. # asm 2: movq <caller6=%rbx,>caller6_stack=40(%rsp)
  202. movq %rbx,40(%rsp)
  203. # qhasm: caller7_stack = caller7
  204. # asm 1: movq <caller7=int64#15,>caller7_stack=stack64#7
  205. # asm 2: movq <caller7=%rbp,>caller7_stack=48(%rsp)
  206. movq %rbp,48(%rsp)
  207. # qhasm: a0 = *(uint64 *)(rp + 40)
  208. # asm 1: movq 40(<rp=int64#1),>a0=int64#3
  209. # asm 2: movq 40(<rp=%rdi),>a0=%rdx
  210. movq 40(%rdi),%rdx
  211. # qhasm: a1 = *(uint64 *)(rp + 48)
  212. # asm 1: movq 48(<rp=int64#1),>a1=int64#4
  213. # asm 2: movq 48(<rp=%rdi),>a1=%rcx
  214. movq 48(%rdi),%rcx
  215. # qhasm: a2 = *(uint64 *)(rp + 56)
  216. # asm 1: movq 56(<rp=int64#1),>a2=int64#5
  217. # asm 2: movq 56(<rp=%rdi),>a2=%r8
  218. movq 56(%rdi),%r8
  219. # qhasm: a3 = *(uint64 *)(rp + 64)
  220. # asm 1: movq 64(<rp=int64#1),>a3=int64#6
  221. # asm 2: movq 64(<rp=%rdi),>a3=%r9
  222. movq 64(%rdi),%r9
  223. # qhasm: a4 = *(uint64 *)(rp + 72)
  224. # asm 1: movq 72(<rp=int64#1),>a4=int64#7
  225. # asm 2: movq 72(<rp=%rdi),>a4=%rax
  226. movq 72(%rdi),%rax
  227. # qhasm: b0 = a0
  228. # asm 1: mov <a0=int64#3,>b0=int64#8
  229. # asm 2: mov <a0=%rdx,>b0=%r10
  230. mov %rdx,%r10
  231. # qhasm: b1 = a1
  232. # asm 1: mov <a1=int64#4,>b1=int64#9
  233. # asm 2: mov <a1=%rcx,>b1=%r11
  234. mov %rcx,%r11
  235. # qhasm: b2 = a2
  236. # asm 1: mov <a2=int64#5,>b2=int64#10
  237. # asm 2: mov <a2=%r8,>b2=%r12
  238. mov %r8,%r12
  239. # qhasm: b3 = a3
  240. # asm 1: mov <a3=int64#6,>b3=int64#11
  241. # asm 2: mov <a3=%r9,>b3=%r13
  242. mov %r9,%r13
  243. # qhasm: b4 = a4
  244. # asm 1: mov <a4=int64#7,>b4=int64#12
  245. # asm 2: mov <a4=%rax,>b4=%r14
  246. mov %rax,%r14
  247. # qhasm: a0 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P0)
  248. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<a0=int64#3
  249. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<a0=%rdx
  250. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%rdx
  251. # qhasm: a1 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  252. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a1=int64#4
  253. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a1=%rcx
  254. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rcx
  255. # qhasm: a2 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  256. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a2=int64#5
  257. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a2=%r8
  258. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r8
  259. # qhasm: a3 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  260. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a3=int64#6
  261. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a3=%r9
  262. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r9
  263. # qhasm: a4 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  264. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a4=int64#7
  265. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a4=%rax
  266. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rax
  267. # qhasm: b0 += *(uint64 *) (rp + 0)
  268. # asm 1: addq 0(<rp=int64#1),<b0=int64#8
  269. # asm 2: addq 0(<rp=%rdi),<b0=%r10
  270. addq 0(%rdi),%r10
  271. # qhasm: b1 += *(uint64 *) (rp + 8)
  272. # asm 1: addq 8(<rp=int64#1),<b1=int64#9
  273. # asm 2: addq 8(<rp=%rdi),<b1=%r11
  274. addq 8(%rdi),%r11
  275. # qhasm: b2 += *(uint64 *) (rp + 16)
  276. # asm 1: addq 16(<rp=int64#1),<b2=int64#10
  277. # asm 2: addq 16(<rp=%rdi),<b2=%r12
  278. addq 16(%rdi),%r12
  279. # qhasm: b3 += *(uint64 *) (rp + 24)
  280. # asm 1: addq 24(<rp=int64#1),<b3=int64#11
  281. # asm 2: addq 24(<rp=%rdi),<b3=%r13
  282. addq 24(%rdi),%r13
  283. # qhasm: b4 += *(uint64 *) (rp + 32)
  284. # asm 1: addq 32(<rp=int64#1),<b4=int64#12
  285. # asm 2: addq 32(<rp=%rdi),<b4=%r14
  286. addq 32(%rdi),%r14
  287. # qhasm: a0 -= *(uint64 *) (rp + 0)
  288. # asm 1: subq 0(<rp=int64#1),<a0=int64#3
  289. # asm 2: subq 0(<rp=%rdi),<a0=%rdx
  290. subq 0(%rdi),%rdx
  291. # qhasm: a1 -= *(uint64 *) (rp + 8)
  292. # asm 1: subq 8(<rp=int64#1),<a1=int64#4
  293. # asm 2: subq 8(<rp=%rdi),<a1=%rcx
  294. subq 8(%rdi),%rcx
  295. # qhasm: a2 -= *(uint64 *) (rp + 16)
  296. # asm 1: subq 16(<rp=int64#1),<a2=int64#5
  297. # asm 2: subq 16(<rp=%rdi),<a2=%r8
  298. subq 16(%rdi),%r8
  299. # qhasm: a3 -= *(uint64 *) (rp + 24)
  300. # asm 1: subq 24(<rp=int64#1),<a3=int64#6
  301. # asm 2: subq 24(<rp=%rdi),<a3=%r9
  302. subq 24(%rdi),%r9
  303. # qhasm: a4 -= *(uint64 *) (rp + 32)
  304. # asm 1: subq 32(<rp=int64#1),<a4=int64#7
  305. # asm 2: subq 32(<rp=%rdi),<a4=%rax
  306. subq 32(%rdi),%rax
  307. # qhasm: a0_stack = a0
  308. # asm 1: movq <a0=int64#3,>a0_stack=stack64#8
  309. # asm 2: movq <a0=%rdx,>a0_stack=56(%rsp)
  310. movq %rdx,56(%rsp)
  311. # qhasm: a1_stack = a1
  312. # asm 1: movq <a1=int64#4,>a1_stack=stack64#9
  313. # asm 2: movq <a1=%rcx,>a1_stack=64(%rsp)
  314. movq %rcx,64(%rsp)
  315. # qhasm: a2_stack = a2
  316. # asm 1: movq <a2=int64#5,>a2_stack=stack64#10
  317. # asm 2: movq <a2=%r8,>a2_stack=72(%rsp)
  318. movq %r8,72(%rsp)
  319. # qhasm: a3_stack = a3
  320. # asm 1: movq <a3=int64#6,>a3_stack=stack64#11
  321. # asm 2: movq <a3=%r9,>a3_stack=80(%rsp)
  322. movq %r9,80(%rsp)
  323. # qhasm: a4_stack = a4
  324. # asm 1: movq <a4=int64#7,>a4_stack=stack64#12
  325. # asm 2: movq <a4=%rax,>a4_stack=88(%rsp)
  326. movq %rax,88(%rsp)
  327. # qhasm: b0_stack = b0
  328. # asm 1: movq <b0=int64#8,>b0_stack=stack64#13
  329. # asm 2: movq <b0=%r10,>b0_stack=96(%rsp)
  330. movq %r10,96(%rsp)
  331. # qhasm: b1_stack = b1
  332. # asm 1: movq <b1=int64#9,>b1_stack=stack64#14
  333. # asm 2: movq <b1=%r11,>b1_stack=104(%rsp)
  334. movq %r11,104(%rsp)
  335. # qhasm: b2_stack = b2
  336. # asm 1: movq <b2=int64#10,>b2_stack=stack64#15
  337. # asm 2: movq <b2=%r12,>b2_stack=112(%rsp)
  338. movq %r12,112(%rsp)
  339. # qhasm: b3_stack = b3
  340. # asm 1: movq <b3=int64#11,>b3_stack=stack64#16
  341. # asm 2: movq <b3=%r13,>b3_stack=120(%rsp)
  342. movq %r13,120(%rsp)
  343. # qhasm: b4_stack = b4
  344. # asm 1: movq <b4=int64#12,>b4_stack=stack64#17
  345. # asm 2: movq <b4=%r14,>b4_stack=128(%rsp)
  346. movq %r14,128(%rsp)
  347. # qhasm: mulrax = a3_stack
  348. # asm 1: movq <a3_stack=stack64#11,>mulrax=int64#3
  349. # asm 2: movq <a3_stack=80(%rsp),>mulrax=%rdx
  350. movq 80(%rsp),%rdx
  351. # qhasm: mulrax *= 19
  352. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  353. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  354. imulq $19,%rdx,%rax
  355. # qhasm: mulx319_stack = mulrax
  356. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#18
  357. # asm 2: movq <mulrax=%rax,>mulx319_stack=136(%rsp)
  358. movq %rax,136(%rsp)
  359. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 16)
  360. # asm 1: mulq 16(<qp=int64#2)
  361. # asm 2: mulq 16(<qp=%rsi)
  362. mulq 16(%rsi)
  363. # qhasm: a0 = mulrax
  364. # asm 1: mov <mulrax=int64#7,>a0=int64#4
  365. # asm 2: mov <mulrax=%rax,>a0=%rcx
  366. mov %rax,%rcx
  367. # qhasm: mulr01 = mulrdx
  368. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#5
  369. # asm 2: mov <mulrdx=%rdx,>mulr01=%r8
  370. mov %rdx,%r8
  371. # qhasm: mulrax = a4_stack
  372. # asm 1: movq <a4_stack=stack64#12,>mulrax=int64#3
  373. # asm 2: movq <a4_stack=88(%rsp),>mulrax=%rdx
  374. movq 88(%rsp),%rdx
  375. # qhasm: mulrax *= 19
  376. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  377. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  378. imulq $19,%rdx,%rax
  379. # qhasm: mulx419_stack = mulrax
  380. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#19
  381. # asm 2: movq <mulrax=%rax,>mulx419_stack=144(%rsp)
  382. movq %rax,144(%rsp)
  383. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 8)
  384. # asm 1: mulq 8(<qp=int64#2)
  385. # asm 2: mulq 8(<qp=%rsi)
  386. mulq 8(%rsi)
  387. # qhasm: carry? a0 += mulrax
  388. # asm 1: add <mulrax=int64#7,<a0=int64#4
  389. # asm 2: add <mulrax=%rax,<a0=%rcx
  390. add %rax,%rcx
  391. # qhasm: mulr01 += mulrdx + carry
  392. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  393. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  394. adc %rdx,%r8
  395. # qhasm: mulrax = a0_stack
  396. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  397. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  398. movq 56(%rsp),%rax
  399. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 0)
  400. # asm 1: mulq 0(<qp=int64#2)
  401. # asm 2: mulq 0(<qp=%rsi)
  402. mulq 0(%rsi)
  403. # qhasm: carry? a0 += mulrax
  404. # asm 1: add <mulrax=int64#7,<a0=int64#4
  405. # asm 2: add <mulrax=%rax,<a0=%rcx
  406. add %rax,%rcx
  407. # qhasm: mulr01 += mulrdx + carry
  408. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  409. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  410. adc %rdx,%r8
  411. # qhasm: mulrax = a0_stack
  412. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  413. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  414. movq 56(%rsp),%rax
  415. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 8)
  416. # asm 1: mulq 8(<qp=int64#2)
  417. # asm 2: mulq 8(<qp=%rsi)
  418. mulq 8(%rsi)
  419. # qhasm: a1 = mulrax
  420. # asm 1: mov <mulrax=int64#7,>a1=int64#6
  421. # asm 2: mov <mulrax=%rax,>a1=%r9
  422. mov %rax,%r9
  423. # qhasm: mulr11 = mulrdx
  424. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#8
  425. # asm 2: mov <mulrdx=%rdx,>mulr11=%r10
  426. mov %rdx,%r10
  427. # qhasm: mulrax = a0_stack
  428. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  429. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  430. movq 56(%rsp),%rax
  431. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 16)
  432. # asm 1: mulq 16(<qp=int64#2)
  433. # asm 2: mulq 16(<qp=%rsi)
  434. mulq 16(%rsi)
  435. # qhasm: a2 = mulrax
  436. # asm 1: mov <mulrax=int64#7,>a2=int64#9
  437. # asm 2: mov <mulrax=%rax,>a2=%r11
  438. mov %rax,%r11
  439. # qhasm: mulr21 = mulrdx
  440. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#10
  441. # asm 2: mov <mulrdx=%rdx,>mulr21=%r12
  442. mov %rdx,%r12
  443. # qhasm: mulrax = a0_stack
  444. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  445. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  446. movq 56(%rsp),%rax
  447. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 24)
  448. # asm 1: mulq 24(<qp=int64#2)
  449. # asm 2: mulq 24(<qp=%rsi)
  450. mulq 24(%rsi)
  451. # qhasm: a3 = mulrax
  452. # asm 1: mov <mulrax=int64#7,>a3=int64#11
  453. # asm 2: mov <mulrax=%rax,>a3=%r13
  454. mov %rax,%r13
  455. # qhasm: mulr31 = mulrdx
  456. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#12
  457. # asm 2: mov <mulrdx=%rdx,>mulr31=%r14
  458. mov %rdx,%r14
  459. # qhasm: mulrax = a0_stack
  460. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  461. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  462. movq 56(%rsp),%rax
  463. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 32)
  464. # asm 1: mulq 32(<qp=int64#2)
  465. # asm 2: mulq 32(<qp=%rsi)
  466. mulq 32(%rsi)
  467. # qhasm: a4 = mulrax
  468. # asm 1: mov <mulrax=int64#7,>a4=int64#13
  469. # asm 2: mov <mulrax=%rax,>a4=%r15
  470. mov %rax,%r15
  471. # qhasm: mulr41 = mulrdx
  472. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#14
  473. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbx
  474. mov %rdx,%rbx
  475. # qhasm: mulrax = a1_stack
  476. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  477. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  478. movq 64(%rsp),%rax
  479. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 0)
  480. # asm 1: mulq 0(<qp=int64#2)
  481. # asm 2: mulq 0(<qp=%rsi)
  482. mulq 0(%rsi)
  483. # qhasm: carry? a1 += mulrax
  484. # asm 1: add <mulrax=int64#7,<a1=int64#6
  485. # asm 2: add <mulrax=%rax,<a1=%r9
  486. add %rax,%r9
  487. # qhasm: mulr11 += mulrdx + carry
  488. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  489. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  490. adc %rdx,%r10
  491. # qhasm: mulrax = a1_stack
  492. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  493. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  494. movq 64(%rsp),%rax
  495. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 8)
  496. # asm 1: mulq 8(<qp=int64#2)
  497. # asm 2: mulq 8(<qp=%rsi)
  498. mulq 8(%rsi)
  499. # qhasm: carry? a2 += mulrax
  500. # asm 1: add <mulrax=int64#7,<a2=int64#9
  501. # asm 2: add <mulrax=%rax,<a2=%r11
  502. add %rax,%r11
  503. # qhasm: mulr21 += mulrdx + carry
  504. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  505. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  506. adc %rdx,%r12
  507. # qhasm: mulrax = a1_stack
  508. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  509. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  510. movq 64(%rsp),%rax
  511. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 16)
  512. # asm 1: mulq 16(<qp=int64#2)
  513. # asm 2: mulq 16(<qp=%rsi)
  514. mulq 16(%rsi)
  515. # qhasm: carry? a3 += mulrax
  516. # asm 1: add <mulrax=int64#7,<a3=int64#11
  517. # asm 2: add <mulrax=%rax,<a3=%r13
  518. add %rax,%r13
  519. # qhasm: mulr31 += mulrdx + carry
  520. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  521. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  522. adc %rdx,%r14
  523. # qhasm: mulrax = a1_stack
  524. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  525. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  526. movq 64(%rsp),%rax
  527. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 24)
  528. # asm 1: mulq 24(<qp=int64#2)
  529. # asm 2: mulq 24(<qp=%rsi)
  530. mulq 24(%rsi)
  531. # qhasm: carry? a4 += mulrax
  532. # asm 1: add <mulrax=int64#7,<a4=int64#13
  533. # asm 2: add <mulrax=%rax,<a4=%r15
  534. add %rax,%r15
  535. # qhasm: mulr41 += mulrdx + carry
  536. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  537. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  538. adc %rdx,%rbx
  539. # qhasm: mulrax = a1_stack
  540. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#3
  541. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rdx
  542. movq 64(%rsp),%rdx
  543. # qhasm: mulrax *= 19
  544. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  545. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  546. imulq $19,%rdx,%rax
  547. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 32)
  548. # asm 1: mulq 32(<qp=int64#2)
  549. # asm 2: mulq 32(<qp=%rsi)
  550. mulq 32(%rsi)
  551. # qhasm: carry? a0 += mulrax
  552. # asm 1: add <mulrax=int64#7,<a0=int64#4
  553. # asm 2: add <mulrax=%rax,<a0=%rcx
  554. add %rax,%rcx
  555. # qhasm: mulr01 += mulrdx + carry
  556. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  557. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  558. adc %rdx,%r8
  559. # qhasm: mulrax = a2_stack
  560. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#7
  561. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rax
  562. movq 72(%rsp),%rax
  563. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 0)
  564. # asm 1: mulq 0(<qp=int64#2)
  565. # asm 2: mulq 0(<qp=%rsi)
  566. mulq 0(%rsi)
  567. # qhasm: carry? a2 += mulrax
  568. # asm 1: add <mulrax=int64#7,<a2=int64#9
  569. # asm 2: add <mulrax=%rax,<a2=%r11
  570. add %rax,%r11
  571. # qhasm: mulr21 += mulrdx + carry
  572. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  573. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  574. adc %rdx,%r12
  575. # qhasm: mulrax = a2_stack
  576. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#7
  577. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rax
  578. movq 72(%rsp),%rax
  579. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 8)
  580. # asm 1: mulq 8(<qp=int64#2)
  581. # asm 2: mulq 8(<qp=%rsi)
  582. mulq 8(%rsi)
  583. # qhasm: carry? a3 += mulrax
  584. # asm 1: add <mulrax=int64#7,<a3=int64#11
  585. # asm 2: add <mulrax=%rax,<a3=%r13
  586. add %rax,%r13
  587. # qhasm: mulr31 += mulrdx + carry
  588. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  589. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  590. adc %rdx,%r14
  591. # qhasm: mulrax = a2_stack
  592. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#7
  593. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rax
  594. movq 72(%rsp),%rax
  595. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 16)
  596. # asm 1: mulq 16(<qp=int64#2)
  597. # asm 2: mulq 16(<qp=%rsi)
  598. mulq 16(%rsi)
  599. # qhasm: carry? a4 += mulrax
  600. # asm 1: add <mulrax=int64#7,<a4=int64#13
  601. # asm 2: add <mulrax=%rax,<a4=%r15
  602. add %rax,%r15
  603. # qhasm: mulr41 += mulrdx + carry
  604. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  605. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  606. adc %rdx,%rbx
  607. # qhasm: mulrax = a2_stack
  608. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#3
  609. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rdx
  610. movq 72(%rsp),%rdx
  611. # qhasm: mulrax *= 19
  612. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  613. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  614. imulq $19,%rdx,%rax
  615. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 24)
  616. # asm 1: mulq 24(<qp=int64#2)
  617. # asm 2: mulq 24(<qp=%rsi)
  618. mulq 24(%rsi)
  619. # qhasm: carry? a0 += mulrax
  620. # asm 1: add <mulrax=int64#7,<a0=int64#4
  621. # asm 2: add <mulrax=%rax,<a0=%rcx
  622. add %rax,%rcx
  623. # qhasm: mulr01 += mulrdx + carry
  624. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  625. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  626. adc %rdx,%r8
  627. # qhasm: mulrax = a2_stack
  628. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#3
  629. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rdx
  630. movq 72(%rsp),%rdx
  631. # qhasm: mulrax *= 19
  632. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  633. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  634. imulq $19,%rdx,%rax
  635. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 32)
  636. # asm 1: mulq 32(<qp=int64#2)
  637. # asm 2: mulq 32(<qp=%rsi)
  638. mulq 32(%rsi)
  639. # qhasm: carry? a1 += mulrax
  640. # asm 1: add <mulrax=int64#7,<a1=int64#6
  641. # asm 2: add <mulrax=%rax,<a1=%r9
  642. add %rax,%r9
  643. # qhasm: mulr11 += mulrdx + carry
  644. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  645. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  646. adc %rdx,%r10
  647. # qhasm: mulrax = a3_stack
  648. # asm 1: movq <a3_stack=stack64#11,>mulrax=int64#7
  649. # asm 2: movq <a3_stack=80(%rsp),>mulrax=%rax
  650. movq 80(%rsp),%rax
  651. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 0)
  652. # asm 1: mulq 0(<qp=int64#2)
  653. # asm 2: mulq 0(<qp=%rsi)
  654. mulq 0(%rsi)
  655. # qhasm: carry? a3 += mulrax
  656. # asm 1: add <mulrax=int64#7,<a3=int64#11
  657. # asm 2: add <mulrax=%rax,<a3=%r13
  658. add %rax,%r13
  659. # qhasm: mulr31 += mulrdx + carry
  660. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  661. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  662. adc %rdx,%r14
  663. # qhasm: mulrax = a3_stack
  664. # asm 1: movq <a3_stack=stack64#11,>mulrax=int64#7
  665. # asm 2: movq <a3_stack=80(%rsp),>mulrax=%rax
  666. movq 80(%rsp),%rax
  667. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 8)
  668. # asm 1: mulq 8(<qp=int64#2)
  669. # asm 2: mulq 8(<qp=%rsi)
  670. mulq 8(%rsi)
  671. # qhasm: carry? a4 += mulrax
  672. # asm 1: add <mulrax=int64#7,<a4=int64#13
  673. # asm 2: add <mulrax=%rax,<a4=%r15
  674. add %rax,%r15
  675. # qhasm: mulr41 += mulrdx + carry
  676. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  677. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  678. adc %rdx,%rbx
  679. # qhasm: mulrax = mulx319_stack
  680. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  681. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  682. movq 136(%rsp),%rax
  683. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 24)
  684. # asm 1: mulq 24(<qp=int64#2)
  685. # asm 2: mulq 24(<qp=%rsi)
  686. mulq 24(%rsi)
  687. # qhasm: carry? a1 += mulrax
  688. # asm 1: add <mulrax=int64#7,<a1=int64#6
  689. # asm 2: add <mulrax=%rax,<a1=%r9
  690. add %rax,%r9
  691. # qhasm: mulr11 += mulrdx + carry
  692. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  693. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  694. adc %rdx,%r10
  695. # qhasm: mulrax = mulx319_stack
  696. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  697. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  698. movq 136(%rsp),%rax
  699. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 32)
  700. # asm 1: mulq 32(<qp=int64#2)
  701. # asm 2: mulq 32(<qp=%rsi)
  702. mulq 32(%rsi)
  703. # qhasm: carry? a2 += mulrax
  704. # asm 1: add <mulrax=int64#7,<a2=int64#9
  705. # asm 2: add <mulrax=%rax,<a2=%r11
  706. add %rax,%r11
  707. # qhasm: mulr21 += mulrdx + carry
  708. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  709. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  710. adc %rdx,%r12
  711. # qhasm: mulrax = a4_stack
  712. # asm 1: movq <a4_stack=stack64#12,>mulrax=int64#7
  713. # asm 2: movq <a4_stack=88(%rsp),>mulrax=%rax
  714. movq 88(%rsp),%rax
  715. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 0)
  716. # asm 1: mulq 0(<qp=int64#2)
  717. # asm 2: mulq 0(<qp=%rsi)
  718. mulq 0(%rsi)
  719. # qhasm: carry? a4 += mulrax
  720. # asm 1: add <mulrax=int64#7,<a4=int64#13
  721. # asm 2: add <mulrax=%rax,<a4=%r15
  722. add %rax,%r15
  723. # qhasm: mulr41 += mulrdx + carry
  724. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  725. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  726. adc %rdx,%rbx
  727. # qhasm: mulrax = mulx419_stack
  728. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  729. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  730. movq 144(%rsp),%rax
  731. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 16)
  732. # asm 1: mulq 16(<qp=int64#2)
  733. # asm 2: mulq 16(<qp=%rsi)
  734. mulq 16(%rsi)
  735. # qhasm: carry? a1 += mulrax
  736. # asm 1: add <mulrax=int64#7,<a1=int64#6
  737. # asm 2: add <mulrax=%rax,<a1=%r9
  738. add %rax,%r9
  739. # qhasm: mulr11 += mulrdx + carry
  740. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  741. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  742. adc %rdx,%r10
  743. # qhasm: mulrax = mulx419_stack
  744. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  745. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  746. movq 144(%rsp),%rax
  747. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 24)
  748. # asm 1: mulq 24(<qp=int64#2)
  749. # asm 2: mulq 24(<qp=%rsi)
  750. mulq 24(%rsi)
  751. # qhasm: carry? a2 += mulrax
  752. # asm 1: add <mulrax=int64#7,<a2=int64#9
  753. # asm 2: add <mulrax=%rax,<a2=%r11
  754. add %rax,%r11
  755. # qhasm: mulr21 += mulrdx + carry
  756. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  757. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  758. adc %rdx,%r12
  759. # qhasm: mulrax = mulx419_stack
  760. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  761. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  762. movq 144(%rsp),%rax
  763. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 32)
  764. # asm 1: mulq 32(<qp=int64#2)
  765. # asm 2: mulq 32(<qp=%rsi)
  766. mulq 32(%rsi)
  767. # qhasm: carry? a3 += mulrax
  768. # asm 1: add <mulrax=int64#7,<a3=int64#11
  769. # asm 2: add <mulrax=%rax,<a3=%r13
  770. add %rax,%r13
  771. # qhasm: mulr31 += mulrdx + carry
  772. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  773. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  774. adc %rdx,%r14
  775. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  776. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  777. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  778. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  779. # qhasm: mulr01 = (mulr01.a0) << 13
  780. # asm 1: shld $13,<a0=int64#4,<mulr01=int64#5
  781. # asm 2: shld $13,<a0=%rcx,<mulr01=%r8
  782. shld $13,%rcx,%r8
  783. # qhasm: a0 &= mulredmask
  784. # asm 1: and <mulredmask=int64#3,<a0=int64#4
  785. # asm 2: and <mulredmask=%rdx,<a0=%rcx
  786. and %rdx,%rcx
  787. # qhasm: mulr11 = (mulr11.a1) << 13
  788. # asm 1: shld $13,<a1=int64#6,<mulr11=int64#8
  789. # asm 2: shld $13,<a1=%r9,<mulr11=%r10
  790. shld $13,%r9,%r10
  791. # qhasm: a1 &= mulredmask
  792. # asm 1: and <mulredmask=int64#3,<a1=int64#6
  793. # asm 2: and <mulredmask=%rdx,<a1=%r9
  794. and %rdx,%r9
  795. # qhasm: a1 += mulr01
  796. # asm 1: add <mulr01=int64#5,<a1=int64#6
  797. # asm 2: add <mulr01=%r8,<a1=%r9
  798. add %r8,%r9
  799. # qhasm: mulr21 = (mulr21.a2) << 13
  800. # asm 1: shld $13,<a2=int64#9,<mulr21=int64#10
  801. # asm 2: shld $13,<a2=%r11,<mulr21=%r12
  802. shld $13,%r11,%r12
  803. # qhasm: a2 &= mulredmask
  804. # asm 1: and <mulredmask=int64#3,<a2=int64#9
  805. # asm 2: and <mulredmask=%rdx,<a2=%r11
  806. and %rdx,%r11
  807. # qhasm: a2 += mulr11
  808. # asm 1: add <mulr11=int64#8,<a2=int64#9
  809. # asm 2: add <mulr11=%r10,<a2=%r11
  810. add %r10,%r11
  811. # qhasm: mulr31 = (mulr31.a3) << 13
  812. # asm 1: shld $13,<a3=int64#11,<mulr31=int64#12
  813. # asm 2: shld $13,<a3=%r13,<mulr31=%r14
  814. shld $13,%r13,%r14
  815. # qhasm: a3 &= mulredmask
  816. # asm 1: and <mulredmask=int64#3,<a3=int64#11
  817. # asm 2: and <mulredmask=%rdx,<a3=%r13
  818. and %rdx,%r13
  819. # qhasm: a3 += mulr21
  820. # asm 1: add <mulr21=int64#10,<a3=int64#11
  821. # asm 2: add <mulr21=%r12,<a3=%r13
  822. add %r12,%r13
  823. # qhasm: mulr41 = (mulr41.a4) << 13
  824. # asm 1: shld $13,<a4=int64#13,<mulr41=int64#14
  825. # asm 2: shld $13,<a4=%r15,<mulr41=%rbx
  826. shld $13,%r15,%rbx
  827. # qhasm: a4 &= mulredmask
  828. # asm 1: and <mulredmask=int64#3,<a4=int64#13
  829. # asm 2: and <mulredmask=%rdx,<a4=%r15
  830. and %rdx,%r15
  831. # qhasm: a4 += mulr31
  832. # asm 1: add <mulr31=int64#12,<a4=int64#13
  833. # asm 2: add <mulr31=%r14,<a4=%r15
  834. add %r14,%r15
  835. # qhasm: mulr41 = mulr41 * 19
  836. # asm 1: imulq $19,<mulr41=int64#14,>mulr41=int64#5
  837. # asm 2: imulq $19,<mulr41=%rbx,>mulr41=%r8
  838. imulq $19,%rbx,%r8
  839. # qhasm: a0 += mulr41
  840. # asm 1: add <mulr41=int64#5,<a0=int64#4
  841. # asm 2: add <mulr41=%r8,<a0=%rcx
  842. add %r8,%rcx
  843. # qhasm: mult = a0
  844. # asm 1: mov <a0=int64#4,>mult=int64#5
  845. # asm 2: mov <a0=%rcx,>mult=%r8
  846. mov %rcx,%r8
  847. # qhasm: (uint64) mult >>= 51
  848. # asm 1: shr $51,<mult=int64#5
  849. # asm 2: shr $51,<mult=%r8
  850. shr $51,%r8
  851. # qhasm: mult += a1
  852. # asm 1: add <a1=int64#6,<mult=int64#5
  853. # asm 2: add <a1=%r9,<mult=%r8
  854. add %r9,%r8
  855. # qhasm: a1 = mult
  856. # asm 1: mov <mult=int64#5,>a1=int64#6
  857. # asm 2: mov <mult=%r8,>a1=%r9
  858. mov %r8,%r9
  859. # qhasm: (uint64) mult >>= 51
  860. # asm 1: shr $51,<mult=int64#5
  861. # asm 2: shr $51,<mult=%r8
  862. shr $51,%r8
  863. # qhasm: a0 &= mulredmask
  864. # asm 1: and <mulredmask=int64#3,<a0=int64#4
  865. # asm 2: and <mulredmask=%rdx,<a0=%rcx
  866. and %rdx,%rcx
  867. # qhasm: mult += a2
  868. # asm 1: add <a2=int64#9,<mult=int64#5
  869. # asm 2: add <a2=%r11,<mult=%r8
  870. add %r11,%r8
  871. # qhasm: a2 = mult
  872. # asm 1: mov <mult=int64#5,>a2=int64#7
  873. # asm 2: mov <mult=%r8,>a2=%rax
  874. mov %r8,%rax
  875. # qhasm: (uint64) mult >>= 51
  876. # asm 1: shr $51,<mult=int64#5
  877. # asm 2: shr $51,<mult=%r8
  878. shr $51,%r8
  879. # qhasm: a1 &= mulredmask
  880. # asm 1: and <mulredmask=int64#3,<a1=int64#6
  881. # asm 2: and <mulredmask=%rdx,<a1=%r9
  882. and %rdx,%r9
  883. # qhasm: mult += a3
  884. # asm 1: add <a3=int64#11,<mult=int64#5
  885. # asm 2: add <a3=%r13,<mult=%r8
  886. add %r13,%r8
  887. # qhasm: a3 = mult
  888. # asm 1: mov <mult=int64#5,>a3=int64#8
  889. # asm 2: mov <mult=%r8,>a3=%r10
  890. mov %r8,%r10
  891. # qhasm: (uint64) mult >>= 51
  892. # asm 1: shr $51,<mult=int64#5
  893. # asm 2: shr $51,<mult=%r8
  894. shr $51,%r8
  895. # qhasm: a2 &= mulredmask
  896. # asm 1: and <mulredmask=int64#3,<a2=int64#7
  897. # asm 2: and <mulredmask=%rdx,<a2=%rax
  898. and %rdx,%rax
  899. # qhasm: mult += a4
  900. # asm 1: add <a4=int64#13,<mult=int64#5
  901. # asm 2: add <a4=%r15,<mult=%r8
  902. add %r15,%r8
  903. # qhasm: a4 = mult
  904. # asm 1: mov <mult=int64#5,>a4=int64#9
  905. # asm 2: mov <mult=%r8,>a4=%r11
  906. mov %r8,%r11
  907. # qhasm: (uint64) mult >>= 51
  908. # asm 1: shr $51,<mult=int64#5
  909. # asm 2: shr $51,<mult=%r8
  910. shr $51,%r8
  911. # qhasm: a3 &= mulredmask
  912. # asm 1: and <mulredmask=int64#3,<a3=int64#8
  913. # asm 2: and <mulredmask=%rdx,<a3=%r10
  914. and %rdx,%r10
  915. # qhasm: mult *= 19
  916. # asm 1: imulq $19,<mult=int64#5,>mult=int64#5
  917. # asm 2: imulq $19,<mult=%r8,>mult=%r8
  918. imulq $19,%r8,%r8
  919. # qhasm: a0 += mult
  920. # asm 1: add <mult=int64#5,<a0=int64#4
  921. # asm 2: add <mult=%r8,<a0=%rcx
  922. add %r8,%rcx
  923. # qhasm: a4 &= mulredmask
  924. # asm 1: and <mulredmask=int64#3,<a4=int64#9
  925. # asm 2: and <mulredmask=%rdx,<a4=%r11
  926. and %rdx,%r11
  927. # qhasm: a0_stack = a0
  928. # asm 1: movq <a0=int64#4,>a0_stack=stack64#8
  929. # asm 2: movq <a0=%rcx,>a0_stack=56(%rsp)
  930. movq %rcx,56(%rsp)
  931. # qhasm: a1_stack = a1
  932. # asm 1: movq <a1=int64#6,>a1_stack=stack64#9
  933. # asm 2: movq <a1=%r9,>a1_stack=64(%rsp)
  934. movq %r9,64(%rsp)
  935. # qhasm: a2_stack = a2
  936. # asm 1: movq <a2=int64#7,>a2_stack=stack64#10
  937. # asm 2: movq <a2=%rax,>a2_stack=72(%rsp)
  938. movq %rax,72(%rsp)
  939. # qhasm: a3_stack = a3
  940. # asm 1: movq <a3=int64#8,>a3_stack=stack64#11
  941. # asm 2: movq <a3=%r10,>a3_stack=80(%rsp)
  942. movq %r10,80(%rsp)
  943. # qhasm: a4_stack = a4
  944. # asm 1: movq <a4=int64#9,>a4_stack=stack64#12
  945. # asm 2: movq <a4=%r11,>a4_stack=88(%rsp)
  946. movq %r11,88(%rsp)
  947. # qhasm: mulrax = b3_stack
  948. # asm 1: movq <b3_stack=stack64#16,>mulrax=int64#3
  949. # asm 2: movq <b3_stack=120(%rsp),>mulrax=%rdx
  950. movq 120(%rsp),%rdx
  951. # qhasm: mulrax *= 19
  952. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  953. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  954. imulq $19,%rdx,%rax
  955. # qhasm: mulx319_stack = mulrax
  956. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#18
  957. # asm 2: movq <mulrax=%rax,>mulx319_stack=136(%rsp)
  958. movq %rax,136(%rsp)
  959. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 56)
  960. # asm 1: mulq 56(<qp=int64#2)
  961. # asm 2: mulq 56(<qp=%rsi)
  962. mulq 56(%rsi)
  963. # qhasm: e0 = mulrax
  964. # asm 1: mov <mulrax=int64#7,>e0=int64#4
  965. # asm 2: mov <mulrax=%rax,>e0=%rcx
  966. mov %rax,%rcx
  967. # qhasm: mulr01 = mulrdx
  968. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#5
  969. # asm 2: mov <mulrdx=%rdx,>mulr01=%r8
  970. mov %rdx,%r8
  971. # qhasm: mulrax = b4_stack
  972. # asm 1: movq <b4_stack=stack64#17,>mulrax=int64#3
  973. # asm 2: movq <b4_stack=128(%rsp),>mulrax=%rdx
  974. movq 128(%rsp),%rdx
  975. # qhasm: mulrax *= 19
  976. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  977. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  978. imulq $19,%rdx,%rax
  979. # qhasm: mulx419_stack = mulrax
  980. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#19
  981. # asm 2: movq <mulrax=%rax,>mulx419_stack=144(%rsp)
  982. movq %rax,144(%rsp)
  983. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 48)
  984. # asm 1: mulq 48(<qp=int64#2)
  985. # asm 2: mulq 48(<qp=%rsi)
  986. mulq 48(%rsi)
  987. # qhasm: carry? e0 += mulrax
  988. # asm 1: add <mulrax=int64#7,<e0=int64#4
  989. # asm 2: add <mulrax=%rax,<e0=%rcx
  990. add %rax,%rcx
  991. # qhasm: mulr01 += mulrdx + carry
  992. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  993. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  994. adc %rdx,%r8
  995. # qhasm: mulrax = b0_stack
  996. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  997. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  998. movq 96(%rsp),%rax
  999. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 40)
  1000. # asm 1: mulq 40(<qp=int64#2)
  1001. # asm 2: mulq 40(<qp=%rsi)
  1002. mulq 40(%rsi)
  1003. # qhasm: carry? e0 += mulrax
  1004. # asm 1: add <mulrax=int64#7,<e0=int64#4
  1005. # asm 2: add <mulrax=%rax,<e0=%rcx
  1006. add %rax,%rcx
  1007. # qhasm: mulr01 += mulrdx + carry
  1008. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1009. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1010. adc %rdx,%r8
  1011. # qhasm: mulrax = b0_stack
  1012. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1013. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1014. movq 96(%rsp),%rax
  1015. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 48)
  1016. # asm 1: mulq 48(<qp=int64#2)
  1017. # asm 2: mulq 48(<qp=%rsi)
  1018. mulq 48(%rsi)
  1019. # qhasm: e1 = mulrax
  1020. # asm 1: mov <mulrax=int64#7,>e1=int64#6
  1021. # asm 2: mov <mulrax=%rax,>e1=%r9
  1022. mov %rax,%r9
  1023. # qhasm: mulr11 = mulrdx
  1024. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#8
  1025. # asm 2: mov <mulrdx=%rdx,>mulr11=%r10
  1026. mov %rdx,%r10
  1027. # qhasm: mulrax = b0_stack
  1028. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1029. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1030. movq 96(%rsp),%rax
  1031. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 56)
  1032. # asm 1: mulq 56(<qp=int64#2)
  1033. # asm 2: mulq 56(<qp=%rsi)
  1034. mulq 56(%rsi)
  1035. # qhasm: e2 = mulrax
  1036. # asm 1: mov <mulrax=int64#7,>e2=int64#9
  1037. # asm 2: mov <mulrax=%rax,>e2=%r11
  1038. mov %rax,%r11
  1039. # qhasm: mulr21 = mulrdx
  1040. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#10
  1041. # asm 2: mov <mulrdx=%rdx,>mulr21=%r12
  1042. mov %rdx,%r12
  1043. # qhasm: mulrax = b0_stack
  1044. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1045. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1046. movq 96(%rsp),%rax
  1047. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 64)
  1048. # asm 1: mulq 64(<qp=int64#2)
  1049. # asm 2: mulq 64(<qp=%rsi)
  1050. mulq 64(%rsi)
  1051. # qhasm: e3 = mulrax
  1052. # asm 1: mov <mulrax=int64#7,>e3=int64#11
  1053. # asm 2: mov <mulrax=%rax,>e3=%r13
  1054. mov %rax,%r13
  1055. # qhasm: mulr31 = mulrdx
  1056. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#12
  1057. # asm 2: mov <mulrdx=%rdx,>mulr31=%r14
  1058. mov %rdx,%r14
  1059. # qhasm: mulrax = b0_stack
  1060. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1061. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1062. movq 96(%rsp),%rax
  1063. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 72)
  1064. # asm 1: mulq 72(<qp=int64#2)
  1065. # asm 2: mulq 72(<qp=%rsi)
  1066. mulq 72(%rsi)
  1067. # qhasm: e4 = mulrax
  1068. # asm 1: mov <mulrax=int64#7,>e4=int64#13
  1069. # asm 2: mov <mulrax=%rax,>e4=%r15
  1070. mov %rax,%r15
  1071. # qhasm: mulr41 = mulrdx
  1072. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#14
  1073. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbx
  1074. mov %rdx,%rbx
  1075. # qhasm: mulrax = b1_stack
  1076. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1077. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1078. movq 104(%rsp),%rax
  1079. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 40)
  1080. # asm 1: mulq 40(<qp=int64#2)
  1081. # asm 2: mulq 40(<qp=%rsi)
  1082. mulq 40(%rsi)
  1083. # qhasm: carry? e1 += mulrax
  1084. # asm 1: add <mulrax=int64#7,<e1=int64#6
  1085. # asm 2: add <mulrax=%rax,<e1=%r9
  1086. add %rax,%r9
  1087. # qhasm: mulr11 += mulrdx + carry
  1088. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1089. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1090. adc %rdx,%r10
  1091. # qhasm: mulrax = b1_stack
  1092. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1093. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1094. movq 104(%rsp),%rax
  1095. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 48)
  1096. # asm 1: mulq 48(<qp=int64#2)
  1097. # asm 2: mulq 48(<qp=%rsi)
  1098. mulq 48(%rsi)
  1099. # qhasm: carry? e2 += mulrax
  1100. # asm 1: add <mulrax=int64#7,<e2=int64#9
  1101. # asm 2: add <mulrax=%rax,<e2=%r11
  1102. add %rax,%r11
  1103. # qhasm: mulr21 += mulrdx + carry
  1104. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1105. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1106. adc %rdx,%r12
  1107. # qhasm: mulrax = b1_stack
  1108. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1109. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1110. movq 104(%rsp),%rax
  1111. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 56)
  1112. # asm 1: mulq 56(<qp=int64#2)
  1113. # asm 2: mulq 56(<qp=%rsi)
  1114. mulq 56(%rsi)
  1115. # qhasm: carry? e3 += mulrax
  1116. # asm 1: add <mulrax=int64#7,<e3=int64#11
  1117. # asm 2: add <mulrax=%rax,<e3=%r13
  1118. add %rax,%r13
  1119. # qhasm: mulr31 += mulrdx + carry
  1120. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1121. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1122. adc %rdx,%r14
  1123. # qhasm: mulrax = b1_stack
  1124. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1125. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1126. movq 104(%rsp),%rax
  1127. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 64)
  1128. # asm 1: mulq 64(<qp=int64#2)
  1129. # asm 2: mulq 64(<qp=%rsi)
  1130. mulq 64(%rsi)
  1131. # qhasm: carry? e4 += mulrax
  1132. # asm 1: add <mulrax=int64#7,<e4=int64#13
  1133. # asm 2: add <mulrax=%rax,<e4=%r15
  1134. add %rax,%r15
  1135. # qhasm: mulr41 += mulrdx + carry
  1136. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1137. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1138. adc %rdx,%rbx
  1139. # qhasm: mulrax = b1_stack
  1140. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#3
  1141. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rdx
  1142. movq 104(%rsp),%rdx
  1143. # qhasm: mulrax *= 19
  1144. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1145. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1146. imulq $19,%rdx,%rax
  1147. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 72)
  1148. # asm 1: mulq 72(<qp=int64#2)
  1149. # asm 2: mulq 72(<qp=%rsi)
  1150. mulq 72(%rsi)
  1151. # qhasm: carry? e0 += mulrax
  1152. # asm 1: add <mulrax=int64#7,<e0=int64#4
  1153. # asm 2: add <mulrax=%rax,<e0=%rcx
  1154. add %rax,%rcx
  1155. # qhasm: mulr01 += mulrdx + carry
  1156. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1157. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1158. adc %rdx,%r8
  1159. # qhasm: mulrax = b2_stack
  1160. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#7
  1161. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rax
  1162. movq 112(%rsp),%rax
  1163. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 40)
  1164. # asm 1: mulq 40(<qp=int64#2)
  1165. # asm 2: mulq 40(<qp=%rsi)
  1166. mulq 40(%rsi)
  1167. # qhasm: carry? e2 += mulrax
  1168. # asm 1: add <mulrax=int64#7,<e2=int64#9
  1169. # asm 2: add <mulrax=%rax,<e2=%r11
  1170. add %rax,%r11
  1171. # qhasm: mulr21 += mulrdx + carry
  1172. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1173. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1174. adc %rdx,%r12
  1175. # qhasm: mulrax = b2_stack
  1176. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#7
  1177. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rax
  1178. movq 112(%rsp),%rax
  1179. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 48)
  1180. # asm 1: mulq 48(<qp=int64#2)
  1181. # asm 2: mulq 48(<qp=%rsi)
  1182. mulq 48(%rsi)
  1183. # qhasm: carry? e3 += mulrax
  1184. # asm 1: add <mulrax=int64#7,<e3=int64#11
  1185. # asm 2: add <mulrax=%rax,<e3=%r13
  1186. add %rax,%r13
  1187. # qhasm: mulr31 += mulrdx + carry
  1188. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1189. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1190. adc %rdx,%r14
  1191. # qhasm: mulrax = b2_stack
  1192. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#7
  1193. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rax
  1194. movq 112(%rsp),%rax
  1195. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 56)
  1196. # asm 1: mulq 56(<qp=int64#2)
  1197. # asm 2: mulq 56(<qp=%rsi)
  1198. mulq 56(%rsi)
  1199. # qhasm: carry? e4 += mulrax
  1200. # asm 1: add <mulrax=int64#7,<e4=int64#13
  1201. # asm 2: add <mulrax=%rax,<e4=%r15
  1202. add %rax,%r15
  1203. # qhasm: mulr41 += mulrdx + carry
  1204. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1205. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1206. adc %rdx,%rbx
  1207. # qhasm: mulrax = b2_stack
  1208. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#3
  1209. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rdx
  1210. movq 112(%rsp),%rdx
  1211. # qhasm: mulrax *= 19
  1212. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1213. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1214. imulq $19,%rdx,%rax
  1215. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 64)
  1216. # asm 1: mulq 64(<qp=int64#2)
  1217. # asm 2: mulq 64(<qp=%rsi)
  1218. mulq 64(%rsi)
  1219. # qhasm: carry? e0 += mulrax
  1220. # asm 1: add <mulrax=int64#7,<e0=int64#4
  1221. # asm 2: add <mulrax=%rax,<e0=%rcx
  1222. add %rax,%rcx
  1223. # qhasm: mulr01 += mulrdx + carry
  1224. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1225. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1226. adc %rdx,%r8
  1227. # qhasm: mulrax = b2_stack
  1228. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#3
  1229. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rdx
  1230. movq 112(%rsp),%rdx
  1231. # qhasm: mulrax *= 19
  1232. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1233. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1234. imulq $19,%rdx,%rax
  1235. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 72)
  1236. # asm 1: mulq 72(<qp=int64#2)
  1237. # asm 2: mulq 72(<qp=%rsi)
  1238. mulq 72(%rsi)
  1239. # qhasm: carry? e1 += mulrax
  1240. # asm 1: add <mulrax=int64#7,<e1=int64#6
  1241. # asm 2: add <mulrax=%rax,<e1=%r9
  1242. add %rax,%r9
  1243. # qhasm: mulr11 += mulrdx + carry
  1244. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1245. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1246. adc %rdx,%r10
  1247. # qhasm: mulrax = b3_stack
  1248. # asm 1: movq <b3_stack=stack64#16,>mulrax=int64#7
  1249. # asm 2: movq <b3_stack=120(%rsp),>mulrax=%rax
  1250. movq 120(%rsp),%rax
  1251. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 40)
  1252. # asm 1: mulq 40(<qp=int64#2)
  1253. # asm 2: mulq 40(<qp=%rsi)
  1254. mulq 40(%rsi)
  1255. # qhasm: carry? e3 += mulrax
  1256. # asm 1: add <mulrax=int64#7,<e3=int64#11
  1257. # asm 2: add <mulrax=%rax,<e3=%r13
  1258. add %rax,%r13
  1259. # qhasm: mulr31 += mulrdx + carry
  1260. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1261. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1262. adc %rdx,%r14
  1263. # qhasm: mulrax = b3_stack
  1264. # asm 1: movq <b3_stack=stack64#16,>mulrax=int64#7
  1265. # asm 2: movq <b3_stack=120(%rsp),>mulrax=%rax
  1266. movq 120(%rsp),%rax
  1267. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 48)
  1268. # asm 1: mulq 48(<qp=int64#2)
  1269. # asm 2: mulq 48(<qp=%rsi)
  1270. mulq 48(%rsi)
  1271. # qhasm: carry? e4 += mulrax
  1272. # asm 1: add <mulrax=int64#7,<e4=int64#13
  1273. # asm 2: add <mulrax=%rax,<e4=%r15
  1274. add %rax,%r15
  1275. # qhasm: mulr41 += mulrdx + carry
  1276. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1277. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1278. adc %rdx,%rbx
  1279. # qhasm: mulrax = mulx319_stack
  1280. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  1281. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  1282. movq 136(%rsp),%rax
  1283. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 64)
  1284. # asm 1: mulq 64(<qp=int64#2)
  1285. # asm 2: mulq 64(<qp=%rsi)
  1286. mulq 64(%rsi)
  1287. # qhasm: carry? e1 += mulrax
  1288. # asm 1: add <mulrax=int64#7,<e1=int64#6
  1289. # asm 2: add <mulrax=%rax,<e1=%r9
  1290. add %rax,%r9
  1291. # qhasm: mulr11 += mulrdx + carry
  1292. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1293. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1294. adc %rdx,%r10
  1295. # qhasm: mulrax = mulx319_stack
  1296. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  1297. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  1298. movq 136(%rsp),%rax
  1299. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 72)
  1300. # asm 1: mulq 72(<qp=int64#2)
  1301. # asm 2: mulq 72(<qp=%rsi)
  1302. mulq 72(%rsi)
  1303. # qhasm: carry? e2 += mulrax
  1304. # asm 1: add <mulrax=int64#7,<e2=int64#9
  1305. # asm 2: add <mulrax=%rax,<e2=%r11
  1306. add %rax,%r11
  1307. # qhasm: mulr21 += mulrdx + carry
  1308. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1309. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1310. adc %rdx,%r12
  1311. # qhasm: mulrax = b4_stack
  1312. # asm 1: movq <b4_stack=stack64#17,>mulrax=int64#7
  1313. # asm 2: movq <b4_stack=128(%rsp),>mulrax=%rax
  1314. movq 128(%rsp),%rax
  1315. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 40)
  1316. # asm 1: mulq 40(<qp=int64#2)
  1317. # asm 2: mulq 40(<qp=%rsi)
  1318. mulq 40(%rsi)
  1319. # qhasm: carry? e4 += mulrax
  1320. # asm 1: add <mulrax=int64#7,<e4=int64#13
  1321. # asm 2: add <mulrax=%rax,<e4=%r15
  1322. add %rax,%r15
  1323. # qhasm: mulr41 += mulrdx + carry
  1324. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1325. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1326. adc %rdx,%rbx
  1327. # qhasm: mulrax = mulx419_stack
  1328. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  1329. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  1330. movq 144(%rsp),%rax
  1331. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 56)
  1332. # asm 1: mulq 56(<qp=int64#2)
  1333. # asm 2: mulq 56(<qp=%rsi)
  1334. mulq 56(%rsi)
  1335. # qhasm: carry? e1 += mulrax
  1336. # asm 1: add <mulrax=int64#7,<e1=int64#6
  1337. # asm 2: add <mulrax=%rax,<e1=%r9
  1338. add %rax,%r9
  1339. # qhasm: mulr11 += mulrdx + carry
  1340. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1341. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1342. adc %rdx,%r10
  1343. # qhasm: mulrax = mulx419_stack
  1344. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  1345. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  1346. movq 144(%rsp),%rax
  1347. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 64)
  1348. # asm 1: mulq 64(<qp=int64#2)
  1349. # asm 2: mulq 64(<qp=%rsi)
  1350. mulq 64(%rsi)
  1351. # qhasm: carry? e2 += mulrax
  1352. # asm 1: add <mulrax=int64#7,<e2=int64#9
  1353. # asm 2: add <mulrax=%rax,<e2=%r11
  1354. add %rax,%r11
  1355. # qhasm: mulr21 += mulrdx + carry
  1356. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1357. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1358. adc %rdx,%r12
  1359. # qhasm: mulrax = mulx419_stack
  1360. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  1361. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  1362. movq 144(%rsp),%rax
  1363. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 72)
  1364. # asm 1: mulq 72(<qp=int64#2)
  1365. # asm 2: mulq 72(<qp=%rsi)
  1366. mulq 72(%rsi)
  1367. # qhasm: carry? e3 += mulrax
  1368. # asm 1: add <mulrax=int64#7,<e3=int64#11
  1369. # asm 2: add <mulrax=%rax,<e3=%r13
  1370. add %rax,%r13
  1371. # qhasm: mulr31 += mulrdx + carry
  1372. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1373. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1374. adc %rdx,%r14
  1375. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  1376. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  1377. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  1378. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  1379. # qhasm: mulr01 = (mulr01.e0) << 13
  1380. # asm 1: shld $13,<e0=int64#4,<mulr01=int64#5
  1381. # asm 2: shld $13,<e0=%rcx,<mulr01=%r8
  1382. shld $13,%rcx,%r8
  1383. # qhasm: e0 &= mulredmask
  1384. # asm 1: and <mulredmask=int64#3,<e0=int64#4
  1385. # asm 2: and <mulredmask=%rdx,<e0=%rcx
  1386. and %rdx,%rcx
  1387. # qhasm: mulr11 = (mulr11.e1) << 13
  1388. # asm 1: shld $13,<e1=int64#6,<mulr11=int64#8
  1389. # asm 2: shld $13,<e1=%r9,<mulr11=%r10
  1390. shld $13,%r9,%r10
  1391. # qhasm: e1 &= mulredmask
  1392. # asm 1: and <mulredmask=int64#3,<e1=int64#6
  1393. # asm 2: and <mulredmask=%rdx,<e1=%r9
  1394. and %rdx,%r9
  1395. # qhasm: e1 += mulr01
  1396. # asm 1: add <mulr01=int64#5,<e1=int64#6
  1397. # asm 2: add <mulr01=%r8,<e1=%r9
  1398. add %r8,%r9
  1399. # qhasm: mulr21 = (mulr21.e2) << 13
  1400. # asm 1: shld $13,<e2=int64#9,<mulr21=int64#10
  1401. # asm 2: shld $13,<e2=%r11,<mulr21=%r12
  1402. shld $13,%r11,%r12
  1403. # qhasm: e2 &= mulredmask
  1404. # asm 1: and <mulredmask=int64#3,<e2=int64#9
  1405. # asm 2: and <mulredmask=%rdx,<e2=%r11
  1406. and %rdx,%r11
  1407. # qhasm: e2 += mulr11
  1408. # asm 1: add <mulr11=int64#8,<e2=int64#9
  1409. # asm 2: add <mulr11=%r10,<e2=%r11
  1410. add %r10,%r11
  1411. # qhasm: mulr31 = (mulr31.e3) << 13
  1412. # asm 1: shld $13,<e3=int64#11,<mulr31=int64#12
  1413. # asm 2: shld $13,<e3=%r13,<mulr31=%r14
  1414. shld $13,%r13,%r14
  1415. # qhasm: e3 &= mulredmask
  1416. # asm 1: and <mulredmask=int64#3,<e3=int64#11
  1417. # asm 2: and <mulredmask=%rdx,<e3=%r13
  1418. and %rdx,%r13
  1419. # qhasm: e3 += mulr21
  1420. # asm 1: add <mulr21=int64#10,<e3=int64#11
  1421. # asm 2: add <mulr21=%r12,<e3=%r13
  1422. add %r12,%r13
  1423. # qhasm: mulr41 = (mulr41.e4) << 13
  1424. # asm 1: shld $13,<e4=int64#13,<mulr41=int64#14
  1425. # asm 2: shld $13,<e4=%r15,<mulr41=%rbx
  1426. shld $13,%r15,%rbx
  1427. # qhasm: e4 &= mulredmask
  1428. # asm 1: and <mulredmask=int64#3,<e4=int64#13
  1429. # asm 2: and <mulredmask=%rdx,<e4=%r15
  1430. and %rdx,%r15
  1431. # qhasm: e4 += mulr31
  1432. # asm 1: add <mulr31=int64#12,<e4=int64#13
  1433. # asm 2: add <mulr31=%r14,<e4=%r15
  1434. add %r14,%r15
  1435. # qhasm: mulr41 = mulr41 * 19
  1436. # asm 1: imulq $19,<mulr41=int64#14,>mulr41=int64#5
  1437. # asm 2: imulq $19,<mulr41=%rbx,>mulr41=%r8
  1438. imulq $19,%rbx,%r8
  1439. # qhasm: e0 += mulr41
  1440. # asm 1: add <mulr41=int64#5,<e0=int64#4
  1441. # asm 2: add <mulr41=%r8,<e0=%rcx
  1442. add %r8,%rcx
  1443. # qhasm: mult = e0
  1444. # asm 1: mov <e0=int64#4,>mult=int64#5
  1445. # asm 2: mov <e0=%rcx,>mult=%r8
  1446. mov %rcx,%r8
  1447. # qhasm: (uint64) mult >>= 51
  1448. # asm 1: shr $51,<mult=int64#5
  1449. # asm 2: shr $51,<mult=%r8
  1450. shr $51,%r8
  1451. # qhasm: mult += e1
  1452. # asm 1: add <e1=int64#6,<mult=int64#5
  1453. # asm 2: add <e1=%r9,<mult=%r8
  1454. add %r9,%r8
  1455. # qhasm: e1 = mult
  1456. # asm 1: mov <mult=int64#5,>e1=int64#6
  1457. # asm 2: mov <mult=%r8,>e1=%r9
  1458. mov %r8,%r9
  1459. # qhasm: (uint64) mult >>= 51
  1460. # asm 1: shr $51,<mult=int64#5
  1461. # asm 2: shr $51,<mult=%r8
  1462. shr $51,%r8
  1463. # qhasm: e0 &= mulredmask
  1464. # asm 1: and <mulredmask=int64#3,<e0=int64#4
  1465. # asm 2: and <mulredmask=%rdx,<e0=%rcx
  1466. and %rdx,%rcx
  1467. # qhasm: mult += e2
  1468. # asm 1: add <e2=int64#9,<mult=int64#5
  1469. # asm 2: add <e2=%r11,<mult=%r8
  1470. add %r11,%r8
  1471. # qhasm: e2 = mult
  1472. # asm 1: mov <mult=int64#5,>e2=int64#7
  1473. # asm 2: mov <mult=%r8,>e2=%rax
  1474. mov %r8,%rax
  1475. # qhasm: (uint64) mult >>= 51
  1476. # asm 1: shr $51,<mult=int64#5
  1477. # asm 2: shr $51,<mult=%r8
  1478. shr $51,%r8
  1479. # qhasm: e1 &= mulredmask
  1480. # asm 1: and <mulredmask=int64#3,<e1=int64#6
  1481. # asm 2: and <mulredmask=%rdx,<e1=%r9
  1482. and %rdx,%r9
  1483. # qhasm: mult += e3
  1484. # asm 1: add <e3=int64#11,<mult=int64#5
  1485. # asm 2: add <e3=%r13,<mult=%r8
  1486. add %r13,%r8
  1487. # qhasm: e3 = mult
  1488. # asm 1: mov <mult=int64#5,>e3=int64#8
  1489. # asm 2: mov <mult=%r8,>e3=%r10
  1490. mov %r8,%r10
  1491. # qhasm: (uint64) mult >>= 51
  1492. # asm 1: shr $51,<mult=int64#5
  1493. # asm 2: shr $51,<mult=%r8
  1494. shr $51,%r8
  1495. # qhasm: e2 &= mulredmask
  1496. # asm 1: and <mulredmask=int64#3,<e2=int64#7
  1497. # asm 2: and <mulredmask=%rdx,<e2=%rax
  1498. and %rdx,%rax
  1499. # qhasm: mult += e4
  1500. # asm 1: add <e4=int64#13,<mult=int64#5
  1501. # asm 2: add <e4=%r15,<mult=%r8
  1502. add %r15,%r8
  1503. # qhasm: e4 = mult
  1504. # asm 1: mov <mult=int64#5,>e4=int64#9
  1505. # asm 2: mov <mult=%r8,>e4=%r11
  1506. mov %r8,%r11
  1507. # qhasm: (uint64) mult >>= 51
  1508. # asm 1: shr $51,<mult=int64#5
  1509. # asm 2: shr $51,<mult=%r8
  1510. shr $51,%r8
  1511. # qhasm: e3 &= mulredmask
  1512. # asm 1: and <mulredmask=int64#3,<e3=int64#8
  1513. # asm 2: and <mulredmask=%rdx,<e3=%r10
  1514. and %rdx,%r10
  1515. # qhasm: mult *= 19
  1516. # asm 1: imulq $19,<mult=int64#5,>mult=int64#5
  1517. # asm 2: imulq $19,<mult=%r8,>mult=%r8
  1518. imulq $19,%r8,%r8
  1519. # qhasm: e0 += mult
  1520. # asm 1: add <mult=int64#5,<e0=int64#4
  1521. # asm 2: add <mult=%r8,<e0=%rcx
  1522. add %r8,%rcx
  1523. # qhasm: e4 &= mulredmask
  1524. # asm 1: and <mulredmask=int64#3,<e4=int64#9
  1525. # asm 2: and <mulredmask=%rdx,<e4=%r11
  1526. and %rdx,%r11
  1527. # qhasm: h0 = e0
  1528. # asm 1: mov <e0=int64#4,>h0=int64#3
  1529. # asm 2: mov <e0=%rcx,>h0=%rdx
  1530. mov %rcx,%rdx
  1531. # qhasm: h1 = e1
  1532. # asm 1: mov <e1=int64#6,>h1=int64#5
  1533. # asm 2: mov <e1=%r9,>h1=%r8
  1534. mov %r9,%r8
  1535. # qhasm: h2 = e2
  1536. # asm 1: mov <e2=int64#7,>h2=int64#10
  1537. # asm 2: mov <e2=%rax,>h2=%r12
  1538. mov %rax,%r12
  1539. # qhasm: h3 = e3
  1540. # asm 1: mov <e3=int64#8,>h3=int64#11
  1541. # asm 2: mov <e3=%r10,>h3=%r13
  1542. mov %r10,%r13
  1543. # qhasm: h4 = e4
  1544. # asm 1: mov <e4=int64#9,>h4=int64#12
  1545. # asm 2: mov <e4=%r11,>h4=%r14
  1546. mov %r11,%r14
  1547. # qhasm: e0 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P0)
  1548. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<e0=int64#4
  1549. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<e0=%rcx
  1550. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%rcx
  1551. # qhasm: e1 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1552. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<e1=int64#6
  1553. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<e1=%r9
  1554. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r9
  1555. # qhasm: e2 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1556. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<e2=int64#7
  1557. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<e2=%rax
  1558. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rax
  1559. # qhasm: e3 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1560. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<e3=int64#8
  1561. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<e3=%r10
  1562. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r10
  1563. # qhasm: e4 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1564. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<e4=int64#9
  1565. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<e4=%r11
  1566. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r11
  1567. # qhasm: h0 += a0_stack
  1568. # asm 1: addq <a0_stack=stack64#8,<h0=int64#3
  1569. # asm 2: addq <a0_stack=56(%rsp),<h0=%rdx
  1570. addq 56(%rsp),%rdx
  1571. # qhasm: h1 += a1_stack
  1572. # asm 1: addq <a1_stack=stack64#9,<h1=int64#5
  1573. # asm 2: addq <a1_stack=64(%rsp),<h1=%r8
  1574. addq 64(%rsp),%r8
  1575. # qhasm: h2 += a2_stack
  1576. # asm 1: addq <a2_stack=stack64#10,<h2=int64#10
  1577. # asm 2: addq <a2_stack=72(%rsp),<h2=%r12
  1578. addq 72(%rsp),%r12
  1579. # qhasm: h3 += a3_stack
  1580. # asm 1: addq <a3_stack=stack64#11,<h3=int64#11
  1581. # asm 2: addq <a3_stack=80(%rsp),<h3=%r13
  1582. addq 80(%rsp),%r13
  1583. # qhasm: h4 += a4_stack
  1584. # asm 1: addq <a4_stack=stack64#12,<h4=int64#12
  1585. # asm 2: addq <a4_stack=88(%rsp),<h4=%r14
  1586. addq 88(%rsp),%r14
  1587. # qhasm: e0 -= a0_stack
  1588. # asm 1: subq <a0_stack=stack64#8,<e0=int64#4
  1589. # asm 2: subq <a0_stack=56(%rsp),<e0=%rcx
  1590. subq 56(%rsp),%rcx
  1591. # qhasm: e1 -= a1_stack
  1592. # asm 1: subq <a1_stack=stack64#9,<e1=int64#6
  1593. # asm 2: subq <a1_stack=64(%rsp),<e1=%r9
  1594. subq 64(%rsp),%r9
  1595. # qhasm: e2 -= a2_stack
  1596. # asm 1: subq <a2_stack=stack64#10,<e2=int64#7
  1597. # asm 2: subq <a2_stack=72(%rsp),<e2=%rax
  1598. subq 72(%rsp),%rax
  1599. # qhasm: e3 -= a3_stack
  1600. # asm 1: subq <a3_stack=stack64#11,<e3=int64#8
  1601. # asm 2: subq <a3_stack=80(%rsp),<e3=%r10
  1602. subq 80(%rsp),%r10
  1603. # qhasm: e4 -= a4_stack
  1604. # asm 1: subq <a4_stack=stack64#12,<e4=int64#9
  1605. # asm 2: subq <a4_stack=88(%rsp),<e4=%r11
  1606. subq 88(%rsp),%r11
  1607. # qhasm: h0_stack = h0
  1608. # asm 1: movq <h0=int64#3,>h0_stack=stack64#8
  1609. # asm 2: movq <h0=%rdx,>h0_stack=56(%rsp)
  1610. movq %rdx,56(%rsp)
  1611. # qhasm: h1_stack = h1
  1612. # asm 1: movq <h1=int64#5,>h1_stack=stack64#9
  1613. # asm 2: movq <h1=%r8,>h1_stack=64(%rsp)
  1614. movq %r8,64(%rsp)
  1615. # qhasm: h2_stack = h2
  1616. # asm 1: movq <h2=int64#10,>h2_stack=stack64#10
  1617. # asm 2: movq <h2=%r12,>h2_stack=72(%rsp)
  1618. movq %r12,72(%rsp)
  1619. # qhasm: h3_stack = h3
  1620. # asm 1: movq <h3=int64#11,>h3_stack=stack64#11
  1621. # asm 2: movq <h3=%r13,>h3_stack=80(%rsp)
  1622. movq %r13,80(%rsp)
  1623. # qhasm: h4_stack = h4
  1624. # asm 1: movq <h4=int64#12,>h4_stack=stack64#12
  1625. # asm 2: movq <h4=%r14,>h4_stack=88(%rsp)
  1626. movq %r14,88(%rsp)
  1627. # qhasm: e0_stack = e0
  1628. # asm 1: movq <e0=int64#4,>e0_stack=stack64#13
  1629. # asm 2: movq <e0=%rcx,>e0_stack=96(%rsp)
  1630. movq %rcx,96(%rsp)
  1631. # qhasm: e1_stack = e1
  1632. # asm 1: movq <e1=int64#6,>e1_stack=stack64#14
  1633. # asm 2: movq <e1=%r9,>e1_stack=104(%rsp)
  1634. movq %r9,104(%rsp)
  1635. # qhasm: e2_stack = e2
  1636. # asm 1: movq <e2=int64#7,>e2_stack=stack64#15
  1637. # asm 2: movq <e2=%rax,>e2_stack=112(%rsp)
  1638. movq %rax,112(%rsp)
  1639. # qhasm: e3_stack = e3
  1640. # asm 1: movq <e3=int64#8,>e3_stack=stack64#16
  1641. # asm 2: movq <e3=%r10,>e3_stack=120(%rsp)
  1642. movq %r10,120(%rsp)
  1643. # qhasm: e4_stack = e4
  1644. # asm 1: movq <e4=int64#9,>e4_stack=stack64#17
  1645. # asm 2: movq <e4=%r11,>e4_stack=128(%rsp)
  1646. movq %r11,128(%rsp)
  1647. # qhasm: mulrax = *(uint64 *)(rp + 144)
  1648. # asm 1: movq 144(<rp=int64#1),>mulrax=int64#3
  1649. # asm 2: movq 144(<rp=%rdi),>mulrax=%rdx
  1650. movq 144(%rdi),%rdx
  1651. # qhasm: mulrax *= 19
  1652. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1653. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1654. imulq $19,%rdx,%rax
  1655. # qhasm: mulx319_stack = mulrax
  1656. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#18
  1657. # asm 2: movq <mulrax=%rax,>mulx319_stack=136(%rsp)
  1658. movq %rax,136(%rsp)
  1659. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  1660. # asm 1: mulq 96(<qp=int64#2)
  1661. # asm 2: mulq 96(<qp=%rsi)
  1662. mulq 96(%rsi)
  1663. # qhasm: c0 = mulrax
  1664. # asm 1: mov <mulrax=int64#7,>c0=int64#4
  1665. # asm 2: mov <mulrax=%rax,>c0=%rcx
  1666. mov %rax,%rcx
  1667. # qhasm: mulr01 = mulrdx
  1668. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#5
  1669. # asm 2: mov <mulrdx=%rdx,>mulr01=%r8
  1670. mov %rdx,%r8
  1671. # qhasm: mulrax = *(uint64 *)(rp + 152)
  1672. # asm 1: movq 152(<rp=int64#1),>mulrax=int64#3
  1673. # asm 2: movq 152(<rp=%rdi),>mulrax=%rdx
  1674. movq 152(%rdi),%rdx
  1675. # qhasm: mulrax *= 19
  1676. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1677. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1678. imulq $19,%rdx,%rax
  1679. # qhasm: mulx419_stack = mulrax
  1680. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#19
  1681. # asm 2: movq <mulrax=%rax,>mulx419_stack=144(%rsp)
  1682. movq %rax,144(%rsp)
  1683. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  1684. # asm 1: mulq 88(<qp=int64#2)
  1685. # asm 2: mulq 88(<qp=%rsi)
  1686. mulq 88(%rsi)
  1687. # qhasm: carry? c0 += mulrax
  1688. # asm 1: add <mulrax=int64#7,<c0=int64#4
  1689. # asm 2: add <mulrax=%rax,<c0=%rcx
  1690. add %rax,%rcx
  1691. # qhasm: mulr01 += mulrdx + carry
  1692. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1693. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1694. adc %rdx,%r8
  1695. # qhasm: mulrax = *(uint64 *)(rp + 120)
  1696. # asm 1: movq 120(<rp=int64#1),>mulrax=int64#7
  1697. # asm 2: movq 120(<rp=%rdi),>mulrax=%rax
  1698. movq 120(%rdi),%rax
  1699. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  1700. # asm 1: mulq 80(<qp=int64#2)
  1701. # asm 2: mulq 80(<qp=%rsi)
  1702. mulq 80(%rsi)
  1703. # qhasm: carry? c0 += mulrax
  1704. # asm 1: add <mulrax=int64#7,<c0=int64#4
  1705. # asm 2: add <mulrax=%rax,<c0=%rcx
  1706. add %rax,%rcx
  1707. # qhasm: mulr01 += mulrdx + carry
  1708. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1709. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1710. adc %rdx,%r8
  1711. # qhasm: mulrax = *(uint64 *)(rp + 120)
  1712. # asm 1: movq 120(<rp=int64#1),>mulrax=int64#7
  1713. # asm 2: movq 120(<rp=%rdi),>mulrax=%rax
  1714. movq 120(%rdi),%rax
  1715. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  1716. # asm 1: mulq 88(<qp=int64#2)
  1717. # asm 2: mulq 88(<qp=%rsi)
  1718. mulq 88(%rsi)
  1719. # qhasm: c1 = mulrax
  1720. # asm 1: mov <mulrax=int64#7,>c1=int64#6
  1721. # asm 2: mov <mulrax=%rax,>c1=%r9
  1722. mov %rax,%r9
  1723. # qhasm: mulr11 = mulrdx
  1724. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#8
  1725. # asm 2: mov <mulrdx=%rdx,>mulr11=%r10
  1726. mov %rdx,%r10
  1727. # qhasm: mulrax = *(uint64 *)(rp + 120)
  1728. # asm 1: movq 120(<rp=int64#1),>mulrax=int64#7
  1729. # asm 2: movq 120(<rp=%rdi),>mulrax=%rax
  1730. movq 120(%rdi),%rax
  1731. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  1732. # asm 1: mulq 96(<qp=int64#2)
  1733. # asm 2: mulq 96(<qp=%rsi)
  1734. mulq 96(%rsi)
  1735. # qhasm: c2 = mulrax
  1736. # asm 1: mov <mulrax=int64#7,>c2=int64#9
  1737. # asm 2: mov <mulrax=%rax,>c2=%r11
  1738. mov %rax,%r11
  1739. # qhasm: mulr21 = mulrdx
  1740. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#10
  1741. # asm 2: mov <mulrdx=%rdx,>mulr21=%r12
  1742. mov %rdx,%r12
  1743. # qhasm: mulrax = *(uint64 *)(rp + 120)
  1744. # asm 1: movq 120(<rp=int64#1),>mulrax=int64#7
  1745. # asm 2: movq 120(<rp=%rdi),>mulrax=%rax
  1746. movq 120(%rdi),%rax
  1747. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  1748. # asm 1: mulq 104(<qp=int64#2)
  1749. # asm 2: mulq 104(<qp=%rsi)
  1750. mulq 104(%rsi)
  1751. # qhasm: c3 = mulrax
  1752. # asm 1: mov <mulrax=int64#7,>c3=int64#11
  1753. # asm 2: mov <mulrax=%rax,>c3=%r13
  1754. mov %rax,%r13
  1755. # qhasm: mulr31 = mulrdx
  1756. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#12
  1757. # asm 2: mov <mulrdx=%rdx,>mulr31=%r14
  1758. mov %rdx,%r14
  1759. # qhasm: mulrax = *(uint64 *)(rp + 120)
  1760. # asm 1: movq 120(<rp=int64#1),>mulrax=int64#7
  1761. # asm 2: movq 120(<rp=%rdi),>mulrax=%rax
  1762. movq 120(%rdi),%rax
  1763. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  1764. # asm 1: mulq 112(<qp=int64#2)
  1765. # asm 2: mulq 112(<qp=%rsi)
  1766. mulq 112(%rsi)
  1767. # qhasm: c4 = mulrax
  1768. # asm 1: mov <mulrax=int64#7,>c4=int64#13
  1769. # asm 2: mov <mulrax=%rax,>c4=%r15
  1770. mov %rax,%r15
  1771. # qhasm: mulr41 = mulrdx
  1772. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#14
  1773. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbx
  1774. mov %rdx,%rbx
  1775. # qhasm: mulrax = *(uint64 *)(rp + 128)
  1776. # asm 1: movq 128(<rp=int64#1),>mulrax=int64#7
  1777. # asm 2: movq 128(<rp=%rdi),>mulrax=%rax
  1778. movq 128(%rdi),%rax
  1779. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  1780. # asm 1: mulq 80(<qp=int64#2)
  1781. # asm 2: mulq 80(<qp=%rsi)
  1782. mulq 80(%rsi)
  1783. # qhasm: carry? c1 += mulrax
  1784. # asm 1: add <mulrax=int64#7,<c1=int64#6
  1785. # asm 2: add <mulrax=%rax,<c1=%r9
  1786. add %rax,%r9
  1787. # qhasm: mulr11 += mulrdx + carry
  1788. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1789. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1790. adc %rdx,%r10
  1791. # qhasm: mulrax = *(uint64 *)(rp + 128)
  1792. # asm 1: movq 128(<rp=int64#1),>mulrax=int64#7
  1793. # asm 2: movq 128(<rp=%rdi),>mulrax=%rax
  1794. movq 128(%rdi),%rax
  1795. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  1796. # asm 1: mulq 88(<qp=int64#2)
  1797. # asm 2: mulq 88(<qp=%rsi)
  1798. mulq 88(%rsi)
  1799. # qhasm: carry? c2 += mulrax
  1800. # asm 1: add <mulrax=int64#7,<c2=int64#9
  1801. # asm 2: add <mulrax=%rax,<c2=%r11
  1802. add %rax,%r11
  1803. # qhasm: mulr21 += mulrdx + carry
  1804. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1805. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1806. adc %rdx,%r12
  1807. # qhasm: mulrax = *(uint64 *)(rp + 128)
  1808. # asm 1: movq 128(<rp=int64#1),>mulrax=int64#7
  1809. # asm 2: movq 128(<rp=%rdi),>mulrax=%rax
  1810. movq 128(%rdi),%rax
  1811. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  1812. # asm 1: mulq 96(<qp=int64#2)
  1813. # asm 2: mulq 96(<qp=%rsi)
  1814. mulq 96(%rsi)
  1815. # qhasm: carry? c3 += mulrax
  1816. # asm 1: add <mulrax=int64#7,<c3=int64#11
  1817. # asm 2: add <mulrax=%rax,<c3=%r13
  1818. add %rax,%r13
  1819. # qhasm: mulr31 += mulrdx + carry
  1820. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1821. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1822. adc %rdx,%r14
  1823. # qhasm: mulrax = *(uint64 *)(rp + 128)
  1824. # asm 1: movq 128(<rp=int64#1),>mulrax=int64#7
  1825. # asm 2: movq 128(<rp=%rdi),>mulrax=%rax
  1826. movq 128(%rdi),%rax
  1827. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  1828. # asm 1: mulq 104(<qp=int64#2)
  1829. # asm 2: mulq 104(<qp=%rsi)
  1830. mulq 104(%rsi)
  1831. # qhasm: carry? c4 += mulrax
  1832. # asm 1: add <mulrax=int64#7,<c4=int64#13
  1833. # asm 2: add <mulrax=%rax,<c4=%r15
  1834. add %rax,%r15
  1835. # qhasm: mulr41 += mulrdx + carry
  1836. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1837. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1838. adc %rdx,%rbx
  1839. # qhasm: mulrax = *(uint64 *)(rp + 128)
  1840. # asm 1: movq 128(<rp=int64#1),>mulrax=int64#3
  1841. # asm 2: movq 128(<rp=%rdi),>mulrax=%rdx
  1842. movq 128(%rdi),%rdx
  1843. # qhasm: mulrax *= 19
  1844. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1845. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1846. imulq $19,%rdx,%rax
  1847. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  1848. # asm 1: mulq 112(<qp=int64#2)
  1849. # asm 2: mulq 112(<qp=%rsi)
  1850. mulq 112(%rsi)
  1851. # qhasm: carry? c0 += mulrax
  1852. # asm 1: add <mulrax=int64#7,<c0=int64#4
  1853. # asm 2: add <mulrax=%rax,<c0=%rcx
  1854. add %rax,%rcx
  1855. # qhasm: mulr01 += mulrdx + carry
  1856. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1857. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1858. adc %rdx,%r8
  1859. # qhasm: mulrax = *(uint64 *)(rp + 136)
  1860. # asm 1: movq 136(<rp=int64#1),>mulrax=int64#7
  1861. # asm 2: movq 136(<rp=%rdi),>mulrax=%rax
  1862. movq 136(%rdi),%rax
  1863. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  1864. # asm 1: mulq 80(<qp=int64#2)
  1865. # asm 2: mulq 80(<qp=%rsi)
  1866. mulq 80(%rsi)
  1867. # qhasm: carry? c2 += mulrax
  1868. # asm 1: add <mulrax=int64#7,<c2=int64#9
  1869. # asm 2: add <mulrax=%rax,<c2=%r11
  1870. add %rax,%r11
  1871. # qhasm: mulr21 += mulrdx + carry
  1872. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1873. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1874. adc %rdx,%r12
  1875. # qhasm: mulrax = *(uint64 *)(rp + 136)
  1876. # asm 1: movq 136(<rp=int64#1),>mulrax=int64#7
  1877. # asm 2: movq 136(<rp=%rdi),>mulrax=%rax
  1878. movq 136(%rdi),%rax
  1879. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  1880. # asm 1: mulq 88(<qp=int64#2)
  1881. # asm 2: mulq 88(<qp=%rsi)
  1882. mulq 88(%rsi)
  1883. # qhasm: carry? c3 += mulrax
  1884. # asm 1: add <mulrax=int64#7,<c3=int64#11
  1885. # asm 2: add <mulrax=%rax,<c3=%r13
  1886. add %rax,%r13
  1887. # qhasm: mulr31 += mulrdx + carry
  1888. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1889. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1890. adc %rdx,%r14
  1891. # qhasm: mulrax = *(uint64 *)(rp + 136)
  1892. # asm 1: movq 136(<rp=int64#1),>mulrax=int64#7
  1893. # asm 2: movq 136(<rp=%rdi),>mulrax=%rax
  1894. movq 136(%rdi),%rax
  1895. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  1896. # asm 1: mulq 96(<qp=int64#2)
  1897. # asm 2: mulq 96(<qp=%rsi)
  1898. mulq 96(%rsi)
  1899. # qhasm: carry? c4 += mulrax
  1900. # asm 1: add <mulrax=int64#7,<c4=int64#13
  1901. # asm 2: add <mulrax=%rax,<c4=%r15
  1902. add %rax,%r15
  1903. # qhasm: mulr41 += mulrdx + carry
  1904. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1905. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1906. adc %rdx,%rbx
  1907. # qhasm: mulrax = *(uint64 *)(rp + 136)
  1908. # asm 1: movq 136(<rp=int64#1),>mulrax=int64#3
  1909. # asm 2: movq 136(<rp=%rdi),>mulrax=%rdx
  1910. movq 136(%rdi),%rdx
  1911. # qhasm: mulrax *= 19
  1912. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1913. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1914. imulq $19,%rdx,%rax
  1915. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  1916. # asm 1: mulq 104(<qp=int64#2)
  1917. # asm 2: mulq 104(<qp=%rsi)
  1918. mulq 104(%rsi)
  1919. # qhasm: carry? c0 += mulrax
  1920. # asm 1: add <mulrax=int64#7,<c0=int64#4
  1921. # asm 2: add <mulrax=%rax,<c0=%rcx
  1922. add %rax,%rcx
  1923. # qhasm: mulr01 += mulrdx + carry
  1924. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1925. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1926. adc %rdx,%r8
  1927. # qhasm: mulrax = *(uint64 *)(rp + 136)
  1928. # asm 1: movq 136(<rp=int64#1),>mulrax=int64#3
  1929. # asm 2: movq 136(<rp=%rdi),>mulrax=%rdx
  1930. movq 136(%rdi),%rdx
  1931. # qhasm: mulrax *= 19
  1932. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1933. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1934. imulq $19,%rdx,%rax
  1935. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  1936. # asm 1: mulq 112(<qp=int64#2)
  1937. # asm 2: mulq 112(<qp=%rsi)
  1938. mulq 112(%rsi)
  1939. # qhasm: carry? c1 += mulrax
  1940. # asm 1: add <mulrax=int64#7,<c1=int64#6
  1941. # asm 2: add <mulrax=%rax,<c1=%r9
  1942. add %rax,%r9
  1943. # qhasm: mulr11 += mulrdx + carry
  1944. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1945. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1946. adc %rdx,%r10
  1947. # qhasm: mulrax = *(uint64 *)(rp + 144)
  1948. # asm 1: movq 144(<rp=int64#1),>mulrax=int64#7
  1949. # asm 2: movq 144(<rp=%rdi),>mulrax=%rax
  1950. movq 144(%rdi),%rax
  1951. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  1952. # asm 1: mulq 80(<qp=int64#2)
  1953. # asm 2: mulq 80(<qp=%rsi)
  1954. mulq 80(%rsi)
  1955. # qhasm: carry? c3 += mulrax
  1956. # asm 1: add <mulrax=int64#7,<c3=int64#11
  1957. # asm 2: add <mulrax=%rax,<c3=%r13
  1958. add %rax,%r13
  1959. # qhasm: mulr31 += mulrdx + carry
  1960. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1961. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1962. adc %rdx,%r14
  1963. # qhasm: mulrax = *(uint64 *)(rp + 144)
  1964. # asm 1: movq 144(<rp=int64#1),>mulrax=int64#7
  1965. # asm 2: movq 144(<rp=%rdi),>mulrax=%rax
  1966. movq 144(%rdi),%rax
  1967. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  1968. # asm 1: mulq 88(<qp=int64#2)
  1969. # asm 2: mulq 88(<qp=%rsi)
  1970. mulq 88(%rsi)
  1971. # qhasm: carry? c4 += mulrax
  1972. # asm 1: add <mulrax=int64#7,<c4=int64#13
  1973. # asm 2: add <mulrax=%rax,<c4=%r15
  1974. add %rax,%r15
  1975. # qhasm: mulr41 += mulrdx + carry
  1976. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1977. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1978. adc %rdx,%rbx
  1979. # qhasm: mulrax = mulx319_stack
  1980. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  1981. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  1982. movq 136(%rsp),%rax
  1983. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  1984. # asm 1: mulq 104(<qp=int64#2)
  1985. # asm 2: mulq 104(<qp=%rsi)
  1986. mulq 104(%rsi)
  1987. # qhasm: carry? c1 += mulrax
  1988. # asm 1: add <mulrax=int64#7,<c1=int64#6
  1989. # asm 2: add <mulrax=%rax,<c1=%r9
  1990. add %rax,%r9
  1991. # qhasm: mulr11 += mulrdx + carry
  1992. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1993. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1994. adc %rdx,%r10
  1995. # qhasm: mulrax = mulx319_stack
  1996. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  1997. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  1998. movq 136(%rsp),%rax
  1999. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  2000. # asm 1: mulq 112(<qp=int64#2)
  2001. # asm 2: mulq 112(<qp=%rsi)
  2002. mulq 112(%rsi)
  2003. # qhasm: carry? c2 += mulrax
  2004. # asm 1: add <mulrax=int64#7,<c2=int64#9
  2005. # asm 2: add <mulrax=%rax,<c2=%r11
  2006. add %rax,%r11
  2007. # qhasm: mulr21 += mulrdx + carry
  2008. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  2009. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  2010. adc %rdx,%r12
  2011. # qhasm: mulrax = *(uint64 *)(rp + 152)
  2012. # asm 1: movq 152(<rp=int64#1),>mulrax=int64#7
  2013. # asm 2: movq 152(<rp=%rdi),>mulrax=%rax
  2014. movq 152(%rdi),%rax
  2015. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  2016. # asm 1: mulq 80(<qp=int64#2)
  2017. # asm 2: mulq 80(<qp=%rsi)
  2018. mulq 80(%rsi)
  2019. # qhasm: carry? c4 += mulrax
  2020. # asm 1: add <mulrax=int64#7,<c4=int64#13
  2021. # asm 2: add <mulrax=%rax,<c4=%r15
  2022. add %rax,%r15
  2023. # qhasm: mulr41 += mulrdx + carry
  2024. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  2025. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  2026. adc %rdx,%rbx
  2027. # qhasm: mulrax = mulx419_stack
  2028. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  2029. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  2030. movq 144(%rsp),%rax
  2031. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  2032. # asm 1: mulq 96(<qp=int64#2)
  2033. # asm 2: mulq 96(<qp=%rsi)
  2034. mulq 96(%rsi)
  2035. # qhasm: carry? c1 += mulrax
  2036. # asm 1: add <mulrax=int64#7,<c1=int64#6
  2037. # asm 2: add <mulrax=%rax,<c1=%r9
  2038. add %rax,%r9
  2039. # qhasm: mulr11 += mulrdx + carry
  2040. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  2041. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  2042. adc %rdx,%r10
  2043. # qhasm: mulrax = mulx419_stack
  2044. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  2045. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  2046. movq 144(%rsp),%rax
  2047. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  2048. # asm 1: mulq 104(<qp=int64#2)
  2049. # asm 2: mulq 104(<qp=%rsi)
  2050. mulq 104(%rsi)
  2051. # qhasm: carry? c2 += mulrax
  2052. # asm 1: add <mulrax=int64#7,<c2=int64#9
  2053. # asm 2: add <mulrax=%rax,<c2=%r11
  2054. add %rax,%r11
  2055. # qhasm: mulr21 += mulrdx + carry
  2056. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  2057. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  2058. adc %rdx,%r12
  2059. # qhasm: mulrax = mulx419_stack
  2060. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  2061. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  2062. movq 144(%rsp),%rax
  2063. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  2064. # asm 1: mulq 112(<qp=int64#2)
  2065. # asm 2: mulq 112(<qp=%rsi)
  2066. mulq 112(%rsi)
  2067. # qhasm: carry? c3 += mulrax
  2068. # asm 1: add <mulrax=int64#7,<c3=int64#11
  2069. # asm 2: add <mulrax=%rax,<c3=%r13
  2070. add %rax,%r13
  2071. # qhasm: mulr31 += mulrdx + carry
  2072. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  2073. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  2074. adc %rdx,%r14
  2075. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  2076. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#2
  2077. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rsi
  2078. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rsi
  2079. # qhasm: mulr01 = (mulr01.c0) << 13
  2080. # asm 1: shld $13,<c0=int64#4,<mulr01=int64#5
  2081. # asm 2: shld $13,<c0=%rcx,<mulr01=%r8
  2082. shld $13,%rcx,%r8
  2083. # qhasm: c0 &= mulredmask
  2084. # asm 1: and <mulredmask=int64#2,<c0=int64#4
  2085. # asm 2: and <mulredmask=%rsi,<c0=%rcx
  2086. and %rsi,%rcx
  2087. # qhasm: mulr11 = (mulr11.c1) << 13
  2088. # asm 1: shld $13,<c1=int64#6,<mulr11=int64#8
  2089. # asm 2: shld $13,<c1=%r9,<mulr11=%r10
  2090. shld $13,%r9,%r10
  2091. # qhasm: c1 &= mulredmask
  2092. # asm 1: and <mulredmask=int64#2,<c1=int64#6
  2093. # asm 2: and <mulredmask=%rsi,<c1=%r9
  2094. and %rsi,%r9
  2095. # qhasm: c1 += mulr01
  2096. # asm 1: add <mulr01=int64#5,<c1=int64#6
  2097. # asm 2: add <mulr01=%r8,<c1=%r9
  2098. add %r8,%r9
  2099. # qhasm: mulr21 = (mulr21.c2) << 13
  2100. # asm 1: shld $13,<c2=int64#9,<mulr21=int64#10
  2101. # asm 2: shld $13,<c2=%r11,<mulr21=%r12
  2102. shld $13,%r11,%r12
  2103. # qhasm: c2 &= mulredmask
  2104. # asm 1: and <mulredmask=int64#2,<c2=int64#9
  2105. # asm 2: and <mulredmask=%rsi,<c2=%r11
  2106. and %rsi,%r11
  2107. # qhasm: c2 += mulr11
  2108. # asm 1: add <mulr11=int64#8,<c2=int64#9
  2109. # asm 2: add <mulr11=%r10,<c2=%r11
  2110. add %r10,%r11
  2111. # qhasm: mulr31 = (mulr31.c3) << 13
  2112. # asm 1: shld $13,<c3=int64#11,<mulr31=int64#12
  2113. # asm 2: shld $13,<c3=%r13,<mulr31=%r14
  2114. shld $13,%r13,%r14
  2115. # qhasm: c3 &= mulredmask
  2116. # asm 1: and <mulredmask=int64#2,<c3=int64#11
  2117. # asm 2: and <mulredmask=%rsi,<c3=%r13
  2118. and %rsi,%r13
  2119. # qhasm: c3 += mulr21
  2120. # asm 1: add <mulr21=int64#10,<c3=int64#11
  2121. # asm 2: add <mulr21=%r12,<c3=%r13
  2122. add %r12,%r13
  2123. # qhasm: mulr41 = (mulr41.c4) << 13
  2124. # asm 1: shld $13,<c4=int64#13,<mulr41=int64#14
  2125. # asm 2: shld $13,<c4=%r15,<mulr41=%rbx
  2126. shld $13,%r15,%rbx
  2127. # qhasm: c4 &= mulredmask
  2128. # asm 1: and <mulredmask=int64#2,<c4=int64#13
  2129. # asm 2: and <mulredmask=%rsi,<c4=%r15
  2130. and %rsi,%r15
  2131. # qhasm: c4 += mulr31
  2132. # asm 1: add <mulr31=int64#12,<c4=int64#13
  2133. # asm 2: add <mulr31=%r14,<c4=%r15
  2134. add %r14,%r15
  2135. # qhasm: mulr41 = mulr41 * 19
  2136. # asm 1: imulq $19,<mulr41=int64#14,>mulr41=int64#3
  2137. # asm 2: imulq $19,<mulr41=%rbx,>mulr41=%rdx
  2138. imulq $19,%rbx,%rdx
  2139. # qhasm: c0 += mulr41
  2140. # asm 1: add <mulr41=int64#3,<c0=int64#4
  2141. # asm 2: add <mulr41=%rdx,<c0=%rcx
  2142. add %rdx,%rcx
  2143. # qhasm: mult = c0
  2144. # asm 1: mov <c0=int64#4,>mult=int64#3
  2145. # asm 2: mov <c0=%rcx,>mult=%rdx
  2146. mov %rcx,%rdx
  2147. # qhasm: (uint64) mult >>= 51
  2148. # asm 1: shr $51,<mult=int64#3
  2149. # asm 2: shr $51,<mult=%rdx
  2150. shr $51,%rdx
  2151. # qhasm: mult += c1
  2152. # asm 1: add <c1=int64#6,<mult=int64#3
  2153. # asm 2: add <c1=%r9,<mult=%rdx
  2154. add %r9,%rdx
  2155. # qhasm: c1 = mult
  2156. # asm 1: mov <mult=int64#3,>c1=int64#5
  2157. # asm 2: mov <mult=%rdx,>c1=%r8
  2158. mov %rdx,%r8
  2159. # qhasm: (uint64) mult >>= 51
  2160. # asm 1: shr $51,<mult=int64#3
  2161. # asm 2: shr $51,<mult=%rdx
  2162. shr $51,%rdx
  2163. # qhasm: c0 &= mulredmask
  2164. # asm 1: and <mulredmask=int64#2,<c0=int64#4
  2165. # asm 2: and <mulredmask=%rsi,<c0=%rcx
  2166. and %rsi,%rcx
  2167. # qhasm: mult += c2
  2168. # asm 1: add <c2=int64#9,<mult=int64#3
  2169. # asm 2: add <c2=%r11,<mult=%rdx
  2170. add %r11,%rdx
  2171. # qhasm: c2 = mult
  2172. # asm 1: mov <mult=int64#3,>c2=int64#6
  2173. # asm 2: mov <mult=%rdx,>c2=%r9
  2174. mov %rdx,%r9
  2175. # qhasm: (uint64) mult >>= 51
  2176. # asm 1: shr $51,<mult=int64#3
  2177. # asm 2: shr $51,<mult=%rdx
  2178. shr $51,%rdx
  2179. # qhasm: c1 &= mulredmask
  2180. # asm 1: and <mulredmask=int64#2,<c1=int64#5
  2181. # asm 2: and <mulredmask=%rsi,<c1=%r8
  2182. and %rsi,%r8
  2183. # qhasm: mult += c3
  2184. # asm 1: add <c3=int64#11,<mult=int64#3
  2185. # asm 2: add <c3=%r13,<mult=%rdx
  2186. add %r13,%rdx
  2187. # qhasm: c3 = mult
  2188. # asm 1: mov <mult=int64#3,>c3=int64#7
  2189. # asm 2: mov <mult=%rdx,>c3=%rax
  2190. mov %rdx,%rax
  2191. # qhasm: (uint64) mult >>= 51
  2192. # asm 1: shr $51,<mult=int64#3
  2193. # asm 2: shr $51,<mult=%rdx
  2194. shr $51,%rdx
  2195. # qhasm: c2 &= mulredmask
  2196. # asm 1: and <mulredmask=int64#2,<c2=int64#6
  2197. # asm 2: and <mulredmask=%rsi,<c2=%r9
  2198. and %rsi,%r9
  2199. # qhasm: mult += c4
  2200. # asm 1: add <c4=int64#13,<mult=int64#3
  2201. # asm 2: add <c4=%r15,<mult=%rdx
  2202. add %r15,%rdx
  2203. # qhasm: c4 = mult
  2204. # asm 1: mov <mult=int64#3,>c4=int64#8
  2205. # asm 2: mov <mult=%rdx,>c4=%r10
  2206. mov %rdx,%r10
  2207. # qhasm: (uint64) mult >>= 51
  2208. # asm 1: shr $51,<mult=int64#3
  2209. # asm 2: shr $51,<mult=%rdx
  2210. shr $51,%rdx
  2211. # qhasm: c3 &= mulredmask
  2212. # asm 1: and <mulredmask=int64#2,<c3=int64#7
  2213. # asm 2: and <mulredmask=%rsi,<c3=%rax
  2214. and %rsi,%rax
  2215. # qhasm: mult *= 19
  2216. # asm 1: imulq $19,<mult=int64#3,>mult=int64#3
  2217. # asm 2: imulq $19,<mult=%rdx,>mult=%rdx
  2218. imulq $19,%rdx,%rdx
  2219. # qhasm: c0 += mult
  2220. # asm 1: add <mult=int64#3,<c0=int64#4
  2221. # asm 2: add <mult=%rdx,<c0=%rcx
  2222. add %rdx,%rcx
  2223. # qhasm: c4 &= mulredmask
  2224. # asm 1: and <mulredmask=int64#2,<c4=int64#8
  2225. # asm 2: and <mulredmask=%rsi,<c4=%r10
  2226. and %rsi,%r10
  2227. # qhasm: c0_stack = c0
  2228. # asm 1: movq <c0=int64#4,>c0_stack=stack64#18
  2229. # asm 2: movq <c0=%rcx,>c0_stack=136(%rsp)
  2230. movq %rcx,136(%rsp)
  2231. # qhasm: f0 = *(uint64 *)(rp + 80)
  2232. # asm 1: movq 80(<rp=int64#1),>f0=int64#2
  2233. # asm 2: movq 80(<rp=%rdi),>f0=%rsi
  2234. movq 80(%rdi),%rsi
  2235. # qhasm: f1 = *(uint64 *)(rp + 88)
  2236. # asm 1: movq 88(<rp=int64#1),>f1=int64#3
  2237. # asm 2: movq 88(<rp=%rdi),>f1=%rdx
  2238. movq 88(%rdi),%rdx
  2239. # qhasm: f2 = *(uint64 *)(rp + 96)
  2240. # asm 1: movq 96(<rp=int64#1),>f2=int64#4
  2241. # asm 2: movq 96(<rp=%rdi),>f2=%rcx
  2242. movq 96(%rdi),%rcx
  2243. # qhasm: f3 = *(uint64 *)(rp + 104)
  2244. # asm 1: movq 104(<rp=int64#1),>f3=int64#9
  2245. # asm 2: movq 104(<rp=%rdi),>f3=%r11
  2246. movq 104(%rdi),%r11
  2247. # qhasm: f4 = *(uint64 *)(rp + 112)
  2248. # asm 1: movq 112(<rp=int64#1),>f4=int64#10
  2249. # asm 2: movq 112(<rp=%rdi),>f4=%r12
  2250. movq 112(%rdi),%r12
  2251. # qhasm: f0 += f0
  2252. # asm 1: add <f0=int64#2,<f0=int64#2
  2253. # asm 2: add <f0=%rsi,<f0=%rsi
  2254. add %rsi,%rsi
  2255. # qhasm: f1 += f1
  2256. # asm 1: add <f1=int64#3,<f1=int64#3
  2257. # asm 2: add <f1=%rdx,<f1=%rdx
  2258. add %rdx,%rdx
  2259. # qhasm: f2 += f2
  2260. # asm 1: add <f2=int64#4,<f2=int64#4
  2261. # asm 2: add <f2=%rcx,<f2=%rcx
  2262. add %rcx,%rcx
  2263. # qhasm: f3 += f3
  2264. # asm 1: add <f3=int64#9,<f3=int64#9
  2265. # asm 2: add <f3=%r11,<f3=%r11
  2266. add %r11,%r11
  2267. # qhasm: f4 += f4
  2268. # asm 1: add <f4=int64#10,<f4=int64#10
  2269. # asm 2: add <f4=%r12,<f4=%r12
  2270. add %r12,%r12
  2271. # qhasm: g0 = f0
  2272. # asm 1: mov <f0=int64#2,>g0=int64#11
  2273. # asm 2: mov <f0=%rsi,>g0=%r13
  2274. mov %rsi,%r13
  2275. # qhasm: g1 = f1
  2276. # asm 1: mov <f1=int64#3,>g1=int64#12
  2277. # asm 2: mov <f1=%rdx,>g1=%r14
  2278. mov %rdx,%r14
  2279. # qhasm: g2 = f2
  2280. # asm 1: mov <f2=int64#4,>g2=int64#13
  2281. # asm 2: mov <f2=%rcx,>g2=%r15
  2282. mov %rcx,%r15
  2283. # qhasm: g3 = f3
  2284. # asm 1: mov <f3=int64#9,>g3=int64#14
  2285. # asm 2: mov <f3=%r11,>g3=%rbx
  2286. mov %r11,%rbx
  2287. # qhasm: g4 = f4
  2288. # asm 1: mov <f4=int64#10,>g4=int64#15
  2289. # asm 2: mov <f4=%r12,>g4=%rbp
  2290. mov %r12,%rbp
  2291. # qhasm: f0 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P0)
  2292. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<f0=int64#2
  2293. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<f0=%rsi
  2294. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%rsi
  2295. # qhasm: f1 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  2296. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<f1=int64#3
  2297. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<f1=%rdx
  2298. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rdx
  2299. # qhasm: f2 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  2300. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<f2=int64#4
  2301. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<f2=%rcx
  2302. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rcx
  2303. # qhasm: f3 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  2304. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<f3=int64#9
  2305. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<f3=%r11
  2306. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r11
  2307. # qhasm: f4 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  2308. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<f4=int64#10
  2309. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<f4=%r12
  2310. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r12
  2311. # qhasm: g0 += c0_stack
  2312. # asm 1: addq <c0_stack=stack64#18,<g0=int64#11
  2313. # asm 2: addq <c0_stack=136(%rsp),<g0=%r13
  2314. addq 136(%rsp),%r13
  2315. # qhasm: g1 += c1
  2316. # asm 1: add <c1=int64#5,<g1=int64#12
  2317. # asm 2: add <c1=%r8,<g1=%r14
  2318. add %r8,%r14
  2319. # qhasm: g2 += c2
  2320. # asm 1: add <c2=int64#6,<g2=int64#13
  2321. # asm 2: add <c2=%r9,<g2=%r15
  2322. add %r9,%r15
  2323. # qhasm: g3 += c3
  2324. # asm 1: add <c3=int64#7,<g3=int64#14
  2325. # asm 2: add <c3=%rax,<g3=%rbx
  2326. add %rax,%rbx
  2327. # qhasm: g4 += c4
  2328. # asm 1: add <c4=int64#8,<g4=int64#15
  2329. # asm 2: add <c4=%r10,<g4=%rbp
  2330. add %r10,%rbp
  2331. # qhasm: f0 -= c0_stack
  2332. # asm 1: subq <c0_stack=stack64#18,<f0=int64#2
  2333. # asm 2: subq <c0_stack=136(%rsp),<f0=%rsi
  2334. subq 136(%rsp),%rsi
  2335. # qhasm: f1 -= c1
  2336. # asm 1: sub <c1=int64#5,<f1=int64#3
  2337. # asm 2: sub <c1=%r8,<f1=%rdx
  2338. sub %r8,%rdx
  2339. # qhasm: f2 -= c2
  2340. # asm 1: sub <c2=int64#6,<f2=int64#4
  2341. # asm 2: sub <c2=%r9,<f2=%rcx
  2342. sub %r9,%rcx
  2343. # qhasm: f3 -= c3
  2344. # asm 1: sub <c3=int64#7,<f3=int64#9
  2345. # asm 2: sub <c3=%rax,<f3=%r11
  2346. sub %rax,%r11
  2347. # qhasm: f4 -= c4
  2348. # asm 1: sub <c4=int64#8,<f4=int64#10
  2349. # asm 2: sub <c4=%r10,<f4=%r12
  2350. sub %r10,%r12
  2351. # qhasm: g0_stack = g0
  2352. # asm 1: movq <g0=int64#11,>g0_stack=stack64#18
  2353. # asm 2: movq <g0=%r13,>g0_stack=136(%rsp)
  2354. movq %r13,136(%rsp)
  2355. # qhasm: g1_stack = g1
  2356. # asm 1: movq <g1=int64#12,>g1_stack=stack64#19
  2357. # asm 2: movq <g1=%r14,>g1_stack=144(%rsp)
  2358. movq %r14,144(%rsp)
  2359. # qhasm: g2_stack = g2
  2360. # asm 1: movq <g2=int64#13,>g2_stack=stack64#20
  2361. # asm 2: movq <g2=%r15,>g2_stack=152(%rsp)
  2362. movq %r15,152(%rsp)
  2363. # qhasm: g3_stack = g3
  2364. # asm 1: movq <g3=int64#14,>g3_stack=stack64#21
  2365. # asm 2: movq <g3=%rbx,>g3_stack=160(%rsp)
  2366. movq %rbx,160(%rsp)
  2367. # qhasm: g4_stack = g4
  2368. # asm 1: movq <g4=int64#15,>g4_stack=stack64#22
  2369. # asm 2: movq <g4=%rbp,>g4_stack=168(%rsp)
  2370. movq %rbp,168(%rsp)
  2371. # qhasm: f0_stack = f0
  2372. # asm 1: movq <f0=int64#2,>f0_stack=stack64#23
  2373. # asm 2: movq <f0=%rsi,>f0_stack=176(%rsp)
  2374. movq %rsi,176(%rsp)
  2375. # qhasm: f1_stack = f1
  2376. # asm 1: movq <f1=int64#3,>f1_stack=stack64#24
  2377. # asm 2: movq <f1=%rdx,>f1_stack=184(%rsp)
  2378. movq %rdx,184(%rsp)
  2379. # qhasm: f2_stack = f2
  2380. # asm 1: movq <f2=int64#4,>f2_stack=stack64#25
  2381. # asm 2: movq <f2=%rcx,>f2_stack=192(%rsp)
  2382. movq %rcx,192(%rsp)
  2383. # qhasm: f3_stack = f3
  2384. # asm 1: movq <f3=int64#9,>f3_stack=stack64#26
  2385. # asm 2: movq <f3=%r11,>f3_stack=200(%rsp)
  2386. movq %r11,200(%rsp)
  2387. # qhasm: f4_stack = f4
  2388. # asm 1: movq <f4=int64#10,>f4_stack=stack64#27
  2389. # asm 2: movq <f4=%r12,>f4_stack=208(%rsp)
  2390. movq %r12,208(%rsp)
  2391. # qhasm: mulrax = e3_stack
  2392. # asm 1: movq <e3_stack=stack64#16,>mulrax=int64#2
  2393. # asm 2: movq <e3_stack=120(%rsp),>mulrax=%rsi
  2394. movq 120(%rsp),%rsi
  2395. # qhasm: mulrax *= 19
  2396. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  2397. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  2398. imulq $19,%rsi,%rax
  2399. # qhasm: mulx319_stack = mulrax
  2400. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#28
  2401. # asm 2: movq <mulrax=%rax,>mulx319_stack=216(%rsp)
  2402. movq %rax,216(%rsp)
  2403. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  2404. # asm 1: mulq <f2_stack=stack64#25
  2405. # asm 2: mulq <f2_stack=192(%rsp)
  2406. mulq 192(%rsp)
  2407. # qhasm: rx0 = mulrax
  2408. # asm 1: mov <mulrax=int64#7,>rx0=int64#2
  2409. # asm 2: mov <mulrax=%rax,>rx0=%rsi
  2410. mov %rax,%rsi
  2411. # qhasm: mulr01 = mulrdx
  2412. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  2413. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  2414. mov %rdx,%rcx
  2415. # qhasm: mulrax = e4_stack
  2416. # asm 1: movq <e4_stack=stack64#17,>mulrax=int64#3
  2417. # asm 2: movq <e4_stack=128(%rsp),>mulrax=%rdx
  2418. movq 128(%rsp),%rdx
  2419. # qhasm: mulrax *= 19
  2420. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2421. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2422. imulq $19,%rdx,%rax
  2423. # qhasm: mulx419_stack = mulrax
  2424. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#29
  2425. # asm 2: movq <mulrax=%rax,>mulx419_stack=224(%rsp)
  2426. movq %rax,224(%rsp)
  2427. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  2428. # asm 1: mulq <f1_stack=stack64#24
  2429. # asm 2: mulq <f1_stack=184(%rsp)
  2430. mulq 184(%rsp)
  2431. # qhasm: carry? rx0 += mulrax
  2432. # asm 1: add <mulrax=int64#7,<rx0=int64#2
  2433. # asm 2: add <mulrax=%rax,<rx0=%rsi
  2434. add %rax,%rsi
  2435. # qhasm: mulr01 += mulrdx + carry
  2436. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2437. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2438. adc %rdx,%rcx
  2439. # qhasm: mulrax = e0_stack
  2440. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  2441. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  2442. movq 96(%rsp),%rax
  2443. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  2444. # asm 1: mulq <f0_stack=stack64#23
  2445. # asm 2: mulq <f0_stack=176(%rsp)
  2446. mulq 176(%rsp)
  2447. # qhasm: carry? rx0 += mulrax
  2448. # asm 1: add <mulrax=int64#7,<rx0=int64#2
  2449. # asm 2: add <mulrax=%rax,<rx0=%rsi
  2450. add %rax,%rsi
  2451. # qhasm: mulr01 += mulrdx + carry
  2452. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2453. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2454. adc %rdx,%rcx
  2455. # qhasm: mulrax = e0_stack
  2456. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  2457. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  2458. movq 96(%rsp),%rax
  2459. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  2460. # asm 1: mulq <f1_stack=stack64#24
  2461. # asm 2: mulq <f1_stack=184(%rsp)
  2462. mulq 184(%rsp)
  2463. # qhasm: rx1 = mulrax
  2464. # asm 1: mov <mulrax=int64#7,>rx1=int64#5
  2465. # asm 2: mov <mulrax=%rax,>rx1=%r8
  2466. mov %rax,%r8
  2467. # qhasm: mulr11 = mulrdx
  2468. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  2469. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  2470. mov %rdx,%r9
  2471. # qhasm: mulrax = e0_stack
  2472. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  2473. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  2474. movq 96(%rsp),%rax
  2475. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  2476. # asm 1: mulq <f2_stack=stack64#25
  2477. # asm 2: mulq <f2_stack=192(%rsp)
  2478. mulq 192(%rsp)
  2479. # qhasm: rx2 = mulrax
  2480. # asm 1: mov <mulrax=int64#7,>rx2=int64#8
  2481. # asm 2: mov <mulrax=%rax,>rx2=%r10
  2482. mov %rax,%r10
  2483. # qhasm: mulr21 = mulrdx
  2484. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  2485. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  2486. mov %rdx,%r11
  2487. # qhasm: mulrax = e0_stack
  2488. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  2489. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  2490. movq 96(%rsp),%rax
  2491. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  2492. # asm 1: mulq <f3_stack=stack64#26
  2493. # asm 2: mulq <f3_stack=200(%rsp)
  2494. mulq 200(%rsp)
  2495. # qhasm: rx3 = mulrax
  2496. # asm 1: mov <mulrax=int64#7,>rx3=int64#10
  2497. # asm 2: mov <mulrax=%rax,>rx3=%r12
  2498. mov %rax,%r12
  2499. # qhasm: mulr31 = mulrdx
  2500. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  2501. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  2502. mov %rdx,%r13
  2503. # qhasm: mulrax = e0_stack
  2504. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  2505. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  2506. movq 96(%rsp),%rax
  2507. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  2508. # asm 1: mulq <f4_stack=stack64#27
  2509. # asm 2: mulq <f4_stack=208(%rsp)
  2510. mulq 208(%rsp)
  2511. # qhasm: rx4 = mulrax
  2512. # asm 1: mov <mulrax=int64#7,>rx4=int64#12
  2513. # asm 2: mov <mulrax=%rax,>rx4=%r14
  2514. mov %rax,%r14
  2515. # qhasm: mulr41 = mulrdx
  2516. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  2517. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  2518. mov %rdx,%r15
  2519. # qhasm: mulrax = e1_stack
  2520. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  2521. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  2522. movq 104(%rsp),%rax
  2523. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  2524. # asm 1: mulq <f0_stack=stack64#23
  2525. # asm 2: mulq <f0_stack=176(%rsp)
  2526. mulq 176(%rsp)
  2527. # qhasm: carry? rx1 += mulrax
  2528. # asm 1: add <mulrax=int64#7,<rx1=int64#5
  2529. # asm 2: add <mulrax=%rax,<rx1=%r8
  2530. add %rax,%r8
  2531. # qhasm: mulr11 += mulrdx + carry
  2532. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2533. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2534. adc %rdx,%r9
  2535. # qhasm: mulrax = e1_stack
  2536. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  2537. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  2538. movq 104(%rsp),%rax
  2539. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  2540. # asm 1: mulq <f1_stack=stack64#24
  2541. # asm 2: mulq <f1_stack=184(%rsp)
  2542. mulq 184(%rsp)
  2543. # qhasm: carry? rx2 += mulrax
  2544. # asm 1: add <mulrax=int64#7,<rx2=int64#8
  2545. # asm 2: add <mulrax=%rax,<rx2=%r10
  2546. add %rax,%r10
  2547. # qhasm: mulr21 += mulrdx + carry
  2548. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2549. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2550. adc %rdx,%r11
  2551. # qhasm: mulrax = e1_stack
  2552. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  2553. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  2554. movq 104(%rsp),%rax
  2555. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  2556. # asm 1: mulq <f2_stack=stack64#25
  2557. # asm 2: mulq <f2_stack=192(%rsp)
  2558. mulq 192(%rsp)
  2559. # qhasm: carry? rx3 += mulrax
  2560. # asm 1: add <mulrax=int64#7,<rx3=int64#10
  2561. # asm 2: add <mulrax=%rax,<rx3=%r12
  2562. add %rax,%r12
  2563. # qhasm: mulr31 += mulrdx + carry
  2564. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2565. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2566. adc %rdx,%r13
  2567. # qhasm: mulrax = e1_stack
  2568. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  2569. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  2570. movq 104(%rsp),%rax
  2571. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  2572. # asm 1: mulq <f3_stack=stack64#26
  2573. # asm 2: mulq <f3_stack=200(%rsp)
  2574. mulq 200(%rsp)
  2575. # qhasm: carry? rx4 += mulrax
  2576. # asm 1: add <mulrax=int64#7,<rx4=int64#12
  2577. # asm 2: add <mulrax=%rax,<rx4=%r14
  2578. add %rax,%r14
  2579. # qhasm: mulr41 += mulrdx + carry
  2580. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2581. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2582. adc %rdx,%r15
  2583. # qhasm: mulrax = e1_stack
  2584. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#3
  2585. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rdx
  2586. movq 104(%rsp),%rdx
  2587. # qhasm: mulrax *= 19
  2588. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2589. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2590. imulq $19,%rdx,%rax
  2591. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  2592. # asm 1: mulq <f4_stack=stack64#27
  2593. # asm 2: mulq <f4_stack=208(%rsp)
  2594. mulq 208(%rsp)
  2595. # qhasm: carry? rx0 += mulrax
  2596. # asm 1: add <mulrax=int64#7,<rx0=int64#2
  2597. # asm 2: add <mulrax=%rax,<rx0=%rsi
  2598. add %rax,%rsi
  2599. # qhasm: mulr01 += mulrdx + carry
  2600. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2601. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2602. adc %rdx,%rcx
  2603. # qhasm: mulrax = e2_stack
  2604. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#7
  2605. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rax
  2606. movq 112(%rsp),%rax
  2607. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  2608. # asm 1: mulq <f0_stack=stack64#23
  2609. # asm 2: mulq <f0_stack=176(%rsp)
  2610. mulq 176(%rsp)
  2611. # qhasm: carry? rx2 += mulrax
  2612. # asm 1: add <mulrax=int64#7,<rx2=int64#8
  2613. # asm 2: add <mulrax=%rax,<rx2=%r10
  2614. add %rax,%r10
  2615. # qhasm: mulr21 += mulrdx + carry
  2616. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2617. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2618. adc %rdx,%r11
  2619. # qhasm: mulrax = e2_stack
  2620. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#7
  2621. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rax
  2622. movq 112(%rsp),%rax
  2623. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  2624. # asm 1: mulq <f1_stack=stack64#24
  2625. # asm 2: mulq <f1_stack=184(%rsp)
  2626. mulq 184(%rsp)
  2627. # qhasm: carry? rx3 += mulrax
  2628. # asm 1: add <mulrax=int64#7,<rx3=int64#10
  2629. # asm 2: add <mulrax=%rax,<rx3=%r12
  2630. add %rax,%r12
  2631. # qhasm: mulr31 += mulrdx + carry
  2632. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2633. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2634. adc %rdx,%r13
  2635. # qhasm: mulrax = e2_stack
  2636. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#7
  2637. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rax
  2638. movq 112(%rsp),%rax
  2639. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  2640. # asm 1: mulq <f2_stack=stack64#25
  2641. # asm 2: mulq <f2_stack=192(%rsp)
  2642. mulq 192(%rsp)
  2643. # qhasm: carry? rx4 += mulrax
  2644. # asm 1: add <mulrax=int64#7,<rx4=int64#12
  2645. # asm 2: add <mulrax=%rax,<rx4=%r14
  2646. add %rax,%r14
  2647. # qhasm: mulr41 += mulrdx + carry
  2648. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2649. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2650. adc %rdx,%r15
  2651. # qhasm: mulrax = e2_stack
  2652. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#3
  2653. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rdx
  2654. movq 112(%rsp),%rdx
  2655. # qhasm: mulrax *= 19
  2656. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2657. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2658. imulq $19,%rdx,%rax
  2659. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  2660. # asm 1: mulq <f3_stack=stack64#26
  2661. # asm 2: mulq <f3_stack=200(%rsp)
  2662. mulq 200(%rsp)
  2663. # qhasm: carry? rx0 += mulrax
  2664. # asm 1: add <mulrax=int64#7,<rx0=int64#2
  2665. # asm 2: add <mulrax=%rax,<rx0=%rsi
  2666. add %rax,%rsi
  2667. # qhasm: mulr01 += mulrdx + carry
  2668. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2669. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2670. adc %rdx,%rcx
  2671. # qhasm: mulrax = e2_stack
  2672. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#3
  2673. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rdx
  2674. movq 112(%rsp),%rdx
  2675. # qhasm: mulrax *= 19
  2676. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2677. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2678. imulq $19,%rdx,%rax
  2679. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  2680. # asm 1: mulq <f4_stack=stack64#27
  2681. # asm 2: mulq <f4_stack=208(%rsp)
  2682. mulq 208(%rsp)
  2683. # qhasm: carry? rx1 += mulrax
  2684. # asm 1: add <mulrax=int64#7,<rx1=int64#5
  2685. # asm 2: add <mulrax=%rax,<rx1=%r8
  2686. add %rax,%r8
  2687. # qhasm: mulr11 += mulrdx + carry
  2688. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2689. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2690. adc %rdx,%r9
  2691. # qhasm: mulrax = e3_stack
  2692. # asm 1: movq <e3_stack=stack64#16,>mulrax=int64#7
  2693. # asm 2: movq <e3_stack=120(%rsp),>mulrax=%rax
  2694. movq 120(%rsp),%rax
  2695. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  2696. # asm 1: mulq <f0_stack=stack64#23
  2697. # asm 2: mulq <f0_stack=176(%rsp)
  2698. mulq 176(%rsp)
  2699. # qhasm: carry? rx3 += mulrax
  2700. # asm 1: add <mulrax=int64#7,<rx3=int64#10
  2701. # asm 2: add <mulrax=%rax,<rx3=%r12
  2702. add %rax,%r12
  2703. # qhasm: mulr31 += mulrdx + carry
  2704. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2705. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2706. adc %rdx,%r13
  2707. # qhasm: mulrax = e3_stack
  2708. # asm 1: movq <e3_stack=stack64#16,>mulrax=int64#7
  2709. # asm 2: movq <e3_stack=120(%rsp),>mulrax=%rax
  2710. movq 120(%rsp),%rax
  2711. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  2712. # asm 1: mulq <f1_stack=stack64#24
  2713. # asm 2: mulq <f1_stack=184(%rsp)
  2714. mulq 184(%rsp)
  2715. # qhasm: carry? rx4 += mulrax
  2716. # asm 1: add <mulrax=int64#7,<rx4=int64#12
  2717. # asm 2: add <mulrax=%rax,<rx4=%r14
  2718. add %rax,%r14
  2719. # qhasm: mulr41 += mulrdx + carry
  2720. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2721. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2722. adc %rdx,%r15
  2723. # qhasm: mulrax = mulx319_stack
  2724. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  2725. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  2726. movq 216(%rsp),%rax
  2727. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  2728. # asm 1: mulq <f3_stack=stack64#26
  2729. # asm 2: mulq <f3_stack=200(%rsp)
  2730. mulq 200(%rsp)
  2731. # qhasm: carry? rx1 += mulrax
  2732. # asm 1: add <mulrax=int64#7,<rx1=int64#5
  2733. # asm 2: add <mulrax=%rax,<rx1=%r8
  2734. add %rax,%r8
  2735. # qhasm: mulr11 += mulrdx + carry
  2736. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2737. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2738. adc %rdx,%r9
  2739. # qhasm: mulrax = mulx319_stack
  2740. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  2741. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  2742. movq 216(%rsp),%rax
  2743. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  2744. # asm 1: mulq <f4_stack=stack64#27
  2745. # asm 2: mulq <f4_stack=208(%rsp)
  2746. mulq 208(%rsp)
  2747. # qhasm: carry? rx2 += mulrax
  2748. # asm 1: add <mulrax=int64#7,<rx2=int64#8
  2749. # asm 2: add <mulrax=%rax,<rx2=%r10
  2750. add %rax,%r10
  2751. # qhasm: mulr21 += mulrdx + carry
  2752. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2753. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2754. adc %rdx,%r11
  2755. # qhasm: mulrax = e4_stack
  2756. # asm 1: movq <e4_stack=stack64#17,>mulrax=int64#7
  2757. # asm 2: movq <e4_stack=128(%rsp),>mulrax=%rax
  2758. movq 128(%rsp),%rax
  2759. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  2760. # asm 1: mulq <f0_stack=stack64#23
  2761. # asm 2: mulq <f0_stack=176(%rsp)
  2762. mulq 176(%rsp)
  2763. # qhasm: carry? rx4 += mulrax
  2764. # asm 1: add <mulrax=int64#7,<rx4=int64#12
  2765. # asm 2: add <mulrax=%rax,<rx4=%r14
  2766. add %rax,%r14
  2767. # qhasm: mulr41 += mulrdx + carry
  2768. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2769. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2770. adc %rdx,%r15
  2771. # qhasm: mulrax = mulx419_stack
  2772. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  2773. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  2774. movq 224(%rsp),%rax
  2775. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  2776. # asm 1: mulq <f2_stack=stack64#25
  2777. # asm 2: mulq <f2_stack=192(%rsp)
  2778. mulq 192(%rsp)
  2779. # qhasm: carry? rx1 += mulrax
  2780. # asm 1: add <mulrax=int64#7,<rx1=int64#5
  2781. # asm 2: add <mulrax=%rax,<rx1=%r8
  2782. add %rax,%r8
  2783. # qhasm: mulr11 += mulrdx + carry
  2784. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2785. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2786. adc %rdx,%r9
  2787. # qhasm: mulrax = mulx419_stack
  2788. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  2789. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  2790. movq 224(%rsp),%rax
  2791. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  2792. # asm 1: mulq <f3_stack=stack64#26
  2793. # asm 2: mulq <f3_stack=200(%rsp)
  2794. mulq 200(%rsp)
  2795. # qhasm: carry? rx2 += mulrax
  2796. # asm 1: add <mulrax=int64#7,<rx2=int64#8
  2797. # asm 2: add <mulrax=%rax,<rx2=%r10
  2798. add %rax,%r10
  2799. # qhasm: mulr21 += mulrdx + carry
  2800. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2801. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2802. adc %rdx,%r11
  2803. # qhasm: mulrax = mulx419_stack
  2804. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  2805. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  2806. movq 224(%rsp),%rax
  2807. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  2808. # asm 1: mulq <f4_stack=stack64#27
  2809. # asm 2: mulq <f4_stack=208(%rsp)
  2810. mulq 208(%rsp)
  2811. # qhasm: carry? rx3 += mulrax
  2812. # asm 1: add <mulrax=int64#7,<rx3=int64#10
  2813. # asm 2: add <mulrax=%rax,<rx3=%r12
  2814. add %rax,%r12
  2815. # qhasm: mulr31 += mulrdx + carry
  2816. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2817. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2818. adc %rdx,%r13
  2819. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  2820. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  2821. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  2822. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  2823. # qhasm: mulr01 = (mulr01.rx0) << 13
  2824. # asm 1: shld $13,<rx0=int64#2,<mulr01=int64#4
  2825. # asm 2: shld $13,<rx0=%rsi,<mulr01=%rcx
  2826. shld $13,%rsi,%rcx
  2827. # qhasm: rx0 &= mulredmask
  2828. # asm 1: and <mulredmask=int64#3,<rx0=int64#2
  2829. # asm 2: and <mulredmask=%rdx,<rx0=%rsi
  2830. and %rdx,%rsi
  2831. # qhasm: mulr11 = (mulr11.rx1) << 13
  2832. # asm 1: shld $13,<rx1=int64#5,<mulr11=int64#6
  2833. # asm 2: shld $13,<rx1=%r8,<mulr11=%r9
  2834. shld $13,%r8,%r9
  2835. # qhasm: rx1 &= mulredmask
  2836. # asm 1: and <mulredmask=int64#3,<rx1=int64#5
  2837. # asm 2: and <mulredmask=%rdx,<rx1=%r8
  2838. and %rdx,%r8
  2839. # qhasm: rx1 += mulr01
  2840. # asm 1: add <mulr01=int64#4,<rx1=int64#5
  2841. # asm 2: add <mulr01=%rcx,<rx1=%r8
  2842. add %rcx,%r8
  2843. # qhasm: mulr21 = (mulr21.rx2) << 13
  2844. # asm 1: shld $13,<rx2=int64#8,<mulr21=int64#9
  2845. # asm 2: shld $13,<rx2=%r10,<mulr21=%r11
  2846. shld $13,%r10,%r11
  2847. # qhasm: rx2 &= mulredmask
  2848. # asm 1: and <mulredmask=int64#3,<rx2=int64#8
  2849. # asm 2: and <mulredmask=%rdx,<rx2=%r10
  2850. and %rdx,%r10
  2851. # qhasm: rx2 += mulr11
  2852. # asm 1: add <mulr11=int64#6,<rx2=int64#8
  2853. # asm 2: add <mulr11=%r9,<rx2=%r10
  2854. add %r9,%r10
  2855. # qhasm: mulr31 = (mulr31.rx3) << 13
  2856. # asm 1: shld $13,<rx3=int64#10,<mulr31=int64#11
  2857. # asm 2: shld $13,<rx3=%r12,<mulr31=%r13
  2858. shld $13,%r12,%r13
  2859. # qhasm: rx3 &= mulredmask
  2860. # asm 1: and <mulredmask=int64#3,<rx3=int64#10
  2861. # asm 2: and <mulredmask=%rdx,<rx3=%r12
  2862. and %rdx,%r12
  2863. # qhasm: rx3 += mulr21
  2864. # asm 1: add <mulr21=int64#9,<rx3=int64#10
  2865. # asm 2: add <mulr21=%r11,<rx3=%r12
  2866. add %r11,%r12
  2867. # qhasm: mulr41 = (mulr41.rx4) << 13
  2868. # asm 1: shld $13,<rx4=int64#12,<mulr41=int64#13
  2869. # asm 2: shld $13,<rx4=%r14,<mulr41=%r15
  2870. shld $13,%r14,%r15
  2871. # qhasm: rx4 &= mulredmask
  2872. # asm 1: and <mulredmask=int64#3,<rx4=int64#12
  2873. # asm 2: and <mulredmask=%rdx,<rx4=%r14
  2874. and %rdx,%r14
  2875. # qhasm: rx4 += mulr31
  2876. # asm 1: add <mulr31=int64#11,<rx4=int64#12
  2877. # asm 2: add <mulr31=%r13,<rx4=%r14
  2878. add %r13,%r14
  2879. # qhasm: mulr41 = mulr41 * 19
  2880. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  2881. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  2882. imulq $19,%r15,%rcx
  2883. # qhasm: rx0 += mulr41
  2884. # asm 1: add <mulr41=int64#4,<rx0=int64#2
  2885. # asm 2: add <mulr41=%rcx,<rx0=%rsi
  2886. add %rcx,%rsi
  2887. # qhasm: mult = rx0
  2888. # asm 1: mov <rx0=int64#2,>mult=int64#4
  2889. # asm 2: mov <rx0=%rsi,>mult=%rcx
  2890. mov %rsi,%rcx
  2891. # qhasm: (uint64) mult >>= 51
  2892. # asm 1: shr $51,<mult=int64#4
  2893. # asm 2: shr $51,<mult=%rcx
  2894. shr $51,%rcx
  2895. # qhasm: mult += rx1
  2896. # asm 1: add <rx1=int64#5,<mult=int64#4
  2897. # asm 2: add <rx1=%r8,<mult=%rcx
  2898. add %r8,%rcx
  2899. # qhasm: rx1 = mult
  2900. # asm 1: mov <mult=int64#4,>rx1=int64#5
  2901. # asm 2: mov <mult=%rcx,>rx1=%r8
  2902. mov %rcx,%r8
  2903. # qhasm: (uint64) mult >>= 51
  2904. # asm 1: shr $51,<mult=int64#4
  2905. # asm 2: shr $51,<mult=%rcx
  2906. shr $51,%rcx
  2907. # qhasm: rx0 &= mulredmask
  2908. # asm 1: and <mulredmask=int64#3,<rx0=int64#2
  2909. # asm 2: and <mulredmask=%rdx,<rx0=%rsi
  2910. and %rdx,%rsi
  2911. # qhasm: mult += rx2
  2912. # asm 1: add <rx2=int64#8,<mult=int64#4
  2913. # asm 2: add <rx2=%r10,<mult=%rcx
  2914. add %r10,%rcx
  2915. # qhasm: rx2 = mult
  2916. # asm 1: mov <mult=int64#4,>rx2=int64#6
  2917. # asm 2: mov <mult=%rcx,>rx2=%r9
  2918. mov %rcx,%r9
  2919. # qhasm: (uint64) mult >>= 51
  2920. # asm 1: shr $51,<mult=int64#4
  2921. # asm 2: shr $51,<mult=%rcx
  2922. shr $51,%rcx
  2923. # qhasm: rx1 &= mulredmask
  2924. # asm 1: and <mulredmask=int64#3,<rx1=int64#5
  2925. # asm 2: and <mulredmask=%rdx,<rx1=%r8
  2926. and %rdx,%r8
  2927. # qhasm: mult += rx3
  2928. # asm 1: add <rx3=int64#10,<mult=int64#4
  2929. # asm 2: add <rx3=%r12,<mult=%rcx
  2930. add %r12,%rcx
  2931. # qhasm: rx3 = mult
  2932. # asm 1: mov <mult=int64#4,>rx3=int64#7
  2933. # asm 2: mov <mult=%rcx,>rx3=%rax
  2934. mov %rcx,%rax
  2935. # qhasm: (uint64) mult >>= 51
  2936. # asm 1: shr $51,<mult=int64#4
  2937. # asm 2: shr $51,<mult=%rcx
  2938. shr $51,%rcx
  2939. # qhasm: rx2 &= mulredmask
  2940. # asm 1: and <mulredmask=int64#3,<rx2=int64#6
  2941. # asm 2: and <mulredmask=%rdx,<rx2=%r9
  2942. and %rdx,%r9
  2943. # qhasm: mult += rx4
  2944. # asm 1: add <rx4=int64#12,<mult=int64#4
  2945. # asm 2: add <rx4=%r14,<mult=%rcx
  2946. add %r14,%rcx
  2947. # qhasm: rx4 = mult
  2948. # asm 1: mov <mult=int64#4,>rx4=int64#8
  2949. # asm 2: mov <mult=%rcx,>rx4=%r10
  2950. mov %rcx,%r10
  2951. # qhasm: (uint64) mult >>= 51
  2952. # asm 1: shr $51,<mult=int64#4
  2953. # asm 2: shr $51,<mult=%rcx
  2954. shr $51,%rcx
  2955. # qhasm: rx3 &= mulredmask
  2956. # asm 1: and <mulredmask=int64#3,<rx3=int64#7
  2957. # asm 2: and <mulredmask=%rdx,<rx3=%rax
  2958. and %rdx,%rax
  2959. # qhasm: mult *= 19
  2960. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  2961. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  2962. imulq $19,%rcx,%rcx
  2963. # qhasm: rx0 += mult
  2964. # asm 1: add <mult=int64#4,<rx0=int64#2
  2965. # asm 2: add <mult=%rcx,<rx0=%rsi
  2966. add %rcx,%rsi
  2967. # qhasm: rx4 &= mulredmask
  2968. # asm 1: and <mulredmask=int64#3,<rx4=int64#8
  2969. # asm 2: and <mulredmask=%rdx,<rx4=%r10
  2970. and %rdx,%r10
  2971. # qhasm: *(uint64 *)(rp + 0) = rx0
  2972. # asm 1: movq <rx0=int64#2,0(<rp=int64#1)
  2973. # asm 2: movq <rx0=%rsi,0(<rp=%rdi)
  2974. movq %rsi,0(%rdi)
  2975. # qhasm: *(uint64 *)(rp + 8) = rx1
  2976. # asm 1: movq <rx1=int64#5,8(<rp=int64#1)
  2977. # asm 2: movq <rx1=%r8,8(<rp=%rdi)
  2978. movq %r8,8(%rdi)
  2979. # qhasm: *(uint64 *)(rp + 16) = rx2
  2980. # asm 1: movq <rx2=int64#6,16(<rp=int64#1)
  2981. # asm 2: movq <rx2=%r9,16(<rp=%rdi)
  2982. movq %r9,16(%rdi)
  2983. # qhasm: *(uint64 *)(rp + 24) = rx3
  2984. # asm 1: movq <rx3=int64#7,24(<rp=int64#1)
  2985. # asm 2: movq <rx3=%rax,24(<rp=%rdi)
  2986. movq %rax,24(%rdi)
  2987. # qhasm: *(uint64 *)(rp + 32) = rx4
  2988. # asm 1: movq <rx4=int64#8,32(<rp=int64#1)
  2989. # asm 2: movq <rx4=%r10,32(<rp=%rdi)
  2990. movq %r10,32(%rdi)
  2991. # qhasm: mulrax = h3_stack
  2992. # asm 1: movq <h3_stack=stack64#11,>mulrax=int64#2
  2993. # asm 2: movq <h3_stack=80(%rsp),>mulrax=%rsi
  2994. movq 80(%rsp),%rsi
  2995. # qhasm: mulrax *= 19
  2996. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  2997. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  2998. imulq $19,%rsi,%rax
  2999. # qhasm: mulx319_stack = mulrax
  3000. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#28
  3001. # asm 2: movq <mulrax=%rax,>mulx319_stack=216(%rsp)
  3002. movq %rax,216(%rsp)
  3003. # qhasm: (uint128) mulrdx mulrax = mulrax * g2_stack
  3004. # asm 1: mulq <g2_stack=stack64#20
  3005. # asm 2: mulq <g2_stack=152(%rsp)
  3006. mulq 152(%rsp)
  3007. # qhasm: ry0 = mulrax
  3008. # asm 1: mov <mulrax=int64#7,>ry0=int64#2
  3009. # asm 2: mov <mulrax=%rax,>ry0=%rsi
  3010. mov %rax,%rsi
  3011. # qhasm: mulr01 = mulrdx
  3012. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  3013. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  3014. mov %rdx,%rcx
  3015. # qhasm: mulrax = h4_stack
  3016. # asm 1: movq <h4_stack=stack64#12,>mulrax=int64#3
  3017. # asm 2: movq <h4_stack=88(%rsp),>mulrax=%rdx
  3018. movq 88(%rsp),%rdx
  3019. # qhasm: mulrax *= 19
  3020. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3021. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3022. imulq $19,%rdx,%rax
  3023. # qhasm: mulx419_stack = mulrax
  3024. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#29
  3025. # asm 2: movq <mulrax=%rax,>mulx419_stack=224(%rsp)
  3026. movq %rax,224(%rsp)
  3027. # qhasm: (uint128) mulrdx mulrax = mulrax * g1_stack
  3028. # asm 1: mulq <g1_stack=stack64#19
  3029. # asm 2: mulq <g1_stack=144(%rsp)
  3030. mulq 144(%rsp)
  3031. # qhasm: carry? ry0 += mulrax
  3032. # asm 1: add <mulrax=int64#7,<ry0=int64#2
  3033. # asm 2: add <mulrax=%rax,<ry0=%rsi
  3034. add %rax,%rsi
  3035. # qhasm: mulr01 += mulrdx + carry
  3036. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3037. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3038. adc %rdx,%rcx
  3039. # qhasm: mulrax = h0_stack
  3040. # asm 1: movq <h0_stack=stack64#8,>mulrax=int64#7
  3041. # asm 2: movq <h0_stack=56(%rsp),>mulrax=%rax
  3042. movq 56(%rsp),%rax
  3043. # qhasm: (uint128) mulrdx mulrax = mulrax * g0_stack
  3044. # asm 1: mulq <g0_stack=stack64#18
  3045. # asm 2: mulq <g0_stack=136(%rsp)
  3046. mulq 136(%rsp)
  3047. # qhasm: carry? ry0 += mulrax
  3048. # asm 1: add <mulrax=int64#7,<ry0=int64#2
  3049. # asm 2: add <mulrax=%rax,<ry0=%rsi
  3050. add %rax,%rsi
  3051. # qhasm: mulr01 += mulrdx + carry
  3052. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3053. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3054. adc %rdx,%rcx
  3055. # qhasm: mulrax = h0_stack
  3056. # asm 1: movq <h0_stack=stack64#8,>mulrax=int64#7
  3057. # asm 2: movq <h0_stack=56(%rsp),>mulrax=%rax
  3058. movq 56(%rsp),%rax
  3059. # qhasm: (uint128) mulrdx mulrax = mulrax * g1_stack
  3060. # asm 1: mulq <g1_stack=stack64#19
  3061. # asm 2: mulq <g1_stack=144(%rsp)
  3062. mulq 144(%rsp)
  3063. # qhasm: ry1 = mulrax
  3064. # asm 1: mov <mulrax=int64#7,>ry1=int64#5
  3065. # asm 2: mov <mulrax=%rax,>ry1=%r8
  3066. mov %rax,%r8
  3067. # qhasm: mulr11 = mulrdx
  3068. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  3069. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  3070. mov %rdx,%r9
  3071. # qhasm: mulrax = h0_stack
  3072. # asm 1: movq <h0_stack=stack64#8,>mulrax=int64#7
  3073. # asm 2: movq <h0_stack=56(%rsp),>mulrax=%rax
  3074. movq 56(%rsp),%rax
  3075. # qhasm: (uint128) mulrdx mulrax = mulrax * g2_stack
  3076. # asm 1: mulq <g2_stack=stack64#20
  3077. # asm 2: mulq <g2_stack=152(%rsp)
  3078. mulq 152(%rsp)
  3079. # qhasm: ry2 = mulrax
  3080. # asm 1: mov <mulrax=int64#7,>ry2=int64#8
  3081. # asm 2: mov <mulrax=%rax,>ry2=%r10
  3082. mov %rax,%r10
  3083. # qhasm: mulr21 = mulrdx
  3084. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  3085. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  3086. mov %rdx,%r11
  3087. # qhasm: mulrax = h0_stack
  3088. # asm 1: movq <h0_stack=stack64#8,>mulrax=int64#7
  3089. # asm 2: movq <h0_stack=56(%rsp),>mulrax=%rax
  3090. movq 56(%rsp),%rax
  3091. # qhasm: (uint128) mulrdx mulrax = mulrax * g3_stack
  3092. # asm 1: mulq <g3_stack=stack64#21
  3093. # asm 2: mulq <g3_stack=160(%rsp)
  3094. mulq 160(%rsp)
  3095. # qhasm: ry3 = mulrax
  3096. # asm 1: mov <mulrax=int64#7,>ry3=int64#10
  3097. # asm 2: mov <mulrax=%rax,>ry3=%r12
  3098. mov %rax,%r12
  3099. # qhasm: mulr31 = mulrdx
  3100. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  3101. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  3102. mov %rdx,%r13
  3103. # qhasm: mulrax = h0_stack
  3104. # asm 1: movq <h0_stack=stack64#8,>mulrax=int64#7
  3105. # asm 2: movq <h0_stack=56(%rsp),>mulrax=%rax
  3106. movq 56(%rsp),%rax
  3107. # qhasm: (uint128) mulrdx mulrax = mulrax * g4_stack
  3108. # asm 1: mulq <g4_stack=stack64#22
  3109. # asm 2: mulq <g4_stack=168(%rsp)
  3110. mulq 168(%rsp)
  3111. # qhasm: ry4 = mulrax
  3112. # asm 1: mov <mulrax=int64#7,>ry4=int64#12
  3113. # asm 2: mov <mulrax=%rax,>ry4=%r14
  3114. mov %rax,%r14
  3115. # qhasm: mulr41 = mulrdx
  3116. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  3117. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  3118. mov %rdx,%r15
  3119. # qhasm: mulrax = h1_stack
  3120. # asm 1: movq <h1_stack=stack64#9,>mulrax=int64#7
  3121. # asm 2: movq <h1_stack=64(%rsp),>mulrax=%rax
  3122. movq 64(%rsp),%rax
  3123. # qhasm: (uint128) mulrdx mulrax = mulrax * g0_stack
  3124. # asm 1: mulq <g0_stack=stack64#18
  3125. # asm 2: mulq <g0_stack=136(%rsp)
  3126. mulq 136(%rsp)
  3127. # qhasm: carry? ry1 += mulrax
  3128. # asm 1: add <mulrax=int64#7,<ry1=int64#5
  3129. # asm 2: add <mulrax=%rax,<ry1=%r8
  3130. add %rax,%r8
  3131. # qhasm: mulr11 += mulrdx + carry
  3132. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3133. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3134. adc %rdx,%r9
  3135. # qhasm: mulrax = h1_stack
  3136. # asm 1: movq <h1_stack=stack64#9,>mulrax=int64#7
  3137. # asm 2: movq <h1_stack=64(%rsp),>mulrax=%rax
  3138. movq 64(%rsp),%rax
  3139. # qhasm: (uint128) mulrdx mulrax = mulrax * g1_stack
  3140. # asm 1: mulq <g1_stack=stack64#19
  3141. # asm 2: mulq <g1_stack=144(%rsp)
  3142. mulq 144(%rsp)
  3143. # qhasm: carry? ry2 += mulrax
  3144. # asm 1: add <mulrax=int64#7,<ry2=int64#8
  3145. # asm 2: add <mulrax=%rax,<ry2=%r10
  3146. add %rax,%r10
  3147. # qhasm: mulr21 += mulrdx + carry
  3148. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3149. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3150. adc %rdx,%r11
  3151. # qhasm: mulrax = h1_stack
  3152. # asm 1: movq <h1_stack=stack64#9,>mulrax=int64#7
  3153. # asm 2: movq <h1_stack=64(%rsp),>mulrax=%rax
  3154. movq 64(%rsp),%rax
  3155. # qhasm: (uint128) mulrdx mulrax = mulrax * g2_stack
  3156. # asm 1: mulq <g2_stack=stack64#20
  3157. # asm 2: mulq <g2_stack=152(%rsp)
  3158. mulq 152(%rsp)
  3159. # qhasm: carry? ry3 += mulrax
  3160. # asm 1: add <mulrax=int64#7,<ry3=int64#10
  3161. # asm 2: add <mulrax=%rax,<ry3=%r12
  3162. add %rax,%r12
  3163. # qhasm: mulr31 += mulrdx + carry
  3164. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3165. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3166. adc %rdx,%r13
  3167. # qhasm: mulrax = h1_stack
  3168. # asm 1: movq <h1_stack=stack64#9,>mulrax=int64#7
  3169. # asm 2: movq <h1_stack=64(%rsp),>mulrax=%rax
  3170. movq 64(%rsp),%rax
  3171. # qhasm: (uint128) mulrdx mulrax = mulrax * g3_stack
  3172. # asm 1: mulq <g3_stack=stack64#21
  3173. # asm 2: mulq <g3_stack=160(%rsp)
  3174. mulq 160(%rsp)
  3175. # qhasm: carry? ry4 += mulrax
  3176. # asm 1: add <mulrax=int64#7,<ry4=int64#12
  3177. # asm 2: add <mulrax=%rax,<ry4=%r14
  3178. add %rax,%r14
  3179. # qhasm: mulr41 += mulrdx + carry
  3180. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3181. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3182. adc %rdx,%r15
  3183. # qhasm: mulrax = h1_stack
  3184. # asm 1: movq <h1_stack=stack64#9,>mulrax=int64#3
  3185. # asm 2: movq <h1_stack=64(%rsp),>mulrax=%rdx
  3186. movq 64(%rsp),%rdx
  3187. # qhasm: mulrax *= 19
  3188. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3189. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3190. imulq $19,%rdx,%rax
  3191. # qhasm: (uint128) mulrdx mulrax = mulrax * g4_stack
  3192. # asm 1: mulq <g4_stack=stack64#22
  3193. # asm 2: mulq <g4_stack=168(%rsp)
  3194. mulq 168(%rsp)
  3195. # qhasm: carry? ry0 += mulrax
  3196. # asm 1: add <mulrax=int64#7,<ry0=int64#2
  3197. # asm 2: add <mulrax=%rax,<ry0=%rsi
  3198. add %rax,%rsi
  3199. # qhasm: mulr01 += mulrdx + carry
  3200. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3201. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3202. adc %rdx,%rcx
  3203. # qhasm: mulrax = h2_stack
  3204. # asm 1: movq <h2_stack=stack64#10,>mulrax=int64#7
  3205. # asm 2: movq <h2_stack=72(%rsp),>mulrax=%rax
  3206. movq 72(%rsp),%rax
  3207. # qhasm: (uint128) mulrdx mulrax = mulrax * g0_stack
  3208. # asm 1: mulq <g0_stack=stack64#18
  3209. # asm 2: mulq <g0_stack=136(%rsp)
  3210. mulq 136(%rsp)
  3211. # qhasm: carry? ry2 += mulrax
  3212. # asm 1: add <mulrax=int64#7,<ry2=int64#8
  3213. # asm 2: add <mulrax=%rax,<ry2=%r10
  3214. add %rax,%r10
  3215. # qhasm: mulr21 += mulrdx + carry
  3216. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3217. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3218. adc %rdx,%r11
  3219. # qhasm: mulrax = h2_stack
  3220. # asm 1: movq <h2_stack=stack64#10,>mulrax=int64#7
  3221. # asm 2: movq <h2_stack=72(%rsp),>mulrax=%rax
  3222. movq 72(%rsp),%rax
  3223. # qhasm: (uint128) mulrdx mulrax = mulrax * g1_stack
  3224. # asm 1: mulq <g1_stack=stack64#19
  3225. # asm 2: mulq <g1_stack=144(%rsp)
  3226. mulq 144(%rsp)
  3227. # qhasm: carry? ry3 += mulrax
  3228. # asm 1: add <mulrax=int64#7,<ry3=int64#10
  3229. # asm 2: add <mulrax=%rax,<ry3=%r12
  3230. add %rax,%r12
  3231. # qhasm: mulr31 += mulrdx + carry
  3232. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3233. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3234. adc %rdx,%r13
  3235. # qhasm: mulrax = h2_stack
  3236. # asm 1: movq <h2_stack=stack64#10,>mulrax=int64#7
  3237. # asm 2: movq <h2_stack=72(%rsp),>mulrax=%rax
  3238. movq 72(%rsp),%rax
  3239. # qhasm: (uint128) mulrdx mulrax = mulrax * g2_stack
  3240. # asm 1: mulq <g2_stack=stack64#20
  3241. # asm 2: mulq <g2_stack=152(%rsp)
  3242. mulq 152(%rsp)
  3243. # qhasm: carry? ry4 += mulrax
  3244. # asm 1: add <mulrax=int64#7,<ry4=int64#12
  3245. # asm 2: add <mulrax=%rax,<ry4=%r14
  3246. add %rax,%r14
  3247. # qhasm: mulr41 += mulrdx + carry
  3248. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3249. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3250. adc %rdx,%r15
  3251. # qhasm: mulrax = h2_stack
  3252. # asm 1: movq <h2_stack=stack64#10,>mulrax=int64#3
  3253. # asm 2: movq <h2_stack=72(%rsp),>mulrax=%rdx
  3254. movq 72(%rsp),%rdx
  3255. # qhasm: mulrax *= 19
  3256. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3257. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3258. imulq $19,%rdx,%rax
  3259. # qhasm: (uint128) mulrdx mulrax = mulrax * g3_stack
  3260. # asm 1: mulq <g3_stack=stack64#21
  3261. # asm 2: mulq <g3_stack=160(%rsp)
  3262. mulq 160(%rsp)
  3263. # qhasm: carry? ry0 += mulrax
  3264. # asm 1: add <mulrax=int64#7,<ry0=int64#2
  3265. # asm 2: add <mulrax=%rax,<ry0=%rsi
  3266. add %rax,%rsi
  3267. # qhasm: mulr01 += mulrdx + carry
  3268. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3269. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3270. adc %rdx,%rcx
  3271. # qhasm: mulrax = h2_stack
  3272. # asm 1: movq <h2_stack=stack64#10,>mulrax=int64#3
  3273. # asm 2: movq <h2_stack=72(%rsp),>mulrax=%rdx
  3274. movq 72(%rsp),%rdx
  3275. # qhasm: mulrax *= 19
  3276. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3277. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3278. imulq $19,%rdx,%rax
  3279. # qhasm: (uint128) mulrdx mulrax = mulrax * g4_stack
  3280. # asm 1: mulq <g4_stack=stack64#22
  3281. # asm 2: mulq <g4_stack=168(%rsp)
  3282. mulq 168(%rsp)
  3283. # qhasm: carry? ry1 += mulrax
  3284. # asm 1: add <mulrax=int64#7,<ry1=int64#5
  3285. # asm 2: add <mulrax=%rax,<ry1=%r8
  3286. add %rax,%r8
  3287. # qhasm: mulr11 += mulrdx + carry
  3288. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3289. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3290. adc %rdx,%r9
  3291. # qhasm: mulrax = h3_stack
  3292. # asm 1: movq <h3_stack=stack64#11,>mulrax=int64#7
  3293. # asm 2: movq <h3_stack=80(%rsp),>mulrax=%rax
  3294. movq 80(%rsp),%rax
  3295. # qhasm: (uint128) mulrdx mulrax = mulrax * g0_stack
  3296. # asm 1: mulq <g0_stack=stack64#18
  3297. # asm 2: mulq <g0_stack=136(%rsp)
  3298. mulq 136(%rsp)
  3299. # qhasm: carry? ry3 += mulrax
  3300. # asm 1: add <mulrax=int64#7,<ry3=int64#10
  3301. # asm 2: add <mulrax=%rax,<ry3=%r12
  3302. add %rax,%r12
  3303. # qhasm: mulr31 += mulrdx + carry
  3304. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3305. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3306. adc %rdx,%r13
  3307. # qhasm: mulrax = h3_stack
  3308. # asm 1: movq <h3_stack=stack64#11,>mulrax=int64#7
  3309. # asm 2: movq <h3_stack=80(%rsp),>mulrax=%rax
  3310. movq 80(%rsp),%rax
  3311. # qhasm: (uint128) mulrdx mulrax = mulrax * g1_stack
  3312. # asm 1: mulq <g1_stack=stack64#19
  3313. # asm 2: mulq <g1_stack=144(%rsp)
  3314. mulq 144(%rsp)
  3315. # qhasm: carry? ry4 += mulrax
  3316. # asm 1: add <mulrax=int64#7,<ry4=int64#12
  3317. # asm 2: add <mulrax=%rax,<ry4=%r14
  3318. add %rax,%r14
  3319. # qhasm: mulr41 += mulrdx + carry
  3320. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3321. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3322. adc %rdx,%r15
  3323. # qhasm: mulrax = mulx319_stack
  3324. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  3325. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  3326. movq 216(%rsp),%rax
  3327. # qhasm: (uint128) mulrdx mulrax = mulrax * g3_stack
  3328. # asm 1: mulq <g3_stack=stack64#21
  3329. # asm 2: mulq <g3_stack=160(%rsp)
  3330. mulq 160(%rsp)
  3331. # qhasm: carry? ry1 += mulrax
  3332. # asm 1: add <mulrax=int64#7,<ry1=int64#5
  3333. # asm 2: add <mulrax=%rax,<ry1=%r8
  3334. add %rax,%r8
  3335. # qhasm: mulr11 += mulrdx + carry
  3336. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3337. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3338. adc %rdx,%r9
  3339. # qhasm: mulrax = mulx319_stack
  3340. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  3341. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  3342. movq 216(%rsp),%rax
  3343. # qhasm: (uint128) mulrdx mulrax = mulrax * g4_stack
  3344. # asm 1: mulq <g4_stack=stack64#22
  3345. # asm 2: mulq <g4_stack=168(%rsp)
  3346. mulq 168(%rsp)
  3347. # qhasm: carry? ry2 += mulrax
  3348. # asm 1: add <mulrax=int64#7,<ry2=int64#8
  3349. # asm 2: add <mulrax=%rax,<ry2=%r10
  3350. add %rax,%r10
  3351. # qhasm: mulr21 += mulrdx + carry
  3352. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3353. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3354. adc %rdx,%r11
  3355. # qhasm: mulrax = h4_stack
  3356. # asm 1: movq <h4_stack=stack64#12,>mulrax=int64#7
  3357. # asm 2: movq <h4_stack=88(%rsp),>mulrax=%rax
  3358. movq 88(%rsp),%rax
  3359. # qhasm: (uint128) mulrdx mulrax = mulrax * g0_stack
  3360. # asm 1: mulq <g0_stack=stack64#18
  3361. # asm 2: mulq <g0_stack=136(%rsp)
  3362. mulq 136(%rsp)
  3363. # qhasm: carry? ry4 += mulrax
  3364. # asm 1: add <mulrax=int64#7,<ry4=int64#12
  3365. # asm 2: add <mulrax=%rax,<ry4=%r14
  3366. add %rax,%r14
  3367. # qhasm: mulr41 += mulrdx + carry
  3368. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3369. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3370. adc %rdx,%r15
  3371. # qhasm: mulrax = mulx419_stack
  3372. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  3373. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  3374. movq 224(%rsp),%rax
  3375. # qhasm: (uint128) mulrdx mulrax = mulrax * g2_stack
  3376. # asm 1: mulq <g2_stack=stack64#20
  3377. # asm 2: mulq <g2_stack=152(%rsp)
  3378. mulq 152(%rsp)
  3379. # qhasm: carry? ry1 += mulrax
  3380. # asm 1: add <mulrax=int64#7,<ry1=int64#5
  3381. # asm 2: add <mulrax=%rax,<ry1=%r8
  3382. add %rax,%r8
  3383. # qhasm: mulr11 += mulrdx + carry
  3384. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3385. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3386. adc %rdx,%r9
  3387. # qhasm: mulrax = mulx419_stack
  3388. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  3389. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  3390. movq 224(%rsp),%rax
  3391. # qhasm: (uint128) mulrdx mulrax = mulrax * g3_stack
  3392. # asm 1: mulq <g3_stack=stack64#21
  3393. # asm 2: mulq <g3_stack=160(%rsp)
  3394. mulq 160(%rsp)
  3395. # qhasm: carry? ry2 += mulrax
  3396. # asm 1: add <mulrax=int64#7,<ry2=int64#8
  3397. # asm 2: add <mulrax=%rax,<ry2=%r10
  3398. add %rax,%r10
  3399. # qhasm: mulr21 += mulrdx + carry
  3400. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3401. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3402. adc %rdx,%r11
  3403. # qhasm: mulrax = mulx419_stack
  3404. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  3405. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  3406. movq 224(%rsp),%rax
  3407. # qhasm: (uint128) mulrdx mulrax = mulrax * g4_stack
  3408. # asm 1: mulq <g4_stack=stack64#22
  3409. # asm 2: mulq <g4_stack=168(%rsp)
  3410. mulq 168(%rsp)
  3411. # qhasm: carry? ry3 += mulrax
  3412. # asm 1: add <mulrax=int64#7,<ry3=int64#10
  3413. # asm 2: add <mulrax=%rax,<ry3=%r12
  3414. add %rax,%r12
  3415. # qhasm: mulr31 += mulrdx + carry
  3416. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3417. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3418. adc %rdx,%r13
  3419. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  3420. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  3421. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  3422. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  3423. # qhasm: mulr01 = (mulr01.ry0) << 13
  3424. # asm 1: shld $13,<ry0=int64#2,<mulr01=int64#4
  3425. # asm 2: shld $13,<ry0=%rsi,<mulr01=%rcx
  3426. shld $13,%rsi,%rcx
  3427. # qhasm: ry0 &= mulredmask
  3428. # asm 1: and <mulredmask=int64#3,<ry0=int64#2
  3429. # asm 2: and <mulredmask=%rdx,<ry0=%rsi
  3430. and %rdx,%rsi
  3431. # qhasm: mulr11 = (mulr11.ry1) << 13
  3432. # asm 1: shld $13,<ry1=int64#5,<mulr11=int64#6
  3433. # asm 2: shld $13,<ry1=%r8,<mulr11=%r9
  3434. shld $13,%r8,%r9
  3435. # qhasm: ry1 &= mulredmask
  3436. # asm 1: and <mulredmask=int64#3,<ry1=int64#5
  3437. # asm 2: and <mulredmask=%rdx,<ry1=%r8
  3438. and %rdx,%r8
  3439. # qhasm: ry1 += mulr01
  3440. # asm 1: add <mulr01=int64#4,<ry1=int64#5
  3441. # asm 2: add <mulr01=%rcx,<ry1=%r8
  3442. add %rcx,%r8
  3443. # qhasm: mulr21 = (mulr21.ry2) << 13
  3444. # asm 1: shld $13,<ry2=int64#8,<mulr21=int64#9
  3445. # asm 2: shld $13,<ry2=%r10,<mulr21=%r11
  3446. shld $13,%r10,%r11
  3447. # qhasm: ry2 &= mulredmask
  3448. # asm 1: and <mulredmask=int64#3,<ry2=int64#8
  3449. # asm 2: and <mulredmask=%rdx,<ry2=%r10
  3450. and %rdx,%r10
  3451. # qhasm: ry2 += mulr11
  3452. # asm 1: add <mulr11=int64#6,<ry2=int64#8
  3453. # asm 2: add <mulr11=%r9,<ry2=%r10
  3454. add %r9,%r10
  3455. # qhasm: mulr31 = (mulr31.ry3) << 13
  3456. # asm 1: shld $13,<ry3=int64#10,<mulr31=int64#11
  3457. # asm 2: shld $13,<ry3=%r12,<mulr31=%r13
  3458. shld $13,%r12,%r13
  3459. # qhasm: ry3 &= mulredmask
  3460. # asm 1: and <mulredmask=int64#3,<ry3=int64#10
  3461. # asm 2: and <mulredmask=%rdx,<ry3=%r12
  3462. and %rdx,%r12
  3463. # qhasm: ry3 += mulr21
  3464. # asm 1: add <mulr21=int64#9,<ry3=int64#10
  3465. # asm 2: add <mulr21=%r11,<ry3=%r12
  3466. add %r11,%r12
  3467. # qhasm: mulr41 = (mulr41.ry4) << 13
  3468. # asm 1: shld $13,<ry4=int64#12,<mulr41=int64#13
  3469. # asm 2: shld $13,<ry4=%r14,<mulr41=%r15
  3470. shld $13,%r14,%r15
  3471. # qhasm: ry4 &= mulredmask
  3472. # asm 1: and <mulredmask=int64#3,<ry4=int64#12
  3473. # asm 2: and <mulredmask=%rdx,<ry4=%r14
  3474. and %rdx,%r14
  3475. # qhasm: ry4 += mulr31
  3476. # asm 1: add <mulr31=int64#11,<ry4=int64#12
  3477. # asm 2: add <mulr31=%r13,<ry4=%r14
  3478. add %r13,%r14
  3479. # qhasm: mulr41 = mulr41 * 19
  3480. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  3481. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  3482. imulq $19,%r15,%rcx
  3483. # qhasm: ry0 += mulr41
  3484. # asm 1: add <mulr41=int64#4,<ry0=int64#2
  3485. # asm 2: add <mulr41=%rcx,<ry0=%rsi
  3486. add %rcx,%rsi
  3487. # qhasm: mult = ry0
  3488. # asm 1: mov <ry0=int64#2,>mult=int64#4
  3489. # asm 2: mov <ry0=%rsi,>mult=%rcx
  3490. mov %rsi,%rcx
  3491. # qhasm: (uint64) mult >>= 51
  3492. # asm 1: shr $51,<mult=int64#4
  3493. # asm 2: shr $51,<mult=%rcx
  3494. shr $51,%rcx
  3495. # qhasm: mult += ry1
  3496. # asm 1: add <ry1=int64#5,<mult=int64#4
  3497. # asm 2: add <ry1=%r8,<mult=%rcx
  3498. add %r8,%rcx
  3499. # qhasm: ry1 = mult
  3500. # asm 1: mov <mult=int64#4,>ry1=int64#5
  3501. # asm 2: mov <mult=%rcx,>ry1=%r8
  3502. mov %rcx,%r8
  3503. # qhasm: (uint64) mult >>= 51
  3504. # asm 1: shr $51,<mult=int64#4
  3505. # asm 2: shr $51,<mult=%rcx
  3506. shr $51,%rcx
  3507. # qhasm: ry0 &= mulredmask
  3508. # asm 1: and <mulredmask=int64#3,<ry0=int64#2
  3509. # asm 2: and <mulredmask=%rdx,<ry0=%rsi
  3510. and %rdx,%rsi
  3511. # qhasm: mult += ry2
  3512. # asm 1: add <ry2=int64#8,<mult=int64#4
  3513. # asm 2: add <ry2=%r10,<mult=%rcx
  3514. add %r10,%rcx
  3515. # qhasm: ry2 = mult
  3516. # asm 1: mov <mult=int64#4,>ry2=int64#6
  3517. # asm 2: mov <mult=%rcx,>ry2=%r9
  3518. mov %rcx,%r9
  3519. # qhasm: (uint64) mult >>= 51
  3520. # asm 1: shr $51,<mult=int64#4
  3521. # asm 2: shr $51,<mult=%rcx
  3522. shr $51,%rcx
  3523. # qhasm: ry1 &= mulredmask
  3524. # asm 1: and <mulredmask=int64#3,<ry1=int64#5
  3525. # asm 2: and <mulredmask=%rdx,<ry1=%r8
  3526. and %rdx,%r8
  3527. # qhasm: mult += ry3
  3528. # asm 1: add <ry3=int64#10,<mult=int64#4
  3529. # asm 2: add <ry3=%r12,<mult=%rcx
  3530. add %r12,%rcx
  3531. # qhasm: ry3 = mult
  3532. # asm 1: mov <mult=int64#4,>ry3=int64#7
  3533. # asm 2: mov <mult=%rcx,>ry3=%rax
  3534. mov %rcx,%rax
  3535. # qhasm: (uint64) mult >>= 51
  3536. # asm 1: shr $51,<mult=int64#4
  3537. # asm 2: shr $51,<mult=%rcx
  3538. shr $51,%rcx
  3539. # qhasm: ry2 &= mulredmask
  3540. # asm 1: and <mulredmask=int64#3,<ry2=int64#6
  3541. # asm 2: and <mulredmask=%rdx,<ry2=%r9
  3542. and %rdx,%r9
  3543. # qhasm: mult += ry4
  3544. # asm 1: add <ry4=int64#12,<mult=int64#4
  3545. # asm 2: add <ry4=%r14,<mult=%rcx
  3546. add %r14,%rcx
  3547. # qhasm: ry4 = mult
  3548. # asm 1: mov <mult=int64#4,>ry4=int64#8
  3549. # asm 2: mov <mult=%rcx,>ry4=%r10
  3550. mov %rcx,%r10
  3551. # qhasm: (uint64) mult >>= 51
  3552. # asm 1: shr $51,<mult=int64#4
  3553. # asm 2: shr $51,<mult=%rcx
  3554. shr $51,%rcx
  3555. # qhasm: ry3 &= mulredmask
  3556. # asm 1: and <mulredmask=int64#3,<ry3=int64#7
  3557. # asm 2: and <mulredmask=%rdx,<ry3=%rax
  3558. and %rdx,%rax
  3559. # qhasm: mult *= 19
  3560. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  3561. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  3562. imulq $19,%rcx,%rcx
  3563. # qhasm: ry0 += mult
  3564. # asm 1: add <mult=int64#4,<ry0=int64#2
  3565. # asm 2: add <mult=%rcx,<ry0=%rsi
  3566. add %rcx,%rsi
  3567. # qhasm: ry4 &= mulredmask
  3568. # asm 1: and <mulredmask=int64#3,<ry4=int64#8
  3569. # asm 2: and <mulredmask=%rdx,<ry4=%r10
  3570. and %rdx,%r10
  3571. # qhasm: *(uint64 *)(rp + 40) = ry0
  3572. # asm 1: movq <ry0=int64#2,40(<rp=int64#1)
  3573. # asm 2: movq <ry0=%rsi,40(<rp=%rdi)
  3574. movq %rsi,40(%rdi)
  3575. # qhasm: *(uint64 *)(rp + 48) = ry1
  3576. # asm 1: movq <ry1=int64#5,48(<rp=int64#1)
  3577. # asm 2: movq <ry1=%r8,48(<rp=%rdi)
  3578. movq %r8,48(%rdi)
  3579. # qhasm: *(uint64 *)(rp + 56) = ry2
  3580. # asm 1: movq <ry2=int64#6,56(<rp=int64#1)
  3581. # asm 2: movq <ry2=%r9,56(<rp=%rdi)
  3582. movq %r9,56(%rdi)
  3583. # qhasm: *(uint64 *)(rp + 64) = ry3
  3584. # asm 1: movq <ry3=int64#7,64(<rp=int64#1)
  3585. # asm 2: movq <ry3=%rax,64(<rp=%rdi)
  3586. movq %rax,64(%rdi)
  3587. # qhasm: *(uint64 *)(rp + 72) = ry4
  3588. # asm 1: movq <ry4=int64#8,72(<rp=int64#1)
  3589. # asm 2: movq <ry4=%r10,72(<rp=%rdi)
  3590. movq %r10,72(%rdi)
  3591. # qhasm: mulrax = g3_stack
  3592. # asm 1: movq <g3_stack=stack64#21,>mulrax=int64#2
  3593. # asm 2: movq <g3_stack=160(%rsp),>mulrax=%rsi
  3594. movq 160(%rsp),%rsi
  3595. # qhasm: mulrax *= 19
  3596. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  3597. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  3598. imulq $19,%rsi,%rax
  3599. # qhasm: mulx319_stack = mulrax
  3600. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#28
  3601. # asm 2: movq <mulrax=%rax,>mulx319_stack=216(%rsp)
  3602. movq %rax,216(%rsp)
  3603. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  3604. # asm 1: mulq <f2_stack=stack64#25
  3605. # asm 2: mulq <f2_stack=192(%rsp)
  3606. mulq 192(%rsp)
  3607. # qhasm: rz0 = mulrax
  3608. # asm 1: mov <mulrax=int64#7,>rz0=int64#2
  3609. # asm 2: mov <mulrax=%rax,>rz0=%rsi
  3610. mov %rax,%rsi
  3611. # qhasm: mulr01 = mulrdx
  3612. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  3613. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  3614. mov %rdx,%rcx
  3615. # qhasm: mulrax = g4_stack
  3616. # asm 1: movq <g4_stack=stack64#22,>mulrax=int64#3
  3617. # asm 2: movq <g4_stack=168(%rsp),>mulrax=%rdx
  3618. movq 168(%rsp),%rdx
  3619. # qhasm: mulrax *= 19
  3620. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3621. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3622. imulq $19,%rdx,%rax
  3623. # qhasm: mulx419_stack = mulrax
  3624. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#29
  3625. # asm 2: movq <mulrax=%rax,>mulx419_stack=224(%rsp)
  3626. movq %rax,224(%rsp)
  3627. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  3628. # asm 1: mulq <f1_stack=stack64#24
  3629. # asm 2: mulq <f1_stack=184(%rsp)
  3630. mulq 184(%rsp)
  3631. # qhasm: carry? rz0 += mulrax
  3632. # asm 1: add <mulrax=int64#7,<rz0=int64#2
  3633. # asm 2: add <mulrax=%rax,<rz0=%rsi
  3634. add %rax,%rsi
  3635. # qhasm: mulr01 += mulrdx + carry
  3636. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3637. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3638. adc %rdx,%rcx
  3639. # qhasm: mulrax = g0_stack
  3640. # asm 1: movq <g0_stack=stack64#18,>mulrax=int64#7
  3641. # asm 2: movq <g0_stack=136(%rsp),>mulrax=%rax
  3642. movq 136(%rsp),%rax
  3643. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  3644. # asm 1: mulq <f0_stack=stack64#23
  3645. # asm 2: mulq <f0_stack=176(%rsp)
  3646. mulq 176(%rsp)
  3647. # qhasm: carry? rz0 += mulrax
  3648. # asm 1: add <mulrax=int64#7,<rz0=int64#2
  3649. # asm 2: add <mulrax=%rax,<rz0=%rsi
  3650. add %rax,%rsi
  3651. # qhasm: mulr01 += mulrdx + carry
  3652. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3653. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3654. adc %rdx,%rcx
  3655. # qhasm: mulrax = g0_stack
  3656. # asm 1: movq <g0_stack=stack64#18,>mulrax=int64#7
  3657. # asm 2: movq <g0_stack=136(%rsp),>mulrax=%rax
  3658. movq 136(%rsp),%rax
  3659. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  3660. # asm 1: mulq <f1_stack=stack64#24
  3661. # asm 2: mulq <f1_stack=184(%rsp)
  3662. mulq 184(%rsp)
  3663. # qhasm: rz1 = mulrax
  3664. # asm 1: mov <mulrax=int64#7,>rz1=int64#5
  3665. # asm 2: mov <mulrax=%rax,>rz1=%r8
  3666. mov %rax,%r8
  3667. # qhasm: mulr11 = mulrdx
  3668. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  3669. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  3670. mov %rdx,%r9
  3671. # qhasm: mulrax = g0_stack
  3672. # asm 1: movq <g0_stack=stack64#18,>mulrax=int64#7
  3673. # asm 2: movq <g0_stack=136(%rsp),>mulrax=%rax
  3674. movq 136(%rsp),%rax
  3675. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  3676. # asm 1: mulq <f2_stack=stack64#25
  3677. # asm 2: mulq <f2_stack=192(%rsp)
  3678. mulq 192(%rsp)
  3679. # qhasm: rz2 = mulrax
  3680. # asm 1: mov <mulrax=int64#7,>rz2=int64#8
  3681. # asm 2: mov <mulrax=%rax,>rz2=%r10
  3682. mov %rax,%r10
  3683. # qhasm: mulr21 = mulrdx
  3684. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  3685. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  3686. mov %rdx,%r11
  3687. # qhasm: mulrax = g0_stack
  3688. # asm 1: movq <g0_stack=stack64#18,>mulrax=int64#7
  3689. # asm 2: movq <g0_stack=136(%rsp),>mulrax=%rax
  3690. movq 136(%rsp),%rax
  3691. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  3692. # asm 1: mulq <f3_stack=stack64#26
  3693. # asm 2: mulq <f3_stack=200(%rsp)
  3694. mulq 200(%rsp)
  3695. # qhasm: rz3 = mulrax
  3696. # asm 1: mov <mulrax=int64#7,>rz3=int64#10
  3697. # asm 2: mov <mulrax=%rax,>rz3=%r12
  3698. mov %rax,%r12
  3699. # qhasm: mulr31 = mulrdx
  3700. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  3701. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  3702. mov %rdx,%r13
  3703. # qhasm: mulrax = g0_stack
  3704. # asm 1: movq <g0_stack=stack64#18,>mulrax=int64#7
  3705. # asm 2: movq <g0_stack=136(%rsp),>mulrax=%rax
  3706. movq 136(%rsp),%rax
  3707. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  3708. # asm 1: mulq <f4_stack=stack64#27
  3709. # asm 2: mulq <f4_stack=208(%rsp)
  3710. mulq 208(%rsp)
  3711. # qhasm: rz4 = mulrax
  3712. # asm 1: mov <mulrax=int64#7,>rz4=int64#12
  3713. # asm 2: mov <mulrax=%rax,>rz4=%r14
  3714. mov %rax,%r14
  3715. # qhasm: mulr41 = mulrdx
  3716. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  3717. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  3718. mov %rdx,%r15
  3719. # qhasm: mulrax = g1_stack
  3720. # asm 1: movq <g1_stack=stack64#19,>mulrax=int64#7
  3721. # asm 2: movq <g1_stack=144(%rsp),>mulrax=%rax
  3722. movq 144(%rsp),%rax
  3723. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  3724. # asm 1: mulq <f0_stack=stack64#23
  3725. # asm 2: mulq <f0_stack=176(%rsp)
  3726. mulq 176(%rsp)
  3727. # qhasm: carry? rz1 += mulrax
  3728. # asm 1: add <mulrax=int64#7,<rz1=int64#5
  3729. # asm 2: add <mulrax=%rax,<rz1=%r8
  3730. add %rax,%r8
  3731. # qhasm: mulr11 += mulrdx + carry
  3732. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3733. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3734. adc %rdx,%r9
  3735. # qhasm: mulrax = g1_stack
  3736. # asm 1: movq <g1_stack=stack64#19,>mulrax=int64#7
  3737. # asm 2: movq <g1_stack=144(%rsp),>mulrax=%rax
  3738. movq 144(%rsp),%rax
  3739. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  3740. # asm 1: mulq <f1_stack=stack64#24
  3741. # asm 2: mulq <f1_stack=184(%rsp)
  3742. mulq 184(%rsp)
  3743. # qhasm: carry? rz2 += mulrax
  3744. # asm 1: add <mulrax=int64#7,<rz2=int64#8
  3745. # asm 2: add <mulrax=%rax,<rz2=%r10
  3746. add %rax,%r10
  3747. # qhasm: mulr21 += mulrdx + carry
  3748. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3749. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3750. adc %rdx,%r11
  3751. # qhasm: mulrax = g1_stack
  3752. # asm 1: movq <g1_stack=stack64#19,>mulrax=int64#7
  3753. # asm 2: movq <g1_stack=144(%rsp),>mulrax=%rax
  3754. movq 144(%rsp),%rax
  3755. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  3756. # asm 1: mulq <f2_stack=stack64#25
  3757. # asm 2: mulq <f2_stack=192(%rsp)
  3758. mulq 192(%rsp)
  3759. # qhasm: carry? rz3 += mulrax
  3760. # asm 1: add <mulrax=int64#7,<rz3=int64#10
  3761. # asm 2: add <mulrax=%rax,<rz3=%r12
  3762. add %rax,%r12
  3763. # qhasm: mulr31 += mulrdx + carry
  3764. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3765. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3766. adc %rdx,%r13
  3767. # qhasm: mulrax = g1_stack
  3768. # asm 1: movq <g1_stack=stack64#19,>mulrax=int64#7
  3769. # asm 2: movq <g1_stack=144(%rsp),>mulrax=%rax
  3770. movq 144(%rsp),%rax
  3771. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  3772. # asm 1: mulq <f3_stack=stack64#26
  3773. # asm 2: mulq <f3_stack=200(%rsp)
  3774. mulq 200(%rsp)
  3775. # qhasm: carry? rz4 += mulrax
  3776. # asm 1: add <mulrax=int64#7,<rz4=int64#12
  3777. # asm 2: add <mulrax=%rax,<rz4=%r14
  3778. add %rax,%r14
  3779. # qhasm: mulr41 += mulrdx + carry
  3780. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3781. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3782. adc %rdx,%r15
  3783. # qhasm: mulrax = g1_stack
  3784. # asm 1: movq <g1_stack=stack64#19,>mulrax=int64#3
  3785. # asm 2: movq <g1_stack=144(%rsp),>mulrax=%rdx
  3786. movq 144(%rsp),%rdx
  3787. # qhasm: mulrax *= 19
  3788. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3789. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3790. imulq $19,%rdx,%rax
  3791. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  3792. # asm 1: mulq <f4_stack=stack64#27
  3793. # asm 2: mulq <f4_stack=208(%rsp)
  3794. mulq 208(%rsp)
  3795. # qhasm: carry? rz0 += mulrax
  3796. # asm 1: add <mulrax=int64#7,<rz0=int64#2
  3797. # asm 2: add <mulrax=%rax,<rz0=%rsi
  3798. add %rax,%rsi
  3799. # qhasm: mulr01 += mulrdx + carry
  3800. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3801. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3802. adc %rdx,%rcx
  3803. # qhasm: mulrax = g2_stack
  3804. # asm 1: movq <g2_stack=stack64#20,>mulrax=int64#7
  3805. # asm 2: movq <g2_stack=152(%rsp),>mulrax=%rax
  3806. movq 152(%rsp),%rax
  3807. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  3808. # asm 1: mulq <f0_stack=stack64#23
  3809. # asm 2: mulq <f0_stack=176(%rsp)
  3810. mulq 176(%rsp)
  3811. # qhasm: carry? rz2 += mulrax
  3812. # asm 1: add <mulrax=int64#7,<rz2=int64#8
  3813. # asm 2: add <mulrax=%rax,<rz2=%r10
  3814. add %rax,%r10
  3815. # qhasm: mulr21 += mulrdx + carry
  3816. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3817. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3818. adc %rdx,%r11
  3819. # qhasm: mulrax = g2_stack
  3820. # asm 1: movq <g2_stack=stack64#20,>mulrax=int64#7
  3821. # asm 2: movq <g2_stack=152(%rsp),>mulrax=%rax
  3822. movq 152(%rsp),%rax
  3823. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  3824. # asm 1: mulq <f1_stack=stack64#24
  3825. # asm 2: mulq <f1_stack=184(%rsp)
  3826. mulq 184(%rsp)
  3827. # qhasm: carry? rz3 += mulrax
  3828. # asm 1: add <mulrax=int64#7,<rz3=int64#10
  3829. # asm 2: add <mulrax=%rax,<rz3=%r12
  3830. add %rax,%r12
  3831. # qhasm: mulr31 += mulrdx + carry
  3832. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3833. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3834. adc %rdx,%r13
  3835. # qhasm: mulrax = g2_stack
  3836. # asm 1: movq <g2_stack=stack64#20,>mulrax=int64#7
  3837. # asm 2: movq <g2_stack=152(%rsp),>mulrax=%rax
  3838. movq 152(%rsp),%rax
  3839. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  3840. # asm 1: mulq <f2_stack=stack64#25
  3841. # asm 2: mulq <f2_stack=192(%rsp)
  3842. mulq 192(%rsp)
  3843. # qhasm: carry? rz4 += mulrax
  3844. # asm 1: add <mulrax=int64#7,<rz4=int64#12
  3845. # asm 2: add <mulrax=%rax,<rz4=%r14
  3846. add %rax,%r14
  3847. # qhasm: mulr41 += mulrdx + carry
  3848. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3849. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3850. adc %rdx,%r15
  3851. # qhasm: mulrax = g2_stack
  3852. # asm 1: movq <g2_stack=stack64#20,>mulrax=int64#3
  3853. # asm 2: movq <g2_stack=152(%rsp),>mulrax=%rdx
  3854. movq 152(%rsp),%rdx
  3855. # qhasm: mulrax *= 19
  3856. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3857. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3858. imulq $19,%rdx,%rax
  3859. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  3860. # asm 1: mulq <f3_stack=stack64#26
  3861. # asm 2: mulq <f3_stack=200(%rsp)
  3862. mulq 200(%rsp)
  3863. # qhasm: carry? rz0 += mulrax
  3864. # asm 1: add <mulrax=int64#7,<rz0=int64#2
  3865. # asm 2: add <mulrax=%rax,<rz0=%rsi
  3866. add %rax,%rsi
  3867. # qhasm: mulr01 += mulrdx + carry
  3868. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3869. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3870. adc %rdx,%rcx
  3871. # qhasm: mulrax = g2_stack
  3872. # asm 1: movq <g2_stack=stack64#20,>mulrax=int64#3
  3873. # asm 2: movq <g2_stack=152(%rsp),>mulrax=%rdx
  3874. movq 152(%rsp),%rdx
  3875. # qhasm: mulrax *= 19
  3876. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3877. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3878. imulq $19,%rdx,%rax
  3879. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  3880. # asm 1: mulq <f4_stack=stack64#27
  3881. # asm 2: mulq <f4_stack=208(%rsp)
  3882. mulq 208(%rsp)
  3883. # qhasm: carry? rz1 += mulrax
  3884. # asm 1: add <mulrax=int64#7,<rz1=int64#5
  3885. # asm 2: add <mulrax=%rax,<rz1=%r8
  3886. add %rax,%r8
  3887. # qhasm: mulr11 += mulrdx + carry
  3888. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3889. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3890. adc %rdx,%r9
  3891. # qhasm: mulrax = g3_stack
  3892. # asm 1: movq <g3_stack=stack64#21,>mulrax=int64#7
  3893. # asm 2: movq <g3_stack=160(%rsp),>mulrax=%rax
  3894. movq 160(%rsp),%rax
  3895. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  3896. # asm 1: mulq <f0_stack=stack64#23
  3897. # asm 2: mulq <f0_stack=176(%rsp)
  3898. mulq 176(%rsp)
  3899. # qhasm: carry? rz3 += mulrax
  3900. # asm 1: add <mulrax=int64#7,<rz3=int64#10
  3901. # asm 2: add <mulrax=%rax,<rz3=%r12
  3902. add %rax,%r12
  3903. # qhasm: mulr31 += mulrdx + carry
  3904. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3905. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3906. adc %rdx,%r13
  3907. # qhasm: mulrax = g3_stack
  3908. # asm 1: movq <g3_stack=stack64#21,>mulrax=int64#7
  3909. # asm 2: movq <g3_stack=160(%rsp),>mulrax=%rax
  3910. movq 160(%rsp),%rax
  3911. # qhasm: (uint128) mulrdx mulrax = mulrax * f1_stack
  3912. # asm 1: mulq <f1_stack=stack64#24
  3913. # asm 2: mulq <f1_stack=184(%rsp)
  3914. mulq 184(%rsp)
  3915. # qhasm: carry? rz4 += mulrax
  3916. # asm 1: add <mulrax=int64#7,<rz4=int64#12
  3917. # asm 2: add <mulrax=%rax,<rz4=%r14
  3918. add %rax,%r14
  3919. # qhasm: mulr41 += mulrdx + carry
  3920. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3921. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3922. adc %rdx,%r15
  3923. # qhasm: mulrax = mulx319_stack
  3924. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  3925. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  3926. movq 216(%rsp),%rax
  3927. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  3928. # asm 1: mulq <f3_stack=stack64#26
  3929. # asm 2: mulq <f3_stack=200(%rsp)
  3930. mulq 200(%rsp)
  3931. # qhasm: carry? rz1 += mulrax
  3932. # asm 1: add <mulrax=int64#7,<rz1=int64#5
  3933. # asm 2: add <mulrax=%rax,<rz1=%r8
  3934. add %rax,%r8
  3935. # qhasm: mulr11 += mulrdx + carry
  3936. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3937. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3938. adc %rdx,%r9
  3939. # qhasm: mulrax = mulx319_stack
  3940. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  3941. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  3942. movq 216(%rsp),%rax
  3943. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  3944. # asm 1: mulq <f4_stack=stack64#27
  3945. # asm 2: mulq <f4_stack=208(%rsp)
  3946. mulq 208(%rsp)
  3947. # qhasm: carry? rz2 += mulrax
  3948. # asm 1: add <mulrax=int64#7,<rz2=int64#8
  3949. # asm 2: add <mulrax=%rax,<rz2=%r10
  3950. add %rax,%r10
  3951. # qhasm: mulr21 += mulrdx + carry
  3952. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3953. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3954. adc %rdx,%r11
  3955. # qhasm: mulrax = g4_stack
  3956. # asm 1: movq <g4_stack=stack64#22,>mulrax=int64#7
  3957. # asm 2: movq <g4_stack=168(%rsp),>mulrax=%rax
  3958. movq 168(%rsp),%rax
  3959. # qhasm: (uint128) mulrdx mulrax = mulrax * f0_stack
  3960. # asm 1: mulq <f0_stack=stack64#23
  3961. # asm 2: mulq <f0_stack=176(%rsp)
  3962. mulq 176(%rsp)
  3963. # qhasm: carry? rz4 += mulrax
  3964. # asm 1: add <mulrax=int64#7,<rz4=int64#12
  3965. # asm 2: add <mulrax=%rax,<rz4=%r14
  3966. add %rax,%r14
  3967. # qhasm: mulr41 += mulrdx + carry
  3968. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3969. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3970. adc %rdx,%r15
  3971. # qhasm: mulrax = mulx419_stack
  3972. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  3973. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  3974. movq 224(%rsp),%rax
  3975. # qhasm: (uint128) mulrdx mulrax = mulrax * f2_stack
  3976. # asm 1: mulq <f2_stack=stack64#25
  3977. # asm 2: mulq <f2_stack=192(%rsp)
  3978. mulq 192(%rsp)
  3979. # qhasm: carry? rz1 += mulrax
  3980. # asm 1: add <mulrax=int64#7,<rz1=int64#5
  3981. # asm 2: add <mulrax=%rax,<rz1=%r8
  3982. add %rax,%r8
  3983. # qhasm: mulr11 += mulrdx + carry
  3984. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3985. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3986. adc %rdx,%r9
  3987. # qhasm: mulrax = mulx419_stack
  3988. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  3989. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  3990. movq 224(%rsp),%rax
  3991. # qhasm: (uint128) mulrdx mulrax = mulrax * f3_stack
  3992. # asm 1: mulq <f3_stack=stack64#26
  3993. # asm 2: mulq <f3_stack=200(%rsp)
  3994. mulq 200(%rsp)
  3995. # qhasm: carry? rz2 += mulrax
  3996. # asm 1: add <mulrax=int64#7,<rz2=int64#8
  3997. # asm 2: add <mulrax=%rax,<rz2=%r10
  3998. add %rax,%r10
  3999. # qhasm: mulr21 += mulrdx + carry
  4000. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4001. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4002. adc %rdx,%r11
  4003. # qhasm: mulrax = mulx419_stack
  4004. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  4005. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  4006. movq 224(%rsp),%rax
  4007. # qhasm: (uint128) mulrdx mulrax = mulrax * f4_stack
  4008. # asm 1: mulq <f4_stack=stack64#27
  4009. # asm 2: mulq <f4_stack=208(%rsp)
  4010. mulq 208(%rsp)
  4011. # qhasm: carry? rz3 += mulrax
  4012. # asm 1: add <mulrax=int64#7,<rz3=int64#10
  4013. # asm 2: add <mulrax=%rax,<rz3=%r12
  4014. add %rax,%r12
  4015. # qhasm: mulr31 += mulrdx + carry
  4016. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4017. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4018. adc %rdx,%r13
  4019. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  4020. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  4021. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  4022. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  4023. # qhasm: mulr01 = (mulr01.rz0) << 13
  4024. # asm 1: shld $13,<rz0=int64#2,<mulr01=int64#4
  4025. # asm 2: shld $13,<rz0=%rsi,<mulr01=%rcx
  4026. shld $13,%rsi,%rcx
  4027. # qhasm: rz0 &= mulredmask
  4028. # asm 1: and <mulredmask=int64#3,<rz0=int64#2
  4029. # asm 2: and <mulredmask=%rdx,<rz0=%rsi
  4030. and %rdx,%rsi
  4031. # qhasm: mulr11 = (mulr11.rz1) << 13
  4032. # asm 1: shld $13,<rz1=int64#5,<mulr11=int64#6
  4033. # asm 2: shld $13,<rz1=%r8,<mulr11=%r9
  4034. shld $13,%r8,%r9
  4035. # qhasm: rz1 &= mulredmask
  4036. # asm 1: and <mulredmask=int64#3,<rz1=int64#5
  4037. # asm 2: and <mulredmask=%rdx,<rz1=%r8
  4038. and %rdx,%r8
  4039. # qhasm: rz1 += mulr01
  4040. # asm 1: add <mulr01=int64#4,<rz1=int64#5
  4041. # asm 2: add <mulr01=%rcx,<rz1=%r8
  4042. add %rcx,%r8
  4043. # qhasm: mulr21 = (mulr21.rz2) << 13
  4044. # asm 1: shld $13,<rz2=int64#8,<mulr21=int64#9
  4045. # asm 2: shld $13,<rz2=%r10,<mulr21=%r11
  4046. shld $13,%r10,%r11
  4047. # qhasm: rz2 &= mulredmask
  4048. # asm 1: and <mulredmask=int64#3,<rz2=int64#8
  4049. # asm 2: and <mulredmask=%rdx,<rz2=%r10
  4050. and %rdx,%r10
  4051. # qhasm: rz2 += mulr11
  4052. # asm 1: add <mulr11=int64#6,<rz2=int64#8
  4053. # asm 2: add <mulr11=%r9,<rz2=%r10
  4054. add %r9,%r10
  4055. # qhasm: mulr31 = (mulr31.rz3) << 13
  4056. # asm 1: shld $13,<rz3=int64#10,<mulr31=int64#11
  4057. # asm 2: shld $13,<rz3=%r12,<mulr31=%r13
  4058. shld $13,%r12,%r13
  4059. # qhasm: rz3 &= mulredmask
  4060. # asm 1: and <mulredmask=int64#3,<rz3=int64#10
  4061. # asm 2: and <mulredmask=%rdx,<rz3=%r12
  4062. and %rdx,%r12
  4063. # qhasm: rz3 += mulr21
  4064. # asm 1: add <mulr21=int64#9,<rz3=int64#10
  4065. # asm 2: add <mulr21=%r11,<rz3=%r12
  4066. add %r11,%r12
  4067. # qhasm: mulr41 = (mulr41.rz4) << 13
  4068. # asm 1: shld $13,<rz4=int64#12,<mulr41=int64#13
  4069. # asm 2: shld $13,<rz4=%r14,<mulr41=%r15
  4070. shld $13,%r14,%r15
  4071. # qhasm: rz4 &= mulredmask
  4072. # asm 1: and <mulredmask=int64#3,<rz4=int64#12
  4073. # asm 2: and <mulredmask=%rdx,<rz4=%r14
  4074. and %rdx,%r14
  4075. # qhasm: rz4 += mulr31
  4076. # asm 1: add <mulr31=int64#11,<rz4=int64#12
  4077. # asm 2: add <mulr31=%r13,<rz4=%r14
  4078. add %r13,%r14
  4079. # qhasm: mulr41 = mulr41 * 19
  4080. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  4081. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  4082. imulq $19,%r15,%rcx
  4083. # qhasm: rz0 += mulr41
  4084. # asm 1: add <mulr41=int64#4,<rz0=int64#2
  4085. # asm 2: add <mulr41=%rcx,<rz0=%rsi
  4086. add %rcx,%rsi
  4087. # qhasm: mult = rz0
  4088. # asm 1: mov <rz0=int64#2,>mult=int64#4
  4089. # asm 2: mov <rz0=%rsi,>mult=%rcx
  4090. mov %rsi,%rcx
  4091. # qhasm: (uint64) mult >>= 51
  4092. # asm 1: shr $51,<mult=int64#4
  4093. # asm 2: shr $51,<mult=%rcx
  4094. shr $51,%rcx
  4095. # qhasm: mult += rz1
  4096. # asm 1: add <rz1=int64#5,<mult=int64#4
  4097. # asm 2: add <rz1=%r8,<mult=%rcx
  4098. add %r8,%rcx
  4099. # qhasm: rz1 = mult
  4100. # asm 1: mov <mult=int64#4,>rz1=int64#5
  4101. # asm 2: mov <mult=%rcx,>rz1=%r8
  4102. mov %rcx,%r8
  4103. # qhasm: (uint64) mult >>= 51
  4104. # asm 1: shr $51,<mult=int64#4
  4105. # asm 2: shr $51,<mult=%rcx
  4106. shr $51,%rcx
  4107. # qhasm: rz0 &= mulredmask
  4108. # asm 1: and <mulredmask=int64#3,<rz0=int64#2
  4109. # asm 2: and <mulredmask=%rdx,<rz0=%rsi
  4110. and %rdx,%rsi
  4111. # qhasm: mult += rz2
  4112. # asm 1: add <rz2=int64#8,<mult=int64#4
  4113. # asm 2: add <rz2=%r10,<mult=%rcx
  4114. add %r10,%rcx
  4115. # qhasm: rz2 = mult
  4116. # asm 1: mov <mult=int64#4,>rz2=int64#6
  4117. # asm 2: mov <mult=%rcx,>rz2=%r9
  4118. mov %rcx,%r9
  4119. # qhasm: (uint64) mult >>= 51
  4120. # asm 1: shr $51,<mult=int64#4
  4121. # asm 2: shr $51,<mult=%rcx
  4122. shr $51,%rcx
  4123. # qhasm: rz1 &= mulredmask
  4124. # asm 1: and <mulredmask=int64#3,<rz1=int64#5
  4125. # asm 2: and <mulredmask=%rdx,<rz1=%r8
  4126. and %rdx,%r8
  4127. # qhasm: mult += rz3
  4128. # asm 1: add <rz3=int64#10,<mult=int64#4
  4129. # asm 2: add <rz3=%r12,<mult=%rcx
  4130. add %r12,%rcx
  4131. # qhasm: rz3 = mult
  4132. # asm 1: mov <mult=int64#4,>rz3=int64#7
  4133. # asm 2: mov <mult=%rcx,>rz3=%rax
  4134. mov %rcx,%rax
  4135. # qhasm: (uint64) mult >>= 51
  4136. # asm 1: shr $51,<mult=int64#4
  4137. # asm 2: shr $51,<mult=%rcx
  4138. shr $51,%rcx
  4139. # qhasm: rz2 &= mulredmask
  4140. # asm 1: and <mulredmask=int64#3,<rz2=int64#6
  4141. # asm 2: and <mulredmask=%rdx,<rz2=%r9
  4142. and %rdx,%r9
  4143. # qhasm: mult += rz4
  4144. # asm 1: add <rz4=int64#12,<mult=int64#4
  4145. # asm 2: add <rz4=%r14,<mult=%rcx
  4146. add %r14,%rcx
  4147. # qhasm: rz4 = mult
  4148. # asm 1: mov <mult=int64#4,>rz4=int64#8
  4149. # asm 2: mov <mult=%rcx,>rz4=%r10
  4150. mov %rcx,%r10
  4151. # qhasm: (uint64) mult >>= 51
  4152. # asm 1: shr $51,<mult=int64#4
  4153. # asm 2: shr $51,<mult=%rcx
  4154. shr $51,%rcx
  4155. # qhasm: rz3 &= mulredmask
  4156. # asm 1: and <mulredmask=int64#3,<rz3=int64#7
  4157. # asm 2: and <mulredmask=%rdx,<rz3=%rax
  4158. and %rdx,%rax
  4159. # qhasm: mult *= 19
  4160. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  4161. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  4162. imulq $19,%rcx,%rcx
  4163. # qhasm: rz0 += mult
  4164. # asm 1: add <mult=int64#4,<rz0=int64#2
  4165. # asm 2: add <mult=%rcx,<rz0=%rsi
  4166. add %rcx,%rsi
  4167. # qhasm: rz4 &= mulredmask
  4168. # asm 1: and <mulredmask=int64#3,<rz4=int64#8
  4169. # asm 2: and <mulredmask=%rdx,<rz4=%r10
  4170. and %rdx,%r10
  4171. # qhasm: *(uint64 *)(rp + 80) = rz0
  4172. # asm 1: movq <rz0=int64#2,80(<rp=int64#1)
  4173. # asm 2: movq <rz0=%rsi,80(<rp=%rdi)
  4174. movq %rsi,80(%rdi)
  4175. # qhasm: *(uint64 *)(rp + 88) = rz1
  4176. # asm 1: movq <rz1=int64#5,88(<rp=int64#1)
  4177. # asm 2: movq <rz1=%r8,88(<rp=%rdi)
  4178. movq %r8,88(%rdi)
  4179. # qhasm: *(uint64 *)(rp + 96) = rz2
  4180. # asm 1: movq <rz2=int64#6,96(<rp=int64#1)
  4181. # asm 2: movq <rz2=%r9,96(<rp=%rdi)
  4182. movq %r9,96(%rdi)
  4183. # qhasm: *(uint64 *)(rp + 104) = rz3
  4184. # asm 1: movq <rz3=int64#7,104(<rp=int64#1)
  4185. # asm 2: movq <rz3=%rax,104(<rp=%rdi)
  4186. movq %rax,104(%rdi)
  4187. # qhasm: *(uint64 *)(rp + 112) = rz4
  4188. # asm 1: movq <rz4=int64#8,112(<rp=int64#1)
  4189. # asm 2: movq <rz4=%r10,112(<rp=%rdi)
  4190. movq %r10,112(%rdi)
  4191. # qhasm: mulrax = e3_stack
  4192. # asm 1: movq <e3_stack=stack64#16,>mulrax=int64#2
  4193. # asm 2: movq <e3_stack=120(%rsp),>mulrax=%rsi
  4194. movq 120(%rsp),%rsi
  4195. # qhasm: mulrax *= 19
  4196. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  4197. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  4198. imulq $19,%rsi,%rax
  4199. # qhasm: mulx319_stack = mulrax
  4200. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#18
  4201. # asm 2: movq <mulrax=%rax,>mulx319_stack=136(%rsp)
  4202. movq %rax,136(%rsp)
  4203. # qhasm: (uint128) mulrdx mulrax = mulrax * h2_stack
  4204. # asm 1: mulq <h2_stack=stack64#10
  4205. # asm 2: mulq <h2_stack=72(%rsp)
  4206. mulq 72(%rsp)
  4207. # qhasm: rt0 = mulrax
  4208. # asm 1: mov <mulrax=int64#7,>rt0=int64#2
  4209. # asm 2: mov <mulrax=%rax,>rt0=%rsi
  4210. mov %rax,%rsi
  4211. # qhasm: mulr01 = mulrdx
  4212. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  4213. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  4214. mov %rdx,%rcx
  4215. # qhasm: mulrax = e4_stack
  4216. # asm 1: movq <e4_stack=stack64#17,>mulrax=int64#3
  4217. # asm 2: movq <e4_stack=128(%rsp),>mulrax=%rdx
  4218. movq 128(%rsp),%rdx
  4219. # qhasm: mulrax *= 19
  4220. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4221. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4222. imulq $19,%rdx,%rax
  4223. # qhasm: mulx419_stack = mulrax
  4224. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#19
  4225. # asm 2: movq <mulrax=%rax,>mulx419_stack=144(%rsp)
  4226. movq %rax,144(%rsp)
  4227. # qhasm: (uint128) mulrdx mulrax = mulrax * h1_stack
  4228. # asm 1: mulq <h1_stack=stack64#9
  4229. # asm 2: mulq <h1_stack=64(%rsp)
  4230. mulq 64(%rsp)
  4231. # qhasm: carry? rt0 += mulrax
  4232. # asm 1: add <mulrax=int64#7,<rt0=int64#2
  4233. # asm 2: add <mulrax=%rax,<rt0=%rsi
  4234. add %rax,%rsi
  4235. # qhasm: mulr01 += mulrdx + carry
  4236. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4237. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4238. adc %rdx,%rcx
  4239. # qhasm: mulrax = e0_stack
  4240. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  4241. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  4242. movq 96(%rsp),%rax
  4243. # qhasm: (uint128) mulrdx mulrax = mulrax * h0_stack
  4244. # asm 1: mulq <h0_stack=stack64#8
  4245. # asm 2: mulq <h0_stack=56(%rsp)
  4246. mulq 56(%rsp)
  4247. # qhasm: carry? rt0 += mulrax
  4248. # asm 1: add <mulrax=int64#7,<rt0=int64#2
  4249. # asm 2: add <mulrax=%rax,<rt0=%rsi
  4250. add %rax,%rsi
  4251. # qhasm: mulr01 += mulrdx + carry
  4252. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4253. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4254. adc %rdx,%rcx
  4255. # qhasm: mulrax = e0_stack
  4256. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  4257. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  4258. movq 96(%rsp),%rax
  4259. # qhasm: (uint128) mulrdx mulrax = mulrax * h1_stack
  4260. # asm 1: mulq <h1_stack=stack64#9
  4261. # asm 2: mulq <h1_stack=64(%rsp)
  4262. mulq 64(%rsp)
  4263. # qhasm: rt1 = mulrax
  4264. # asm 1: mov <mulrax=int64#7,>rt1=int64#5
  4265. # asm 2: mov <mulrax=%rax,>rt1=%r8
  4266. mov %rax,%r8
  4267. # qhasm: mulr11 = mulrdx
  4268. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  4269. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  4270. mov %rdx,%r9
  4271. # qhasm: mulrax = e0_stack
  4272. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  4273. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  4274. movq 96(%rsp),%rax
  4275. # qhasm: (uint128) mulrdx mulrax = mulrax * h2_stack
  4276. # asm 1: mulq <h2_stack=stack64#10
  4277. # asm 2: mulq <h2_stack=72(%rsp)
  4278. mulq 72(%rsp)
  4279. # qhasm: rt2 = mulrax
  4280. # asm 1: mov <mulrax=int64#7,>rt2=int64#8
  4281. # asm 2: mov <mulrax=%rax,>rt2=%r10
  4282. mov %rax,%r10
  4283. # qhasm: mulr21 = mulrdx
  4284. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  4285. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  4286. mov %rdx,%r11
  4287. # qhasm: mulrax = e0_stack
  4288. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  4289. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  4290. movq 96(%rsp),%rax
  4291. # qhasm: (uint128) mulrdx mulrax = mulrax * h3_stack
  4292. # asm 1: mulq <h3_stack=stack64#11
  4293. # asm 2: mulq <h3_stack=80(%rsp)
  4294. mulq 80(%rsp)
  4295. # qhasm: rt3 = mulrax
  4296. # asm 1: mov <mulrax=int64#7,>rt3=int64#10
  4297. # asm 2: mov <mulrax=%rax,>rt3=%r12
  4298. mov %rax,%r12
  4299. # qhasm: mulr31 = mulrdx
  4300. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  4301. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  4302. mov %rdx,%r13
  4303. # qhasm: mulrax = e0_stack
  4304. # asm 1: movq <e0_stack=stack64#13,>mulrax=int64#7
  4305. # asm 2: movq <e0_stack=96(%rsp),>mulrax=%rax
  4306. movq 96(%rsp),%rax
  4307. # qhasm: (uint128) mulrdx mulrax = mulrax * h4_stack
  4308. # asm 1: mulq <h4_stack=stack64#12
  4309. # asm 2: mulq <h4_stack=88(%rsp)
  4310. mulq 88(%rsp)
  4311. # qhasm: rt4 = mulrax
  4312. # asm 1: mov <mulrax=int64#7,>rt4=int64#12
  4313. # asm 2: mov <mulrax=%rax,>rt4=%r14
  4314. mov %rax,%r14
  4315. # qhasm: mulr41 = mulrdx
  4316. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  4317. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  4318. mov %rdx,%r15
  4319. # qhasm: mulrax = e1_stack
  4320. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  4321. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  4322. movq 104(%rsp),%rax
  4323. # qhasm: (uint128) mulrdx mulrax = mulrax * h0_stack
  4324. # asm 1: mulq <h0_stack=stack64#8
  4325. # asm 2: mulq <h0_stack=56(%rsp)
  4326. mulq 56(%rsp)
  4327. # qhasm: carry? rt1 += mulrax
  4328. # asm 1: add <mulrax=int64#7,<rt1=int64#5
  4329. # asm 2: add <mulrax=%rax,<rt1=%r8
  4330. add %rax,%r8
  4331. # qhasm: mulr11 += mulrdx + carry
  4332. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4333. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4334. adc %rdx,%r9
  4335. # qhasm: mulrax = e1_stack
  4336. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  4337. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  4338. movq 104(%rsp),%rax
  4339. # qhasm: (uint128) mulrdx mulrax = mulrax * h1_stack
  4340. # asm 1: mulq <h1_stack=stack64#9
  4341. # asm 2: mulq <h1_stack=64(%rsp)
  4342. mulq 64(%rsp)
  4343. # qhasm: carry? rt2 += mulrax
  4344. # asm 1: add <mulrax=int64#7,<rt2=int64#8
  4345. # asm 2: add <mulrax=%rax,<rt2=%r10
  4346. add %rax,%r10
  4347. # qhasm: mulr21 += mulrdx + carry
  4348. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4349. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4350. adc %rdx,%r11
  4351. # qhasm: mulrax = e1_stack
  4352. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  4353. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  4354. movq 104(%rsp),%rax
  4355. # qhasm: (uint128) mulrdx mulrax = mulrax * h2_stack
  4356. # asm 1: mulq <h2_stack=stack64#10
  4357. # asm 2: mulq <h2_stack=72(%rsp)
  4358. mulq 72(%rsp)
  4359. # qhasm: carry? rt3 += mulrax
  4360. # asm 1: add <mulrax=int64#7,<rt3=int64#10
  4361. # asm 2: add <mulrax=%rax,<rt3=%r12
  4362. add %rax,%r12
  4363. # qhasm: mulr31 += mulrdx + carry
  4364. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4365. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4366. adc %rdx,%r13
  4367. # qhasm: mulrax = e1_stack
  4368. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#7
  4369. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rax
  4370. movq 104(%rsp),%rax
  4371. # qhasm: (uint128) mulrdx mulrax = mulrax * h3_stack
  4372. # asm 1: mulq <h3_stack=stack64#11
  4373. # asm 2: mulq <h3_stack=80(%rsp)
  4374. mulq 80(%rsp)
  4375. # qhasm: carry? rt4 += mulrax
  4376. # asm 1: add <mulrax=int64#7,<rt4=int64#12
  4377. # asm 2: add <mulrax=%rax,<rt4=%r14
  4378. add %rax,%r14
  4379. # qhasm: mulr41 += mulrdx + carry
  4380. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4381. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4382. adc %rdx,%r15
  4383. # qhasm: mulrax = e1_stack
  4384. # asm 1: movq <e1_stack=stack64#14,>mulrax=int64#3
  4385. # asm 2: movq <e1_stack=104(%rsp),>mulrax=%rdx
  4386. movq 104(%rsp),%rdx
  4387. # qhasm: mulrax *= 19
  4388. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4389. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4390. imulq $19,%rdx,%rax
  4391. # qhasm: (uint128) mulrdx mulrax = mulrax * h4_stack
  4392. # asm 1: mulq <h4_stack=stack64#12
  4393. # asm 2: mulq <h4_stack=88(%rsp)
  4394. mulq 88(%rsp)
  4395. # qhasm: carry? rt0 += mulrax
  4396. # asm 1: add <mulrax=int64#7,<rt0=int64#2
  4397. # asm 2: add <mulrax=%rax,<rt0=%rsi
  4398. add %rax,%rsi
  4399. # qhasm: mulr01 += mulrdx + carry
  4400. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4401. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4402. adc %rdx,%rcx
  4403. # qhasm: mulrax = e2_stack
  4404. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#7
  4405. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rax
  4406. movq 112(%rsp),%rax
  4407. # qhasm: (uint128) mulrdx mulrax = mulrax * h0_stack
  4408. # asm 1: mulq <h0_stack=stack64#8
  4409. # asm 2: mulq <h0_stack=56(%rsp)
  4410. mulq 56(%rsp)
  4411. # qhasm: carry? rt2 += mulrax
  4412. # asm 1: add <mulrax=int64#7,<rt2=int64#8
  4413. # asm 2: add <mulrax=%rax,<rt2=%r10
  4414. add %rax,%r10
  4415. # qhasm: mulr21 += mulrdx + carry
  4416. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4417. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4418. adc %rdx,%r11
  4419. # qhasm: mulrax = e2_stack
  4420. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#7
  4421. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rax
  4422. movq 112(%rsp),%rax
  4423. # qhasm: (uint128) mulrdx mulrax = mulrax * h1_stack
  4424. # asm 1: mulq <h1_stack=stack64#9
  4425. # asm 2: mulq <h1_stack=64(%rsp)
  4426. mulq 64(%rsp)
  4427. # qhasm: carry? rt3 += mulrax
  4428. # asm 1: add <mulrax=int64#7,<rt3=int64#10
  4429. # asm 2: add <mulrax=%rax,<rt3=%r12
  4430. add %rax,%r12
  4431. # qhasm: mulr31 += mulrdx + carry
  4432. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4433. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4434. adc %rdx,%r13
  4435. # qhasm: mulrax = e2_stack
  4436. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#7
  4437. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rax
  4438. movq 112(%rsp),%rax
  4439. # qhasm: (uint128) mulrdx mulrax = mulrax * h2_stack
  4440. # asm 1: mulq <h2_stack=stack64#10
  4441. # asm 2: mulq <h2_stack=72(%rsp)
  4442. mulq 72(%rsp)
  4443. # qhasm: carry? rt4 += mulrax
  4444. # asm 1: add <mulrax=int64#7,<rt4=int64#12
  4445. # asm 2: add <mulrax=%rax,<rt4=%r14
  4446. add %rax,%r14
  4447. # qhasm: mulr41 += mulrdx + carry
  4448. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4449. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4450. adc %rdx,%r15
  4451. # qhasm: mulrax = e2_stack
  4452. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#3
  4453. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rdx
  4454. movq 112(%rsp),%rdx
  4455. # qhasm: mulrax *= 19
  4456. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4457. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4458. imulq $19,%rdx,%rax
  4459. # qhasm: (uint128) mulrdx mulrax = mulrax * h3_stack
  4460. # asm 1: mulq <h3_stack=stack64#11
  4461. # asm 2: mulq <h3_stack=80(%rsp)
  4462. mulq 80(%rsp)
  4463. # qhasm: carry? rt0 += mulrax
  4464. # asm 1: add <mulrax=int64#7,<rt0=int64#2
  4465. # asm 2: add <mulrax=%rax,<rt0=%rsi
  4466. add %rax,%rsi
  4467. # qhasm: mulr01 += mulrdx + carry
  4468. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4469. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4470. adc %rdx,%rcx
  4471. # qhasm: mulrax = e2_stack
  4472. # asm 1: movq <e2_stack=stack64#15,>mulrax=int64#3
  4473. # asm 2: movq <e2_stack=112(%rsp),>mulrax=%rdx
  4474. movq 112(%rsp),%rdx
  4475. # qhasm: mulrax *= 19
  4476. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4477. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4478. imulq $19,%rdx,%rax
  4479. # qhasm: (uint128) mulrdx mulrax = mulrax * h4_stack
  4480. # asm 1: mulq <h4_stack=stack64#12
  4481. # asm 2: mulq <h4_stack=88(%rsp)
  4482. mulq 88(%rsp)
  4483. # qhasm: carry? rt1 += mulrax
  4484. # asm 1: add <mulrax=int64#7,<rt1=int64#5
  4485. # asm 2: add <mulrax=%rax,<rt1=%r8
  4486. add %rax,%r8
  4487. # qhasm: mulr11 += mulrdx + carry
  4488. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4489. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4490. adc %rdx,%r9
  4491. # qhasm: mulrax = e3_stack
  4492. # asm 1: movq <e3_stack=stack64#16,>mulrax=int64#7
  4493. # asm 2: movq <e3_stack=120(%rsp),>mulrax=%rax
  4494. movq 120(%rsp),%rax
  4495. # qhasm: (uint128) mulrdx mulrax = mulrax * h0_stack
  4496. # asm 1: mulq <h0_stack=stack64#8
  4497. # asm 2: mulq <h0_stack=56(%rsp)
  4498. mulq 56(%rsp)
  4499. # qhasm: carry? rt3 += mulrax
  4500. # asm 1: add <mulrax=int64#7,<rt3=int64#10
  4501. # asm 2: add <mulrax=%rax,<rt3=%r12
  4502. add %rax,%r12
  4503. # qhasm: mulr31 += mulrdx + carry
  4504. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4505. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4506. adc %rdx,%r13
  4507. # qhasm: mulrax = e3_stack
  4508. # asm 1: movq <e3_stack=stack64#16,>mulrax=int64#7
  4509. # asm 2: movq <e3_stack=120(%rsp),>mulrax=%rax
  4510. movq 120(%rsp),%rax
  4511. # qhasm: (uint128) mulrdx mulrax = mulrax * h1_stack
  4512. # asm 1: mulq <h1_stack=stack64#9
  4513. # asm 2: mulq <h1_stack=64(%rsp)
  4514. mulq 64(%rsp)
  4515. # qhasm: carry? rt4 += mulrax
  4516. # asm 1: add <mulrax=int64#7,<rt4=int64#12
  4517. # asm 2: add <mulrax=%rax,<rt4=%r14
  4518. add %rax,%r14
  4519. # qhasm: mulr41 += mulrdx + carry
  4520. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4521. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4522. adc %rdx,%r15
  4523. # qhasm: mulrax = mulx319_stack
  4524. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  4525. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  4526. movq 136(%rsp),%rax
  4527. # qhasm: (uint128) mulrdx mulrax = mulrax * h3_stack
  4528. # asm 1: mulq <h3_stack=stack64#11
  4529. # asm 2: mulq <h3_stack=80(%rsp)
  4530. mulq 80(%rsp)
  4531. # qhasm: carry? rt1 += mulrax
  4532. # asm 1: add <mulrax=int64#7,<rt1=int64#5
  4533. # asm 2: add <mulrax=%rax,<rt1=%r8
  4534. add %rax,%r8
  4535. # qhasm: mulr11 += mulrdx + carry
  4536. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4537. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4538. adc %rdx,%r9
  4539. # qhasm: mulrax = mulx319_stack
  4540. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  4541. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  4542. movq 136(%rsp),%rax
  4543. # qhasm: (uint128) mulrdx mulrax = mulrax * h4_stack
  4544. # asm 1: mulq <h4_stack=stack64#12
  4545. # asm 2: mulq <h4_stack=88(%rsp)
  4546. mulq 88(%rsp)
  4547. # qhasm: carry? rt2 += mulrax
  4548. # asm 1: add <mulrax=int64#7,<rt2=int64#8
  4549. # asm 2: add <mulrax=%rax,<rt2=%r10
  4550. add %rax,%r10
  4551. # qhasm: mulr21 += mulrdx + carry
  4552. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4553. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4554. adc %rdx,%r11
  4555. # qhasm: mulrax = e4_stack
  4556. # asm 1: movq <e4_stack=stack64#17,>mulrax=int64#7
  4557. # asm 2: movq <e4_stack=128(%rsp),>mulrax=%rax
  4558. movq 128(%rsp),%rax
  4559. # qhasm: (uint128) mulrdx mulrax = mulrax * h0_stack
  4560. # asm 1: mulq <h0_stack=stack64#8
  4561. # asm 2: mulq <h0_stack=56(%rsp)
  4562. mulq 56(%rsp)
  4563. # qhasm: carry? rt4 += mulrax
  4564. # asm 1: add <mulrax=int64#7,<rt4=int64#12
  4565. # asm 2: add <mulrax=%rax,<rt4=%r14
  4566. add %rax,%r14
  4567. # qhasm: mulr41 += mulrdx + carry
  4568. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4569. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4570. adc %rdx,%r15
  4571. # qhasm: mulrax = mulx419_stack
  4572. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  4573. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  4574. movq 144(%rsp),%rax
  4575. # qhasm: (uint128) mulrdx mulrax = mulrax * h2_stack
  4576. # asm 1: mulq <h2_stack=stack64#10
  4577. # asm 2: mulq <h2_stack=72(%rsp)
  4578. mulq 72(%rsp)
  4579. # qhasm: carry? rt1 += mulrax
  4580. # asm 1: add <mulrax=int64#7,<rt1=int64#5
  4581. # asm 2: add <mulrax=%rax,<rt1=%r8
  4582. add %rax,%r8
  4583. # qhasm: mulr11 += mulrdx + carry
  4584. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4585. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4586. adc %rdx,%r9
  4587. # qhasm: mulrax = mulx419_stack
  4588. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  4589. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  4590. movq 144(%rsp),%rax
  4591. # qhasm: (uint128) mulrdx mulrax = mulrax * h3_stack
  4592. # asm 1: mulq <h3_stack=stack64#11
  4593. # asm 2: mulq <h3_stack=80(%rsp)
  4594. mulq 80(%rsp)
  4595. # qhasm: carry? rt2 += mulrax
  4596. # asm 1: add <mulrax=int64#7,<rt2=int64#8
  4597. # asm 2: add <mulrax=%rax,<rt2=%r10
  4598. add %rax,%r10
  4599. # qhasm: mulr21 += mulrdx + carry
  4600. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4601. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4602. adc %rdx,%r11
  4603. # qhasm: mulrax = mulx419_stack
  4604. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  4605. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  4606. movq 144(%rsp),%rax
  4607. # qhasm: (uint128) mulrdx mulrax = mulrax * h4_stack
  4608. # asm 1: mulq <h4_stack=stack64#12
  4609. # asm 2: mulq <h4_stack=88(%rsp)
  4610. mulq 88(%rsp)
  4611. # qhasm: carry? rt3 += mulrax
  4612. # asm 1: add <mulrax=int64#7,<rt3=int64#10
  4613. # asm 2: add <mulrax=%rax,<rt3=%r12
  4614. add %rax,%r12
  4615. # qhasm: mulr31 += mulrdx + carry
  4616. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4617. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4618. adc %rdx,%r13
  4619. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  4620. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  4621. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  4622. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  4623. # qhasm: mulr01 = (mulr01.rt0) << 13
  4624. # asm 1: shld $13,<rt0=int64#2,<mulr01=int64#4
  4625. # asm 2: shld $13,<rt0=%rsi,<mulr01=%rcx
  4626. shld $13,%rsi,%rcx
  4627. # qhasm: rt0 &= mulredmask
  4628. # asm 1: and <mulredmask=int64#3,<rt0=int64#2
  4629. # asm 2: and <mulredmask=%rdx,<rt0=%rsi
  4630. and %rdx,%rsi
  4631. # qhasm: mulr11 = (mulr11.rt1) << 13
  4632. # asm 1: shld $13,<rt1=int64#5,<mulr11=int64#6
  4633. # asm 2: shld $13,<rt1=%r8,<mulr11=%r9
  4634. shld $13,%r8,%r9
  4635. # qhasm: rt1 &= mulredmask
  4636. # asm 1: and <mulredmask=int64#3,<rt1=int64#5
  4637. # asm 2: and <mulredmask=%rdx,<rt1=%r8
  4638. and %rdx,%r8
  4639. # qhasm: rt1 += mulr01
  4640. # asm 1: add <mulr01=int64#4,<rt1=int64#5
  4641. # asm 2: add <mulr01=%rcx,<rt1=%r8
  4642. add %rcx,%r8
  4643. # qhasm: mulr21 = (mulr21.rt2) << 13
  4644. # asm 1: shld $13,<rt2=int64#8,<mulr21=int64#9
  4645. # asm 2: shld $13,<rt2=%r10,<mulr21=%r11
  4646. shld $13,%r10,%r11
  4647. # qhasm: rt2 &= mulredmask
  4648. # asm 1: and <mulredmask=int64#3,<rt2=int64#8
  4649. # asm 2: and <mulredmask=%rdx,<rt2=%r10
  4650. and %rdx,%r10
  4651. # qhasm: rt2 += mulr11
  4652. # asm 1: add <mulr11=int64#6,<rt2=int64#8
  4653. # asm 2: add <mulr11=%r9,<rt2=%r10
  4654. add %r9,%r10
  4655. # qhasm: mulr31 = (mulr31.rt3) << 13
  4656. # asm 1: shld $13,<rt3=int64#10,<mulr31=int64#11
  4657. # asm 2: shld $13,<rt3=%r12,<mulr31=%r13
  4658. shld $13,%r12,%r13
  4659. # qhasm: rt3 &= mulredmask
  4660. # asm 1: and <mulredmask=int64#3,<rt3=int64#10
  4661. # asm 2: and <mulredmask=%rdx,<rt3=%r12
  4662. and %rdx,%r12
  4663. # qhasm: rt3 += mulr21
  4664. # asm 1: add <mulr21=int64#9,<rt3=int64#10
  4665. # asm 2: add <mulr21=%r11,<rt3=%r12
  4666. add %r11,%r12
  4667. # qhasm: mulr41 = (mulr41.rt4) << 13
  4668. # asm 1: shld $13,<rt4=int64#12,<mulr41=int64#13
  4669. # asm 2: shld $13,<rt4=%r14,<mulr41=%r15
  4670. shld $13,%r14,%r15
  4671. # qhasm: rt4 &= mulredmask
  4672. # asm 1: and <mulredmask=int64#3,<rt4=int64#12
  4673. # asm 2: and <mulredmask=%rdx,<rt4=%r14
  4674. and %rdx,%r14
  4675. # qhasm: rt4 += mulr31
  4676. # asm 1: add <mulr31=int64#11,<rt4=int64#12
  4677. # asm 2: add <mulr31=%r13,<rt4=%r14
  4678. add %r13,%r14
  4679. # qhasm: mulr41 = mulr41 * 19
  4680. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  4681. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  4682. imulq $19,%r15,%rcx
  4683. # qhasm: rt0 += mulr41
  4684. # asm 1: add <mulr41=int64#4,<rt0=int64#2
  4685. # asm 2: add <mulr41=%rcx,<rt0=%rsi
  4686. add %rcx,%rsi
  4687. # qhasm: mult = rt0
  4688. # asm 1: mov <rt0=int64#2,>mult=int64#4
  4689. # asm 2: mov <rt0=%rsi,>mult=%rcx
  4690. mov %rsi,%rcx
  4691. # qhasm: (uint64) mult >>= 51
  4692. # asm 1: shr $51,<mult=int64#4
  4693. # asm 2: shr $51,<mult=%rcx
  4694. shr $51,%rcx
  4695. # qhasm: mult += rt1
  4696. # asm 1: add <rt1=int64#5,<mult=int64#4
  4697. # asm 2: add <rt1=%r8,<mult=%rcx
  4698. add %r8,%rcx
  4699. # qhasm: rt1 = mult
  4700. # asm 1: mov <mult=int64#4,>rt1=int64#5
  4701. # asm 2: mov <mult=%rcx,>rt1=%r8
  4702. mov %rcx,%r8
  4703. # qhasm: (uint64) mult >>= 51
  4704. # asm 1: shr $51,<mult=int64#4
  4705. # asm 2: shr $51,<mult=%rcx
  4706. shr $51,%rcx
  4707. # qhasm: rt0 &= mulredmask
  4708. # asm 1: and <mulredmask=int64#3,<rt0=int64#2
  4709. # asm 2: and <mulredmask=%rdx,<rt0=%rsi
  4710. and %rdx,%rsi
  4711. # qhasm: mult += rt2
  4712. # asm 1: add <rt2=int64#8,<mult=int64#4
  4713. # asm 2: add <rt2=%r10,<mult=%rcx
  4714. add %r10,%rcx
  4715. # qhasm: rt2 = mult
  4716. # asm 1: mov <mult=int64#4,>rt2=int64#6
  4717. # asm 2: mov <mult=%rcx,>rt2=%r9
  4718. mov %rcx,%r9
  4719. # qhasm: (uint64) mult >>= 51
  4720. # asm 1: shr $51,<mult=int64#4
  4721. # asm 2: shr $51,<mult=%rcx
  4722. shr $51,%rcx
  4723. # qhasm: rt1 &= mulredmask
  4724. # asm 1: and <mulredmask=int64#3,<rt1=int64#5
  4725. # asm 2: and <mulredmask=%rdx,<rt1=%r8
  4726. and %rdx,%r8
  4727. # qhasm: mult += rt3
  4728. # asm 1: add <rt3=int64#10,<mult=int64#4
  4729. # asm 2: add <rt3=%r12,<mult=%rcx
  4730. add %r12,%rcx
  4731. # qhasm: rt3 = mult
  4732. # asm 1: mov <mult=int64#4,>rt3=int64#7
  4733. # asm 2: mov <mult=%rcx,>rt3=%rax
  4734. mov %rcx,%rax
  4735. # qhasm: (uint64) mult >>= 51
  4736. # asm 1: shr $51,<mult=int64#4
  4737. # asm 2: shr $51,<mult=%rcx
  4738. shr $51,%rcx
  4739. # qhasm: rt2 &= mulredmask
  4740. # asm 1: and <mulredmask=int64#3,<rt2=int64#6
  4741. # asm 2: and <mulredmask=%rdx,<rt2=%r9
  4742. and %rdx,%r9
  4743. # qhasm: mult += rt4
  4744. # asm 1: add <rt4=int64#12,<mult=int64#4
  4745. # asm 2: add <rt4=%r14,<mult=%rcx
  4746. add %r14,%rcx
  4747. # qhasm: rt4 = mult
  4748. # asm 1: mov <mult=int64#4,>rt4=int64#8
  4749. # asm 2: mov <mult=%rcx,>rt4=%r10
  4750. mov %rcx,%r10
  4751. # qhasm: (uint64) mult >>= 51
  4752. # asm 1: shr $51,<mult=int64#4
  4753. # asm 2: shr $51,<mult=%rcx
  4754. shr $51,%rcx
  4755. # qhasm: rt3 &= mulredmask
  4756. # asm 1: and <mulredmask=int64#3,<rt3=int64#7
  4757. # asm 2: and <mulredmask=%rdx,<rt3=%rax
  4758. and %rdx,%rax
  4759. # qhasm: mult *= 19
  4760. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  4761. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  4762. imulq $19,%rcx,%rcx
  4763. # qhasm: rt0 += mult
  4764. # asm 1: add <mult=int64#4,<rt0=int64#2
  4765. # asm 2: add <mult=%rcx,<rt0=%rsi
  4766. add %rcx,%rsi
  4767. # qhasm: rt4 &= mulredmask
  4768. # asm 1: and <mulredmask=int64#3,<rt4=int64#8
  4769. # asm 2: and <mulredmask=%rdx,<rt4=%r10
  4770. and %rdx,%r10
  4771. # qhasm: *(uint64 *)(rp + 120) = rt0
  4772. # asm 1: movq <rt0=int64#2,120(<rp=int64#1)
  4773. # asm 2: movq <rt0=%rsi,120(<rp=%rdi)
  4774. movq %rsi,120(%rdi)
  4775. # qhasm: *(uint64 *)(rp + 128) = rt1
  4776. # asm 1: movq <rt1=int64#5,128(<rp=int64#1)
  4777. # asm 2: movq <rt1=%r8,128(<rp=%rdi)
  4778. movq %r8,128(%rdi)
  4779. # qhasm: *(uint64 *)(rp + 136) = rt2
  4780. # asm 1: movq <rt2=int64#6,136(<rp=int64#1)
  4781. # asm 2: movq <rt2=%r9,136(<rp=%rdi)
  4782. movq %r9,136(%rdi)
  4783. # qhasm: *(uint64 *)(rp + 144) = rt3
  4784. # asm 1: movq <rt3=int64#7,144(<rp=int64#1)
  4785. # asm 2: movq <rt3=%rax,144(<rp=%rdi)
  4786. movq %rax,144(%rdi)
  4787. # qhasm: *(uint64 *)(rp + 152) = rt4
  4788. # asm 1: movq <rt4=int64#8,152(<rp=int64#1)
  4789. # asm 2: movq <rt4=%r10,152(<rp=%rdi)
  4790. movq %r10,152(%rdi)
  4791. # qhasm: caller1 = caller1_stack
  4792. # asm 1: movq <caller1_stack=stack64#1,>caller1=int64#9
  4793. # asm 2: movq <caller1_stack=0(%rsp),>caller1=%r11
  4794. movq 0(%rsp),%r11
  4795. # qhasm: caller2 = caller2_stack
  4796. # asm 1: movq <caller2_stack=stack64#2,>caller2=int64#10
  4797. # asm 2: movq <caller2_stack=8(%rsp),>caller2=%r12
  4798. movq 8(%rsp),%r12
  4799. # qhasm: caller3 = caller3_stack
  4800. # asm 1: movq <caller3_stack=stack64#3,>caller3=int64#11
  4801. # asm 2: movq <caller3_stack=16(%rsp),>caller3=%r13
  4802. movq 16(%rsp),%r13
  4803. # qhasm: caller4 = caller4_stack
  4804. # asm 1: movq <caller4_stack=stack64#4,>caller4=int64#12
  4805. # asm 2: movq <caller4_stack=24(%rsp),>caller4=%r14
  4806. movq 24(%rsp),%r14
  4807. # qhasm: caller5 = caller5_stack
  4808. # asm 1: movq <caller5_stack=stack64#5,>caller5=int64#13
  4809. # asm 2: movq <caller5_stack=32(%rsp),>caller5=%r15
  4810. movq 32(%rsp),%r15
  4811. # qhasm: caller6 = caller6_stack
  4812. # asm 1: movq <caller6_stack=stack64#6,>caller6=int64#14
  4813. # asm 2: movq <caller6_stack=40(%rsp),>caller6=%rbx
  4814. movq 40(%rsp),%rbx
  4815. # qhasm: caller7 = caller7_stack
  4816. # asm 1: movq <caller7_stack=stack64#7,>caller7=int64#15
  4817. # asm 2: movq <caller7_stack=48(%rsp),>caller7=%rbp
  4818. movq 48(%rsp),%rbp
  4819. # qhasm: leave
  4820. add %r11,%rsp
  4821. mov %rdi,%rax
  4822. mov %rsi,%rdx
  4823. ret