ge25519_add_p1p1.S 123 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716
  1. # qhasm: int64 rp
  2. # qhasm: int64 pp
  3. # qhasm: int64 qp
  4. # qhasm: input rp
  5. # qhasm: input pp
  6. # qhasm: input qp
  7. # qhasm: int64 caller1
  8. # qhasm: int64 caller2
  9. # qhasm: int64 caller3
  10. # qhasm: int64 caller4
  11. # qhasm: int64 caller5
  12. # qhasm: int64 caller6
  13. # qhasm: int64 caller7
  14. # qhasm: caller caller1
  15. # qhasm: caller caller2
  16. # qhasm: caller caller3
  17. # qhasm: caller caller4
  18. # qhasm: caller caller5
  19. # qhasm: caller caller6
  20. # qhasm: caller caller7
  21. # qhasm: stack64 caller1_stack
  22. # qhasm: stack64 caller2_stack
  23. # qhasm: stack64 caller3_stack
  24. # qhasm: stack64 caller4_stack
  25. # qhasm: stack64 caller5_stack
  26. # qhasm: stack64 caller6_stack
  27. # qhasm: stack64 caller7_stack
  28. # qhasm: int64 a0
  29. # qhasm: int64 a1
  30. # qhasm: int64 a2
  31. # qhasm: int64 a3
  32. # qhasm: int64 a4
  33. # qhasm: stack64 a0_stack
  34. # qhasm: stack64 a1_stack
  35. # qhasm: stack64 a2_stack
  36. # qhasm: stack64 a3_stack
  37. # qhasm: stack64 a4_stack
  38. # qhasm: int64 b0
  39. # qhasm: int64 b1
  40. # qhasm: int64 b2
  41. # qhasm: int64 b3
  42. # qhasm: int64 b4
  43. # qhasm: stack64 b0_stack
  44. # qhasm: stack64 b1_stack
  45. # qhasm: stack64 b2_stack
  46. # qhasm: stack64 b3_stack
  47. # qhasm: stack64 b4_stack
  48. # qhasm: int64 c0
  49. # qhasm: int64 c1
  50. # qhasm: int64 c2
  51. # qhasm: int64 c3
  52. # qhasm: int64 c4
  53. # qhasm: stack64 c0_stack
  54. # qhasm: stack64 c1_stack
  55. # qhasm: stack64 c2_stack
  56. # qhasm: stack64 c3_stack
  57. # qhasm: stack64 c4_stack
  58. # qhasm: int64 d0
  59. # qhasm: int64 d1
  60. # qhasm: int64 d2
  61. # qhasm: int64 d3
  62. # qhasm: int64 d4
  63. # qhasm: stack64 d0_stack
  64. # qhasm: stack64 d1_stack
  65. # qhasm: stack64 d2_stack
  66. # qhasm: stack64 d3_stack
  67. # qhasm: stack64 d4_stack
  68. # qhasm: int64 t10
  69. # qhasm: int64 t11
  70. # qhasm: int64 t12
  71. # qhasm: int64 t13
  72. # qhasm: int64 t14
  73. # qhasm: stack64 t10_stack
  74. # qhasm: stack64 t11_stack
  75. # qhasm: stack64 t12_stack
  76. # qhasm: stack64 t13_stack
  77. # qhasm: stack64 t14_stack
  78. # qhasm: int64 t20
  79. # qhasm: int64 t21
  80. # qhasm: int64 t22
  81. # qhasm: int64 t23
  82. # qhasm: int64 t24
  83. # qhasm: stack64 t20_stack
  84. # qhasm: stack64 t21_stack
  85. # qhasm: stack64 t22_stack
  86. # qhasm: stack64 t23_stack
  87. # qhasm: stack64 t24_stack
  88. # qhasm: int64 rx0
  89. # qhasm: int64 rx1
  90. # qhasm: int64 rx2
  91. # qhasm: int64 rx3
  92. # qhasm: int64 rx4
  93. # qhasm: int64 ry0
  94. # qhasm: int64 ry1
  95. # qhasm: int64 ry2
  96. # qhasm: int64 ry3
  97. # qhasm: int64 ry4
  98. # qhasm: int64 rz0
  99. # qhasm: int64 rz1
  100. # qhasm: int64 rz2
  101. # qhasm: int64 rz3
  102. # qhasm: int64 rz4
  103. # qhasm: int64 rt0
  104. # qhasm: int64 rt1
  105. # qhasm: int64 rt2
  106. # qhasm: int64 rt3
  107. # qhasm: int64 rt4
  108. # qhasm: int64 x0
  109. # qhasm: int64 x1
  110. # qhasm: int64 x2
  111. # qhasm: int64 x3
  112. # qhasm: int64 x4
  113. # qhasm: int64 mulr01
  114. # qhasm: int64 mulr11
  115. # qhasm: int64 mulr21
  116. # qhasm: int64 mulr31
  117. # qhasm: int64 mulr41
  118. # qhasm: int64 mulrax
  119. # qhasm: int64 mulrdx
  120. # qhasm: int64 mult
  121. # qhasm: int64 mulredmask
  122. # qhasm: stack64 mulx219_stack
  123. # qhasm: stack64 mulx319_stack
  124. # qhasm: stack64 mulx419_stack
  125. # qhasm: enter CRYPTO_NAMESPACE(batch_ge25519_add_p1p1)
  126. .text
  127. .p2align 5
  128. .globl _CRYPTO_NAMESPACE(batch_ge25519_add_p1p1)
  129. .globl CRYPTO_NAMESPACE(batch_ge25519_add_p1p1)
  130. _CRYPTO_NAMESPACE(batch_ge25519_add_p1p1):
  131. CRYPTO_NAMESPACE(batch_ge25519_add_p1p1):
  132. mov %rsp,%r11
  133. and $31,%r11
  134. add $256,%r11
  135. sub %r11,%rsp
  136. # qhasm: caller1_stack = caller1
  137. # asm 1: movq <caller1=int64#9,>caller1_stack=stack64#1
  138. # asm 2: movq <caller1=%r11,>caller1_stack=0(%rsp)
  139. movq %r11,0(%rsp)
  140. # qhasm: caller2_stack = caller2
  141. # asm 1: movq <caller2=int64#10,>caller2_stack=stack64#2
  142. # asm 2: movq <caller2=%r12,>caller2_stack=8(%rsp)
  143. movq %r12,8(%rsp)
  144. # qhasm: caller3_stack = caller3
  145. # asm 1: movq <caller3=int64#11,>caller3_stack=stack64#3
  146. # asm 2: movq <caller3=%r13,>caller3_stack=16(%rsp)
  147. movq %r13,16(%rsp)
  148. # qhasm: caller4_stack = caller4
  149. # asm 1: movq <caller4=int64#12,>caller4_stack=stack64#4
  150. # asm 2: movq <caller4=%r14,>caller4_stack=24(%rsp)
  151. movq %r14,24(%rsp)
  152. # qhasm: caller5_stack = caller5
  153. # asm 1: movq <caller5=int64#13,>caller5_stack=stack64#5
  154. # asm 2: movq <caller5=%r15,>caller5_stack=32(%rsp)
  155. movq %r15,32(%rsp)
  156. # qhasm: caller6_stack = caller6
  157. # asm 1: movq <caller6=int64#14,>caller6_stack=stack64#6
  158. # asm 2: movq <caller6=%rbx,>caller6_stack=40(%rsp)
  159. movq %rbx,40(%rsp)
  160. # qhasm: caller7_stack = caller7
  161. # asm 1: movq <caller7=int64#15,>caller7_stack=stack64#7
  162. # asm 2: movq <caller7=%rbp,>caller7_stack=48(%rsp)
  163. movq %rbp,48(%rsp)
  164. # qhasm: qp = qp
  165. # asm 1: mov <qp=int64#3,>qp=int64#4
  166. # asm 2: mov <qp=%rdx,>qp=%rcx
  167. mov %rdx,%rcx
  168. # qhasm: a0 = *(uint64 *)(pp + 40)
  169. # asm 1: movq 40(<pp=int64#2),>a0=int64#3
  170. # asm 2: movq 40(<pp=%rsi),>a0=%rdx
  171. movq 40(%rsi),%rdx
  172. # qhasm: a1 = *(uint64 *)(pp + 48)
  173. # asm 1: movq 48(<pp=int64#2),>a1=int64#5
  174. # asm 2: movq 48(<pp=%rsi),>a1=%r8
  175. movq 48(%rsi),%r8
  176. # qhasm: a2 = *(uint64 *)(pp + 56)
  177. # asm 1: movq 56(<pp=int64#2),>a2=int64#6
  178. # asm 2: movq 56(<pp=%rsi),>a2=%r9
  179. movq 56(%rsi),%r9
  180. # qhasm: a3 = *(uint64 *)(pp + 64)
  181. # asm 1: movq 64(<pp=int64#2),>a3=int64#7
  182. # asm 2: movq 64(<pp=%rsi),>a3=%rax
  183. movq 64(%rsi),%rax
  184. # qhasm: a4 = *(uint64 *)(pp + 72)
  185. # asm 1: movq 72(<pp=int64#2),>a4=int64#8
  186. # asm 2: movq 72(<pp=%rsi),>a4=%r10
  187. movq 72(%rsi),%r10
  188. # qhasm: b0 = a0
  189. # asm 1: mov <a0=int64#3,>b0=int64#9
  190. # asm 2: mov <a0=%rdx,>b0=%r11
  191. mov %rdx,%r11
  192. # qhasm: a0 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P0)
  193. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<a0=int64#3
  194. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<a0=%rdx
  195. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%rdx
  196. # qhasm: x0 = *(uint64 *)(pp + 0)
  197. # asm 1: movq 0(<pp=int64#2),>x0=int64#10
  198. # asm 2: movq 0(<pp=%rsi),>x0=%r12
  199. movq 0(%rsi),%r12
  200. # qhasm: b0 += x0
  201. # asm 1: add <x0=int64#10,<b0=int64#9
  202. # asm 2: add <x0=%r12,<b0=%r11
  203. add %r12,%r11
  204. # qhasm: a0 -= x0
  205. # asm 1: sub <x0=int64#10,<a0=int64#3
  206. # asm 2: sub <x0=%r12,<a0=%rdx
  207. sub %r12,%rdx
  208. # qhasm: b1 = a1
  209. # asm 1: mov <a1=int64#5,>b1=int64#10
  210. # asm 2: mov <a1=%r8,>b1=%r12
  211. mov %r8,%r12
  212. # qhasm: a1 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  213. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a1=int64#5
  214. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a1=%r8
  215. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r8
  216. # qhasm: x1 = *(uint64 *)(pp + 8)
  217. # asm 1: movq 8(<pp=int64#2),>x1=int64#11
  218. # asm 2: movq 8(<pp=%rsi),>x1=%r13
  219. movq 8(%rsi),%r13
  220. # qhasm: b1 += x1
  221. # asm 1: add <x1=int64#11,<b1=int64#10
  222. # asm 2: add <x1=%r13,<b1=%r12
  223. add %r13,%r12
  224. # qhasm: a1 -= x1
  225. # asm 1: sub <x1=int64#11,<a1=int64#5
  226. # asm 2: sub <x1=%r13,<a1=%r8
  227. sub %r13,%r8
  228. # qhasm: b2 = a2
  229. # asm 1: mov <a2=int64#6,>b2=int64#11
  230. # asm 2: mov <a2=%r9,>b2=%r13
  231. mov %r9,%r13
  232. # qhasm: a2 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  233. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a2=int64#6
  234. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a2=%r9
  235. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r9
  236. # qhasm: x2 = *(uint64 *)(pp + 16)
  237. # asm 1: movq 16(<pp=int64#2),>x2=int64#12
  238. # asm 2: movq 16(<pp=%rsi),>x2=%r14
  239. movq 16(%rsi),%r14
  240. # qhasm: b2 += x2
  241. # asm 1: add <x2=int64#12,<b2=int64#11
  242. # asm 2: add <x2=%r14,<b2=%r13
  243. add %r14,%r13
  244. # qhasm: a2 -= x2
  245. # asm 1: sub <x2=int64#12,<a2=int64#6
  246. # asm 2: sub <x2=%r14,<a2=%r9
  247. sub %r14,%r9
  248. # qhasm: b3 = a3
  249. # asm 1: mov <a3=int64#7,>b3=int64#12
  250. # asm 2: mov <a3=%rax,>b3=%r14
  251. mov %rax,%r14
  252. # qhasm: a3 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  253. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a3=int64#7
  254. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a3=%rax
  255. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rax
  256. # qhasm: x3 = *(uint64 *)(pp + 24)
  257. # asm 1: movq 24(<pp=int64#2),>x3=int64#13
  258. # asm 2: movq 24(<pp=%rsi),>x3=%r15
  259. movq 24(%rsi),%r15
  260. # qhasm: b3 += x3
  261. # asm 1: add <x3=int64#13,<b3=int64#12
  262. # asm 2: add <x3=%r15,<b3=%r14
  263. add %r15,%r14
  264. # qhasm: a3 -= x3
  265. # asm 1: sub <x3=int64#13,<a3=int64#7
  266. # asm 2: sub <x3=%r15,<a3=%rax
  267. sub %r15,%rax
  268. # qhasm: b4 = a4
  269. # asm 1: mov <a4=int64#8,>b4=int64#13
  270. # asm 2: mov <a4=%r10,>b4=%r15
  271. mov %r10,%r15
  272. # qhasm: a4 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  273. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<a4=int64#8
  274. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<a4=%r10
  275. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r10
  276. # qhasm: x4 = *(uint64 *)(pp + 32)
  277. # asm 1: movq 32(<pp=int64#2),>x4=int64#14
  278. # asm 2: movq 32(<pp=%rsi),>x4=%rbx
  279. movq 32(%rsi),%rbx
  280. # qhasm: b4 += x4
  281. # asm 1: add <x4=int64#14,<b4=int64#13
  282. # asm 2: add <x4=%rbx,<b4=%r15
  283. add %rbx,%r15
  284. # qhasm: a4 -= x4
  285. # asm 1: sub <x4=int64#14,<a4=int64#8
  286. # asm 2: sub <x4=%rbx,<a4=%r10
  287. sub %rbx,%r10
  288. # qhasm: a0_stack = a0
  289. # asm 1: movq <a0=int64#3,>a0_stack=stack64#8
  290. # asm 2: movq <a0=%rdx,>a0_stack=56(%rsp)
  291. movq %rdx,56(%rsp)
  292. # qhasm: a1_stack = a1
  293. # asm 1: movq <a1=int64#5,>a1_stack=stack64#9
  294. # asm 2: movq <a1=%r8,>a1_stack=64(%rsp)
  295. movq %r8,64(%rsp)
  296. # qhasm: a2_stack = a2
  297. # asm 1: movq <a2=int64#6,>a2_stack=stack64#10
  298. # asm 2: movq <a2=%r9,>a2_stack=72(%rsp)
  299. movq %r9,72(%rsp)
  300. # qhasm: a3_stack = a3
  301. # asm 1: movq <a3=int64#7,>a3_stack=stack64#11
  302. # asm 2: movq <a3=%rax,>a3_stack=80(%rsp)
  303. movq %rax,80(%rsp)
  304. # qhasm: a4_stack = a4
  305. # asm 1: movq <a4=int64#8,>a4_stack=stack64#12
  306. # asm 2: movq <a4=%r10,>a4_stack=88(%rsp)
  307. movq %r10,88(%rsp)
  308. # qhasm: b0_stack = b0
  309. # asm 1: movq <b0=int64#9,>b0_stack=stack64#13
  310. # asm 2: movq <b0=%r11,>b0_stack=96(%rsp)
  311. movq %r11,96(%rsp)
  312. # qhasm: b1_stack = b1
  313. # asm 1: movq <b1=int64#10,>b1_stack=stack64#14
  314. # asm 2: movq <b1=%r12,>b1_stack=104(%rsp)
  315. movq %r12,104(%rsp)
  316. # qhasm: b2_stack = b2
  317. # asm 1: movq <b2=int64#11,>b2_stack=stack64#15
  318. # asm 2: movq <b2=%r13,>b2_stack=112(%rsp)
  319. movq %r13,112(%rsp)
  320. # qhasm: b3_stack = b3
  321. # asm 1: movq <b3=int64#12,>b3_stack=stack64#16
  322. # asm 2: movq <b3=%r14,>b3_stack=120(%rsp)
  323. movq %r14,120(%rsp)
  324. # qhasm: b4_stack = b4
  325. # asm 1: movq <b4=int64#13,>b4_stack=stack64#17
  326. # asm 2: movq <b4=%r15,>b4_stack=128(%rsp)
  327. movq %r15,128(%rsp)
  328. # qhasm: t10 = *(uint64 *)(qp + 40)
  329. # asm 1: movq 40(<qp=int64#4),>t10=int64#3
  330. # asm 2: movq 40(<qp=%rcx),>t10=%rdx
  331. movq 40(%rcx),%rdx
  332. # qhasm: t11 = *(uint64 *)(qp + 48)
  333. # asm 1: movq 48(<qp=int64#4),>t11=int64#5
  334. # asm 2: movq 48(<qp=%rcx),>t11=%r8
  335. movq 48(%rcx),%r8
  336. # qhasm: t12 = *(uint64 *)(qp + 56)
  337. # asm 1: movq 56(<qp=int64#4),>t12=int64#6
  338. # asm 2: movq 56(<qp=%rcx),>t12=%r9
  339. movq 56(%rcx),%r9
  340. # qhasm: t13 = *(uint64 *)(qp + 64)
  341. # asm 1: movq 64(<qp=int64#4),>t13=int64#7
  342. # asm 2: movq 64(<qp=%rcx),>t13=%rax
  343. movq 64(%rcx),%rax
  344. # qhasm: t14 = *(uint64 *)(qp + 72)
  345. # asm 1: movq 72(<qp=int64#4),>t14=int64#8
  346. # asm 2: movq 72(<qp=%rcx),>t14=%r10
  347. movq 72(%rcx),%r10
  348. # qhasm: t20 = t10
  349. # asm 1: mov <t10=int64#3,>t20=int64#9
  350. # asm 2: mov <t10=%rdx,>t20=%r11
  351. mov %rdx,%r11
  352. # qhasm: t10 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P0)
  353. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<t10=int64#3
  354. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<t10=%rdx
  355. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%rdx
  356. # qhasm: rx0 = *(uint64 *) (qp + 0)
  357. # asm 1: movq 0(<qp=int64#4),>rx0=int64#10
  358. # asm 2: movq 0(<qp=%rcx),>rx0=%r12
  359. movq 0(%rcx),%r12
  360. # qhasm: t20 += rx0
  361. # asm 1: add <rx0=int64#10,<t20=int64#9
  362. # asm 2: add <rx0=%r12,<t20=%r11
  363. add %r12,%r11
  364. # qhasm: t10 -= rx0
  365. # asm 1: sub <rx0=int64#10,<t10=int64#3
  366. # asm 2: sub <rx0=%r12,<t10=%rdx
  367. sub %r12,%rdx
  368. # qhasm: t21 = t11
  369. # asm 1: mov <t11=int64#5,>t21=int64#10
  370. # asm 2: mov <t11=%r8,>t21=%r12
  371. mov %r8,%r12
  372. # qhasm: t11 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  373. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<t11=int64#5
  374. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<t11=%r8
  375. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r8
  376. # qhasm: rx1 = *(uint64 *) (qp + 8)
  377. # asm 1: movq 8(<qp=int64#4),>rx1=int64#11
  378. # asm 2: movq 8(<qp=%rcx),>rx1=%r13
  379. movq 8(%rcx),%r13
  380. # qhasm: t21 += rx1
  381. # asm 1: add <rx1=int64#11,<t21=int64#10
  382. # asm 2: add <rx1=%r13,<t21=%r12
  383. add %r13,%r12
  384. # qhasm: t11 -= rx1
  385. # asm 1: sub <rx1=int64#11,<t11=int64#5
  386. # asm 2: sub <rx1=%r13,<t11=%r8
  387. sub %r13,%r8
  388. # qhasm: t22 = t12
  389. # asm 1: mov <t12=int64#6,>t22=int64#11
  390. # asm 2: mov <t12=%r9,>t22=%r13
  391. mov %r9,%r13
  392. # qhasm: t12 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  393. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<t12=int64#6
  394. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<t12=%r9
  395. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r9
  396. # qhasm: rx2 = *(uint64 *) (qp + 16)
  397. # asm 1: movq 16(<qp=int64#4),>rx2=int64#12
  398. # asm 2: movq 16(<qp=%rcx),>rx2=%r14
  399. movq 16(%rcx),%r14
  400. # qhasm: t22 += rx2
  401. # asm 1: add <rx2=int64#12,<t22=int64#11
  402. # asm 2: add <rx2=%r14,<t22=%r13
  403. add %r14,%r13
  404. # qhasm: t12 -= rx2
  405. # asm 1: sub <rx2=int64#12,<t12=int64#6
  406. # asm 2: sub <rx2=%r14,<t12=%r9
  407. sub %r14,%r9
  408. # qhasm: t23 = t13
  409. # asm 1: mov <t13=int64#7,>t23=int64#12
  410. # asm 2: mov <t13=%rax,>t23=%r14
  411. mov %rax,%r14
  412. # qhasm: t13 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  413. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<t13=int64#7
  414. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<t13=%rax
  415. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rax
  416. # qhasm: rx3 = *(uint64 *) (qp + 24)
  417. # asm 1: movq 24(<qp=int64#4),>rx3=int64#13
  418. # asm 2: movq 24(<qp=%rcx),>rx3=%r15
  419. movq 24(%rcx),%r15
  420. # qhasm: t23 += rx3
  421. # asm 1: add <rx3=int64#13,<t23=int64#12
  422. # asm 2: add <rx3=%r15,<t23=%r14
  423. add %r15,%r14
  424. # qhasm: t13 -= rx3
  425. # asm 1: sub <rx3=int64#13,<t13=int64#7
  426. # asm 2: sub <rx3=%r15,<t13=%rax
  427. sub %r15,%rax
  428. # qhasm: t24 = t14
  429. # asm 1: mov <t14=int64#8,>t24=int64#13
  430. # asm 2: mov <t14=%r10,>t24=%r15
  431. mov %r10,%r15
  432. # qhasm: t14 += *(uint64 *) &CRYPTO_NAMESPACE(batch_2P1234)
  433. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<t14=int64#8
  434. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<t14=%r10
  435. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r10
  436. # qhasm: rx4 = *(uint64 *) (qp + 32)
  437. # asm 1: movq 32(<qp=int64#4),>rx4=int64#14
  438. # asm 2: movq 32(<qp=%rcx),>rx4=%rbx
  439. movq 32(%rcx),%rbx
  440. # qhasm: t24 += rx4
  441. # asm 1: add <rx4=int64#14,<t24=int64#13
  442. # asm 2: add <rx4=%rbx,<t24=%r15
  443. add %rbx,%r15
  444. # qhasm: t14 -= rx4
  445. # asm 1: sub <rx4=int64#14,<t14=int64#8
  446. # asm 2: sub <rx4=%rbx,<t14=%r10
  447. sub %rbx,%r10
  448. # qhasm: t10_stack = t10
  449. # asm 1: movq <t10=int64#3,>t10_stack=stack64#18
  450. # asm 2: movq <t10=%rdx,>t10_stack=136(%rsp)
  451. movq %rdx,136(%rsp)
  452. # qhasm: t11_stack = t11
  453. # asm 1: movq <t11=int64#5,>t11_stack=stack64#19
  454. # asm 2: movq <t11=%r8,>t11_stack=144(%rsp)
  455. movq %r8,144(%rsp)
  456. # qhasm: t12_stack = t12
  457. # asm 1: movq <t12=int64#6,>t12_stack=stack64#20
  458. # asm 2: movq <t12=%r9,>t12_stack=152(%rsp)
  459. movq %r9,152(%rsp)
  460. # qhasm: t13_stack = t13
  461. # asm 1: movq <t13=int64#7,>t13_stack=stack64#21
  462. # asm 2: movq <t13=%rax,>t13_stack=160(%rsp)
  463. movq %rax,160(%rsp)
  464. # qhasm: t14_stack = t14
  465. # asm 1: movq <t14=int64#8,>t14_stack=stack64#22
  466. # asm 2: movq <t14=%r10,>t14_stack=168(%rsp)
  467. movq %r10,168(%rsp)
  468. # qhasm: t20_stack = t20
  469. # asm 1: movq <t20=int64#9,>t20_stack=stack64#23
  470. # asm 2: movq <t20=%r11,>t20_stack=176(%rsp)
  471. movq %r11,176(%rsp)
  472. # qhasm: t21_stack = t21
  473. # asm 1: movq <t21=int64#10,>t21_stack=stack64#24
  474. # asm 2: movq <t21=%r12,>t21_stack=184(%rsp)
  475. movq %r12,184(%rsp)
  476. # qhasm: t22_stack = t22
  477. # asm 1: movq <t22=int64#11,>t22_stack=stack64#25
  478. # asm 2: movq <t22=%r13,>t22_stack=192(%rsp)
  479. movq %r13,192(%rsp)
  480. # qhasm: t23_stack = t23
  481. # asm 1: movq <t23=int64#12,>t23_stack=stack64#26
  482. # asm 2: movq <t23=%r14,>t23_stack=200(%rsp)
  483. movq %r14,200(%rsp)
  484. # qhasm: t24_stack = t24
  485. # asm 1: movq <t24=int64#13,>t24_stack=stack64#27
  486. # asm 2: movq <t24=%r15,>t24_stack=208(%rsp)
  487. movq %r15,208(%rsp)
  488. # qhasm: mulrax = a3_stack
  489. # asm 1: movq <a3_stack=stack64#11,>mulrax=int64#3
  490. # asm 2: movq <a3_stack=80(%rsp),>mulrax=%rdx
  491. movq 80(%rsp),%rdx
  492. # qhasm: mulrax *= 19
  493. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  494. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  495. imulq $19,%rdx,%rax
  496. # qhasm: mulx319_stack = mulrax
  497. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#28
  498. # asm 2: movq <mulrax=%rax,>mulx319_stack=216(%rsp)
  499. movq %rax,216(%rsp)
  500. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  501. # asm 1: mulq <t12_stack=stack64#20
  502. # asm 2: mulq <t12_stack=152(%rsp)
  503. mulq 152(%rsp)
  504. # qhasm: a0 = mulrax
  505. # asm 1: mov <mulrax=int64#7,>a0=int64#5
  506. # asm 2: mov <mulrax=%rax,>a0=%r8
  507. mov %rax,%r8
  508. # qhasm: mulr01 = mulrdx
  509. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#6
  510. # asm 2: mov <mulrdx=%rdx,>mulr01=%r9
  511. mov %rdx,%r9
  512. # qhasm: mulrax = a4_stack
  513. # asm 1: movq <a4_stack=stack64#12,>mulrax=int64#3
  514. # asm 2: movq <a4_stack=88(%rsp),>mulrax=%rdx
  515. movq 88(%rsp),%rdx
  516. # qhasm: mulrax *= 19
  517. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  518. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  519. imulq $19,%rdx,%rax
  520. # qhasm: mulx419_stack = mulrax
  521. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#29
  522. # asm 2: movq <mulrax=%rax,>mulx419_stack=224(%rsp)
  523. movq %rax,224(%rsp)
  524. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  525. # asm 1: mulq <t11_stack=stack64#19
  526. # asm 2: mulq <t11_stack=144(%rsp)
  527. mulq 144(%rsp)
  528. # qhasm: carry? a0 += mulrax
  529. # asm 1: add <mulrax=int64#7,<a0=int64#5
  530. # asm 2: add <mulrax=%rax,<a0=%r8
  531. add %rax,%r8
  532. # qhasm: mulr01 += mulrdx + carry
  533. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  534. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  535. adc %rdx,%r9
  536. # qhasm: mulrax = a0_stack
  537. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  538. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  539. movq 56(%rsp),%rax
  540. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  541. # asm 1: mulq <t10_stack=stack64#18
  542. # asm 2: mulq <t10_stack=136(%rsp)
  543. mulq 136(%rsp)
  544. # qhasm: carry? a0 += mulrax
  545. # asm 1: add <mulrax=int64#7,<a0=int64#5
  546. # asm 2: add <mulrax=%rax,<a0=%r8
  547. add %rax,%r8
  548. # qhasm: mulr01 += mulrdx + carry
  549. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  550. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  551. adc %rdx,%r9
  552. # qhasm: mulrax = a0_stack
  553. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  554. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  555. movq 56(%rsp),%rax
  556. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  557. # asm 1: mulq <t11_stack=stack64#19
  558. # asm 2: mulq <t11_stack=144(%rsp)
  559. mulq 144(%rsp)
  560. # qhasm: a1 = mulrax
  561. # asm 1: mov <mulrax=int64#7,>a1=int64#8
  562. # asm 2: mov <mulrax=%rax,>a1=%r10
  563. mov %rax,%r10
  564. # qhasm: mulr11 = mulrdx
  565. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#9
  566. # asm 2: mov <mulrdx=%rdx,>mulr11=%r11
  567. mov %rdx,%r11
  568. # qhasm: mulrax = a0_stack
  569. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  570. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  571. movq 56(%rsp),%rax
  572. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  573. # asm 1: mulq <t12_stack=stack64#20
  574. # asm 2: mulq <t12_stack=152(%rsp)
  575. mulq 152(%rsp)
  576. # qhasm: a2 = mulrax
  577. # asm 1: mov <mulrax=int64#7,>a2=int64#10
  578. # asm 2: mov <mulrax=%rax,>a2=%r12
  579. mov %rax,%r12
  580. # qhasm: mulr21 = mulrdx
  581. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#11
  582. # asm 2: mov <mulrdx=%rdx,>mulr21=%r13
  583. mov %rdx,%r13
  584. # qhasm: mulrax = a0_stack
  585. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  586. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  587. movq 56(%rsp),%rax
  588. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  589. # asm 1: mulq <t13_stack=stack64#21
  590. # asm 2: mulq <t13_stack=160(%rsp)
  591. mulq 160(%rsp)
  592. # qhasm: a3 = mulrax
  593. # asm 1: mov <mulrax=int64#7,>a3=int64#12
  594. # asm 2: mov <mulrax=%rax,>a3=%r14
  595. mov %rax,%r14
  596. # qhasm: mulr31 = mulrdx
  597. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#13
  598. # asm 2: mov <mulrdx=%rdx,>mulr31=%r15
  599. mov %rdx,%r15
  600. # qhasm: mulrax = a0_stack
  601. # asm 1: movq <a0_stack=stack64#8,>mulrax=int64#7
  602. # asm 2: movq <a0_stack=56(%rsp),>mulrax=%rax
  603. movq 56(%rsp),%rax
  604. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  605. # asm 1: mulq <t14_stack=stack64#22
  606. # asm 2: mulq <t14_stack=168(%rsp)
  607. mulq 168(%rsp)
  608. # qhasm: a4 = mulrax
  609. # asm 1: mov <mulrax=int64#7,>a4=int64#14
  610. # asm 2: mov <mulrax=%rax,>a4=%rbx
  611. mov %rax,%rbx
  612. # qhasm: mulr41 = mulrdx
  613. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#15
  614. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbp
  615. mov %rdx,%rbp
  616. # qhasm: mulrax = a1_stack
  617. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  618. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  619. movq 64(%rsp),%rax
  620. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  621. # asm 1: mulq <t10_stack=stack64#18
  622. # asm 2: mulq <t10_stack=136(%rsp)
  623. mulq 136(%rsp)
  624. # qhasm: carry? a1 += mulrax
  625. # asm 1: add <mulrax=int64#7,<a1=int64#8
  626. # asm 2: add <mulrax=%rax,<a1=%r10
  627. add %rax,%r10
  628. # qhasm: mulr11 += mulrdx + carry
  629. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  630. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  631. adc %rdx,%r11
  632. # qhasm: mulrax = a1_stack
  633. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  634. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  635. movq 64(%rsp),%rax
  636. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  637. # asm 1: mulq <t11_stack=stack64#19
  638. # asm 2: mulq <t11_stack=144(%rsp)
  639. mulq 144(%rsp)
  640. # qhasm: carry? a2 += mulrax
  641. # asm 1: add <mulrax=int64#7,<a2=int64#10
  642. # asm 2: add <mulrax=%rax,<a2=%r12
  643. add %rax,%r12
  644. # qhasm: mulr21 += mulrdx + carry
  645. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  646. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  647. adc %rdx,%r13
  648. # qhasm: mulrax = a1_stack
  649. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  650. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  651. movq 64(%rsp),%rax
  652. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  653. # asm 1: mulq <t12_stack=stack64#20
  654. # asm 2: mulq <t12_stack=152(%rsp)
  655. mulq 152(%rsp)
  656. # qhasm: carry? a3 += mulrax
  657. # asm 1: add <mulrax=int64#7,<a3=int64#12
  658. # asm 2: add <mulrax=%rax,<a3=%r14
  659. add %rax,%r14
  660. # qhasm: mulr31 += mulrdx + carry
  661. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  662. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  663. adc %rdx,%r15
  664. # qhasm: mulrax = a1_stack
  665. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#7
  666. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rax
  667. movq 64(%rsp),%rax
  668. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  669. # asm 1: mulq <t13_stack=stack64#21
  670. # asm 2: mulq <t13_stack=160(%rsp)
  671. mulq 160(%rsp)
  672. # qhasm: carry? a4 += mulrax
  673. # asm 1: add <mulrax=int64#7,<a4=int64#14
  674. # asm 2: add <mulrax=%rax,<a4=%rbx
  675. add %rax,%rbx
  676. # qhasm: mulr41 += mulrdx + carry
  677. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  678. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  679. adc %rdx,%rbp
  680. # qhasm: mulrax = a1_stack
  681. # asm 1: movq <a1_stack=stack64#9,>mulrax=int64#3
  682. # asm 2: movq <a1_stack=64(%rsp),>mulrax=%rdx
  683. movq 64(%rsp),%rdx
  684. # qhasm: mulrax *= 19
  685. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  686. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  687. imulq $19,%rdx,%rax
  688. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  689. # asm 1: mulq <t14_stack=stack64#22
  690. # asm 2: mulq <t14_stack=168(%rsp)
  691. mulq 168(%rsp)
  692. # qhasm: carry? a0 += mulrax
  693. # asm 1: add <mulrax=int64#7,<a0=int64#5
  694. # asm 2: add <mulrax=%rax,<a0=%r8
  695. add %rax,%r8
  696. # qhasm: mulr01 += mulrdx + carry
  697. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  698. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  699. adc %rdx,%r9
  700. # qhasm: mulrax = a2_stack
  701. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#7
  702. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rax
  703. movq 72(%rsp),%rax
  704. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  705. # asm 1: mulq <t10_stack=stack64#18
  706. # asm 2: mulq <t10_stack=136(%rsp)
  707. mulq 136(%rsp)
  708. # qhasm: carry? a2 += mulrax
  709. # asm 1: add <mulrax=int64#7,<a2=int64#10
  710. # asm 2: add <mulrax=%rax,<a2=%r12
  711. add %rax,%r12
  712. # qhasm: mulr21 += mulrdx + carry
  713. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  714. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  715. adc %rdx,%r13
  716. # qhasm: mulrax = a2_stack
  717. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#7
  718. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rax
  719. movq 72(%rsp),%rax
  720. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  721. # asm 1: mulq <t11_stack=stack64#19
  722. # asm 2: mulq <t11_stack=144(%rsp)
  723. mulq 144(%rsp)
  724. # qhasm: carry? a3 += mulrax
  725. # asm 1: add <mulrax=int64#7,<a3=int64#12
  726. # asm 2: add <mulrax=%rax,<a3=%r14
  727. add %rax,%r14
  728. # qhasm: mulr31 += mulrdx + carry
  729. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  730. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  731. adc %rdx,%r15
  732. # qhasm: mulrax = a2_stack
  733. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#7
  734. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rax
  735. movq 72(%rsp),%rax
  736. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  737. # asm 1: mulq <t12_stack=stack64#20
  738. # asm 2: mulq <t12_stack=152(%rsp)
  739. mulq 152(%rsp)
  740. # qhasm: carry? a4 += mulrax
  741. # asm 1: add <mulrax=int64#7,<a4=int64#14
  742. # asm 2: add <mulrax=%rax,<a4=%rbx
  743. add %rax,%rbx
  744. # qhasm: mulr41 += mulrdx + carry
  745. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  746. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  747. adc %rdx,%rbp
  748. # qhasm: mulrax = a2_stack
  749. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#3
  750. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rdx
  751. movq 72(%rsp),%rdx
  752. # qhasm: mulrax *= 19
  753. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  754. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  755. imulq $19,%rdx,%rax
  756. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  757. # asm 1: mulq <t13_stack=stack64#21
  758. # asm 2: mulq <t13_stack=160(%rsp)
  759. mulq 160(%rsp)
  760. # qhasm: carry? a0 += mulrax
  761. # asm 1: add <mulrax=int64#7,<a0=int64#5
  762. # asm 2: add <mulrax=%rax,<a0=%r8
  763. add %rax,%r8
  764. # qhasm: mulr01 += mulrdx + carry
  765. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  766. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  767. adc %rdx,%r9
  768. # qhasm: mulrax = a2_stack
  769. # asm 1: movq <a2_stack=stack64#10,>mulrax=int64#3
  770. # asm 2: movq <a2_stack=72(%rsp),>mulrax=%rdx
  771. movq 72(%rsp),%rdx
  772. # qhasm: mulrax *= 19
  773. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  774. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  775. imulq $19,%rdx,%rax
  776. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  777. # asm 1: mulq <t14_stack=stack64#22
  778. # asm 2: mulq <t14_stack=168(%rsp)
  779. mulq 168(%rsp)
  780. # qhasm: carry? a1 += mulrax
  781. # asm 1: add <mulrax=int64#7,<a1=int64#8
  782. # asm 2: add <mulrax=%rax,<a1=%r10
  783. add %rax,%r10
  784. # qhasm: mulr11 += mulrdx + carry
  785. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  786. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  787. adc %rdx,%r11
  788. # qhasm: mulrax = a3_stack
  789. # asm 1: movq <a3_stack=stack64#11,>mulrax=int64#7
  790. # asm 2: movq <a3_stack=80(%rsp),>mulrax=%rax
  791. movq 80(%rsp),%rax
  792. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  793. # asm 1: mulq <t10_stack=stack64#18
  794. # asm 2: mulq <t10_stack=136(%rsp)
  795. mulq 136(%rsp)
  796. # qhasm: carry? a3 += mulrax
  797. # asm 1: add <mulrax=int64#7,<a3=int64#12
  798. # asm 2: add <mulrax=%rax,<a3=%r14
  799. add %rax,%r14
  800. # qhasm: mulr31 += mulrdx + carry
  801. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  802. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  803. adc %rdx,%r15
  804. # qhasm: mulrax = a3_stack
  805. # asm 1: movq <a3_stack=stack64#11,>mulrax=int64#7
  806. # asm 2: movq <a3_stack=80(%rsp),>mulrax=%rax
  807. movq 80(%rsp),%rax
  808. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  809. # asm 1: mulq <t11_stack=stack64#19
  810. # asm 2: mulq <t11_stack=144(%rsp)
  811. mulq 144(%rsp)
  812. # qhasm: carry? a4 += mulrax
  813. # asm 1: add <mulrax=int64#7,<a4=int64#14
  814. # asm 2: add <mulrax=%rax,<a4=%rbx
  815. add %rax,%rbx
  816. # qhasm: mulr41 += mulrdx + carry
  817. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  818. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  819. adc %rdx,%rbp
  820. # qhasm: mulrax = mulx319_stack
  821. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  822. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  823. movq 216(%rsp),%rax
  824. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  825. # asm 1: mulq <t13_stack=stack64#21
  826. # asm 2: mulq <t13_stack=160(%rsp)
  827. mulq 160(%rsp)
  828. # qhasm: carry? a1 += mulrax
  829. # asm 1: add <mulrax=int64#7,<a1=int64#8
  830. # asm 2: add <mulrax=%rax,<a1=%r10
  831. add %rax,%r10
  832. # qhasm: mulr11 += mulrdx + carry
  833. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  834. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  835. adc %rdx,%r11
  836. # qhasm: mulrax = mulx319_stack
  837. # asm 1: movq <mulx319_stack=stack64#28,>mulrax=int64#7
  838. # asm 2: movq <mulx319_stack=216(%rsp),>mulrax=%rax
  839. movq 216(%rsp),%rax
  840. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  841. # asm 1: mulq <t14_stack=stack64#22
  842. # asm 2: mulq <t14_stack=168(%rsp)
  843. mulq 168(%rsp)
  844. # qhasm: carry? a2 += mulrax
  845. # asm 1: add <mulrax=int64#7,<a2=int64#10
  846. # asm 2: add <mulrax=%rax,<a2=%r12
  847. add %rax,%r12
  848. # qhasm: mulr21 += mulrdx + carry
  849. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  850. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  851. adc %rdx,%r13
  852. # qhasm: mulrax = a4_stack
  853. # asm 1: movq <a4_stack=stack64#12,>mulrax=int64#7
  854. # asm 2: movq <a4_stack=88(%rsp),>mulrax=%rax
  855. movq 88(%rsp),%rax
  856. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  857. # asm 1: mulq <t10_stack=stack64#18
  858. # asm 2: mulq <t10_stack=136(%rsp)
  859. mulq 136(%rsp)
  860. # qhasm: carry? a4 += mulrax
  861. # asm 1: add <mulrax=int64#7,<a4=int64#14
  862. # asm 2: add <mulrax=%rax,<a4=%rbx
  863. add %rax,%rbx
  864. # qhasm: mulr41 += mulrdx + carry
  865. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  866. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  867. adc %rdx,%rbp
  868. # qhasm: mulrax = mulx419_stack
  869. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  870. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  871. movq 224(%rsp),%rax
  872. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  873. # asm 1: mulq <t12_stack=stack64#20
  874. # asm 2: mulq <t12_stack=152(%rsp)
  875. mulq 152(%rsp)
  876. # qhasm: carry? a1 += mulrax
  877. # asm 1: add <mulrax=int64#7,<a1=int64#8
  878. # asm 2: add <mulrax=%rax,<a1=%r10
  879. add %rax,%r10
  880. # qhasm: mulr11 += mulrdx + carry
  881. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  882. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  883. adc %rdx,%r11
  884. # qhasm: mulrax = mulx419_stack
  885. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  886. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  887. movq 224(%rsp),%rax
  888. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  889. # asm 1: mulq <t13_stack=stack64#21
  890. # asm 2: mulq <t13_stack=160(%rsp)
  891. mulq 160(%rsp)
  892. # qhasm: carry? a2 += mulrax
  893. # asm 1: add <mulrax=int64#7,<a2=int64#10
  894. # asm 2: add <mulrax=%rax,<a2=%r12
  895. add %rax,%r12
  896. # qhasm: mulr21 += mulrdx + carry
  897. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  898. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  899. adc %rdx,%r13
  900. # qhasm: mulrax = mulx419_stack
  901. # asm 1: movq <mulx419_stack=stack64#29,>mulrax=int64#7
  902. # asm 2: movq <mulx419_stack=224(%rsp),>mulrax=%rax
  903. movq 224(%rsp),%rax
  904. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  905. # asm 1: mulq <t14_stack=stack64#22
  906. # asm 2: mulq <t14_stack=168(%rsp)
  907. mulq 168(%rsp)
  908. # qhasm: carry? a3 += mulrax
  909. # asm 1: add <mulrax=int64#7,<a3=int64#12
  910. # asm 2: add <mulrax=%rax,<a3=%r14
  911. add %rax,%r14
  912. # qhasm: mulr31 += mulrdx + carry
  913. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  914. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  915. adc %rdx,%r15
  916. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  917. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  918. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  919. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  920. # qhasm: mulr01 = (mulr01.a0) << 13
  921. # asm 1: shld $13,<a0=int64#5,<mulr01=int64#6
  922. # asm 2: shld $13,<a0=%r8,<mulr01=%r9
  923. shld $13,%r8,%r9
  924. # qhasm: a0 &= mulredmask
  925. # asm 1: and <mulredmask=int64#3,<a0=int64#5
  926. # asm 2: and <mulredmask=%rdx,<a0=%r8
  927. and %rdx,%r8
  928. # qhasm: mulr11 = (mulr11.a1) << 13
  929. # asm 1: shld $13,<a1=int64#8,<mulr11=int64#9
  930. # asm 2: shld $13,<a1=%r10,<mulr11=%r11
  931. shld $13,%r10,%r11
  932. # qhasm: a1 &= mulredmask
  933. # asm 1: and <mulredmask=int64#3,<a1=int64#8
  934. # asm 2: and <mulredmask=%rdx,<a1=%r10
  935. and %rdx,%r10
  936. # qhasm: a1 += mulr01
  937. # asm 1: add <mulr01=int64#6,<a1=int64#8
  938. # asm 2: add <mulr01=%r9,<a1=%r10
  939. add %r9,%r10
  940. # qhasm: mulr21 = (mulr21.a2) << 13
  941. # asm 1: shld $13,<a2=int64#10,<mulr21=int64#11
  942. # asm 2: shld $13,<a2=%r12,<mulr21=%r13
  943. shld $13,%r12,%r13
  944. # qhasm: a2 &= mulredmask
  945. # asm 1: and <mulredmask=int64#3,<a2=int64#10
  946. # asm 2: and <mulredmask=%rdx,<a2=%r12
  947. and %rdx,%r12
  948. # qhasm: a2 += mulr11
  949. # asm 1: add <mulr11=int64#9,<a2=int64#10
  950. # asm 2: add <mulr11=%r11,<a2=%r12
  951. add %r11,%r12
  952. # qhasm: mulr31 = (mulr31.a3) << 13
  953. # asm 1: shld $13,<a3=int64#12,<mulr31=int64#13
  954. # asm 2: shld $13,<a3=%r14,<mulr31=%r15
  955. shld $13,%r14,%r15
  956. # qhasm: a3 &= mulredmask
  957. # asm 1: and <mulredmask=int64#3,<a3=int64#12
  958. # asm 2: and <mulredmask=%rdx,<a3=%r14
  959. and %rdx,%r14
  960. # qhasm: a3 += mulr21
  961. # asm 1: add <mulr21=int64#11,<a3=int64#12
  962. # asm 2: add <mulr21=%r13,<a3=%r14
  963. add %r13,%r14
  964. # qhasm: mulr41 = (mulr41.a4) << 13
  965. # asm 1: shld $13,<a4=int64#14,<mulr41=int64#15
  966. # asm 2: shld $13,<a4=%rbx,<mulr41=%rbp
  967. shld $13,%rbx,%rbp
  968. # qhasm: a4 &= mulredmask
  969. # asm 1: and <mulredmask=int64#3,<a4=int64#14
  970. # asm 2: and <mulredmask=%rdx,<a4=%rbx
  971. and %rdx,%rbx
  972. # qhasm: a4 += mulr31
  973. # asm 1: add <mulr31=int64#13,<a4=int64#14
  974. # asm 2: add <mulr31=%r15,<a4=%rbx
  975. add %r15,%rbx
  976. # qhasm: mulr41 = mulr41 * 19
  977. # asm 1: imulq $19,<mulr41=int64#15,>mulr41=int64#6
  978. # asm 2: imulq $19,<mulr41=%rbp,>mulr41=%r9
  979. imulq $19,%rbp,%r9
  980. # qhasm: a0 += mulr41
  981. # asm 1: add <mulr41=int64#6,<a0=int64#5
  982. # asm 2: add <mulr41=%r9,<a0=%r8
  983. add %r9,%r8
  984. # qhasm: mult = a0
  985. # asm 1: mov <a0=int64#5,>mult=int64#6
  986. # asm 2: mov <a0=%r8,>mult=%r9
  987. mov %r8,%r9
  988. # qhasm: (uint64) mult >>= 51
  989. # asm 1: shr $51,<mult=int64#6
  990. # asm 2: shr $51,<mult=%r9
  991. shr $51,%r9
  992. # qhasm: mult += a1
  993. # asm 1: add <a1=int64#8,<mult=int64#6
  994. # asm 2: add <a1=%r10,<mult=%r9
  995. add %r10,%r9
  996. # qhasm: a1 = mult
  997. # asm 1: mov <mult=int64#6,>a1=int64#7
  998. # asm 2: mov <mult=%r9,>a1=%rax
  999. mov %r9,%rax
  1000. # qhasm: (uint64) mult >>= 51
  1001. # asm 1: shr $51,<mult=int64#6
  1002. # asm 2: shr $51,<mult=%r9
  1003. shr $51,%r9
  1004. # qhasm: a0 &= mulredmask
  1005. # asm 1: and <mulredmask=int64#3,<a0=int64#5
  1006. # asm 2: and <mulredmask=%rdx,<a0=%r8
  1007. and %rdx,%r8
  1008. # qhasm: mult += a2
  1009. # asm 1: add <a2=int64#10,<mult=int64#6
  1010. # asm 2: add <a2=%r12,<mult=%r9
  1011. add %r12,%r9
  1012. # qhasm: a2 = mult
  1013. # asm 1: mov <mult=int64#6,>a2=int64#8
  1014. # asm 2: mov <mult=%r9,>a2=%r10
  1015. mov %r9,%r10
  1016. # qhasm: (uint64) mult >>= 51
  1017. # asm 1: shr $51,<mult=int64#6
  1018. # asm 2: shr $51,<mult=%r9
  1019. shr $51,%r9
  1020. # qhasm: a1 &= mulredmask
  1021. # asm 1: and <mulredmask=int64#3,<a1=int64#7
  1022. # asm 2: and <mulredmask=%rdx,<a1=%rax
  1023. and %rdx,%rax
  1024. # qhasm: mult += a3
  1025. # asm 1: add <a3=int64#12,<mult=int64#6
  1026. # asm 2: add <a3=%r14,<mult=%r9
  1027. add %r14,%r9
  1028. # qhasm: a3 = mult
  1029. # asm 1: mov <mult=int64#6,>a3=int64#9
  1030. # asm 2: mov <mult=%r9,>a3=%r11
  1031. mov %r9,%r11
  1032. # qhasm: (uint64) mult >>= 51
  1033. # asm 1: shr $51,<mult=int64#6
  1034. # asm 2: shr $51,<mult=%r9
  1035. shr $51,%r9
  1036. # qhasm: a2 &= mulredmask
  1037. # asm 1: and <mulredmask=int64#3,<a2=int64#8
  1038. # asm 2: and <mulredmask=%rdx,<a2=%r10
  1039. and %rdx,%r10
  1040. # qhasm: mult += a4
  1041. # asm 1: add <a4=int64#14,<mult=int64#6
  1042. # asm 2: add <a4=%rbx,<mult=%r9
  1043. add %rbx,%r9
  1044. # qhasm: a4 = mult
  1045. # asm 1: mov <mult=int64#6,>a4=int64#10
  1046. # asm 2: mov <mult=%r9,>a4=%r12
  1047. mov %r9,%r12
  1048. # qhasm: (uint64) mult >>= 51
  1049. # asm 1: shr $51,<mult=int64#6
  1050. # asm 2: shr $51,<mult=%r9
  1051. shr $51,%r9
  1052. # qhasm: a3 &= mulredmask
  1053. # asm 1: and <mulredmask=int64#3,<a3=int64#9
  1054. # asm 2: and <mulredmask=%rdx,<a3=%r11
  1055. and %rdx,%r11
  1056. # qhasm: mult *= 19
  1057. # asm 1: imulq $19,<mult=int64#6,>mult=int64#6
  1058. # asm 2: imulq $19,<mult=%r9,>mult=%r9
  1059. imulq $19,%r9,%r9
  1060. # qhasm: a0 += mult
  1061. # asm 1: add <mult=int64#6,<a0=int64#5
  1062. # asm 2: add <mult=%r9,<a0=%r8
  1063. add %r9,%r8
  1064. # qhasm: a4 &= mulredmask
  1065. # asm 1: and <mulredmask=int64#3,<a4=int64#10
  1066. # asm 2: and <mulredmask=%rdx,<a4=%r12
  1067. and %rdx,%r12
  1068. # qhasm: a0_stack = a0
  1069. # asm 1: movq <a0=int64#5,>a0_stack=stack64#8
  1070. # asm 2: movq <a0=%r8,>a0_stack=56(%rsp)
  1071. movq %r8,56(%rsp)
  1072. # qhasm: a1_stack = a1
  1073. # asm 1: movq <a1=int64#7,>a1_stack=stack64#9
  1074. # asm 2: movq <a1=%rax,>a1_stack=64(%rsp)
  1075. movq %rax,64(%rsp)
  1076. # qhasm: a2_stack = a2
  1077. # asm 1: movq <a2=int64#8,>a2_stack=stack64#10
  1078. # asm 2: movq <a2=%r10,>a2_stack=72(%rsp)
  1079. movq %r10,72(%rsp)
  1080. # qhasm: a3_stack = a3
  1081. # asm 1: movq <a3=int64#9,>a3_stack=stack64#11
  1082. # asm 2: movq <a3=%r11,>a3_stack=80(%rsp)
  1083. movq %r11,80(%rsp)
  1084. # qhasm: a4_stack = a4
  1085. # asm 1: movq <a4=int64#10,>a4_stack=stack64#12
  1086. # asm 2: movq <a4=%r12,>a4_stack=88(%rsp)
  1087. movq %r12,88(%rsp)
  1088. # qhasm: mulrax = b3_stack
  1089. # asm 1: movq <b3_stack=stack64#16,>mulrax=int64#3
  1090. # asm 2: movq <b3_stack=120(%rsp),>mulrax=%rdx
  1091. movq 120(%rsp),%rdx
  1092. # qhasm: mulrax *= 19
  1093. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1094. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1095. imulq $19,%rdx,%rax
  1096. # qhasm: mulx319_stack = mulrax
  1097. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#18
  1098. # asm 2: movq <mulrax=%rax,>mulx319_stack=136(%rsp)
  1099. movq %rax,136(%rsp)
  1100. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1101. # asm 1: mulq <t22_stack=stack64#25
  1102. # asm 2: mulq <t22_stack=192(%rsp)
  1103. mulq 192(%rsp)
  1104. # qhasm: rx0 = mulrax
  1105. # asm 1: mov <mulrax=int64#7,>rx0=int64#5
  1106. # asm 2: mov <mulrax=%rax,>rx0=%r8
  1107. mov %rax,%r8
  1108. # qhasm: mulr01 = mulrdx
  1109. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#6
  1110. # asm 2: mov <mulrdx=%rdx,>mulr01=%r9
  1111. mov %rdx,%r9
  1112. # qhasm: mulrax = b4_stack
  1113. # asm 1: movq <b4_stack=stack64#17,>mulrax=int64#3
  1114. # asm 2: movq <b4_stack=128(%rsp),>mulrax=%rdx
  1115. movq 128(%rsp),%rdx
  1116. # qhasm: mulrax *= 19
  1117. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1118. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1119. imulq $19,%rdx,%rax
  1120. # qhasm: mulx419_stack = mulrax
  1121. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#19
  1122. # asm 2: movq <mulrax=%rax,>mulx419_stack=144(%rsp)
  1123. movq %rax,144(%rsp)
  1124. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1125. # asm 1: mulq <t21_stack=stack64#24
  1126. # asm 2: mulq <t21_stack=184(%rsp)
  1127. mulq 184(%rsp)
  1128. # qhasm: carry? rx0 += mulrax
  1129. # asm 1: add <mulrax=int64#7,<rx0=int64#5
  1130. # asm 2: add <mulrax=%rax,<rx0=%r8
  1131. add %rax,%r8
  1132. # qhasm: mulr01 += mulrdx + carry
  1133. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1134. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1135. adc %rdx,%r9
  1136. # qhasm: mulrax = b0_stack
  1137. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1138. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1139. movq 96(%rsp),%rax
  1140. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1141. # asm 1: mulq <t20_stack=stack64#23
  1142. # asm 2: mulq <t20_stack=176(%rsp)
  1143. mulq 176(%rsp)
  1144. # qhasm: carry? rx0 += mulrax
  1145. # asm 1: add <mulrax=int64#7,<rx0=int64#5
  1146. # asm 2: add <mulrax=%rax,<rx0=%r8
  1147. add %rax,%r8
  1148. # qhasm: mulr01 += mulrdx + carry
  1149. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1150. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1151. adc %rdx,%r9
  1152. # qhasm: mulrax = b0_stack
  1153. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1154. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1155. movq 96(%rsp),%rax
  1156. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1157. # asm 1: mulq <t21_stack=stack64#24
  1158. # asm 2: mulq <t21_stack=184(%rsp)
  1159. mulq 184(%rsp)
  1160. # qhasm: rx1 = mulrax
  1161. # asm 1: mov <mulrax=int64#7,>rx1=int64#8
  1162. # asm 2: mov <mulrax=%rax,>rx1=%r10
  1163. mov %rax,%r10
  1164. # qhasm: mulr11 = mulrdx
  1165. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#9
  1166. # asm 2: mov <mulrdx=%rdx,>mulr11=%r11
  1167. mov %rdx,%r11
  1168. # qhasm: mulrax = b0_stack
  1169. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1170. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1171. movq 96(%rsp),%rax
  1172. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1173. # asm 1: mulq <t22_stack=stack64#25
  1174. # asm 2: mulq <t22_stack=192(%rsp)
  1175. mulq 192(%rsp)
  1176. # qhasm: rx2 = mulrax
  1177. # asm 1: mov <mulrax=int64#7,>rx2=int64#10
  1178. # asm 2: mov <mulrax=%rax,>rx2=%r12
  1179. mov %rax,%r12
  1180. # qhasm: mulr21 = mulrdx
  1181. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#11
  1182. # asm 2: mov <mulrdx=%rdx,>mulr21=%r13
  1183. mov %rdx,%r13
  1184. # qhasm: mulrax = b0_stack
  1185. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1186. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1187. movq 96(%rsp),%rax
  1188. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1189. # asm 1: mulq <t23_stack=stack64#26
  1190. # asm 2: mulq <t23_stack=200(%rsp)
  1191. mulq 200(%rsp)
  1192. # qhasm: rx3 = mulrax
  1193. # asm 1: mov <mulrax=int64#7,>rx3=int64#12
  1194. # asm 2: mov <mulrax=%rax,>rx3=%r14
  1195. mov %rax,%r14
  1196. # qhasm: mulr31 = mulrdx
  1197. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#13
  1198. # asm 2: mov <mulrdx=%rdx,>mulr31=%r15
  1199. mov %rdx,%r15
  1200. # qhasm: mulrax = b0_stack
  1201. # asm 1: movq <b0_stack=stack64#13,>mulrax=int64#7
  1202. # asm 2: movq <b0_stack=96(%rsp),>mulrax=%rax
  1203. movq 96(%rsp),%rax
  1204. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1205. # asm 1: mulq <t24_stack=stack64#27
  1206. # asm 2: mulq <t24_stack=208(%rsp)
  1207. mulq 208(%rsp)
  1208. # qhasm: rx4 = mulrax
  1209. # asm 1: mov <mulrax=int64#7,>rx4=int64#14
  1210. # asm 2: mov <mulrax=%rax,>rx4=%rbx
  1211. mov %rax,%rbx
  1212. # qhasm: mulr41 = mulrdx
  1213. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#15
  1214. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbp
  1215. mov %rdx,%rbp
  1216. # qhasm: mulrax = b1_stack
  1217. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1218. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1219. movq 104(%rsp),%rax
  1220. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1221. # asm 1: mulq <t20_stack=stack64#23
  1222. # asm 2: mulq <t20_stack=176(%rsp)
  1223. mulq 176(%rsp)
  1224. # qhasm: carry? rx1 += mulrax
  1225. # asm 1: add <mulrax=int64#7,<rx1=int64#8
  1226. # asm 2: add <mulrax=%rax,<rx1=%r10
  1227. add %rax,%r10
  1228. # qhasm: mulr11 += mulrdx + carry
  1229. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  1230. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  1231. adc %rdx,%r11
  1232. # qhasm: mulrax = b1_stack
  1233. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1234. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1235. movq 104(%rsp),%rax
  1236. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1237. # asm 1: mulq <t21_stack=stack64#24
  1238. # asm 2: mulq <t21_stack=184(%rsp)
  1239. mulq 184(%rsp)
  1240. # qhasm: carry? rx2 += mulrax
  1241. # asm 1: add <mulrax=int64#7,<rx2=int64#10
  1242. # asm 2: add <mulrax=%rax,<rx2=%r12
  1243. add %rax,%r12
  1244. # qhasm: mulr21 += mulrdx + carry
  1245. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  1246. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  1247. adc %rdx,%r13
  1248. # qhasm: mulrax = b1_stack
  1249. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1250. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1251. movq 104(%rsp),%rax
  1252. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1253. # asm 1: mulq <t22_stack=stack64#25
  1254. # asm 2: mulq <t22_stack=192(%rsp)
  1255. mulq 192(%rsp)
  1256. # qhasm: carry? rx3 += mulrax
  1257. # asm 1: add <mulrax=int64#7,<rx3=int64#12
  1258. # asm 2: add <mulrax=%rax,<rx3=%r14
  1259. add %rax,%r14
  1260. # qhasm: mulr31 += mulrdx + carry
  1261. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  1262. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  1263. adc %rdx,%r15
  1264. # qhasm: mulrax = b1_stack
  1265. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#7
  1266. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rax
  1267. movq 104(%rsp),%rax
  1268. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1269. # asm 1: mulq <t23_stack=stack64#26
  1270. # asm 2: mulq <t23_stack=200(%rsp)
  1271. mulq 200(%rsp)
  1272. # qhasm: carry? rx4 += mulrax
  1273. # asm 1: add <mulrax=int64#7,<rx4=int64#14
  1274. # asm 2: add <mulrax=%rax,<rx4=%rbx
  1275. add %rax,%rbx
  1276. # qhasm: mulr41 += mulrdx + carry
  1277. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  1278. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  1279. adc %rdx,%rbp
  1280. # qhasm: mulrax = b1_stack
  1281. # asm 1: movq <b1_stack=stack64#14,>mulrax=int64#3
  1282. # asm 2: movq <b1_stack=104(%rsp),>mulrax=%rdx
  1283. movq 104(%rsp),%rdx
  1284. # qhasm: mulrax *= 19
  1285. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1286. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1287. imulq $19,%rdx,%rax
  1288. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1289. # asm 1: mulq <t24_stack=stack64#27
  1290. # asm 2: mulq <t24_stack=208(%rsp)
  1291. mulq 208(%rsp)
  1292. # qhasm: carry? rx0 += mulrax
  1293. # asm 1: add <mulrax=int64#7,<rx0=int64#5
  1294. # asm 2: add <mulrax=%rax,<rx0=%r8
  1295. add %rax,%r8
  1296. # qhasm: mulr01 += mulrdx + carry
  1297. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1298. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1299. adc %rdx,%r9
  1300. # qhasm: mulrax = b2_stack
  1301. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#7
  1302. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rax
  1303. movq 112(%rsp),%rax
  1304. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1305. # asm 1: mulq <t20_stack=stack64#23
  1306. # asm 2: mulq <t20_stack=176(%rsp)
  1307. mulq 176(%rsp)
  1308. # qhasm: carry? rx2 += mulrax
  1309. # asm 1: add <mulrax=int64#7,<rx2=int64#10
  1310. # asm 2: add <mulrax=%rax,<rx2=%r12
  1311. add %rax,%r12
  1312. # qhasm: mulr21 += mulrdx + carry
  1313. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  1314. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  1315. adc %rdx,%r13
  1316. # qhasm: mulrax = b2_stack
  1317. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#7
  1318. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rax
  1319. movq 112(%rsp),%rax
  1320. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1321. # asm 1: mulq <t21_stack=stack64#24
  1322. # asm 2: mulq <t21_stack=184(%rsp)
  1323. mulq 184(%rsp)
  1324. # qhasm: carry? rx3 += mulrax
  1325. # asm 1: add <mulrax=int64#7,<rx3=int64#12
  1326. # asm 2: add <mulrax=%rax,<rx3=%r14
  1327. add %rax,%r14
  1328. # qhasm: mulr31 += mulrdx + carry
  1329. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  1330. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  1331. adc %rdx,%r15
  1332. # qhasm: mulrax = b2_stack
  1333. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#7
  1334. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rax
  1335. movq 112(%rsp),%rax
  1336. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1337. # asm 1: mulq <t22_stack=stack64#25
  1338. # asm 2: mulq <t22_stack=192(%rsp)
  1339. mulq 192(%rsp)
  1340. # qhasm: carry? rx4 += mulrax
  1341. # asm 1: add <mulrax=int64#7,<rx4=int64#14
  1342. # asm 2: add <mulrax=%rax,<rx4=%rbx
  1343. add %rax,%rbx
  1344. # qhasm: mulr41 += mulrdx + carry
  1345. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  1346. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  1347. adc %rdx,%rbp
  1348. # qhasm: mulrax = b2_stack
  1349. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#3
  1350. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rdx
  1351. movq 112(%rsp),%rdx
  1352. # qhasm: mulrax *= 19
  1353. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1354. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1355. imulq $19,%rdx,%rax
  1356. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1357. # asm 1: mulq <t23_stack=stack64#26
  1358. # asm 2: mulq <t23_stack=200(%rsp)
  1359. mulq 200(%rsp)
  1360. # qhasm: carry? rx0 += mulrax
  1361. # asm 1: add <mulrax=int64#7,<rx0=int64#5
  1362. # asm 2: add <mulrax=%rax,<rx0=%r8
  1363. add %rax,%r8
  1364. # qhasm: mulr01 += mulrdx + carry
  1365. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1366. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1367. adc %rdx,%r9
  1368. # qhasm: mulrax = b2_stack
  1369. # asm 1: movq <b2_stack=stack64#15,>mulrax=int64#3
  1370. # asm 2: movq <b2_stack=112(%rsp),>mulrax=%rdx
  1371. movq 112(%rsp),%rdx
  1372. # qhasm: mulrax *= 19
  1373. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1374. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1375. imulq $19,%rdx,%rax
  1376. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1377. # asm 1: mulq <t24_stack=stack64#27
  1378. # asm 2: mulq <t24_stack=208(%rsp)
  1379. mulq 208(%rsp)
  1380. # qhasm: carry? rx1 += mulrax
  1381. # asm 1: add <mulrax=int64#7,<rx1=int64#8
  1382. # asm 2: add <mulrax=%rax,<rx1=%r10
  1383. add %rax,%r10
  1384. # qhasm: mulr11 += mulrdx + carry
  1385. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  1386. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  1387. adc %rdx,%r11
  1388. # qhasm: mulrax = b3_stack
  1389. # asm 1: movq <b3_stack=stack64#16,>mulrax=int64#7
  1390. # asm 2: movq <b3_stack=120(%rsp),>mulrax=%rax
  1391. movq 120(%rsp),%rax
  1392. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1393. # asm 1: mulq <t20_stack=stack64#23
  1394. # asm 2: mulq <t20_stack=176(%rsp)
  1395. mulq 176(%rsp)
  1396. # qhasm: carry? rx3 += mulrax
  1397. # asm 1: add <mulrax=int64#7,<rx3=int64#12
  1398. # asm 2: add <mulrax=%rax,<rx3=%r14
  1399. add %rax,%r14
  1400. # qhasm: mulr31 += mulrdx + carry
  1401. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  1402. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  1403. adc %rdx,%r15
  1404. # qhasm: mulrax = b3_stack
  1405. # asm 1: movq <b3_stack=stack64#16,>mulrax=int64#7
  1406. # asm 2: movq <b3_stack=120(%rsp),>mulrax=%rax
  1407. movq 120(%rsp),%rax
  1408. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1409. # asm 1: mulq <t21_stack=stack64#24
  1410. # asm 2: mulq <t21_stack=184(%rsp)
  1411. mulq 184(%rsp)
  1412. # qhasm: carry? rx4 += mulrax
  1413. # asm 1: add <mulrax=int64#7,<rx4=int64#14
  1414. # asm 2: add <mulrax=%rax,<rx4=%rbx
  1415. add %rax,%rbx
  1416. # qhasm: mulr41 += mulrdx + carry
  1417. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  1418. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  1419. adc %rdx,%rbp
  1420. # qhasm: mulrax = mulx319_stack
  1421. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  1422. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  1423. movq 136(%rsp),%rax
  1424. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1425. # asm 1: mulq <t23_stack=stack64#26
  1426. # asm 2: mulq <t23_stack=200(%rsp)
  1427. mulq 200(%rsp)
  1428. # qhasm: carry? rx1 += mulrax
  1429. # asm 1: add <mulrax=int64#7,<rx1=int64#8
  1430. # asm 2: add <mulrax=%rax,<rx1=%r10
  1431. add %rax,%r10
  1432. # qhasm: mulr11 += mulrdx + carry
  1433. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  1434. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  1435. adc %rdx,%r11
  1436. # qhasm: mulrax = mulx319_stack
  1437. # asm 1: movq <mulx319_stack=stack64#18,>mulrax=int64#7
  1438. # asm 2: movq <mulx319_stack=136(%rsp),>mulrax=%rax
  1439. movq 136(%rsp),%rax
  1440. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1441. # asm 1: mulq <t24_stack=stack64#27
  1442. # asm 2: mulq <t24_stack=208(%rsp)
  1443. mulq 208(%rsp)
  1444. # qhasm: carry? rx2 += mulrax
  1445. # asm 1: add <mulrax=int64#7,<rx2=int64#10
  1446. # asm 2: add <mulrax=%rax,<rx2=%r12
  1447. add %rax,%r12
  1448. # qhasm: mulr21 += mulrdx + carry
  1449. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  1450. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  1451. adc %rdx,%r13
  1452. # qhasm: mulrax = b4_stack
  1453. # asm 1: movq <b4_stack=stack64#17,>mulrax=int64#7
  1454. # asm 2: movq <b4_stack=128(%rsp),>mulrax=%rax
  1455. movq 128(%rsp),%rax
  1456. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1457. # asm 1: mulq <t20_stack=stack64#23
  1458. # asm 2: mulq <t20_stack=176(%rsp)
  1459. mulq 176(%rsp)
  1460. # qhasm: carry? rx4 += mulrax
  1461. # asm 1: add <mulrax=int64#7,<rx4=int64#14
  1462. # asm 2: add <mulrax=%rax,<rx4=%rbx
  1463. add %rax,%rbx
  1464. # qhasm: mulr41 += mulrdx + carry
  1465. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  1466. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  1467. adc %rdx,%rbp
  1468. # qhasm: mulrax = mulx419_stack
  1469. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  1470. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  1471. movq 144(%rsp),%rax
  1472. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1473. # asm 1: mulq <t22_stack=stack64#25
  1474. # asm 2: mulq <t22_stack=192(%rsp)
  1475. mulq 192(%rsp)
  1476. # qhasm: carry? rx1 += mulrax
  1477. # asm 1: add <mulrax=int64#7,<rx1=int64#8
  1478. # asm 2: add <mulrax=%rax,<rx1=%r10
  1479. add %rax,%r10
  1480. # qhasm: mulr11 += mulrdx + carry
  1481. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  1482. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  1483. adc %rdx,%r11
  1484. # qhasm: mulrax = mulx419_stack
  1485. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  1486. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  1487. movq 144(%rsp),%rax
  1488. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1489. # asm 1: mulq <t23_stack=stack64#26
  1490. # asm 2: mulq <t23_stack=200(%rsp)
  1491. mulq 200(%rsp)
  1492. # qhasm: carry? rx2 += mulrax
  1493. # asm 1: add <mulrax=int64#7,<rx2=int64#10
  1494. # asm 2: add <mulrax=%rax,<rx2=%r12
  1495. add %rax,%r12
  1496. # qhasm: mulr21 += mulrdx + carry
  1497. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  1498. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  1499. adc %rdx,%r13
  1500. # qhasm: mulrax = mulx419_stack
  1501. # asm 1: movq <mulx419_stack=stack64#19,>mulrax=int64#7
  1502. # asm 2: movq <mulx419_stack=144(%rsp),>mulrax=%rax
  1503. movq 144(%rsp),%rax
  1504. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1505. # asm 1: mulq <t24_stack=stack64#27
  1506. # asm 2: mulq <t24_stack=208(%rsp)
  1507. mulq 208(%rsp)
  1508. # qhasm: carry? rx3 += mulrax
  1509. # asm 1: add <mulrax=int64#7,<rx3=int64#12
  1510. # asm 2: add <mulrax=%rax,<rx3=%r14
  1511. add %rax,%r14
  1512. # qhasm: mulr31 += mulrdx + carry
  1513. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  1514. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  1515. adc %rdx,%r15
  1516. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  1517. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  1518. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  1519. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  1520. # qhasm: mulr01 = (mulr01.rx0) << 13
  1521. # asm 1: shld $13,<rx0=int64#5,<mulr01=int64#6
  1522. # asm 2: shld $13,<rx0=%r8,<mulr01=%r9
  1523. shld $13,%r8,%r9
  1524. # qhasm: rx0 &= mulredmask
  1525. # asm 1: and <mulredmask=int64#3,<rx0=int64#5
  1526. # asm 2: and <mulredmask=%rdx,<rx0=%r8
  1527. and %rdx,%r8
  1528. # qhasm: mulr11 = (mulr11.rx1) << 13
  1529. # asm 1: shld $13,<rx1=int64#8,<mulr11=int64#9
  1530. # asm 2: shld $13,<rx1=%r10,<mulr11=%r11
  1531. shld $13,%r10,%r11
  1532. # qhasm: rx1 &= mulredmask
  1533. # asm 1: and <mulredmask=int64#3,<rx1=int64#8
  1534. # asm 2: and <mulredmask=%rdx,<rx1=%r10
  1535. and %rdx,%r10
  1536. # qhasm: rx1 += mulr01
  1537. # asm 1: add <mulr01=int64#6,<rx1=int64#8
  1538. # asm 2: add <mulr01=%r9,<rx1=%r10
  1539. add %r9,%r10
  1540. # qhasm: mulr21 = (mulr21.rx2) << 13
  1541. # asm 1: shld $13,<rx2=int64#10,<mulr21=int64#11
  1542. # asm 2: shld $13,<rx2=%r12,<mulr21=%r13
  1543. shld $13,%r12,%r13
  1544. # qhasm: rx2 &= mulredmask
  1545. # asm 1: and <mulredmask=int64#3,<rx2=int64#10
  1546. # asm 2: and <mulredmask=%rdx,<rx2=%r12
  1547. and %rdx,%r12
  1548. # qhasm: rx2 += mulr11
  1549. # asm 1: add <mulr11=int64#9,<rx2=int64#10
  1550. # asm 2: add <mulr11=%r11,<rx2=%r12
  1551. add %r11,%r12
  1552. # qhasm: mulr31 = (mulr31.rx3) << 13
  1553. # asm 1: shld $13,<rx3=int64#12,<mulr31=int64#13
  1554. # asm 2: shld $13,<rx3=%r14,<mulr31=%r15
  1555. shld $13,%r14,%r15
  1556. # qhasm: rx3 &= mulredmask
  1557. # asm 1: and <mulredmask=int64#3,<rx3=int64#12
  1558. # asm 2: and <mulredmask=%rdx,<rx3=%r14
  1559. and %rdx,%r14
  1560. # qhasm: rx3 += mulr21
  1561. # asm 1: add <mulr21=int64#11,<rx3=int64#12
  1562. # asm 2: add <mulr21=%r13,<rx3=%r14
  1563. add %r13,%r14
  1564. # qhasm: mulr41 = (mulr41.rx4) << 13
  1565. # asm 1: shld $13,<rx4=int64#14,<mulr41=int64#15
  1566. # asm 2: shld $13,<rx4=%rbx,<mulr41=%rbp
  1567. shld $13,%rbx,%rbp
  1568. # qhasm: rx4 &= mulredmask
  1569. # asm 1: and <mulredmask=int64#3,<rx4=int64#14
  1570. # asm 2: and <mulredmask=%rdx,<rx4=%rbx
  1571. and %rdx,%rbx
  1572. # qhasm: rx4 += mulr31
  1573. # asm 1: add <mulr31=int64#13,<rx4=int64#14
  1574. # asm 2: add <mulr31=%r15,<rx4=%rbx
  1575. add %r15,%rbx
  1576. # qhasm: mulr41 = mulr41 * 19
  1577. # asm 1: imulq $19,<mulr41=int64#15,>mulr41=int64#6
  1578. # asm 2: imulq $19,<mulr41=%rbp,>mulr41=%r9
  1579. imulq $19,%rbp,%r9
  1580. # qhasm: rx0 += mulr41
  1581. # asm 1: add <mulr41=int64#6,<rx0=int64#5
  1582. # asm 2: add <mulr41=%r9,<rx0=%r8
  1583. add %r9,%r8
  1584. # qhasm: mult = rx0
  1585. # asm 1: mov <rx0=int64#5,>mult=int64#6
  1586. # asm 2: mov <rx0=%r8,>mult=%r9
  1587. mov %r8,%r9
  1588. # qhasm: (uint64) mult >>= 51
  1589. # asm 1: shr $51,<mult=int64#6
  1590. # asm 2: shr $51,<mult=%r9
  1591. shr $51,%r9
  1592. # qhasm: mult += rx1
  1593. # asm 1: add <rx1=int64#8,<mult=int64#6
  1594. # asm 2: add <rx1=%r10,<mult=%r9
  1595. add %r10,%r9
  1596. # qhasm: rx1 = mult
  1597. # asm 1: mov <mult=int64#6,>rx1=int64#7
  1598. # asm 2: mov <mult=%r9,>rx1=%rax
  1599. mov %r9,%rax
  1600. # qhasm: (uint64) mult >>= 51
  1601. # asm 1: shr $51,<mult=int64#6
  1602. # asm 2: shr $51,<mult=%r9
  1603. shr $51,%r9
  1604. # qhasm: rx0 &= mulredmask
  1605. # asm 1: and <mulredmask=int64#3,<rx0=int64#5
  1606. # asm 2: and <mulredmask=%rdx,<rx0=%r8
  1607. and %rdx,%r8
  1608. # qhasm: mult += rx2
  1609. # asm 1: add <rx2=int64#10,<mult=int64#6
  1610. # asm 2: add <rx2=%r12,<mult=%r9
  1611. add %r12,%r9
  1612. # qhasm: rx2 = mult
  1613. # asm 1: mov <mult=int64#6,>rx2=int64#8
  1614. # asm 2: mov <mult=%r9,>rx2=%r10
  1615. mov %r9,%r10
  1616. # qhasm: (uint64) mult >>= 51
  1617. # asm 1: shr $51,<mult=int64#6
  1618. # asm 2: shr $51,<mult=%r9
  1619. shr $51,%r9
  1620. # qhasm: rx1 &= mulredmask
  1621. # asm 1: and <mulredmask=int64#3,<rx1=int64#7
  1622. # asm 2: and <mulredmask=%rdx,<rx1=%rax
  1623. and %rdx,%rax
  1624. # qhasm: mult += rx3
  1625. # asm 1: add <rx3=int64#12,<mult=int64#6
  1626. # asm 2: add <rx3=%r14,<mult=%r9
  1627. add %r14,%r9
  1628. # qhasm: rx3 = mult
  1629. # asm 1: mov <mult=int64#6,>rx3=int64#9
  1630. # asm 2: mov <mult=%r9,>rx3=%r11
  1631. mov %r9,%r11
  1632. # qhasm: (uint64) mult >>= 51
  1633. # asm 1: shr $51,<mult=int64#6
  1634. # asm 2: shr $51,<mult=%r9
  1635. shr $51,%r9
  1636. # qhasm: rx2 &= mulredmask
  1637. # asm 1: and <mulredmask=int64#3,<rx2=int64#8
  1638. # asm 2: and <mulredmask=%rdx,<rx2=%r10
  1639. and %rdx,%r10
  1640. # qhasm: mult += rx4
  1641. # asm 1: add <rx4=int64#14,<mult=int64#6
  1642. # asm 2: add <rx4=%rbx,<mult=%r9
  1643. add %rbx,%r9
  1644. # qhasm: rx4 = mult
  1645. # asm 1: mov <mult=int64#6,>rx4=int64#10
  1646. # asm 2: mov <mult=%r9,>rx4=%r12
  1647. mov %r9,%r12
  1648. # qhasm: (uint64) mult >>= 51
  1649. # asm 1: shr $51,<mult=int64#6
  1650. # asm 2: shr $51,<mult=%r9
  1651. shr $51,%r9
  1652. # qhasm: rx3 &= mulredmask
  1653. # asm 1: and <mulredmask=int64#3,<rx3=int64#9
  1654. # asm 2: and <mulredmask=%rdx,<rx3=%r11
  1655. and %rdx,%r11
  1656. # qhasm: mult *= 19
  1657. # asm 1: imulq $19,<mult=int64#6,>mult=int64#6
  1658. # asm 2: imulq $19,<mult=%r9,>mult=%r9
  1659. imulq $19,%r9,%r9
  1660. # qhasm: rx0 += mult
  1661. # asm 1: add <mult=int64#6,<rx0=int64#5
  1662. # asm 2: add <mult=%r9,<rx0=%r8
  1663. add %r9,%r8
  1664. # qhasm: rx4 &= mulredmask
  1665. # asm 1: and <mulredmask=int64#3,<rx4=int64#10
  1666. # asm 2: and <mulredmask=%rdx,<rx4=%r12
  1667. and %rdx,%r12
  1668. # qhasm: ry0 = rx0
  1669. # asm 1: mov <rx0=int64#5,>ry0=int64#3
  1670. # asm 2: mov <rx0=%r8,>ry0=%rdx
  1671. mov %r8,%rdx
  1672. # qhasm: ry1 = rx1
  1673. # asm 1: mov <rx1=int64#7,>ry1=int64#6
  1674. # asm 2: mov <rx1=%rax,>ry1=%r9
  1675. mov %rax,%r9
  1676. # qhasm: ry2 = rx2
  1677. # asm 1: mov <rx2=int64#8,>ry2=int64#11
  1678. # asm 2: mov <rx2=%r10,>ry2=%r13
  1679. mov %r10,%r13
  1680. # qhasm: ry3 = rx3
  1681. # asm 1: mov <rx3=int64#9,>ry3=int64#12
  1682. # asm 2: mov <rx3=%r11,>ry3=%r14
  1683. mov %r11,%r14
  1684. # qhasm: ry4 = rx4
  1685. # asm 1: mov <rx4=int64#10,>ry4=int64#13
  1686. # asm 2: mov <rx4=%r12,>ry4=%r15
  1687. mov %r12,%r15
  1688. # qhasm: rx0 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P0)
  1689. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<rx0=int64#5
  1690. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<rx0=%r8
  1691. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%r8
  1692. # qhasm: rx1 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1693. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rx1=int64#7
  1694. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rx1=%rax
  1695. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rax
  1696. # qhasm: rx2 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1697. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rx2=int64#8
  1698. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rx2=%r10
  1699. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r10
  1700. # qhasm: rx3 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1701. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rx3=int64#9
  1702. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rx3=%r11
  1703. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r11
  1704. # qhasm: rx4 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  1705. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rx4=int64#10
  1706. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rx4=%r12
  1707. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r12
  1708. # qhasm: ry0 += a0_stack
  1709. # asm 1: addq <a0_stack=stack64#8,<ry0=int64#3
  1710. # asm 2: addq <a0_stack=56(%rsp),<ry0=%rdx
  1711. addq 56(%rsp),%rdx
  1712. # qhasm: ry1 += a1_stack
  1713. # asm 1: addq <a1_stack=stack64#9,<ry1=int64#6
  1714. # asm 2: addq <a1_stack=64(%rsp),<ry1=%r9
  1715. addq 64(%rsp),%r9
  1716. # qhasm: ry2 += a2_stack
  1717. # asm 1: addq <a2_stack=stack64#10,<ry2=int64#11
  1718. # asm 2: addq <a2_stack=72(%rsp),<ry2=%r13
  1719. addq 72(%rsp),%r13
  1720. # qhasm: ry3 += a3_stack
  1721. # asm 1: addq <a3_stack=stack64#11,<ry3=int64#12
  1722. # asm 2: addq <a3_stack=80(%rsp),<ry3=%r14
  1723. addq 80(%rsp),%r14
  1724. # qhasm: ry4 += a4_stack
  1725. # asm 1: addq <a4_stack=stack64#12,<ry4=int64#13
  1726. # asm 2: addq <a4_stack=88(%rsp),<ry4=%r15
  1727. addq 88(%rsp),%r15
  1728. # qhasm: rx0 -= a0_stack
  1729. # asm 1: subq <a0_stack=stack64#8,<rx0=int64#5
  1730. # asm 2: subq <a0_stack=56(%rsp),<rx0=%r8
  1731. subq 56(%rsp),%r8
  1732. # qhasm: rx1 -= a1_stack
  1733. # asm 1: subq <a1_stack=stack64#9,<rx1=int64#7
  1734. # asm 2: subq <a1_stack=64(%rsp),<rx1=%rax
  1735. subq 64(%rsp),%rax
  1736. # qhasm: rx2 -= a2_stack
  1737. # asm 1: subq <a2_stack=stack64#10,<rx2=int64#8
  1738. # asm 2: subq <a2_stack=72(%rsp),<rx2=%r10
  1739. subq 72(%rsp),%r10
  1740. # qhasm: rx3 -= a3_stack
  1741. # asm 1: subq <a3_stack=stack64#11,<rx3=int64#9
  1742. # asm 2: subq <a3_stack=80(%rsp),<rx3=%r11
  1743. subq 80(%rsp),%r11
  1744. # qhasm: rx4 -= a4_stack
  1745. # asm 1: subq <a4_stack=stack64#12,<rx4=int64#10
  1746. # asm 2: subq <a4_stack=88(%rsp),<rx4=%r12
  1747. subq 88(%rsp),%r12
  1748. # qhasm: *(uint64 *) (rp + 0) = rx0
  1749. # asm 1: movq <rx0=int64#5,0(<rp=int64#1)
  1750. # asm 2: movq <rx0=%r8,0(<rp=%rdi)
  1751. movq %r8,0(%rdi)
  1752. # qhasm: *(uint64 *) (rp + 8) = rx1
  1753. # asm 1: movq <rx1=int64#7,8(<rp=int64#1)
  1754. # asm 2: movq <rx1=%rax,8(<rp=%rdi)
  1755. movq %rax,8(%rdi)
  1756. # qhasm: *(uint64 *) (rp + 16) = rx2
  1757. # asm 1: movq <rx2=int64#8,16(<rp=int64#1)
  1758. # asm 2: movq <rx2=%r10,16(<rp=%rdi)
  1759. movq %r10,16(%rdi)
  1760. # qhasm: *(uint64 *) (rp + 24) = rx3
  1761. # asm 1: movq <rx3=int64#9,24(<rp=int64#1)
  1762. # asm 2: movq <rx3=%r11,24(<rp=%rdi)
  1763. movq %r11,24(%rdi)
  1764. # qhasm: *(uint64 *) (rp + 32) = rx4
  1765. # asm 1: movq <rx4=int64#10,32(<rp=int64#1)
  1766. # asm 2: movq <rx4=%r12,32(<rp=%rdi)
  1767. movq %r12,32(%rdi)
  1768. # qhasm: *(uint64 *) (rp + 80) = ry0
  1769. # asm 1: movq <ry0=int64#3,80(<rp=int64#1)
  1770. # asm 2: movq <ry0=%rdx,80(<rp=%rdi)
  1771. movq %rdx,80(%rdi)
  1772. # qhasm: *(uint64 *) (rp + 88) = ry1
  1773. # asm 1: movq <ry1=int64#6,88(<rp=int64#1)
  1774. # asm 2: movq <ry1=%r9,88(<rp=%rdi)
  1775. movq %r9,88(%rdi)
  1776. # qhasm: *(uint64 *) (rp + 96) = ry2
  1777. # asm 1: movq <ry2=int64#11,96(<rp=int64#1)
  1778. # asm 2: movq <ry2=%r13,96(<rp=%rdi)
  1779. movq %r13,96(%rdi)
  1780. # qhasm: *(uint64 *) (rp + 104) = ry3
  1781. # asm 1: movq <ry3=int64#12,104(<rp=int64#1)
  1782. # asm 2: movq <ry3=%r14,104(<rp=%rdi)
  1783. movq %r14,104(%rdi)
  1784. # qhasm: *(uint64 *) (rp + 112) = ry4
  1785. # asm 1: movq <ry4=int64#13,112(<rp=int64#1)
  1786. # asm 2: movq <ry4=%r15,112(<rp=%rdi)
  1787. movq %r15,112(%rdi)
  1788. # qhasm: mulrax = *(uint64 *)(pp + 144)
  1789. # asm 1: movq 144(<pp=int64#2),>mulrax=int64#3
  1790. # asm 2: movq 144(<pp=%rsi),>mulrax=%rdx
  1791. movq 144(%rsi),%rdx
  1792. # qhasm: mulrax *= 19
  1793. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1794. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1795. imulq $19,%rdx,%rax
  1796. # qhasm: mulx319_stack = mulrax
  1797. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  1798. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  1799. movq %rax,56(%rsp)
  1800. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 136)
  1801. # asm 1: mulq 136(<qp=int64#4)
  1802. # asm 2: mulq 136(<qp=%rcx)
  1803. mulq 136(%rcx)
  1804. # qhasm: c0 = mulrax
  1805. # asm 1: mov <mulrax=int64#7,>c0=int64#5
  1806. # asm 2: mov <mulrax=%rax,>c0=%r8
  1807. mov %rax,%r8
  1808. # qhasm: mulr01 = mulrdx
  1809. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#6
  1810. # asm 2: mov <mulrdx=%rdx,>mulr01=%r9
  1811. mov %rdx,%r9
  1812. # qhasm: mulrax = *(uint64 *)(pp + 152)
  1813. # asm 1: movq 152(<pp=int64#2),>mulrax=int64#3
  1814. # asm 2: movq 152(<pp=%rsi),>mulrax=%rdx
  1815. movq 152(%rsi),%rdx
  1816. # qhasm: mulrax *= 19
  1817. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1818. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1819. imulq $19,%rdx,%rax
  1820. # qhasm: mulx419_stack = mulrax
  1821. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  1822. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  1823. movq %rax,64(%rsp)
  1824. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 128)
  1825. # asm 1: mulq 128(<qp=int64#4)
  1826. # asm 2: mulq 128(<qp=%rcx)
  1827. mulq 128(%rcx)
  1828. # qhasm: carry? c0 += mulrax
  1829. # asm 1: add <mulrax=int64#7,<c0=int64#5
  1830. # asm 2: add <mulrax=%rax,<c0=%r8
  1831. add %rax,%r8
  1832. # qhasm: mulr01 += mulrdx + carry
  1833. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1834. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1835. adc %rdx,%r9
  1836. # qhasm: mulrax = *(uint64 *)(pp + 120)
  1837. # asm 1: movq 120(<pp=int64#2),>mulrax=int64#7
  1838. # asm 2: movq 120(<pp=%rsi),>mulrax=%rax
  1839. movq 120(%rsi),%rax
  1840. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 120)
  1841. # asm 1: mulq 120(<qp=int64#4)
  1842. # asm 2: mulq 120(<qp=%rcx)
  1843. mulq 120(%rcx)
  1844. # qhasm: carry? c0 += mulrax
  1845. # asm 1: add <mulrax=int64#7,<c0=int64#5
  1846. # asm 2: add <mulrax=%rax,<c0=%r8
  1847. add %rax,%r8
  1848. # qhasm: mulr01 += mulrdx + carry
  1849. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1850. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1851. adc %rdx,%r9
  1852. # qhasm: mulrax = *(uint64 *)(pp + 120)
  1853. # asm 1: movq 120(<pp=int64#2),>mulrax=int64#7
  1854. # asm 2: movq 120(<pp=%rsi),>mulrax=%rax
  1855. movq 120(%rsi),%rax
  1856. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 128)
  1857. # asm 1: mulq 128(<qp=int64#4)
  1858. # asm 2: mulq 128(<qp=%rcx)
  1859. mulq 128(%rcx)
  1860. # qhasm: c1 = mulrax
  1861. # asm 1: mov <mulrax=int64#7,>c1=int64#8
  1862. # asm 2: mov <mulrax=%rax,>c1=%r10
  1863. mov %rax,%r10
  1864. # qhasm: mulr11 = mulrdx
  1865. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#9
  1866. # asm 2: mov <mulrdx=%rdx,>mulr11=%r11
  1867. mov %rdx,%r11
  1868. # qhasm: mulrax = *(uint64 *)(pp + 120)
  1869. # asm 1: movq 120(<pp=int64#2),>mulrax=int64#7
  1870. # asm 2: movq 120(<pp=%rsi),>mulrax=%rax
  1871. movq 120(%rsi),%rax
  1872. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 136)
  1873. # asm 1: mulq 136(<qp=int64#4)
  1874. # asm 2: mulq 136(<qp=%rcx)
  1875. mulq 136(%rcx)
  1876. # qhasm: c2 = mulrax
  1877. # asm 1: mov <mulrax=int64#7,>c2=int64#10
  1878. # asm 2: mov <mulrax=%rax,>c2=%r12
  1879. mov %rax,%r12
  1880. # qhasm: mulr21 = mulrdx
  1881. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#11
  1882. # asm 2: mov <mulrdx=%rdx,>mulr21=%r13
  1883. mov %rdx,%r13
  1884. # qhasm: mulrax = *(uint64 *)(pp + 120)
  1885. # asm 1: movq 120(<pp=int64#2),>mulrax=int64#7
  1886. # asm 2: movq 120(<pp=%rsi),>mulrax=%rax
  1887. movq 120(%rsi),%rax
  1888. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 144)
  1889. # asm 1: mulq 144(<qp=int64#4)
  1890. # asm 2: mulq 144(<qp=%rcx)
  1891. mulq 144(%rcx)
  1892. # qhasm: c3 = mulrax
  1893. # asm 1: mov <mulrax=int64#7,>c3=int64#12
  1894. # asm 2: mov <mulrax=%rax,>c3=%r14
  1895. mov %rax,%r14
  1896. # qhasm: mulr31 = mulrdx
  1897. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#13
  1898. # asm 2: mov <mulrdx=%rdx,>mulr31=%r15
  1899. mov %rdx,%r15
  1900. # qhasm: mulrax = *(uint64 *)(pp + 120)
  1901. # asm 1: movq 120(<pp=int64#2),>mulrax=int64#7
  1902. # asm 2: movq 120(<pp=%rsi),>mulrax=%rax
  1903. movq 120(%rsi),%rax
  1904. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 152)
  1905. # asm 1: mulq 152(<qp=int64#4)
  1906. # asm 2: mulq 152(<qp=%rcx)
  1907. mulq 152(%rcx)
  1908. # qhasm: c4 = mulrax
  1909. # asm 1: mov <mulrax=int64#7,>c4=int64#14
  1910. # asm 2: mov <mulrax=%rax,>c4=%rbx
  1911. mov %rax,%rbx
  1912. # qhasm: mulr41 = mulrdx
  1913. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#15
  1914. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbp
  1915. mov %rdx,%rbp
  1916. # qhasm: mulrax = *(uint64 *)(pp + 128)
  1917. # asm 1: movq 128(<pp=int64#2),>mulrax=int64#7
  1918. # asm 2: movq 128(<pp=%rsi),>mulrax=%rax
  1919. movq 128(%rsi),%rax
  1920. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 120)
  1921. # asm 1: mulq 120(<qp=int64#4)
  1922. # asm 2: mulq 120(<qp=%rcx)
  1923. mulq 120(%rcx)
  1924. # qhasm: carry? c1 += mulrax
  1925. # asm 1: add <mulrax=int64#7,<c1=int64#8
  1926. # asm 2: add <mulrax=%rax,<c1=%r10
  1927. add %rax,%r10
  1928. # qhasm: mulr11 += mulrdx + carry
  1929. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  1930. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  1931. adc %rdx,%r11
  1932. # qhasm: mulrax = *(uint64 *)(pp + 128)
  1933. # asm 1: movq 128(<pp=int64#2),>mulrax=int64#7
  1934. # asm 2: movq 128(<pp=%rsi),>mulrax=%rax
  1935. movq 128(%rsi),%rax
  1936. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 128)
  1937. # asm 1: mulq 128(<qp=int64#4)
  1938. # asm 2: mulq 128(<qp=%rcx)
  1939. mulq 128(%rcx)
  1940. # qhasm: carry? c2 += mulrax
  1941. # asm 1: add <mulrax=int64#7,<c2=int64#10
  1942. # asm 2: add <mulrax=%rax,<c2=%r12
  1943. add %rax,%r12
  1944. # qhasm: mulr21 += mulrdx + carry
  1945. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  1946. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  1947. adc %rdx,%r13
  1948. # qhasm: mulrax = *(uint64 *)(pp + 128)
  1949. # asm 1: movq 128(<pp=int64#2),>mulrax=int64#7
  1950. # asm 2: movq 128(<pp=%rsi),>mulrax=%rax
  1951. movq 128(%rsi),%rax
  1952. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 136)
  1953. # asm 1: mulq 136(<qp=int64#4)
  1954. # asm 2: mulq 136(<qp=%rcx)
  1955. mulq 136(%rcx)
  1956. # qhasm: carry? c3 += mulrax
  1957. # asm 1: add <mulrax=int64#7,<c3=int64#12
  1958. # asm 2: add <mulrax=%rax,<c3=%r14
  1959. add %rax,%r14
  1960. # qhasm: mulr31 += mulrdx + carry
  1961. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  1962. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  1963. adc %rdx,%r15
  1964. # qhasm: mulrax = *(uint64 *)(pp + 128)
  1965. # asm 1: movq 128(<pp=int64#2),>mulrax=int64#7
  1966. # asm 2: movq 128(<pp=%rsi),>mulrax=%rax
  1967. movq 128(%rsi),%rax
  1968. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 144)
  1969. # asm 1: mulq 144(<qp=int64#4)
  1970. # asm 2: mulq 144(<qp=%rcx)
  1971. mulq 144(%rcx)
  1972. # qhasm: carry? c4 += mulrax
  1973. # asm 1: add <mulrax=int64#7,<c4=int64#14
  1974. # asm 2: add <mulrax=%rax,<c4=%rbx
  1975. add %rax,%rbx
  1976. # qhasm: mulr41 += mulrdx + carry
  1977. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  1978. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  1979. adc %rdx,%rbp
  1980. # qhasm: mulrax = *(uint64 *)(pp + 128)
  1981. # asm 1: movq 128(<pp=int64#2),>mulrax=int64#3
  1982. # asm 2: movq 128(<pp=%rsi),>mulrax=%rdx
  1983. movq 128(%rsi),%rdx
  1984. # qhasm: mulrax *= 19
  1985. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1986. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1987. imulq $19,%rdx,%rax
  1988. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 152)
  1989. # asm 1: mulq 152(<qp=int64#4)
  1990. # asm 2: mulq 152(<qp=%rcx)
  1991. mulq 152(%rcx)
  1992. # qhasm: carry? c0 += mulrax
  1993. # asm 1: add <mulrax=int64#7,<c0=int64#5
  1994. # asm 2: add <mulrax=%rax,<c0=%r8
  1995. add %rax,%r8
  1996. # qhasm: mulr01 += mulrdx + carry
  1997. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  1998. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  1999. adc %rdx,%r9
  2000. # qhasm: mulrax = *(uint64 *)(pp + 136)
  2001. # asm 1: movq 136(<pp=int64#2),>mulrax=int64#7
  2002. # asm 2: movq 136(<pp=%rsi),>mulrax=%rax
  2003. movq 136(%rsi),%rax
  2004. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 120)
  2005. # asm 1: mulq 120(<qp=int64#4)
  2006. # asm 2: mulq 120(<qp=%rcx)
  2007. mulq 120(%rcx)
  2008. # qhasm: carry? c2 += mulrax
  2009. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2010. # asm 2: add <mulrax=%rax,<c2=%r12
  2011. add %rax,%r12
  2012. # qhasm: mulr21 += mulrdx + carry
  2013. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2014. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2015. adc %rdx,%r13
  2016. # qhasm: mulrax = *(uint64 *)(pp + 136)
  2017. # asm 1: movq 136(<pp=int64#2),>mulrax=int64#7
  2018. # asm 2: movq 136(<pp=%rsi),>mulrax=%rax
  2019. movq 136(%rsi),%rax
  2020. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 128)
  2021. # asm 1: mulq 128(<qp=int64#4)
  2022. # asm 2: mulq 128(<qp=%rcx)
  2023. mulq 128(%rcx)
  2024. # qhasm: carry? c3 += mulrax
  2025. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2026. # asm 2: add <mulrax=%rax,<c3=%r14
  2027. add %rax,%r14
  2028. # qhasm: mulr31 += mulrdx + carry
  2029. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2030. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2031. adc %rdx,%r15
  2032. # qhasm: mulrax = *(uint64 *)(pp + 136)
  2033. # asm 1: movq 136(<pp=int64#2),>mulrax=int64#7
  2034. # asm 2: movq 136(<pp=%rsi),>mulrax=%rax
  2035. movq 136(%rsi),%rax
  2036. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 136)
  2037. # asm 1: mulq 136(<qp=int64#4)
  2038. # asm 2: mulq 136(<qp=%rcx)
  2039. mulq 136(%rcx)
  2040. # qhasm: carry? c4 += mulrax
  2041. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2042. # asm 2: add <mulrax=%rax,<c4=%rbx
  2043. add %rax,%rbx
  2044. # qhasm: mulr41 += mulrdx + carry
  2045. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2046. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2047. adc %rdx,%rbp
  2048. # qhasm: mulrax = *(uint64 *)(pp + 136)
  2049. # asm 1: movq 136(<pp=int64#2),>mulrax=int64#3
  2050. # asm 2: movq 136(<pp=%rsi),>mulrax=%rdx
  2051. movq 136(%rsi),%rdx
  2052. # qhasm: mulrax *= 19
  2053. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2054. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2055. imulq $19,%rdx,%rax
  2056. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 144)
  2057. # asm 1: mulq 144(<qp=int64#4)
  2058. # asm 2: mulq 144(<qp=%rcx)
  2059. mulq 144(%rcx)
  2060. # qhasm: carry? c0 += mulrax
  2061. # asm 1: add <mulrax=int64#7,<c0=int64#5
  2062. # asm 2: add <mulrax=%rax,<c0=%r8
  2063. add %rax,%r8
  2064. # qhasm: mulr01 += mulrdx + carry
  2065. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  2066. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  2067. adc %rdx,%r9
  2068. # qhasm: mulrax = *(uint64 *)(pp + 136)
  2069. # asm 1: movq 136(<pp=int64#2),>mulrax=int64#3
  2070. # asm 2: movq 136(<pp=%rsi),>mulrax=%rdx
  2071. movq 136(%rsi),%rdx
  2072. # qhasm: mulrax *= 19
  2073. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2074. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2075. imulq $19,%rdx,%rax
  2076. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 152)
  2077. # asm 1: mulq 152(<qp=int64#4)
  2078. # asm 2: mulq 152(<qp=%rcx)
  2079. mulq 152(%rcx)
  2080. # qhasm: carry? c1 += mulrax
  2081. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2082. # asm 2: add <mulrax=%rax,<c1=%r10
  2083. add %rax,%r10
  2084. # qhasm: mulr11 += mulrdx + carry
  2085. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2086. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2087. adc %rdx,%r11
  2088. # qhasm: mulrax = *(uint64 *)(pp + 144)
  2089. # asm 1: movq 144(<pp=int64#2),>mulrax=int64#7
  2090. # asm 2: movq 144(<pp=%rsi),>mulrax=%rax
  2091. movq 144(%rsi),%rax
  2092. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 120)
  2093. # asm 1: mulq 120(<qp=int64#4)
  2094. # asm 2: mulq 120(<qp=%rcx)
  2095. mulq 120(%rcx)
  2096. # qhasm: carry? c3 += mulrax
  2097. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2098. # asm 2: add <mulrax=%rax,<c3=%r14
  2099. add %rax,%r14
  2100. # qhasm: mulr31 += mulrdx + carry
  2101. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2102. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2103. adc %rdx,%r15
  2104. # qhasm: mulrax = *(uint64 *)(pp + 144)
  2105. # asm 1: movq 144(<pp=int64#2),>mulrax=int64#7
  2106. # asm 2: movq 144(<pp=%rsi),>mulrax=%rax
  2107. movq 144(%rsi),%rax
  2108. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 128)
  2109. # asm 1: mulq 128(<qp=int64#4)
  2110. # asm 2: mulq 128(<qp=%rcx)
  2111. mulq 128(%rcx)
  2112. # qhasm: carry? c4 += mulrax
  2113. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2114. # asm 2: add <mulrax=%rax,<c4=%rbx
  2115. add %rax,%rbx
  2116. # qhasm: mulr41 += mulrdx + carry
  2117. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2118. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2119. adc %rdx,%rbp
  2120. # qhasm: mulrax = mulx319_stack
  2121. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  2122. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  2123. movq 56(%rsp),%rax
  2124. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 144)
  2125. # asm 1: mulq 144(<qp=int64#4)
  2126. # asm 2: mulq 144(<qp=%rcx)
  2127. mulq 144(%rcx)
  2128. # qhasm: carry? c1 += mulrax
  2129. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2130. # asm 2: add <mulrax=%rax,<c1=%r10
  2131. add %rax,%r10
  2132. # qhasm: mulr11 += mulrdx + carry
  2133. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2134. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2135. adc %rdx,%r11
  2136. # qhasm: mulrax = mulx319_stack
  2137. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  2138. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  2139. movq 56(%rsp),%rax
  2140. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 152)
  2141. # asm 1: mulq 152(<qp=int64#4)
  2142. # asm 2: mulq 152(<qp=%rcx)
  2143. mulq 152(%rcx)
  2144. # qhasm: carry? c2 += mulrax
  2145. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2146. # asm 2: add <mulrax=%rax,<c2=%r12
  2147. add %rax,%r12
  2148. # qhasm: mulr21 += mulrdx + carry
  2149. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2150. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2151. adc %rdx,%r13
  2152. # qhasm: mulrax = *(uint64 *)(pp + 152)
  2153. # asm 1: movq 152(<pp=int64#2),>mulrax=int64#7
  2154. # asm 2: movq 152(<pp=%rsi),>mulrax=%rax
  2155. movq 152(%rsi),%rax
  2156. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 120)
  2157. # asm 1: mulq 120(<qp=int64#4)
  2158. # asm 2: mulq 120(<qp=%rcx)
  2159. mulq 120(%rcx)
  2160. # qhasm: carry? c4 += mulrax
  2161. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2162. # asm 2: add <mulrax=%rax,<c4=%rbx
  2163. add %rax,%rbx
  2164. # qhasm: mulr41 += mulrdx + carry
  2165. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2166. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2167. adc %rdx,%rbp
  2168. # qhasm: mulrax = mulx419_stack
  2169. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  2170. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  2171. movq 64(%rsp),%rax
  2172. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 136)
  2173. # asm 1: mulq 136(<qp=int64#4)
  2174. # asm 2: mulq 136(<qp=%rcx)
  2175. mulq 136(%rcx)
  2176. # qhasm: carry? c1 += mulrax
  2177. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2178. # asm 2: add <mulrax=%rax,<c1=%r10
  2179. add %rax,%r10
  2180. # qhasm: mulr11 += mulrdx + carry
  2181. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2182. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2183. adc %rdx,%r11
  2184. # qhasm: mulrax = mulx419_stack
  2185. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  2186. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  2187. movq 64(%rsp),%rax
  2188. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 144)
  2189. # asm 1: mulq 144(<qp=int64#4)
  2190. # asm 2: mulq 144(<qp=%rcx)
  2191. mulq 144(%rcx)
  2192. # qhasm: carry? c2 += mulrax
  2193. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2194. # asm 2: add <mulrax=%rax,<c2=%r12
  2195. add %rax,%r12
  2196. # qhasm: mulr21 += mulrdx + carry
  2197. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2198. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2199. adc %rdx,%r13
  2200. # qhasm: mulrax = mulx419_stack
  2201. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  2202. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  2203. movq 64(%rsp),%rax
  2204. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 152)
  2205. # asm 1: mulq 152(<qp=int64#4)
  2206. # asm 2: mulq 152(<qp=%rcx)
  2207. mulq 152(%rcx)
  2208. # qhasm: carry? c3 += mulrax
  2209. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2210. # asm 2: add <mulrax=%rax,<c3=%r14
  2211. add %rax,%r14
  2212. # qhasm: mulr31 += mulrdx + carry
  2213. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2214. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2215. adc %rdx,%r15
  2216. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  2217. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  2218. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  2219. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  2220. # qhasm: mulr01 = (mulr01.c0) << 13
  2221. # asm 1: shld $13,<c0=int64#5,<mulr01=int64#6
  2222. # asm 2: shld $13,<c0=%r8,<mulr01=%r9
  2223. shld $13,%r8,%r9
  2224. # qhasm: c0 &= mulredmask
  2225. # asm 1: and <mulredmask=int64#3,<c0=int64#5
  2226. # asm 2: and <mulredmask=%rdx,<c0=%r8
  2227. and %rdx,%r8
  2228. # qhasm: mulr11 = (mulr11.c1) << 13
  2229. # asm 1: shld $13,<c1=int64#8,<mulr11=int64#9
  2230. # asm 2: shld $13,<c1=%r10,<mulr11=%r11
  2231. shld $13,%r10,%r11
  2232. # qhasm: c1 &= mulredmask
  2233. # asm 1: and <mulredmask=int64#3,<c1=int64#8
  2234. # asm 2: and <mulredmask=%rdx,<c1=%r10
  2235. and %rdx,%r10
  2236. # qhasm: c1 += mulr01
  2237. # asm 1: add <mulr01=int64#6,<c1=int64#8
  2238. # asm 2: add <mulr01=%r9,<c1=%r10
  2239. add %r9,%r10
  2240. # qhasm: mulr21 = (mulr21.c2) << 13
  2241. # asm 1: shld $13,<c2=int64#10,<mulr21=int64#11
  2242. # asm 2: shld $13,<c2=%r12,<mulr21=%r13
  2243. shld $13,%r12,%r13
  2244. # qhasm: c2 &= mulredmask
  2245. # asm 1: and <mulredmask=int64#3,<c2=int64#10
  2246. # asm 2: and <mulredmask=%rdx,<c2=%r12
  2247. and %rdx,%r12
  2248. # qhasm: c2 += mulr11
  2249. # asm 1: add <mulr11=int64#9,<c2=int64#10
  2250. # asm 2: add <mulr11=%r11,<c2=%r12
  2251. add %r11,%r12
  2252. # qhasm: mulr31 = (mulr31.c3) << 13
  2253. # asm 1: shld $13,<c3=int64#12,<mulr31=int64#13
  2254. # asm 2: shld $13,<c3=%r14,<mulr31=%r15
  2255. shld $13,%r14,%r15
  2256. # qhasm: c3 &= mulredmask
  2257. # asm 1: and <mulredmask=int64#3,<c3=int64#12
  2258. # asm 2: and <mulredmask=%rdx,<c3=%r14
  2259. and %rdx,%r14
  2260. # qhasm: c3 += mulr21
  2261. # asm 1: add <mulr21=int64#11,<c3=int64#12
  2262. # asm 2: add <mulr21=%r13,<c3=%r14
  2263. add %r13,%r14
  2264. # qhasm: mulr41 = (mulr41.c4) << 13
  2265. # asm 1: shld $13,<c4=int64#14,<mulr41=int64#15
  2266. # asm 2: shld $13,<c4=%rbx,<mulr41=%rbp
  2267. shld $13,%rbx,%rbp
  2268. # qhasm: c4 &= mulredmask
  2269. # asm 1: and <mulredmask=int64#3,<c4=int64#14
  2270. # asm 2: and <mulredmask=%rdx,<c4=%rbx
  2271. and %rdx,%rbx
  2272. # qhasm: c4 += mulr31
  2273. # asm 1: add <mulr31=int64#13,<c4=int64#14
  2274. # asm 2: add <mulr31=%r15,<c4=%rbx
  2275. add %r15,%rbx
  2276. # qhasm: mulr41 = mulr41 * 19
  2277. # asm 1: imulq $19,<mulr41=int64#15,>mulr41=int64#6
  2278. # asm 2: imulq $19,<mulr41=%rbp,>mulr41=%r9
  2279. imulq $19,%rbp,%r9
  2280. # qhasm: c0 += mulr41
  2281. # asm 1: add <mulr41=int64#6,<c0=int64#5
  2282. # asm 2: add <mulr41=%r9,<c0=%r8
  2283. add %r9,%r8
  2284. # qhasm: mult = c0
  2285. # asm 1: mov <c0=int64#5,>mult=int64#6
  2286. # asm 2: mov <c0=%r8,>mult=%r9
  2287. mov %r8,%r9
  2288. # qhasm: (uint64) mult >>= 51
  2289. # asm 1: shr $51,<mult=int64#6
  2290. # asm 2: shr $51,<mult=%r9
  2291. shr $51,%r9
  2292. # qhasm: mult += c1
  2293. # asm 1: add <c1=int64#8,<mult=int64#6
  2294. # asm 2: add <c1=%r10,<mult=%r9
  2295. add %r10,%r9
  2296. # qhasm: c1 = mult
  2297. # asm 1: mov <mult=int64#6,>c1=int64#7
  2298. # asm 2: mov <mult=%r9,>c1=%rax
  2299. mov %r9,%rax
  2300. # qhasm: (uint64) mult >>= 51
  2301. # asm 1: shr $51,<mult=int64#6
  2302. # asm 2: shr $51,<mult=%r9
  2303. shr $51,%r9
  2304. # qhasm: c0 &= mulredmask
  2305. # asm 1: and <mulredmask=int64#3,<c0=int64#5
  2306. # asm 2: and <mulredmask=%rdx,<c0=%r8
  2307. and %rdx,%r8
  2308. # qhasm: mult += c2
  2309. # asm 1: add <c2=int64#10,<mult=int64#6
  2310. # asm 2: add <c2=%r12,<mult=%r9
  2311. add %r12,%r9
  2312. # qhasm: c2 = mult
  2313. # asm 1: mov <mult=int64#6,>c2=int64#8
  2314. # asm 2: mov <mult=%r9,>c2=%r10
  2315. mov %r9,%r10
  2316. # qhasm: (uint64) mult >>= 51
  2317. # asm 1: shr $51,<mult=int64#6
  2318. # asm 2: shr $51,<mult=%r9
  2319. shr $51,%r9
  2320. # qhasm: c1 &= mulredmask
  2321. # asm 1: and <mulredmask=int64#3,<c1=int64#7
  2322. # asm 2: and <mulredmask=%rdx,<c1=%rax
  2323. and %rdx,%rax
  2324. # qhasm: mult += c3
  2325. # asm 1: add <c3=int64#12,<mult=int64#6
  2326. # asm 2: add <c3=%r14,<mult=%r9
  2327. add %r14,%r9
  2328. # qhasm: c3 = mult
  2329. # asm 1: mov <mult=int64#6,>c3=int64#9
  2330. # asm 2: mov <mult=%r9,>c3=%r11
  2331. mov %r9,%r11
  2332. # qhasm: (uint64) mult >>= 51
  2333. # asm 1: shr $51,<mult=int64#6
  2334. # asm 2: shr $51,<mult=%r9
  2335. shr $51,%r9
  2336. # qhasm: c2 &= mulredmask
  2337. # asm 1: and <mulredmask=int64#3,<c2=int64#8
  2338. # asm 2: and <mulredmask=%rdx,<c2=%r10
  2339. and %rdx,%r10
  2340. # qhasm: mult += c4
  2341. # asm 1: add <c4=int64#14,<mult=int64#6
  2342. # asm 2: add <c4=%rbx,<mult=%r9
  2343. add %rbx,%r9
  2344. # qhasm: c4 = mult
  2345. # asm 1: mov <mult=int64#6,>c4=int64#10
  2346. # asm 2: mov <mult=%r9,>c4=%r12
  2347. mov %r9,%r12
  2348. # qhasm: (uint64) mult >>= 51
  2349. # asm 1: shr $51,<mult=int64#6
  2350. # asm 2: shr $51,<mult=%r9
  2351. shr $51,%r9
  2352. # qhasm: c3 &= mulredmask
  2353. # asm 1: and <mulredmask=int64#3,<c3=int64#9
  2354. # asm 2: and <mulredmask=%rdx,<c3=%r11
  2355. and %rdx,%r11
  2356. # qhasm: mult *= 19
  2357. # asm 1: imulq $19,<mult=int64#6,>mult=int64#6
  2358. # asm 2: imulq $19,<mult=%r9,>mult=%r9
  2359. imulq $19,%r9,%r9
  2360. # qhasm: c0 += mult
  2361. # asm 1: add <mult=int64#6,<c0=int64#5
  2362. # asm 2: add <mult=%r9,<c0=%r8
  2363. add %r9,%r8
  2364. # qhasm: c4 &= mulredmask
  2365. # asm 1: and <mulredmask=int64#3,<c4=int64#10
  2366. # asm 2: and <mulredmask=%rdx,<c4=%r12
  2367. and %rdx,%r12
  2368. # qhasm: c0_stack = c0
  2369. # asm 1: movq <c0=int64#5,>c0_stack=stack64#8
  2370. # asm 2: movq <c0=%r8,>c0_stack=56(%rsp)
  2371. movq %r8,56(%rsp)
  2372. # qhasm: c1_stack = c1
  2373. # asm 1: movq <c1=int64#7,>c1_stack=stack64#9
  2374. # asm 2: movq <c1=%rax,>c1_stack=64(%rsp)
  2375. movq %rax,64(%rsp)
  2376. # qhasm: c2_stack = c2
  2377. # asm 1: movq <c2=int64#8,>c2_stack=stack64#10
  2378. # asm 2: movq <c2=%r10,>c2_stack=72(%rsp)
  2379. movq %r10,72(%rsp)
  2380. # qhasm: c3_stack = c3
  2381. # asm 1: movq <c3=int64#9,>c3_stack=stack64#11
  2382. # asm 2: movq <c3=%r11,>c3_stack=80(%rsp)
  2383. movq %r11,80(%rsp)
  2384. # qhasm: c4_stack = c4
  2385. # asm 1: movq <c4=int64#10,>c4_stack=stack64#12
  2386. # asm 2: movq <c4=%r12,>c4_stack=88(%rsp)
  2387. movq %r12,88(%rsp)
  2388. # qhasm: mulrax = c3_stack
  2389. # asm 1: movq <c3_stack=stack64#11,>mulrax=int64#3
  2390. # asm 2: movq <c3_stack=80(%rsp),>mulrax=%rdx
  2391. movq 80(%rsp),%rdx
  2392. # qhasm: mulrax *= 19
  2393. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2394. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2395. imulq $19,%rdx,%rax
  2396. # qhasm: mulx319_stack = mulrax
  2397. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#13
  2398. # asm 2: movq <mulrax=%rax,>mulx319_stack=96(%rsp)
  2399. movq %rax,96(%rsp)
  2400. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D2)
  2401. mulq CRYPTO_NAMESPACE(batch_EC2D2)(%rip)
  2402. # qhasm: c0 = mulrax
  2403. # asm 1: mov <mulrax=int64#7,>c0=int64#5
  2404. # asm 2: mov <mulrax=%rax,>c0=%r8
  2405. mov %rax,%r8
  2406. # qhasm: mulr01 = mulrdx
  2407. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#6
  2408. # asm 2: mov <mulrdx=%rdx,>mulr01=%r9
  2409. mov %rdx,%r9
  2410. # qhasm: mulrax = c4_stack
  2411. # asm 1: movq <c4_stack=stack64#12,>mulrax=int64#3
  2412. # asm 2: movq <c4_stack=88(%rsp),>mulrax=%rdx
  2413. movq 88(%rsp),%rdx
  2414. # qhasm: mulrax *= 19
  2415. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2416. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2417. imulq $19,%rdx,%rax
  2418. # qhasm: mulx419_stack = mulrax
  2419. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#14
  2420. # asm 2: movq <mulrax=%rax,>mulx419_stack=104(%rsp)
  2421. movq %rax,104(%rsp)
  2422. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D1)
  2423. mulq CRYPTO_NAMESPACE(batch_EC2D1)(%rip)
  2424. # qhasm: carry? c0 += mulrax
  2425. # asm 1: add <mulrax=int64#7,<c0=int64#5
  2426. # asm 2: add <mulrax=%rax,<c0=%r8
  2427. add %rax,%r8
  2428. # qhasm: mulr01 += mulrdx + carry
  2429. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  2430. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  2431. adc %rdx,%r9
  2432. # qhasm: mulrax = c0_stack
  2433. # asm 1: movq <c0_stack=stack64#8,>mulrax=int64#7
  2434. # asm 2: movq <c0_stack=56(%rsp),>mulrax=%rax
  2435. movq 56(%rsp),%rax
  2436. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D0)
  2437. mulq CRYPTO_NAMESPACE(batch_EC2D0)(%rip)
  2438. # qhasm: carry? c0 += mulrax
  2439. # asm 1: add <mulrax=int64#7,<c0=int64#5
  2440. # asm 2: add <mulrax=%rax,<c0=%r8
  2441. add %rax,%r8
  2442. # qhasm: mulr01 += mulrdx + carry
  2443. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  2444. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  2445. adc %rdx,%r9
  2446. # qhasm: mulrax = c0_stack
  2447. # asm 1: movq <c0_stack=stack64#8,>mulrax=int64#7
  2448. # asm 2: movq <c0_stack=56(%rsp),>mulrax=%rax
  2449. movq 56(%rsp),%rax
  2450. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D1)
  2451. mulq CRYPTO_NAMESPACE(batch_EC2D1)(%rip)
  2452. # qhasm: c1 = mulrax
  2453. # asm 1: mov <mulrax=int64#7,>c1=int64#8
  2454. # asm 2: mov <mulrax=%rax,>c1=%r10
  2455. mov %rax,%r10
  2456. # qhasm: mulr11 = mulrdx
  2457. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#9
  2458. # asm 2: mov <mulrdx=%rdx,>mulr11=%r11
  2459. mov %rdx,%r11
  2460. # qhasm: mulrax = c0_stack
  2461. # asm 1: movq <c0_stack=stack64#8,>mulrax=int64#7
  2462. # asm 2: movq <c0_stack=56(%rsp),>mulrax=%rax
  2463. movq 56(%rsp),%rax
  2464. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D2)
  2465. mulq CRYPTO_NAMESPACE(batch_EC2D2)(%rip)
  2466. # qhasm: c2 = mulrax
  2467. # asm 1: mov <mulrax=int64#7,>c2=int64#10
  2468. # asm 2: mov <mulrax=%rax,>c2=%r12
  2469. mov %rax,%r12
  2470. # qhasm: mulr21 = mulrdx
  2471. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#11
  2472. # asm 2: mov <mulrdx=%rdx,>mulr21=%r13
  2473. mov %rdx,%r13
  2474. # qhasm: mulrax = c0_stack
  2475. # asm 1: movq <c0_stack=stack64#8,>mulrax=int64#7
  2476. # asm 2: movq <c0_stack=56(%rsp),>mulrax=%rax
  2477. movq 56(%rsp),%rax
  2478. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D3)
  2479. mulq CRYPTO_NAMESPACE(batch_EC2D3)(%rip)
  2480. # qhasm: c3 = mulrax
  2481. # asm 1: mov <mulrax=int64#7,>c3=int64#12
  2482. # asm 2: mov <mulrax=%rax,>c3=%r14
  2483. mov %rax,%r14
  2484. # qhasm: mulr31 = mulrdx
  2485. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#13
  2486. # asm 2: mov <mulrdx=%rdx,>mulr31=%r15
  2487. mov %rdx,%r15
  2488. # qhasm: mulrax = c0_stack
  2489. # asm 1: movq <c0_stack=stack64#8,>mulrax=int64#7
  2490. # asm 2: movq <c0_stack=56(%rsp),>mulrax=%rax
  2491. movq 56(%rsp),%rax
  2492. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D4)
  2493. mulq CRYPTO_NAMESPACE(batch_EC2D4)(%rip)
  2494. # qhasm: c4 = mulrax
  2495. # asm 1: mov <mulrax=int64#7,>c4=int64#14
  2496. # asm 2: mov <mulrax=%rax,>c4=%rbx
  2497. mov %rax,%rbx
  2498. # qhasm: mulr41 = mulrdx
  2499. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#15
  2500. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbp
  2501. mov %rdx,%rbp
  2502. # qhasm: mulrax = c1_stack
  2503. # asm 1: movq <c1_stack=stack64#9,>mulrax=int64#7
  2504. # asm 2: movq <c1_stack=64(%rsp),>mulrax=%rax
  2505. movq 64(%rsp),%rax
  2506. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D0)
  2507. mulq CRYPTO_NAMESPACE(batch_EC2D0)(%rip)
  2508. # qhasm: carry? c1 += mulrax
  2509. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2510. # asm 2: add <mulrax=%rax,<c1=%r10
  2511. add %rax,%r10
  2512. # qhasm: mulr11 += mulrdx + carry
  2513. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2514. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2515. adc %rdx,%r11
  2516. # qhasm: mulrax = c1_stack
  2517. # asm 1: movq <c1_stack=stack64#9,>mulrax=int64#7
  2518. # asm 2: movq <c1_stack=64(%rsp),>mulrax=%rax
  2519. movq 64(%rsp),%rax
  2520. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D1)
  2521. mulq CRYPTO_NAMESPACE(batch_EC2D1)(%rip)
  2522. # qhasm: carry? c2 += mulrax
  2523. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2524. # asm 2: add <mulrax=%rax,<c2=%r12
  2525. add %rax,%r12
  2526. # qhasm: mulr21 += mulrdx + carry
  2527. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2528. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2529. adc %rdx,%r13
  2530. # qhasm: mulrax = c1_stack
  2531. # asm 1: movq <c1_stack=stack64#9,>mulrax=int64#7
  2532. # asm 2: movq <c1_stack=64(%rsp),>mulrax=%rax
  2533. movq 64(%rsp),%rax
  2534. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D2)
  2535. mulq CRYPTO_NAMESPACE(batch_EC2D2)(%rip)
  2536. # qhasm: carry? c3 += mulrax
  2537. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2538. # asm 2: add <mulrax=%rax,<c3=%r14
  2539. add %rax,%r14
  2540. # qhasm: mulr31 += mulrdx + carry
  2541. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2542. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2543. adc %rdx,%r15
  2544. # qhasm: mulrax = c1_stack
  2545. # asm 1: movq <c1_stack=stack64#9,>mulrax=int64#7
  2546. # asm 2: movq <c1_stack=64(%rsp),>mulrax=%rax
  2547. movq 64(%rsp),%rax
  2548. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D3)
  2549. mulq CRYPTO_NAMESPACE(batch_EC2D3)(%rip)
  2550. # qhasm: carry? c4 += mulrax
  2551. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2552. # asm 2: add <mulrax=%rax,<c4=%rbx
  2553. add %rax,%rbx
  2554. # qhasm: mulr41 += mulrdx + carry
  2555. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2556. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2557. adc %rdx,%rbp
  2558. # qhasm: mulrax = c1_stack
  2559. # asm 1: movq <c1_stack=stack64#9,>mulrax=int64#3
  2560. # asm 2: movq <c1_stack=64(%rsp),>mulrax=%rdx
  2561. movq 64(%rsp),%rdx
  2562. # qhasm: mulrax *= 19
  2563. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2564. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2565. imulq $19,%rdx,%rax
  2566. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D4)
  2567. mulq CRYPTO_NAMESPACE(batch_EC2D4)(%rip)
  2568. # qhasm: carry? c0 += mulrax
  2569. # asm 1: add <mulrax=int64#7,<c0=int64#5
  2570. # asm 2: add <mulrax=%rax,<c0=%r8
  2571. add %rax,%r8
  2572. # qhasm: mulr01 += mulrdx + carry
  2573. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  2574. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  2575. adc %rdx,%r9
  2576. # qhasm: mulrax = c2_stack
  2577. # asm 1: movq <c2_stack=stack64#10,>mulrax=int64#7
  2578. # asm 2: movq <c2_stack=72(%rsp),>mulrax=%rax
  2579. movq 72(%rsp),%rax
  2580. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D0)
  2581. mulq CRYPTO_NAMESPACE(batch_EC2D0)(%rip)
  2582. # qhasm: carry? c2 += mulrax
  2583. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2584. # asm 2: add <mulrax=%rax,<c2=%r12
  2585. add %rax,%r12
  2586. # qhasm: mulr21 += mulrdx + carry
  2587. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2588. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2589. adc %rdx,%r13
  2590. # qhasm: mulrax = c2_stack
  2591. # asm 1: movq <c2_stack=stack64#10,>mulrax=int64#7
  2592. # asm 2: movq <c2_stack=72(%rsp),>mulrax=%rax
  2593. movq 72(%rsp),%rax
  2594. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D1)
  2595. mulq CRYPTO_NAMESPACE(batch_EC2D1)(%rip)
  2596. # qhasm: carry? c3 += mulrax
  2597. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2598. # asm 2: add <mulrax=%rax,<c3=%r14
  2599. add %rax,%r14
  2600. # qhasm: mulr31 += mulrdx + carry
  2601. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2602. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2603. adc %rdx,%r15
  2604. # qhasm: mulrax = c2_stack
  2605. # asm 1: movq <c2_stack=stack64#10,>mulrax=int64#7
  2606. # asm 2: movq <c2_stack=72(%rsp),>mulrax=%rax
  2607. movq 72(%rsp),%rax
  2608. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D2)
  2609. mulq CRYPTO_NAMESPACE(batch_EC2D2)(%rip)
  2610. # qhasm: carry? c4 += mulrax
  2611. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2612. # asm 2: add <mulrax=%rax,<c4=%rbx
  2613. add %rax,%rbx
  2614. # qhasm: mulr41 += mulrdx + carry
  2615. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2616. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2617. adc %rdx,%rbp
  2618. # qhasm: mulrax = c2_stack
  2619. # asm 1: movq <c2_stack=stack64#10,>mulrax=int64#3
  2620. # asm 2: movq <c2_stack=72(%rsp),>mulrax=%rdx
  2621. movq 72(%rsp),%rdx
  2622. # qhasm: mulrax *= 19
  2623. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2624. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2625. imulq $19,%rdx,%rax
  2626. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D3)
  2627. mulq CRYPTO_NAMESPACE(batch_EC2D3)(%rip)
  2628. # qhasm: carry? c0 += mulrax
  2629. # asm 1: add <mulrax=int64#7,<c0=int64#5
  2630. # asm 2: add <mulrax=%rax,<c0=%r8
  2631. add %rax,%r8
  2632. # qhasm: mulr01 += mulrdx + carry
  2633. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  2634. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  2635. adc %rdx,%r9
  2636. # qhasm: mulrax = c2_stack
  2637. # asm 1: movq <c2_stack=stack64#10,>mulrax=int64#3
  2638. # asm 2: movq <c2_stack=72(%rsp),>mulrax=%rdx
  2639. movq 72(%rsp),%rdx
  2640. # qhasm: mulrax *= 19
  2641. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2642. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2643. imulq $19,%rdx,%rax
  2644. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D4)
  2645. mulq CRYPTO_NAMESPACE(batch_EC2D4)(%rip)
  2646. # qhasm: carry? c1 += mulrax
  2647. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2648. # asm 2: add <mulrax=%rax,<c1=%r10
  2649. add %rax,%r10
  2650. # qhasm: mulr11 += mulrdx + carry
  2651. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2652. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2653. adc %rdx,%r11
  2654. # qhasm: mulrax = c3_stack
  2655. # asm 1: movq <c3_stack=stack64#11,>mulrax=int64#7
  2656. # asm 2: movq <c3_stack=80(%rsp),>mulrax=%rax
  2657. movq 80(%rsp),%rax
  2658. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D0)
  2659. mulq CRYPTO_NAMESPACE(batch_EC2D0)(%rip)
  2660. # qhasm: carry? c3 += mulrax
  2661. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2662. # asm 2: add <mulrax=%rax,<c3=%r14
  2663. add %rax,%r14
  2664. # qhasm: mulr31 += mulrdx + carry
  2665. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2666. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2667. adc %rdx,%r15
  2668. # qhasm: mulrax = c3_stack
  2669. # asm 1: movq <c3_stack=stack64#11,>mulrax=int64#7
  2670. # asm 2: movq <c3_stack=80(%rsp),>mulrax=%rax
  2671. movq 80(%rsp),%rax
  2672. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D1)
  2673. mulq CRYPTO_NAMESPACE(batch_EC2D1)(%rip)
  2674. # qhasm: carry? c4 += mulrax
  2675. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2676. # asm 2: add <mulrax=%rax,<c4=%rbx
  2677. add %rax,%rbx
  2678. # qhasm: mulr41 += mulrdx + carry
  2679. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2680. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2681. adc %rdx,%rbp
  2682. # qhasm: mulrax = mulx319_stack
  2683. # asm 1: movq <mulx319_stack=stack64#13,>mulrax=int64#7
  2684. # asm 2: movq <mulx319_stack=96(%rsp),>mulrax=%rax
  2685. movq 96(%rsp),%rax
  2686. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D3)
  2687. mulq CRYPTO_NAMESPACE(batch_EC2D3)(%rip)
  2688. # qhasm: carry? c1 += mulrax
  2689. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2690. # asm 2: add <mulrax=%rax,<c1=%r10
  2691. add %rax,%r10
  2692. # qhasm: mulr11 += mulrdx + carry
  2693. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2694. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2695. adc %rdx,%r11
  2696. # qhasm: mulrax = mulx319_stack
  2697. # asm 1: movq <mulx319_stack=stack64#13,>mulrax=int64#7
  2698. # asm 2: movq <mulx319_stack=96(%rsp),>mulrax=%rax
  2699. movq 96(%rsp),%rax
  2700. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D4)
  2701. mulq CRYPTO_NAMESPACE(batch_EC2D4)(%rip)
  2702. # qhasm: carry? c2 += mulrax
  2703. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2704. # asm 2: add <mulrax=%rax,<c2=%r12
  2705. add %rax,%r12
  2706. # qhasm: mulr21 += mulrdx + carry
  2707. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2708. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2709. adc %rdx,%r13
  2710. # qhasm: mulrax = c4_stack
  2711. # asm 1: movq <c4_stack=stack64#12,>mulrax=int64#7
  2712. # asm 2: movq <c4_stack=88(%rsp),>mulrax=%rax
  2713. movq 88(%rsp),%rax
  2714. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D0)
  2715. mulq CRYPTO_NAMESPACE(batch_EC2D0)(%rip)
  2716. # qhasm: carry? c4 += mulrax
  2717. # asm 1: add <mulrax=int64#7,<c4=int64#14
  2718. # asm 2: add <mulrax=%rax,<c4=%rbx
  2719. add %rax,%rbx
  2720. # qhasm: mulr41 += mulrdx + carry
  2721. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  2722. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  2723. adc %rdx,%rbp
  2724. # qhasm: mulrax = mulx419_stack
  2725. # asm 1: movq <mulx419_stack=stack64#14,>mulrax=int64#7
  2726. # asm 2: movq <mulx419_stack=104(%rsp),>mulrax=%rax
  2727. movq 104(%rsp),%rax
  2728. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D2)
  2729. mulq CRYPTO_NAMESPACE(batch_EC2D2)(%rip)
  2730. # qhasm: carry? c1 += mulrax
  2731. # asm 1: add <mulrax=int64#7,<c1=int64#8
  2732. # asm 2: add <mulrax=%rax,<c1=%r10
  2733. add %rax,%r10
  2734. # qhasm: mulr11 += mulrdx + carry
  2735. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  2736. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  2737. adc %rdx,%r11
  2738. # qhasm: mulrax = mulx419_stack
  2739. # asm 1: movq <mulx419_stack=stack64#14,>mulrax=int64#7
  2740. # asm 2: movq <mulx419_stack=104(%rsp),>mulrax=%rax
  2741. movq 104(%rsp),%rax
  2742. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D3)
  2743. mulq CRYPTO_NAMESPACE(batch_EC2D3)(%rip)
  2744. # qhasm: carry? c2 += mulrax
  2745. # asm 1: add <mulrax=int64#7,<c2=int64#10
  2746. # asm 2: add <mulrax=%rax,<c2=%r12
  2747. add %rax,%r12
  2748. # qhasm: mulr21 += mulrdx + carry
  2749. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  2750. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  2751. adc %rdx,%r13
  2752. # qhasm: mulrax = mulx419_stack
  2753. # asm 1: movq <mulx419_stack=stack64#14,>mulrax=int64#7
  2754. # asm 2: movq <mulx419_stack=104(%rsp),>mulrax=%rax
  2755. movq 104(%rsp),%rax
  2756. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&CRYPTO_NAMESPACE(batch_EC2D4)
  2757. mulq CRYPTO_NAMESPACE(batch_EC2D4)(%rip)
  2758. # qhasm: carry? c3 += mulrax
  2759. # asm 1: add <mulrax=int64#7,<c3=int64#12
  2760. # asm 2: add <mulrax=%rax,<c3=%r14
  2761. add %rax,%r14
  2762. # qhasm: mulr31 += mulrdx + carry
  2763. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  2764. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  2765. adc %rdx,%r15
  2766. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  2767. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  2768. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  2769. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  2770. # qhasm: mulr01 = (mulr01.c0) << 13
  2771. # asm 1: shld $13,<c0=int64#5,<mulr01=int64#6
  2772. # asm 2: shld $13,<c0=%r8,<mulr01=%r9
  2773. shld $13,%r8,%r9
  2774. # qhasm: c0 &= mulredmask
  2775. # asm 1: and <mulredmask=int64#3,<c0=int64#5
  2776. # asm 2: and <mulredmask=%rdx,<c0=%r8
  2777. and %rdx,%r8
  2778. # qhasm: mulr11 = (mulr11.c1) << 13
  2779. # asm 1: shld $13,<c1=int64#8,<mulr11=int64#9
  2780. # asm 2: shld $13,<c1=%r10,<mulr11=%r11
  2781. shld $13,%r10,%r11
  2782. # qhasm: c1 &= mulredmask
  2783. # asm 1: and <mulredmask=int64#3,<c1=int64#8
  2784. # asm 2: and <mulredmask=%rdx,<c1=%r10
  2785. and %rdx,%r10
  2786. # qhasm: c1 += mulr01
  2787. # asm 1: add <mulr01=int64#6,<c1=int64#8
  2788. # asm 2: add <mulr01=%r9,<c1=%r10
  2789. add %r9,%r10
  2790. # qhasm: mulr21 = (mulr21.c2) << 13
  2791. # asm 1: shld $13,<c2=int64#10,<mulr21=int64#11
  2792. # asm 2: shld $13,<c2=%r12,<mulr21=%r13
  2793. shld $13,%r12,%r13
  2794. # qhasm: c2 &= mulredmask
  2795. # asm 1: and <mulredmask=int64#3,<c2=int64#10
  2796. # asm 2: and <mulredmask=%rdx,<c2=%r12
  2797. and %rdx,%r12
  2798. # qhasm: c2 += mulr11
  2799. # asm 1: add <mulr11=int64#9,<c2=int64#10
  2800. # asm 2: add <mulr11=%r11,<c2=%r12
  2801. add %r11,%r12
  2802. # qhasm: mulr31 = (mulr31.c3) << 13
  2803. # asm 1: shld $13,<c3=int64#12,<mulr31=int64#13
  2804. # asm 2: shld $13,<c3=%r14,<mulr31=%r15
  2805. shld $13,%r14,%r15
  2806. # qhasm: c3 &= mulredmask
  2807. # asm 1: and <mulredmask=int64#3,<c3=int64#12
  2808. # asm 2: and <mulredmask=%rdx,<c3=%r14
  2809. and %rdx,%r14
  2810. # qhasm: c3 += mulr21
  2811. # asm 1: add <mulr21=int64#11,<c3=int64#12
  2812. # asm 2: add <mulr21=%r13,<c3=%r14
  2813. add %r13,%r14
  2814. # qhasm: mulr41 = (mulr41.c4) << 13
  2815. # asm 1: shld $13,<c4=int64#14,<mulr41=int64#15
  2816. # asm 2: shld $13,<c4=%rbx,<mulr41=%rbp
  2817. shld $13,%rbx,%rbp
  2818. # qhasm: c4 &= mulredmask
  2819. # asm 1: and <mulredmask=int64#3,<c4=int64#14
  2820. # asm 2: and <mulredmask=%rdx,<c4=%rbx
  2821. and %rdx,%rbx
  2822. # qhasm: c4 += mulr31
  2823. # asm 1: add <mulr31=int64#13,<c4=int64#14
  2824. # asm 2: add <mulr31=%r15,<c4=%rbx
  2825. add %r15,%rbx
  2826. # qhasm: mulr41 = mulr41 * 19
  2827. # asm 1: imulq $19,<mulr41=int64#15,>mulr41=int64#6
  2828. # asm 2: imulq $19,<mulr41=%rbp,>mulr41=%r9
  2829. imulq $19,%rbp,%r9
  2830. # qhasm: c0 += mulr41
  2831. # asm 1: add <mulr41=int64#6,<c0=int64#5
  2832. # asm 2: add <mulr41=%r9,<c0=%r8
  2833. add %r9,%r8
  2834. # qhasm: mult = c0
  2835. # asm 1: mov <c0=int64#5,>mult=int64#6
  2836. # asm 2: mov <c0=%r8,>mult=%r9
  2837. mov %r8,%r9
  2838. # qhasm: (uint64) mult >>= 51
  2839. # asm 1: shr $51,<mult=int64#6
  2840. # asm 2: shr $51,<mult=%r9
  2841. shr $51,%r9
  2842. # qhasm: mult += c1
  2843. # asm 1: add <c1=int64#8,<mult=int64#6
  2844. # asm 2: add <c1=%r10,<mult=%r9
  2845. add %r10,%r9
  2846. # qhasm: c1 = mult
  2847. # asm 1: mov <mult=int64#6,>c1=int64#7
  2848. # asm 2: mov <mult=%r9,>c1=%rax
  2849. mov %r9,%rax
  2850. # qhasm: (uint64) mult >>= 51
  2851. # asm 1: shr $51,<mult=int64#6
  2852. # asm 2: shr $51,<mult=%r9
  2853. shr $51,%r9
  2854. # qhasm: c0 &= mulredmask
  2855. # asm 1: and <mulredmask=int64#3,<c0=int64#5
  2856. # asm 2: and <mulredmask=%rdx,<c0=%r8
  2857. and %rdx,%r8
  2858. # qhasm: mult += c2
  2859. # asm 1: add <c2=int64#10,<mult=int64#6
  2860. # asm 2: add <c2=%r12,<mult=%r9
  2861. add %r12,%r9
  2862. # qhasm: c2 = mult
  2863. # asm 1: mov <mult=int64#6,>c2=int64#8
  2864. # asm 2: mov <mult=%r9,>c2=%r10
  2865. mov %r9,%r10
  2866. # qhasm: (uint64) mult >>= 51
  2867. # asm 1: shr $51,<mult=int64#6
  2868. # asm 2: shr $51,<mult=%r9
  2869. shr $51,%r9
  2870. # qhasm: c1 &= mulredmask
  2871. # asm 1: and <mulredmask=int64#3,<c1=int64#7
  2872. # asm 2: and <mulredmask=%rdx,<c1=%rax
  2873. and %rdx,%rax
  2874. # qhasm: mult += c3
  2875. # asm 1: add <c3=int64#12,<mult=int64#6
  2876. # asm 2: add <c3=%r14,<mult=%r9
  2877. add %r14,%r9
  2878. # qhasm: c3 = mult
  2879. # asm 1: mov <mult=int64#6,>c3=int64#9
  2880. # asm 2: mov <mult=%r9,>c3=%r11
  2881. mov %r9,%r11
  2882. # qhasm: (uint64) mult >>= 51
  2883. # asm 1: shr $51,<mult=int64#6
  2884. # asm 2: shr $51,<mult=%r9
  2885. shr $51,%r9
  2886. # qhasm: c2 &= mulredmask
  2887. # asm 1: and <mulredmask=int64#3,<c2=int64#8
  2888. # asm 2: and <mulredmask=%rdx,<c2=%r10
  2889. and %rdx,%r10
  2890. # qhasm: mult += c4
  2891. # asm 1: add <c4=int64#14,<mult=int64#6
  2892. # asm 2: add <c4=%rbx,<mult=%r9
  2893. add %rbx,%r9
  2894. # qhasm: c4 = mult
  2895. # asm 1: mov <mult=int64#6,>c4=int64#10
  2896. # asm 2: mov <mult=%r9,>c4=%r12
  2897. mov %r9,%r12
  2898. # qhasm: (uint64) mult >>= 51
  2899. # asm 1: shr $51,<mult=int64#6
  2900. # asm 2: shr $51,<mult=%r9
  2901. shr $51,%r9
  2902. # qhasm: c3 &= mulredmask
  2903. # asm 1: and <mulredmask=int64#3,<c3=int64#9
  2904. # asm 2: and <mulredmask=%rdx,<c3=%r11
  2905. and %rdx,%r11
  2906. # qhasm: mult *= 19
  2907. # asm 1: imulq $19,<mult=int64#6,>mult=int64#6
  2908. # asm 2: imulq $19,<mult=%r9,>mult=%r9
  2909. imulq $19,%r9,%r9
  2910. # qhasm: c0 += mult
  2911. # asm 1: add <mult=int64#6,<c0=int64#5
  2912. # asm 2: add <mult=%r9,<c0=%r8
  2913. add %r9,%r8
  2914. # qhasm: c4 &= mulredmask
  2915. # asm 1: and <mulredmask=int64#3,<c4=int64#10
  2916. # asm 2: and <mulredmask=%rdx,<c4=%r12
  2917. and %rdx,%r12
  2918. # qhasm: c0_stack = c0
  2919. # asm 1: movq <c0=int64#5,>c0_stack=stack64#8
  2920. # asm 2: movq <c0=%r8,>c0_stack=56(%rsp)
  2921. movq %r8,56(%rsp)
  2922. # qhasm: c1_stack = c1
  2923. # asm 1: movq <c1=int64#7,>c1_stack=stack64#9
  2924. # asm 2: movq <c1=%rax,>c1_stack=64(%rsp)
  2925. movq %rax,64(%rsp)
  2926. # qhasm: c2_stack = c2
  2927. # asm 1: movq <c2=int64#8,>c2_stack=stack64#10
  2928. # asm 2: movq <c2=%r10,>c2_stack=72(%rsp)
  2929. movq %r10,72(%rsp)
  2930. # qhasm: c3_stack = c3
  2931. # asm 1: movq <c3=int64#9,>c3_stack=stack64#11
  2932. # asm 2: movq <c3=%r11,>c3_stack=80(%rsp)
  2933. movq %r11,80(%rsp)
  2934. # qhasm: c4_stack = c4
  2935. # asm 1: movq <c4=int64#10,>c4_stack=stack64#12
  2936. # asm 2: movq <c4=%r12,>c4_stack=88(%rsp)
  2937. movq %r12,88(%rsp)
  2938. # qhasm: mulrax = *(uint64 *)(pp + 104)
  2939. # asm 1: movq 104(<pp=int64#2),>mulrax=int64#3
  2940. # asm 2: movq 104(<pp=%rsi),>mulrax=%rdx
  2941. movq 104(%rsi),%rdx
  2942. # qhasm: mulrax *= 19
  2943. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2944. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2945. imulq $19,%rdx,%rax
  2946. # qhasm: mulx319_stack = mulrax
  2947. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#13
  2948. # asm 2: movq <mulrax=%rax,>mulx319_stack=96(%rsp)
  2949. movq %rax,96(%rsp)
  2950. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  2951. # asm 1: mulq 96(<qp=int64#4)
  2952. # asm 2: mulq 96(<qp=%rcx)
  2953. mulq 96(%rcx)
  2954. # qhasm: rt0 = mulrax
  2955. # asm 1: mov <mulrax=int64#7,>rt0=int64#5
  2956. # asm 2: mov <mulrax=%rax,>rt0=%r8
  2957. mov %rax,%r8
  2958. # qhasm: mulr01 = mulrdx
  2959. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#6
  2960. # asm 2: mov <mulrdx=%rdx,>mulr01=%r9
  2961. mov %rdx,%r9
  2962. # qhasm: mulrax = *(uint64 *)(pp + 112)
  2963. # asm 1: movq 112(<pp=int64#2),>mulrax=int64#3
  2964. # asm 2: movq 112(<pp=%rsi),>mulrax=%rdx
  2965. movq 112(%rsi),%rdx
  2966. # qhasm: mulrax *= 19
  2967. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2968. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2969. imulq $19,%rdx,%rax
  2970. # qhasm: mulx419_stack = mulrax
  2971. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#14
  2972. # asm 2: movq <mulrax=%rax,>mulx419_stack=104(%rsp)
  2973. movq %rax,104(%rsp)
  2974. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  2975. # asm 1: mulq 88(<qp=int64#4)
  2976. # asm 2: mulq 88(<qp=%rcx)
  2977. mulq 88(%rcx)
  2978. # qhasm: carry? rt0 += mulrax
  2979. # asm 1: add <mulrax=int64#7,<rt0=int64#5
  2980. # asm 2: add <mulrax=%rax,<rt0=%r8
  2981. add %rax,%r8
  2982. # qhasm: mulr01 += mulrdx + carry
  2983. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  2984. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  2985. adc %rdx,%r9
  2986. # qhasm: mulrax = *(uint64 *)(pp + 80)
  2987. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  2988. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  2989. movq 80(%rsi),%rax
  2990. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  2991. # asm 1: mulq 80(<qp=int64#4)
  2992. # asm 2: mulq 80(<qp=%rcx)
  2993. mulq 80(%rcx)
  2994. # qhasm: carry? rt0 += mulrax
  2995. # asm 1: add <mulrax=int64#7,<rt0=int64#5
  2996. # asm 2: add <mulrax=%rax,<rt0=%r8
  2997. add %rax,%r8
  2998. # qhasm: mulr01 += mulrdx + carry
  2999. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  3000. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  3001. adc %rdx,%r9
  3002. # qhasm: mulrax = *(uint64 *)(pp + 80)
  3003. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  3004. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  3005. movq 80(%rsi),%rax
  3006. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  3007. # asm 1: mulq 88(<qp=int64#4)
  3008. # asm 2: mulq 88(<qp=%rcx)
  3009. mulq 88(%rcx)
  3010. # qhasm: rt1 = mulrax
  3011. # asm 1: mov <mulrax=int64#7,>rt1=int64#8
  3012. # asm 2: mov <mulrax=%rax,>rt1=%r10
  3013. mov %rax,%r10
  3014. # qhasm: mulr11 = mulrdx
  3015. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#9
  3016. # asm 2: mov <mulrdx=%rdx,>mulr11=%r11
  3017. mov %rdx,%r11
  3018. # qhasm: mulrax = *(uint64 *)(pp + 80)
  3019. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  3020. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  3021. movq 80(%rsi),%rax
  3022. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  3023. # asm 1: mulq 96(<qp=int64#4)
  3024. # asm 2: mulq 96(<qp=%rcx)
  3025. mulq 96(%rcx)
  3026. # qhasm: rt2 = mulrax
  3027. # asm 1: mov <mulrax=int64#7,>rt2=int64#10
  3028. # asm 2: mov <mulrax=%rax,>rt2=%r12
  3029. mov %rax,%r12
  3030. # qhasm: mulr21 = mulrdx
  3031. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#11
  3032. # asm 2: mov <mulrdx=%rdx,>mulr21=%r13
  3033. mov %rdx,%r13
  3034. # qhasm: mulrax = *(uint64 *)(pp + 80)
  3035. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  3036. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  3037. movq 80(%rsi),%rax
  3038. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  3039. # asm 1: mulq 104(<qp=int64#4)
  3040. # asm 2: mulq 104(<qp=%rcx)
  3041. mulq 104(%rcx)
  3042. # qhasm: rt3 = mulrax
  3043. # asm 1: mov <mulrax=int64#7,>rt3=int64#12
  3044. # asm 2: mov <mulrax=%rax,>rt3=%r14
  3045. mov %rax,%r14
  3046. # qhasm: mulr31 = mulrdx
  3047. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#13
  3048. # asm 2: mov <mulrdx=%rdx,>mulr31=%r15
  3049. mov %rdx,%r15
  3050. # qhasm: mulrax = *(uint64 *)(pp + 80)
  3051. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  3052. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  3053. movq 80(%rsi),%rax
  3054. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  3055. # asm 1: mulq 112(<qp=int64#4)
  3056. # asm 2: mulq 112(<qp=%rcx)
  3057. mulq 112(%rcx)
  3058. # qhasm: rt4 = mulrax
  3059. # asm 1: mov <mulrax=int64#7,>rt4=int64#14
  3060. # asm 2: mov <mulrax=%rax,>rt4=%rbx
  3061. mov %rax,%rbx
  3062. # qhasm: mulr41 = mulrdx
  3063. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#15
  3064. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbp
  3065. mov %rdx,%rbp
  3066. # qhasm: mulrax = *(uint64 *)(pp + 88)
  3067. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  3068. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  3069. movq 88(%rsi),%rax
  3070. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  3071. # asm 1: mulq 80(<qp=int64#4)
  3072. # asm 2: mulq 80(<qp=%rcx)
  3073. mulq 80(%rcx)
  3074. # qhasm: carry? rt1 += mulrax
  3075. # asm 1: add <mulrax=int64#7,<rt1=int64#8
  3076. # asm 2: add <mulrax=%rax,<rt1=%r10
  3077. add %rax,%r10
  3078. # qhasm: mulr11 += mulrdx + carry
  3079. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  3080. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  3081. adc %rdx,%r11
  3082. # qhasm: mulrax = *(uint64 *)(pp + 88)
  3083. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  3084. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  3085. movq 88(%rsi),%rax
  3086. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  3087. # asm 1: mulq 88(<qp=int64#4)
  3088. # asm 2: mulq 88(<qp=%rcx)
  3089. mulq 88(%rcx)
  3090. # qhasm: carry? rt2 += mulrax
  3091. # asm 1: add <mulrax=int64#7,<rt2=int64#10
  3092. # asm 2: add <mulrax=%rax,<rt2=%r12
  3093. add %rax,%r12
  3094. # qhasm: mulr21 += mulrdx + carry
  3095. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  3096. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  3097. adc %rdx,%r13
  3098. # qhasm: mulrax = *(uint64 *)(pp + 88)
  3099. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  3100. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  3101. movq 88(%rsi),%rax
  3102. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  3103. # asm 1: mulq 96(<qp=int64#4)
  3104. # asm 2: mulq 96(<qp=%rcx)
  3105. mulq 96(%rcx)
  3106. # qhasm: carry? rt3 += mulrax
  3107. # asm 1: add <mulrax=int64#7,<rt3=int64#12
  3108. # asm 2: add <mulrax=%rax,<rt3=%r14
  3109. add %rax,%r14
  3110. # qhasm: mulr31 += mulrdx + carry
  3111. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  3112. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  3113. adc %rdx,%r15
  3114. # qhasm: mulrax = *(uint64 *)(pp + 88)
  3115. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  3116. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  3117. movq 88(%rsi),%rax
  3118. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  3119. # asm 1: mulq 104(<qp=int64#4)
  3120. # asm 2: mulq 104(<qp=%rcx)
  3121. mulq 104(%rcx)
  3122. # qhasm: carry? rt4 += mulrax
  3123. # asm 1: add <mulrax=int64#7,<rt4=int64#14
  3124. # asm 2: add <mulrax=%rax,<rt4=%rbx
  3125. add %rax,%rbx
  3126. # qhasm: mulr41 += mulrdx + carry
  3127. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  3128. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  3129. adc %rdx,%rbp
  3130. # qhasm: mulrax = *(uint64 *)(pp + 88)
  3131. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#3
  3132. # asm 2: movq 88(<pp=%rsi),>mulrax=%rdx
  3133. movq 88(%rsi),%rdx
  3134. # qhasm: mulrax *= 19
  3135. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3136. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3137. imulq $19,%rdx,%rax
  3138. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  3139. # asm 1: mulq 112(<qp=int64#4)
  3140. # asm 2: mulq 112(<qp=%rcx)
  3141. mulq 112(%rcx)
  3142. # qhasm: carry? rt0 += mulrax
  3143. # asm 1: add <mulrax=int64#7,<rt0=int64#5
  3144. # asm 2: add <mulrax=%rax,<rt0=%r8
  3145. add %rax,%r8
  3146. # qhasm: mulr01 += mulrdx + carry
  3147. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  3148. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  3149. adc %rdx,%r9
  3150. # qhasm: mulrax = *(uint64 *)(pp + 96)
  3151. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#7
  3152. # asm 2: movq 96(<pp=%rsi),>mulrax=%rax
  3153. movq 96(%rsi),%rax
  3154. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  3155. # asm 1: mulq 80(<qp=int64#4)
  3156. # asm 2: mulq 80(<qp=%rcx)
  3157. mulq 80(%rcx)
  3158. # qhasm: carry? rt2 += mulrax
  3159. # asm 1: add <mulrax=int64#7,<rt2=int64#10
  3160. # asm 2: add <mulrax=%rax,<rt2=%r12
  3161. add %rax,%r12
  3162. # qhasm: mulr21 += mulrdx + carry
  3163. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  3164. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  3165. adc %rdx,%r13
  3166. # qhasm: mulrax = *(uint64 *)(pp + 96)
  3167. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#7
  3168. # asm 2: movq 96(<pp=%rsi),>mulrax=%rax
  3169. movq 96(%rsi),%rax
  3170. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  3171. # asm 1: mulq 88(<qp=int64#4)
  3172. # asm 2: mulq 88(<qp=%rcx)
  3173. mulq 88(%rcx)
  3174. # qhasm: carry? rt3 += mulrax
  3175. # asm 1: add <mulrax=int64#7,<rt3=int64#12
  3176. # asm 2: add <mulrax=%rax,<rt3=%r14
  3177. add %rax,%r14
  3178. # qhasm: mulr31 += mulrdx + carry
  3179. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  3180. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  3181. adc %rdx,%r15
  3182. # qhasm: mulrax = *(uint64 *)(pp + 96)
  3183. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#7
  3184. # asm 2: movq 96(<pp=%rsi),>mulrax=%rax
  3185. movq 96(%rsi),%rax
  3186. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  3187. # asm 1: mulq 96(<qp=int64#4)
  3188. # asm 2: mulq 96(<qp=%rcx)
  3189. mulq 96(%rcx)
  3190. # qhasm: carry? rt4 += mulrax
  3191. # asm 1: add <mulrax=int64#7,<rt4=int64#14
  3192. # asm 2: add <mulrax=%rax,<rt4=%rbx
  3193. add %rax,%rbx
  3194. # qhasm: mulr41 += mulrdx + carry
  3195. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  3196. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  3197. adc %rdx,%rbp
  3198. # qhasm: mulrax = *(uint64 *)(pp + 96)
  3199. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#3
  3200. # asm 2: movq 96(<pp=%rsi),>mulrax=%rdx
  3201. movq 96(%rsi),%rdx
  3202. # qhasm: mulrax *= 19
  3203. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3204. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3205. imulq $19,%rdx,%rax
  3206. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  3207. # asm 1: mulq 104(<qp=int64#4)
  3208. # asm 2: mulq 104(<qp=%rcx)
  3209. mulq 104(%rcx)
  3210. # qhasm: carry? rt0 += mulrax
  3211. # asm 1: add <mulrax=int64#7,<rt0=int64#5
  3212. # asm 2: add <mulrax=%rax,<rt0=%r8
  3213. add %rax,%r8
  3214. # qhasm: mulr01 += mulrdx + carry
  3215. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#6
  3216. # asm 2: adc <mulrdx=%rdx,<mulr01=%r9
  3217. adc %rdx,%r9
  3218. # qhasm: mulrax = *(uint64 *)(pp + 96)
  3219. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#3
  3220. # asm 2: movq 96(<pp=%rsi),>mulrax=%rdx
  3221. movq 96(%rsi),%rdx
  3222. # qhasm: mulrax *= 19
  3223. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3224. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3225. imulq $19,%rdx,%rax
  3226. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  3227. # asm 1: mulq 112(<qp=int64#4)
  3228. # asm 2: mulq 112(<qp=%rcx)
  3229. mulq 112(%rcx)
  3230. # qhasm: carry? rt1 += mulrax
  3231. # asm 1: add <mulrax=int64#7,<rt1=int64#8
  3232. # asm 2: add <mulrax=%rax,<rt1=%r10
  3233. add %rax,%r10
  3234. # qhasm: mulr11 += mulrdx + carry
  3235. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  3236. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  3237. adc %rdx,%r11
  3238. # qhasm: mulrax = *(uint64 *)(pp + 104)
  3239. # asm 1: movq 104(<pp=int64#2),>mulrax=int64#7
  3240. # asm 2: movq 104(<pp=%rsi),>mulrax=%rax
  3241. movq 104(%rsi),%rax
  3242. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  3243. # asm 1: mulq 80(<qp=int64#4)
  3244. # asm 2: mulq 80(<qp=%rcx)
  3245. mulq 80(%rcx)
  3246. # qhasm: carry? rt3 += mulrax
  3247. # asm 1: add <mulrax=int64#7,<rt3=int64#12
  3248. # asm 2: add <mulrax=%rax,<rt3=%r14
  3249. add %rax,%r14
  3250. # qhasm: mulr31 += mulrdx + carry
  3251. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  3252. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  3253. adc %rdx,%r15
  3254. # qhasm: mulrax = *(uint64 *)(pp + 104)
  3255. # asm 1: movq 104(<pp=int64#2),>mulrax=int64#7
  3256. # asm 2: movq 104(<pp=%rsi),>mulrax=%rax
  3257. movq 104(%rsi),%rax
  3258. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 88)
  3259. # asm 1: mulq 88(<qp=int64#4)
  3260. # asm 2: mulq 88(<qp=%rcx)
  3261. mulq 88(%rcx)
  3262. # qhasm: carry? rt4 += mulrax
  3263. # asm 1: add <mulrax=int64#7,<rt4=int64#14
  3264. # asm 2: add <mulrax=%rax,<rt4=%rbx
  3265. add %rax,%rbx
  3266. # qhasm: mulr41 += mulrdx + carry
  3267. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  3268. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  3269. adc %rdx,%rbp
  3270. # qhasm: mulrax = mulx319_stack
  3271. # asm 1: movq <mulx319_stack=stack64#13,>mulrax=int64#7
  3272. # asm 2: movq <mulx319_stack=96(%rsp),>mulrax=%rax
  3273. movq 96(%rsp),%rax
  3274. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  3275. # asm 1: mulq 104(<qp=int64#4)
  3276. # asm 2: mulq 104(<qp=%rcx)
  3277. mulq 104(%rcx)
  3278. # qhasm: carry? rt1 += mulrax
  3279. # asm 1: add <mulrax=int64#7,<rt1=int64#8
  3280. # asm 2: add <mulrax=%rax,<rt1=%r10
  3281. add %rax,%r10
  3282. # qhasm: mulr11 += mulrdx + carry
  3283. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  3284. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  3285. adc %rdx,%r11
  3286. # qhasm: mulrax = mulx319_stack
  3287. # asm 1: movq <mulx319_stack=stack64#13,>mulrax=int64#7
  3288. # asm 2: movq <mulx319_stack=96(%rsp),>mulrax=%rax
  3289. movq 96(%rsp),%rax
  3290. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  3291. # asm 1: mulq 112(<qp=int64#4)
  3292. # asm 2: mulq 112(<qp=%rcx)
  3293. mulq 112(%rcx)
  3294. # qhasm: carry? rt2 += mulrax
  3295. # asm 1: add <mulrax=int64#7,<rt2=int64#10
  3296. # asm 2: add <mulrax=%rax,<rt2=%r12
  3297. add %rax,%r12
  3298. # qhasm: mulr21 += mulrdx + carry
  3299. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  3300. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  3301. adc %rdx,%r13
  3302. # qhasm: mulrax = *(uint64 *)(pp + 112)
  3303. # asm 1: movq 112(<pp=int64#2),>mulrax=int64#7
  3304. # asm 2: movq 112(<pp=%rsi),>mulrax=%rax
  3305. movq 112(%rsi),%rax
  3306. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 80)
  3307. # asm 1: mulq 80(<qp=int64#4)
  3308. # asm 2: mulq 80(<qp=%rcx)
  3309. mulq 80(%rcx)
  3310. # qhasm: carry? rt4 += mulrax
  3311. # asm 1: add <mulrax=int64#7,<rt4=int64#14
  3312. # asm 2: add <mulrax=%rax,<rt4=%rbx
  3313. add %rax,%rbx
  3314. # qhasm: mulr41 += mulrdx + carry
  3315. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#15
  3316. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbp
  3317. adc %rdx,%rbp
  3318. # qhasm: mulrax = mulx419_stack
  3319. # asm 1: movq <mulx419_stack=stack64#14,>mulrax=int64#7
  3320. # asm 2: movq <mulx419_stack=104(%rsp),>mulrax=%rax
  3321. movq 104(%rsp),%rax
  3322. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 96)
  3323. # asm 1: mulq 96(<qp=int64#4)
  3324. # asm 2: mulq 96(<qp=%rcx)
  3325. mulq 96(%rcx)
  3326. # qhasm: carry? rt1 += mulrax
  3327. # asm 1: add <mulrax=int64#7,<rt1=int64#8
  3328. # asm 2: add <mulrax=%rax,<rt1=%r10
  3329. add %rax,%r10
  3330. # qhasm: mulr11 += mulrdx + carry
  3331. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#9
  3332. # asm 2: adc <mulrdx=%rdx,<mulr11=%r11
  3333. adc %rdx,%r11
  3334. # qhasm: mulrax = mulx419_stack
  3335. # asm 1: movq <mulx419_stack=stack64#14,>mulrax=int64#7
  3336. # asm 2: movq <mulx419_stack=104(%rsp),>mulrax=%rax
  3337. movq 104(%rsp),%rax
  3338. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 104)
  3339. # asm 1: mulq 104(<qp=int64#4)
  3340. # asm 2: mulq 104(<qp=%rcx)
  3341. mulq 104(%rcx)
  3342. # qhasm: carry? rt2 += mulrax
  3343. # asm 1: add <mulrax=int64#7,<rt2=int64#10
  3344. # asm 2: add <mulrax=%rax,<rt2=%r12
  3345. add %rax,%r12
  3346. # qhasm: mulr21 += mulrdx + carry
  3347. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#11
  3348. # asm 2: adc <mulrdx=%rdx,<mulr21=%r13
  3349. adc %rdx,%r13
  3350. # qhasm: mulrax = mulx419_stack
  3351. # asm 1: movq <mulx419_stack=stack64#14,>mulrax=int64#7
  3352. # asm 2: movq <mulx419_stack=104(%rsp),>mulrax=%rax
  3353. movq 104(%rsp),%rax
  3354. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(qp + 112)
  3355. # asm 1: mulq 112(<qp=int64#4)
  3356. # asm 2: mulq 112(<qp=%rcx)
  3357. mulq 112(%rcx)
  3358. # qhasm: carry? rt3 += mulrax
  3359. # asm 1: add <mulrax=int64#7,<rt3=int64#12
  3360. # asm 2: add <mulrax=%rax,<rt3=%r14
  3361. add %rax,%r14
  3362. # qhasm: mulr31 += mulrdx + carry
  3363. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#13
  3364. # asm 2: adc <mulrdx=%rdx,<mulr31=%r15
  3365. adc %rdx,%r15
  3366. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  3367. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#2
  3368. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rsi
  3369. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rsi
  3370. # qhasm: mulr01 = (mulr01.rt0) << 13
  3371. # asm 1: shld $13,<rt0=int64#5,<mulr01=int64#6
  3372. # asm 2: shld $13,<rt0=%r8,<mulr01=%r9
  3373. shld $13,%r8,%r9
  3374. # qhasm: rt0 &= mulredmask
  3375. # asm 1: and <mulredmask=int64#2,<rt0=int64#5
  3376. # asm 2: and <mulredmask=%rsi,<rt0=%r8
  3377. and %rsi,%r8
  3378. # qhasm: mulr11 = (mulr11.rt1) << 13
  3379. # asm 1: shld $13,<rt1=int64#8,<mulr11=int64#9
  3380. # asm 2: shld $13,<rt1=%r10,<mulr11=%r11
  3381. shld $13,%r10,%r11
  3382. # qhasm: rt1 &= mulredmask
  3383. # asm 1: and <mulredmask=int64#2,<rt1=int64#8
  3384. # asm 2: and <mulredmask=%rsi,<rt1=%r10
  3385. and %rsi,%r10
  3386. # qhasm: rt1 += mulr01
  3387. # asm 1: add <mulr01=int64#6,<rt1=int64#8
  3388. # asm 2: add <mulr01=%r9,<rt1=%r10
  3389. add %r9,%r10
  3390. # qhasm: mulr21 = (mulr21.rt2) << 13
  3391. # asm 1: shld $13,<rt2=int64#10,<mulr21=int64#11
  3392. # asm 2: shld $13,<rt2=%r12,<mulr21=%r13
  3393. shld $13,%r12,%r13
  3394. # qhasm: rt2 &= mulredmask
  3395. # asm 1: and <mulredmask=int64#2,<rt2=int64#10
  3396. # asm 2: and <mulredmask=%rsi,<rt2=%r12
  3397. and %rsi,%r12
  3398. # qhasm: rt2 += mulr11
  3399. # asm 1: add <mulr11=int64#9,<rt2=int64#10
  3400. # asm 2: add <mulr11=%r11,<rt2=%r12
  3401. add %r11,%r12
  3402. # qhasm: mulr31 = (mulr31.rt3) << 13
  3403. # asm 1: shld $13,<rt3=int64#12,<mulr31=int64#13
  3404. # asm 2: shld $13,<rt3=%r14,<mulr31=%r15
  3405. shld $13,%r14,%r15
  3406. # qhasm: rt3 &= mulredmask
  3407. # asm 1: and <mulredmask=int64#2,<rt3=int64#12
  3408. # asm 2: and <mulredmask=%rsi,<rt3=%r14
  3409. and %rsi,%r14
  3410. # qhasm: rt3 += mulr21
  3411. # asm 1: add <mulr21=int64#11,<rt3=int64#12
  3412. # asm 2: add <mulr21=%r13,<rt3=%r14
  3413. add %r13,%r14
  3414. # qhasm: mulr41 = (mulr41.rt4) << 13
  3415. # asm 1: shld $13,<rt4=int64#14,<mulr41=int64#15
  3416. # asm 2: shld $13,<rt4=%rbx,<mulr41=%rbp
  3417. shld $13,%rbx,%rbp
  3418. # qhasm: rt4 &= mulredmask
  3419. # asm 1: and <mulredmask=int64#2,<rt4=int64#14
  3420. # asm 2: and <mulredmask=%rsi,<rt4=%rbx
  3421. and %rsi,%rbx
  3422. # qhasm: rt4 += mulr31
  3423. # asm 1: add <mulr31=int64#13,<rt4=int64#14
  3424. # asm 2: add <mulr31=%r15,<rt4=%rbx
  3425. add %r15,%rbx
  3426. # qhasm: mulr41 = mulr41 * 19
  3427. # asm 1: imulq $19,<mulr41=int64#15,>mulr41=int64#3
  3428. # asm 2: imulq $19,<mulr41=%rbp,>mulr41=%rdx
  3429. imulq $19,%rbp,%rdx
  3430. # qhasm: rt0 += mulr41
  3431. # asm 1: add <mulr41=int64#3,<rt0=int64#5
  3432. # asm 2: add <mulr41=%rdx,<rt0=%r8
  3433. add %rdx,%r8
  3434. # qhasm: mult = rt0
  3435. # asm 1: mov <rt0=int64#5,>mult=int64#3
  3436. # asm 2: mov <rt0=%r8,>mult=%rdx
  3437. mov %r8,%rdx
  3438. # qhasm: (uint64) mult >>= 51
  3439. # asm 1: shr $51,<mult=int64#3
  3440. # asm 2: shr $51,<mult=%rdx
  3441. shr $51,%rdx
  3442. # qhasm: mult += rt1
  3443. # asm 1: add <rt1=int64#8,<mult=int64#3
  3444. # asm 2: add <rt1=%r10,<mult=%rdx
  3445. add %r10,%rdx
  3446. # qhasm: rt1 = mult
  3447. # asm 1: mov <mult=int64#3,>rt1=int64#4
  3448. # asm 2: mov <mult=%rdx,>rt1=%rcx
  3449. mov %rdx,%rcx
  3450. # qhasm: (uint64) mult >>= 51
  3451. # asm 1: shr $51,<mult=int64#3
  3452. # asm 2: shr $51,<mult=%rdx
  3453. shr $51,%rdx
  3454. # qhasm: rt0 &= mulredmask
  3455. # asm 1: and <mulredmask=int64#2,<rt0=int64#5
  3456. # asm 2: and <mulredmask=%rsi,<rt0=%r8
  3457. and %rsi,%r8
  3458. # qhasm: mult += rt2
  3459. # asm 1: add <rt2=int64#10,<mult=int64#3
  3460. # asm 2: add <rt2=%r12,<mult=%rdx
  3461. add %r12,%rdx
  3462. # qhasm: rt2 = mult
  3463. # asm 1: mov <mult=int64#3,>rt2=int64#6
  3464. # asm 2: mov <mult=%rdx,>rt2=%r9
  3465. mov %rdx,%r9
  3466. # qhasm: (uint64) mult >>= 51
  3467. # asm 1: shr $51,<mult=int64#3
  3468. # asm 2: shr $51,<mult=%rdx
  3469. shr $51,%rdx
  3470. # qhasm: rt1 &= mulredmask
  3471. # asm 1: and <mulredmask=int64#2,<rt1=int64#4
  3472. # asm 2: and <mulredmask=%rsi,<rt1=%rcx
  3473. and %rsi,%rcx
  3474. # qhasm: mult += rt3
  3475. # asm 1: add <rt3=int64#12,<mult=int64#3
  3476. # asm 2: add <rt3=%r14,<mult=%rdx
  3477. add %r14,%rdx
  3478. # qhasm: rt3 = mult
  3479. # asm 1: mov <mult=int64#3,>rt3=int64#7
  3480. # asm 2: mov <mult=%rdx,>rt3=%rax
  3481. mov %rdx,%rax
  3482. # qhasm: (uint64) mult >>= 51
  3483. # asm 1: shr $51,<mult=int64#3
  3484. # asm 2: shr $51,<mult=%rdx
  3485. shr $51,%rdx
  3486. # qhasm: rt2 &= mulredmask
  3487. # asm 1: and <mulredmask=int64#2,<rt2=int64#6
  3488. # asm 2: and <mulredmask=%rsi,<rt2=%r9
  3489. and %rsi,%r9
  3490. # qhasm: mult += rt4
  3491. # asm 1: add <rt4=int64#14,<mult=int64#3
  3492. # asm 2: add <rt4=%rbx,<mult=%rdx
  3493. add %rbx,%rdx
  3494. # qhasm: rt4 = mult
  3495. # asm 1: mov <mult=int64#3,>rt4=int64#8
  3496. # asm 2: mov <mult=%rdx,>rt4=%r10
  3497. mov %rdx,%r10
  3498. # qhasm: (uint64) mult >>= 51
  3499. # asm 1: shr $51,<mult=int64#3
  3500. # asm 2: shr $51,<mult=%rdx
  3501. shr $51,%rdx
  3502. # qhasm: rt3 &= mulredmask
  3503. # asm 1: and <mulredmask=int64#2,<rt3=int64#7
  3504. # asm 2: and <mulredmask=%rsi,<rt3=%rax
  3505. and %rsi,%rax
  3506. # qhasm: mult *= 19
  3507. # asm 1: imulq $19,<mult=int64#3,>mult=int64#3
  3508. # asm 2: imulq $19,<mult=%rdx,>mult=%rdx
  3509. imulq $19,%rdx,%rdx
  3510. # qhasm: rt0 += mult
  3511. # asm 1: add <mult=int64#3,<rt0=int64#5
  3512. # asm 2: add <mult=%rdx,<rt0=%r8
  3513. add %rdx,%r8
  3514. # qhasm: rt4 &= mulredmask
  3515. # asm 1: and <mulredmask=int64#2,<rt4=int64#8
  3516. # asm 2: and <mulredmask=%rsi,<rt4=%r10
  3517. and %rsi,%r10
  3518. # qhasm: rt0 += rt0
  3519. # asm 1: add <rt0=int64#5,<rt0=int64#5
  3520. # asm 2: add <rt0=%r8,<rt0=%r8
  3521. add %r8,%r8
  3522. # qhasm: rt1 += rt1
  3523. # asm 1: add <rt1=int64#4,<rt1=int64#4
  3524. # asm 2: add <rt1=%rcx,<rt1=%rcx
  3525. add %rcx,%rcx
  3526. # qhasm: rt2 += rt2
  3527. # asm 1: add <rt2=int64#6,<rt2=int64#6
  3528. # asm 2: add <rt2=%r9,<rt2=%r9
  3529. add %r9,%r9
  3530. # qhasm: rt3 += rt3
  3531. # asm 1: add <rt3=int64#7,<rt3=int64#7
  3532. # asm 2: add <rt3=%rax,<rt3=%rax
  3533. add %rax,%rax
  3534. # qhasm: rt4 += rt4
  3535. # asm 1: add <rt4=int64#8,<rt4=int64#8
  3536. # asm 2: add <rt4=%r10,<rt4=%r10
  3537. add %r10,%r10
  3538. # qhasm: rz0 = rt0
  3539. # asm 1: mov <rt0=int64#5,>rz0=int64#2
  3540. # asm 2: mov <rt0=%r8,>rz0=%rsi
  3541. mov %r8,%rsi
  3542. # qhasm: rz1 = rt1
  3543. # asm 1: mov <rt1=int64#4,>rz1=int64#3
  3544. # asm 2: mov <rt1=%rcx,>rz1=%rdx
  3545. mov %rcx,%rdx
  3546. # qhasm: rz2 = rt2
  3547. # asm 1: mov <rt2=int64#6,>rz2=int64#9
  3548. # asm 2: mov <rt2=%r9,>rz2=%r11
  3549. mov %r9,%r11
  3550. # qhasm: rz3 = rt3
  3551. # asm 1: mov <rt3=int64#7,>rz3=int64#10
  3552. # asm 2: mov <rt3=%rax,>rz3=%r12
  3553. mov %rax,%r12
  3554. # qhasm: rz4 = rt4
  3555. # asm 1: mov <rt4=int64#8,>rz4=int64#11
  3556. # asm 2: mov <rt4=%r10,>rz4=%r13
  3557. mov %r10,%r13
  3558. # qhasm: rt0 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P0)
  3559. # asm 1: add CRYPTO_NAMESPACE(batch_2P0),<rt0=int64#5
  3560. # asm 2: add CRYPTO_NAMESPACE(batch_2P0),<rt0=%r8
  3561. add CRYPTO_NAMESPACE(batch_2P0)(%rip),%r8
  3562. # qhasm: rt1 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  3563. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rt1=int64#4
  3564. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rt1=%rcx
  3565. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rcx
  3566. # qhasm: rt2 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  3567. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rt2=int64#6
  3568. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rt2=%r9
  3569. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r9
  3570. # qhasm: rt3 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  3571. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rt3=int64#7
  3572. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rt3=%rax
  3573. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%rax
  3574. # qhasm: rt4 += *(uint64 *)&CRYPTO_NAMESPACE(batch_2P1234)
  3575. # asm 1: add CRYPTO_NAMESPACE(batch_2P1234),<rt4=int64#8
  3576. # asm 2: add CRYPTO_NAMESPACE(batch_2P1234),<rt4=%r10
  3577. add CRYPTO_NAMESPACE(batch_2P1234)(%rip),%r10
  3578. # qhasm: rz0 += c0_stack
  3579. # asm 1: addq <c0_stack=stack64#8,<rz0=int64#2
  3580. # asm 2: addq <c0_stack=56(%rsp),<rz0=%rsi
  3581. addq 56(%rsp),%rsi
  3582. # qhasm: rz1 += c1_stack
  3583. # asm 1: addq <c1_stack=stack64#9,<rz1=int64#3
  3584. # asm 2: addq <c1_stack=64(%rsp),<rz1=%rdx
  3585. addq 64(%rsp),%rdx
  3586. # qhasm: rz2 += c2_stack
  3587. # asm 1: addq <c2_stack=stack64#10,<rz2=int64#9
  3588. # asm 2: addq <c2_stack=72(%rsp),<rz2=%r11
  3589. addq 72(%rsp),%r11
  3590. # qhasm: rz3 += c3_stack
  3591. # asm 1: addq <c3_stack=stack64#11,<rz3=int64#10
  3592. # asm 2: addq <c3_stack=80(%rsp),<rz3=%r12
  3593. addq 80(%rsp),%r12
  3594. # qhasm: rz4 += c4_stack
  3595. # asm 1: addq <c4_stack=stack64#12,<rz4=int64#11
  3596. # asm 2: addq <c4_stack=88(%rsp),<rz4=%r13
  3597. addq 88(%rsp),%r13
  3598. # qhasm: rt0 -= c0_stack
  3599. # asm 1: subq <c0_stack=stack64#8,<rt0=int64#5
  3600. # asm 2: subq <c0_stack=56(%rsp),<rt0=%r8
  3601. subq 56(%rsp),%r8
  3602. # qhasm: rt1 -= c1_stack
  3603. # asm 1: subq <c1_stack=stack64#9,<rt1=int64#4
  3604. # asm 2: subq <c1_stack=64(%rsp),<rt1=%rcx
  3605. subq 64(%rsp),%rcx
  3606. # qhasm: rt2 -= c2_stack
  3607. # asm 1: subq <c2_stack=stack64#10,<rt2=int64#6
  3608. # asm 2: subq <c2_stack=72(%rsp),<rt2=%r9
  3609. subq 72(%rsp),%r9
  3610. # qhasm: rt3 -= c3_stack
  3611. # asm 1: subq <c3_stack=stack64#11,<rt3=int64#7
  3612. # asm 2: subq <c3_stack=80(%rsp),<rt3=%rax
  3613. subq 80(%rsp),%rax
  3614. # qhasm: rt4 -= c4_stack
  3615. # asm 1: subq <c4_stack=stack64#12,<rt4=int64#8
  3616. # asm 2: subq <c4_stack=88(%rsp),<rt4=%r10
  3617. subq 88(%rsp),%r10
  3618. # qhasm: *(uint64 *)(rp + 40) = rz0
  3619. # asm 1: movq <rz0=int64#2,40(<rp=int64#1)
  3620. # asm 2: movq <rz0=%rsi,40(<rp=%rdi)
  3621. movq %rsi,40(%rdi)
  3622. # qhasm: *(uint64 *)(rp + 48) = rz1
  3623. # asm 1: movq <rz1=int64#3,48(<rp=int64#1)
  3624. # asm 2: movq <rz1=%rdx,48(<rp=%rdi)
  3625. movq %rdx,48(%rdi)
  3626. # qhasm: *(uint64 *)(rp + 56) = rz2
  3627. # asm 1: movq <rz2=int64#9,56(<rp=int64#1)
  3628. # asm 2: movq <rz2=%r11,56(<rp=%rdi)
  3629. movq %r11,56(%rdi)
  3630. # qhasm: *(uint64 *)(rp + 64) = rz3
  3631. # asm 1: movq <rz3=int64#10,64(<rp=int64#1)
  3632. # asm 2: movq <rz3=%r12,64(<rp=%rdi)
  3633. movq %r12,64(%rdi)
  3634. # qhasm: *(uint64 *)(rp + 72) = rz4
  3635. # asm 1: movq <rz4=int64#11,72(<rp=int64#1)
  3636. # asm 2: movq <rz4=%r13,72(<rp=%rdi)
  3637. movq %r13,72(%rdi)
  3638. # qhasm: *(uint64 *)(rp + 120) = rt0
  3639. # asm 1: movq <rt0=int64#5,120(<rp=int64#1)
  3640. # asm 2: movq <rt0=%r8,120(<rp=%rdi)
  3641. movq %r8,120(%rdi)
  3642. # qhasm: *(uint64 *)(rp + 128) = rt1
  3643. # asm 1: movq <rt1=int64#4,128(<rp=int64#1)
  3644. # asm 2: movq <rt1=%rcx,128(<rp=%rdi)
  3645. movq %rcx,128(%rdi)
  3646. # qhasm: *(uint64 *)(rp + 136) = rt2
  3647. # asm 1: movq <rt2=int64#6,136(<rp=int64#1)
  3648. # asm 2: movq <rt2=%r9,136(<rp=%rdi)
  3649. movq %r9,136(%rdi)
  3650. # qhasm: *(uint64 *)(rp + 144) = rt3
  3651. # asm 1: movq <rt3=int64#7,144(<rp=int64#1)
  3652. # asm 2: movq <rt3=%rax,144(<rp=%rdi)
  3653. movq %rax,144(%rdi)
  3654. # qhasm: *(uint64 *)(rp + 152) = rt4
  3655. # asm 1: movq <rt4=int64#8,152(<rp=int64#1)
  3656. # asm 2: movq <rt4=%r10,152(<rp=%rdi)
  3657. movq %r10,152(%rdi)
  3658. # qhasm: caller1 = caller1_stack
  3659. # asm 1: movq <caller1_stack=stack64#1,>caller1=int64#9
  3660. # asm 2: movq <caller1_stack=0(%rsp),>caller1=%r11
  3661. movq 0(%rsp),%r11
  3662. # qhasm: caller2 = caller2_stack
  3663. # asm 1: movq <caller2_stack=stack64#2,>caller2=int64#10
  3664. # asm 2: movq <caller2_stack=8(%rsp),>caller2=%r12
  3665. movq 8(%rsp),%r12
  3666. # qhasm: caller3 = caller3_stack
  3667. # asm 1: movq <caller3_stack=stack64#3,>caller3=int64#11
  3668. # asm 2: movq <caller3_stack=16(%rsp),>caller3=%r13
  3669. movq 16(%rsp),%r13
  3670. # qhasm: caller4 = caller4_stack
  3671. # asm 1: movq <caller4_stack=stack64#4,>caller4=int64#12
  3672. # asm 2: movq <caller4_stack=24(%rsp),>caller4=%r14
  3673. movq 24(%rsp),%r14
  3674. # qhasm: caller5 = caller5_stack
  3675. # asm 1: movq <caller5_stack=stack64#5,>caller5=int64#13
  3676. # asm 2: movq <caller5_stack=32(%rsp),>caller5=%r15
  3677. movq 32(%rsp),%r15
  3678. # qhasm: caller6 = caller6_stack
  3679. # asm 1: movq <caller6_stack=stack64#6,>caller6=int64#14
  3680. # asm 2: movq <caller6_stack=40(%rsp),>caller6=%rbx
  3681. movq 40(%rsp),%rbx
  3682. # qhasm: caller7 = caller7_stack
  3683. # asm 1: movq <caller7_stack=stack64#7,>caller7=int64#15
  3684. # asm 2: movq <caller7_stack=48(%rsp),>caller7=%rbp
  3685. movq 48(%rsp),%rbp
  3686. # qhasm: leave
  3687. add %r11,%rsp
  3688. mov %rdi,%rax
  3689. mov %rsi,%rdx
  3690. ret