ge25519_p1p1_to_p2.S 64 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442
  1. # qhasm: int64 rp
  2. # qhasm: int64 pp
  3. # qhasm: input rp
  4. # qhasm: input pp
  5. # qhasm: int64 caller1
  6. # qhasm: int64 caller2
  7. # qhasm: int64 caller3
  8. # qhasm: int64 caller4
  9. # qhasm: int64 caller5
  10. # qhasm: int64 caller6
  11. # qhasm: int64 caller7
  12. # qhasm: caller caller1
  13. # qhasm: caller caller2
  14. # qhasm: caller caller3
  15. # qhasm: caller caller4
  16. # qhasm: caller caller5
  17. # qhasm: caller caller6
  18. # qhasm: caller caller7
  19. # qhasm: stack64 caller1_stack
  20. # qhasm: stack64 caller2_stack
  21. # qhasm: stack64 caller3_stack
  22. # qhasm: stack64 caller4_stack
  23. # qhasm: stack64 caller5_stack
  24. # qhasm: stack64 caller6_stack
  25. # qhasm: stack64 caller7_stack
  26. # qhasm: int64 rx0
  27. # qhasm: int64 rx1
  28. # qhasm: int64 rx2
  29. # qhasm: int64 rx3
  30. # qhasm: int64 rx4
  31. # qhasm: int64 ry0
  32. # qhasm: int64 ry1
  33. # qhasm: int64 ry2
  34. # qhasm: int64 ry3
  35. # qhasm: int64 ry4
  36. # qhasm: int64 rz0
  37. # qhasm: int64 rz1
  38. # qhasm: int64 rz2
  39. # qhasm: int64 rz3
  40. # qhasm: int64 rz4
  41. # qhasm: int64 mulr01
  42. # qhasm: int64 mulr11
  43. # qhasm: int64 mulr21
  44. # qhasm: int64 mulr31
  45. # qhasm: int64 mulr41
  46. # qhasm: int64 mulrax
  47. # qhasm: int64 mulrdx
  48. # qhasm: int64 mult
  49. # qhasm: int64 mulredmask
  50. # qhasm: stack64 mulx219_stack
  51. # qhasm: stack64 mulx319_stack
  52. # qhasm: stack64 mulx419_stack
  53. # qhasm: enter CRYPTO_NAMESPACE(batch_ge25519_p1p1_to_p2)
  54. .text
  55. .p2align 5
  56. .globl _CRYPTO_NAMESPACE(batch_ge25519_p1p1_to_p2)
  57. .globl CRYPTO_NAMESPACE(batch_ge25519_p1p1_to_p2)
  58. _CRYPTO_NAMESPACE(batch_ge25519_p1p1_to_p2):
  59. CRYPTO_NAMESPACE(batch_ge25519_p1p1_to_p2):
  60. mov %rsp,%r11
  61. and $31,%r11
  62. add $96,%r11
  63. sub %r11,%rsp
  64. # qhasm: caller1_stack = caller1
  65. # asm 1: movq <caller1=int64#9,>caller1_stack=stack64#1
  66. # asm 2: movq <caller1=%r11,>caller1_stack=0(%rsp)
  67. movq %r11,0(%rsp)
  68. # qhasm: caller2_stack = caller2
  69. # asm 1: movq <caller2=int64#10,>caller2_stack=stack64#2
  70. # asm 2: movq <caller2=%r12,>caller2_stack=8(%rsp)
  71. movq %r12,8(%rsp)
  72. # qhasm: caller3_stack = caller3
  73. # asm 1: movq <caller3=int64#11,>caller3_stack=stack64#3
  74. # asm 2: movq <caller3=%r13,>caller3_stack=16(%rsp)
  75. movq %r13,16(%rsp)
  76. # qhasm: caller4_stack = caller4
  77. # asm 1: movq <caller4=int64#12,>caller4_stack=stack64#4
  78. # asm 2: movq <caller4=%r14,>caller4_stack=24(%rsp)
  79. movq %r14,24(%rsp)
  80. # qhasm: caller5_stack = caller5
  81. # asm 1: movq <caller5=int64#13,>caller5_stack=stack64#5
  82. # asm 2: movq <caller5=%r15,>caller5_stack=32(%rsp)
  83. movq %r15,32(%rsp)
  84. # qhasm: caller6_stack = caller6
  85. # asm 1: movq <caller6=int64#14,>caller6_stack=stack64#6
  86. # asm 2: movq <caller6=%rbx,>caller6_stack=40(%rsp)
  87. movq %rbx,40(%rsp)
  88. # qhasm: caller7_stack = caller7
  89. # asm 1: movq <caller7=int64#15,>caller7_stack=stack64#7
  90. # asm 2: movq <caller7=%rbp,>caller7_stack=48(%rsp)
  91. movq %rbp,48(%rsp)
  92. # qhasm: mulrax = *(uint64 *)(pp + 24)
  93. # asm 1: movq 24(<pp=int64#2),>mulrax=int64#3
  94. # asm 2: movq 24(<pp=%rsi),>mulrax=%rdx
  95. movq 24(%rsi),%rdx
  96. # qhasm: mulrax *= 19
  97. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  98. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  99. imulq $19,%rdx,%rax
  100. # qhasm: mulx319_stack = mulrax
  101. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  102. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  103. movq %rax,56(%rsp)
  104. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  105. # asm 1: mulq 136(<pp=int64#2)
  106. # asm 2: mulq 136(<pp=%rsi)
  107. mulq 136(%rsi)
  108. # qhasm: rx0 = mulrax
  109. # asm 1: mov <mulrax=int64#7,>rx0=int64#4
  110. # asm 2: mov <mulrax=%rax,>rx0=%rcx
  111. mov %rax,%rcx
  112. # qhasm: mulr01 = mulrdx
  113. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#5
  114. # asm 2: mov <mulrdx=%rdx,>mulr01=%r8
  115. mov %rdx,%r8
  116. # qhasm: mulrax = *(uint64 *)(pp + 32)
  117. # asm 1: movq 32(<pp=int64#2),>mulrax=int64#3
  118. # asm 2: movq 32(<pp=%rsi),>mulrax=%rdx
  119. movq 32(%rsi),%rdx
  120. # qhasm: mulrax *= 19
  121. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  122. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  123. imulq $19,%rdx,%rax
  124. # qhasm: mulx419_stack = mulrax
  125. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  126. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  127. movq %rax,64(%rsp)
  128. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  129. # asm 1: mulq 128(<pp=int64#2)
  130. # asm 2: mulq 128(<pp=%rsi)
  131. mulq 128(%rsi)
  132. # qhasm: carry? rx0 += mulrax
  133. # asm 1: add <mulrax=int64#7,<rx0=int64#4
  134. # asm 2: add <mulrax=%rax,<rx0=%rcx
  135. add %rax,%rcx
  136. # qhasm: mulr01 += mulrdx + carry
  137. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  138. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  139. adc %rdx,%r8
  140. # qhasm: mulrax = *(uint64 *)(pp + 0)
  141. # asm 1: movq 0(<pp=int64#2),>mulrax=int64#7
  142. # asm 2: movq 0(<pp=%rsi),>mulrax=%rax
  143. movq 0(%rsi),%rax
  144. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  145. # asm 1: mulq 120(<pp=int64#2)
  146. # asm 2: mulq 120(<pp=%rsi)
  147. mulq 120(%rsi)
  148. # qhasm: carry? rx0 += mulrax
  149. # asm 1: add <mulrax=int64#7,<rx0=int64#4
  150. # asm 2: add <mulrax=%rax,<rx0=%rcx
  151. add %rax,%rcx
  152. # qhasm: mulr01 += mulrdx + carry
  153. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  154. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  155. adc %rdx,%r8
  156. # qhasm: mulrax = *(uint64 *)(pp + 0)
  157. # asm 1: movq 0(<pp=int64#2),>mulrax=int64#7
  158. # asm 2: movq 0(<pp=%rsi),>mulrax=%rax
  159. movq 0(%rsi),%rax
  160. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  161. # asm 1: mulq 128(<pp=int64#2)
  162. # asm 2: mulq 128(<pp=%rsi)
  163. mulq 128(%rsi)
  164. # qhasm: rx1 = mulrax
  165. # asm 1: mov <mulrax=int64#7,>rx1=int64#6
  166. # asm 2: mov <mulrax=%rax,>rx1=%r9
  167. mov %rax,%r9
  168. # qhasm: mulr11 = mulrdx
  169. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#8
  170. # asm 2: mov <mulrdx=%rdx,>mulr11=%r10
  171. mov %rdx,%r10
  172. # qhasm: mulrax = *(uint64 *)(pp + 0)
  173. # asm 1: movq 0(<pp=int64#2),>mulrax=int64#7
  174. # asm 2: movq 0(<pp=%rsi),>mulrax=%rax
  175. movq 0(%rsi),%rax
  176. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  177. # asm 1: mulq 136(<pp=int64#2)
  178. # asm 2: mulq 136(<pp=%rsi)
  179. mulq 136(%rsi)
  180. # qhasm: rx2 = mulrax
  181. # asm 1: mov <mulrax=int64#7,>rx2=int64#9
  182. # asm 2: mov <mulrax=%rax,>rx2=%r11
  183. mov %rax,%r11
  184. # qhasm: mulr21 = mulrdx
  185. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#10
  186. # asm 2: mov <mulrdx=%rdx,>mulr21=%r12
  187. mov %rdx,%r12
  188. # qhasm: mulrax = *(uint64 *)(pp + 0)
  189. # asm 1: movq 0(<pp=int64#2),>mulrax=int64#7
  190. # asm 2: movq 0(<pp=%rsi),>mulrax=%rax
  191. movq 0(%rsi),%rax
  192. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  193. # asm 1: mulq 144(<pp=int64#2)
  194. # asm 2: mulq 144(<pp=%rsi)
  195. mulq 144(%rsi)
  196. # qhasm: rx3 = mulrax
  197. # asm 1: mov <mulrax=int64#7,>rx3=int64#11
  198. # asm 2: mov <mulrax=%rax,>rx3=%r13
  199. mov %rax,%r13
  200. # qhasm: mulr31 = mulrdx
  201. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#12
  202. # asm 2: mov <mulrdx=%rdx,>mulr31=%r14
  203. mov %rdx,%r14
  204. # qhasm: mulrax = *(uint64 *)(pp + 0)
  205. # asm 1: movq 0(<pp=int64#2),>mulrax=int64#7
  206. # asm 2: movq 0(<pp=%rsi),>mulrax=%rax
  207. movq 0(%rsi),%rax
  208. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  209. # asm 1: mulq 152(<pp=int64#2)
  210. # asm 2: mulq 152(<pp=%rsi)
  211. mulq 152(%rsi)
  212. # qhasm: rx4 = mulrax
  213. # asm 1: mov <mulrax=int64#7,>rx4=int64#13
  214. # asm 2: mov <mulrax=%rax,>rx4=%r15
  215. mov %rax,%r15
  216. # qhasm: mulr41 = mulrdx
  217. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#14
  218. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbx
  219. mov %rdx,%rbx
  220. # qhasm: mulrax = *(uint64 *)(pp + 8)
  221. # asm 1: movq 8(<pp=int64#2),>mulrax=int64#7
  222. # asm 2: movq 8(<pp=%rsi),>mulrax=%rax
  223. movq 8(%rsi),%rax
  224. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  225. # asm 1: mulq 120(<pp=int64#2)
  226. # asm 2: mulq 120(<pp=%rsi)
  227. mulq 120(%rsi)
  228. # qhasm: carry? rx1 += mulrax
  229. # asm 1: add <mulrax=int64#7,<rx1=int64#6
  230. # asm 2: add <mulrax=%rax,<rx1=%r9
  231. add %rax,%r9
  232. # qhasm: mulr11 += mulrdx + carry
  233. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  234. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  235. adc %rdx,%r10
  236. # qhasm: mulrax = *(uint64 *)(pp + 8)
  237. # asm 1: movq 8(<pp=int64#2),>mulrax=int64#7
  238. # asm 2: movq 8(<pp=%rsi),>mulrax=%rax
  239. movq 8(%rsi),%rax
  240. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  241. # asm 1: mulq 128(<pp=int64#2)
  242. # asm 2: mulq 128(<pp=%rsi)
  243. mulq 128(%rsi)
  244. # qhasm: carry? rx2 += mulrax
  245. # asm 1: add <mulrax=int64#7,<rx2=int64#9
  246. # asm 2: add <mulrax=%rax,<rx2=%r11
  247. add %rax,%r11
  248. # qhasm: mulr21 += mulrdx + carry
  249. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  250. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  251. adc %rdx,%r12
  252. # qhasm: mulrax = *(uint64 *)(pp + 8)
  253. # asm 1: movq 8(<pp=int64#2),>mulrax=int64#7
  254. # asm 2: movq 8(<pp=%rsi),>mulrax=%rax
  255. movq 8(%rsi),%rax
  256. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  257. # asm 1: mulq 136(<pp=int64#2)
  258. # asm 2: mulq 136(<pp=%rsi)
  259. mulq 136(%rsi)
  260. # qhasm: carry? rx3 += mulrax
  261. # asm 1: add <mulrax=int64#7,<rx3=int64#11
  262. # asm 2: add <mulrax=%rax,<rx3=%r13
  263. add %rax,%r13
  264. # qhasm: mulr31 += mulrdx + carry
  265. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  266. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  267. adc %rdx,%r14
  268. # qhasm: mulrax = *(uint64 *)(pp + 8)
  269. # asm 1: movq 8(<pp=int64#2),>mulrax=int64#7
  270. # asm 2: movq 8(<pp=%rsi),>mulrax=%rax
  271. movq 8(%rsi),%rax
  272. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  273. # asm 1: mulq 144(<pp=int64#2)
  274. # asm 2: mulq 144(<pp=%rsi)
  275. mulq 144(%rsi)
  276. # qhasm: carry? rx4 += mulrax
  277. # asm 1: add <mulrax=int64#7,<rx4=int64#13
  278. # asm 2: add <mulrax=%rax,<rx4=%r15
  279. add %rax,%r15
  280. # qhasm: mulr41 += mulrdx + carry
  281. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  282. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  283. adc %rdx,%rbx
  284. # qhasm: mulrax = *(uint64 *)(pp + 8)
  285. # asm 1: movq 8(<pp=int64#2),>mulrax=int64#3
  286. # asm 2: movq 8(<pp=%rsi),>mulrax=%rdx
  287. movq 8(%rsi),%rdx
  288. # qhasm: mulrax *= 19
  289. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  290. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  291. imulq $19,%rdx,%rax
  292. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  293. # asm 1: mulq 152(<pp=int64#2)
  294. # asm 2: mulq 152(<pp=%rsi)
  295. mulq 152(%rsi)
  296. # qhasm: carry? rx0 += mulrax
  297. # asm 1: add <mulrax=int64#7,<rx0=int64#4
  298. # asm 2: add <mulrax=%rax,<rx0=%rcx
  299. add %rax,%rcx
  300. # qhasm: mulr01 += mulrdx + carry
  301. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  302. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  303. adc %rdx,%r8
  304. # qhasm: mulrax = *(uint64 *)(pp + 16)
  305. # asm 1: movq 16(<pp=int64#2),>mulrax=int64#7
  306. # asm 2: movq 16(<pp=%rsi),>mulrax=%rax
  307. movq 16(%rsi),%rax
  308. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  309. # asm 1: mulq 120(<pp=int64#2)
  310. # asm 2: mulq 120(<pp=%rsi)
  311. mulq 120(%rsi)
  312. # qhasm: carry? rx2 += mulrax
  313. # asm 1: add <mulrax=int64#7,<rx2=int64#9
  314. # asm 2: add <mulrax=%rax,<rx2=%r11
  315. add %rax,%r11
  316. # qhasm: mulr21 += mulrdx + carry
  317. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  318. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  319. adc %rdx,%r12
  320. # qhasm: mulrax = *(uint64 *)(pp + 16)
  321. # asm 1: movq 16(<pp=int64#2),>mulrax=int64#7
  322. # asm 2: movq 16(<pp=%rsi),>mulrax=%rax
  323. movq 16(%rsi),%rax
  324. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  325. # asm 1: mulq 128(<pp=int64#2)
  326. # asm 2: mulq 128(<pp=%rsi)
  327. mulq 128(%rsi)
  328. # qhasm: carry? rx3 += mulrax
  329. # asm 1: add <mulrax=int64#7,<rx3=int64#11
  330. # asm 2: add <mulrax=%rax,<rx3=%r13
  331. add %rax,%r13
  332. # qhasm: mulr31 += mulrdx + carry
  333. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  334. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  335. adc %rdx,%r14
  336. # qhasm: mulrax = *(uint64 *)(pp + 16)
  337. # asm 1: movq 16(<pp=int64#2),>mulrax=int64#7
  338. # asm 2: movq 16(<pp=%rsi),>mulrax=%rax
  339. movq 16(%rsi),%rax
  340. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  341. # asm 1: mulq 136(<pp=int64#2)
  342. # asm 2: mulq 136(<pp=%rsi)
  343. mulq 136(%rsi)
  344. # qhasm: carry? rx4 += mulrax
  345. # asm 1: add <mulrax=int64#7,<rx4=int64#13
  346. # asm 2: add <mulrax=%rax,<rx4=%r15
  347. add %rax,%r15
  348. # qhasm: mulr41 += mulrdx + carry
  349. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  350. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  351. adc %rdx,%rbx
  352. # qhasm: mulrax = *(uint64 *)(pp + 16)
  353. # asm 1: movq 16(<pp=int64#2),>mulrax=int64#3
  354. # asm 2: movq 16(<pp=%rsi),>mulrax=%rdx
  355. movq 16(%rsi),%rdx
  356. # qhasm: mulrax *= 19
  357. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  358. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  359. imulq $19,%rdx,%rax
  360. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  361. # asm 1: mulq 144(<pp=int64#2)
  362. # asm 2: mulq 144(<pp=%rsi)
  363. mulq 144(%rsi)
  364. # qhasm: carry? rx0 += mulrax
  365. # asm 1: add <mulrax=int64#7,<rx0=int64#4
  366. # asm 2: add <mulrax=%rax,<rx0=%rcx
  367. add %rax,%rcx
  368. # qhasm: mulr01 += mulrdx + carry
  369. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  370. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  371. adc %rdx,%r8
  372. # qhasm: mulrax = *(uint64 *)(pp + 16)
  373. # asm 1: movq 16(<pp=int64#2),>mulrax=int64#3
  374. # asm 2: movq 16(<pp=%rsi),>mulrax=%rdx
  375. movq 16(%rsi),%rdx
  376. # qhasm: mulrax *= 19
  377. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  378. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  379. imulq $19,%rdx,%rax
  380. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  381. # asm 1: mulq 152(<pp=int64#2)
  382. # asm 2: mulq 152(<pp=%rsi)
  383. mulq 152(%rsi)
  384. # qhasm: carry? rx1 += mulrax
  385. # asm 1: add <mulrax=int64#7,<rx1=int64#6
  386. # asm 2: add <mulrax=%rax,<rx1=%r9
  387. add %rax,%r9
  388. # qhasm: mulr11 += mulrdx + carry
  389. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  390. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  391. adc %rdx,%r10
  392. # qhasm: mulrax = *(uint64 *)(pp + 24)
  393. # asm 1: movq 24(<pp=int64#2),>mulrax=int64#7
  394. # asm 2: movq 24(<pp=%rsi),>mulrax=%rax
  395. movq 24(%rsi),%rax
  396. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  397. # asm 1: mulq 120(<pp=int64#2)
  398. # asm 2: mulq 120(<pp=%rsi)
  399. mulq 120(%rsi)
  400. # qhasm: carry? rx3 += mulrax
  401. # asm 1: add <mulrax=int64#7,<rx3=int64#11
  402. # asm 2: add <mulrax=%rax,<rx3=%r13
  403. add %rax,%r13
  404. # qhasm: mulr31 += mulrdx + carry
  405. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  406. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  407. adc %rdx,%r14
  408. # qhasm: mulrax = *(uint64 *)(pp + 24)
  409. # asm 1: movq 24(<pp=int64#2),>mulrax=int64#7
  410. # asm 2: movq 24(<pp=%rsi),>mulrax=%rax
  411. movq 24(%rsi),%rax
  412. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  413. # asm 1: mulq 128(<pp=int64#2)
  414. # asm 2: mulq 128(<pp=%rsi)
  415. mulq 128(%rsi)
  416. # qhasm: carry? rx4 += mulrax
  417. # asm 1: add <mulrax=int64#7,<rx4=int64#13
  418. # asm 2: add <mulrax=%rax,<rx4=%r15
  419. add %rax,%r15
  420. # qhasm: mulr41 += mulrdx + carry
  421. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  422. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  423. adc %rdx,%rbx
  424. # qhasm: mulrax = mulx319_stack
  425. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  426. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  427. movq 56(%rsp),%rax
  428. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  429. # asm 1: mulq 144(<pp=int64#2)
  430. # asm 2: mulq 144(<pp=%rsi)
  431. mulq 144(%rsi)
  432. # qhasm: carry? rx1 += mulrax
  433. # asm 1: add <mulrax=int64#7,<rx1=int64#6
  434. # asm 2: add <mulrax=%rax,<rx1=%r9
  435. add %rax,%r9
  436. # qhasm: mulr11 += mulrdx + carry
  437. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  438. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  439. adc %rdx,%r10
  440. # qhasm: mulrax = mulx319_stack
  441. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  442. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  443. movq 56(%rsp),%rax
  444. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  445. # asm 1: mulq 152(<pp=int64#2)
  446. # asm 2: mulq 152(<pp=%rsi)
  447. mulq 152(%rsi)
  448. # qhasm: carry? rx2 += mulrax
  449. # asm 1: add <mulrax=int64#7,<rx2=int64#9
  450. # asm 2: add <mulrax=%rax,<rx2=%r11
  451. add %rax,%r11
  452. # qhasm: mulr21 += mulrdx + carry
  453. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  454. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  455. adc %rdx,%r12
  456. # qhasm: mulrax = *(uint64 *)(pp + 32)
  457. # asm 1: movq 32(<pp=int64#2),>mulrax=int64#7
  458. # asm 2: movq 32(<pp=%rsi),>mulrax=%rax
  459. movq 32(%rsi),%rax
  460. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  461. # asm 1: mulq 120(<pp=int64#2)
  462. # asm 2: mulq 120(<pp=%rsi)
  463. mulq 120(%rsi)
  464. # qhasm: carry? rx4 += mulrax
  465. # asm 1: add <mulrax=int64#7,<rx4=int64#13
  466. # asm 2: add <mulrax=%rax,<rx4=%r15
  467. add %rax,%r15
  468. # qhasm: mulr41 += mulrdx + carry
  469. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  470. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  471. adc %rdx,%rbx
  472. # qhasm: mulrax = mulx419_stack
  473. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  474. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  475. movq 64(%rsp),%rax
  476. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  477. # asm 1: mulq 136(<pp=int64#2)
  478. # asm 2: mulq 136(<pp=%rsi)
  479. mulq 136(%rsi)
  480. # qhasm: carry? rx1 += mulrax
  481. # asm 1: add <mulrax=int64#7,<rx1=int64#6
  482. # asm 2: add <mulrax=%rax,<rx1=%r9
  483. add %rax,%r9
  484. # qhasm: mulr11 += mulrdx + carry
  485. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  486. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  487. adc %rdx,%r10
  488. # qhasm: mulrax = mulx419_stack
  489. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  490. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  491. movq 64(%rsp),%rax
  492. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  493. # asm 1: mulq 144(<pp=int64#2)
  494. # asm 2: mulq 144(<pp=%rsi)
  495. mulq 144(%rsi)
  496. # qhasm: carry? rx2 += mulrax
  497. # asm 1: add <mulrax=int64#7,<rx2=int64#9
  498. # asm 2: add <mulrax=%rax,<rx2=%r11
  499. add %rax,%r11
  500. # qhasm: mulr21 += mulrdx + carry
  501. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  502. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  503. adc %rdx,%r12
  504. # qhasm: mulrax = mulx419_stack
  505. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  506. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  507. movq 64(%rsp),%rax
  508. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  509. # asm 1: mulq 152(<pp=int64#2)
  510. # asm 2: mulq 152(<pp=%rsi)
  511. mulq 152(%rsi)
  512. # qhasm: carry? rx3 += mulrax
  513. # asm 1: add <mulrax=int64#7,<rx3=int64#11
  514. # asm 2: add <mulrax=%rax,<rx3=%r13
  515. add %rax,%r13
  516. # qhasm: mulr31 += mulrdx + carry
  517. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  518. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  519. adc %rdx,%r14
  520. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  521. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  522. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  523. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  524. # qhasm: mulr01 = (mulr01.rx0) << 13
  525. # asm 1: shld $13,<rx0=int64#4,<mulr01=int64#5
  526. # asm 2: shld $13,<rx0=%rcx,<mulr01=%r8
  527. shld $13,%rcx,%r8
  528. # qhasm: rx0 &= mulredmask
  529. # asm 1: and <mulredmask=int64#3,<rx0=int64#4
  530. # asm 2: and <mulredmask=%rdx,<rx0=%rcx
  531. and %rdx,%rcx
  532. # qhasm: mulr11 = (mulr11.rx1) << 13
  533. # asm 1: shld $13,<rx1=int64#6,<mulr11=int64#8
  534. # asm 2: shld $13,<rx1=%r9,<mulr11=%r10
  535. shld $13,%r9,%r10
  536. # qhasm: rx1 &= mulredmask
  537. # asm 1: and <mulredmask=int64#3,<rx1=int64#6
  538. # asm 2: and <mulredmask=%rdx,<rx1=%r9
  539. and %rdx,%r9
  540. # qhasm: rx1 += mulr01
  541. # asm 1: add <mulr01=int64#5,<rx1=int64#6
  542. # asm 2: add <mulr01=%r8,<rx1=%r9
  543. add %r8,%r9
  544. # qhasm: mulr21 = (mulr21.rx2) << 13
  545. # asm 1: shld $13,<rx2=int64#9,<mulr21=int64#10
  546. # asm 2: shld $13,<rx2=%r11,<mulr21=%r12
  547. shld $13,%r11,%r12
  548. # qhasm: rx2 &= mulredmask
  549. # asm 1: and <mulredmask=int64#3,<rx2=int64#9
  550. # asm 2: and <mulredmask=%rdx,<rx2=%r11
  551. and %rdx,%r11
  552. # qhasm: rx2 += mulr11
  553. # asm 1: add <mulr11=int64#8,<rx2=int64#9
  554. # asm 2: add <mulr11=%r10,<rx2=%r11
  555. add %r10,%r11
  556. # qhasm: mulr31 = (mulr31.rx3) << 13
  557. # asm 1: shld $13,<rx3=int64#11,<mulr31=int64#12
  558. # asm 2: shld $13,<rx3=%r13,<mulr31=%r14
  559. shld $13,%r13,%r14
  560. # qhasm: rx3 &= mulredmask
  561. # asm 1: and <mulredmask=int64#3,<rx3=int64#11
  562. # asm 2: and <mulredmask=%rdx,<rx3=%r13
  563. and %rdx,%r13
  564. # qhasm: rx3 += mulr21
  565. # asm 1: add <mulr21=int64#10,<rx3=int64#11
  566. # asm 2: add <mulr21=%r12,<rx3=%r13
  567. add %r12,%r13
  568. # qhasm: mulr41 = (mulr41.rx4) << 13
  569. # asm 1: shld $13,<rx4=int64#13,<mulr41=int64#14
  570. # asm 2: shld $13,<rx4=%r15,<mulr41=%rbx
  571. shld $13,%r15,%rbx
  572. # qhasm: rx4 &= mulredmask
  573. # asm 1: and <mulredmask=int64#3,<rx4=int64#13
  574. # asm 2: and <mulredmask=%rdx,<rx4=%r15
  575. and %rdx,%r15
  576. # qhasm: rx4 += mulr31
  577. # asm 1: add <mulr31=int64#12,<rx4=int64#13
  578. # asm 2: add <mulr31=%r14,<rx4=%r15
  579. add %r14,%r15
  580. # qhasm: mulr41 = mulr41 * 19
  581. # asm 1: imulq $19,<mulr41=int64#14,>mulr41=int64#5
  582. # asm 2: imulq $19,<mulr41=%rbx,>mulr41=%r8
  583. imulq $19,%rbx,%r8
  584. # qhasm: rx0 += mulr41
  585. # asm 1: add <mulr41=int64#5,<rx0=int64#4
  586. # asm 2: add <mulr41=%r8,<rx0=%rcx
  587. add %r8,%rcx
  588. # qhasm: mult = rx0
  589. # asm 1: mov <rx0=int64#4,>mult=int64#5
  590. # asm 2: mov <rx0=%rcx,>mult=%r8
  591. mov %rcx,%r8
  592. # qhasm: (uint64) mult >>= 51
  593. # asm 1: shr $51,<mult=int64#5
  594. # asm 2: shr $51,<mult=%r8
  595. shr $51,%r8
  596. # qhasm: mult += rx1
  597. # asm 1: add <rx1=int64#6,<mult=int64#5
  598. # asm 2: add <rx1=%r9,<mult=%r8
  599. add %r9,%r8
  600. # qhasm: rx1 = mult
  601. # asm 1: mov <mult=int64#5,>rx1=int64#6
  602. # asm 2: mov <mult=%r8,>rx1=%r9
  603. mov %r8,%r9
  604. # qhasm: (uint64) mult >>= 51
  605. # asm 1: shr $51,<mult=int64#5
  606. # asm 2: shr $51,<mult=%r8
  607. shr $51,%r8
  608. # qhasm: rx0 &= mulredmask
  609. # asm 1: and <mulredmask=int64#3,<rx0=int64#4
  610. # asm 2: and <mulredmask=%rdx,<rx0=%rcx
  611. and %rdx,%rcx
  612. # qhasm: mult += rx2
  613. # asm 1: add <rx2=int64#9,<mult=int64#5
  614. # asm 2: add <rx2=%r11,<mult=%r8
  615. add %r11,%r8
  616. # qhasm: rx2 = mult
  617. # asm 1: mov <mult=int64#5,>rx2=int64#7
  618. # asm 2: mov <mult=%r8,>rx2=%rax
  619. mov %r8,%rax
  620. # qhasm: (uint64) mult >>= 51
  621. # asm 1: shr $51,<mult=int64#5
  622. # asm 2: shr $51,<mult=%r8
  623. shr $51,%r8
  624. # qhasm: rx1 &= mulredmask
  625. # asm 1: and <mulredmask=int64#3,<rx1=int64#6
  626. # asm 2: and <mulredmask=%rdx,<rx1=%r9
  627. and %rdx,%r9
  628. # qhasm: mult += rx3
  629. # asm 1: add <rx3=int64#11,<mult=int64#5
  630. # asm 2: add <rx3=%r13,<mult=%r8
  631. add %r13,%r8
  632. # qhasm: rx3 = mult
  633. # asm 1: mov <mult=int64#5,>rx3=int64#8
  634. # asm 2: mov <mult=%r8,>rx3=%r10
  635. mov %r8,%r10
  636. # qhasm: (uint64) mult >>= 51
  637. # asm 1: shr $51,<mult=int64#5
  638. # asm 2: shr $51,<mult=%r8
  639. shr $51,%r8
  640. # qhasm: rx2 &= mulredmask
  641. # asm 1: and <mulredmask=int64#3,<rx2=int64#7
  642. # asm 2: and <mulredmask=%rdx,<rx2=%rax
  643. and %rdx,%rax
  644. # qhasm: mult += rx4
  645. # asm 1: add <rx4=int64#13,<mult=int64#5
  646. # asm 2: add <rx4=%r15,<mult=%r8
  647. add %r15,%r8
  648. # qhasm: rx4 = mult
  649. # asm 1: mov <mult=int64#5,>rx4=int64#9
  650. # asm 2: mov <mult=%r8,>rx4=%r11
  651. mov %r8,%r11
  652. # qhasm: (uint64) mult >>= 51
  653. # asm 1: shr $51,<mult=int64#5
  654. # asm 2: shr $51,<mult=%r8
  655. shr $51,%r8
  656. # qhasm: rx3 &= mulredmask
  657. # asm 1: and <mulredmask=int64#3,<rx3=int64#8
  658. # asm 2: and <mulredmask=%rdx,<rx3=%r10
  659. and %rdx,%r10
  660. # qhasm: mult *= 19
  661. # asm 1: imulq $19,<mult=int64#5,>mult=int64#5
  662. # asm 2: imulq $19,<mult=%r8,>mult=%r8
  663. imulq $19,%r8,%r8
  664. # qhasm: rx0 += mult
  665. # asm 1: add <mult=int64#5,<rx0=int64#4
  666. # asm 2: add <mult=%r8,<rx0=%rcx
  667. add %r8,%rcx
  668. # qhasm: rx4 &= mulredmask
  669. # asm 1: and <mulredmask=int64#3,<rx4=int64#9
  670. # asm 2: and <mulredmask=%rdx,<rx4=%r11
  671. and %rdx,%r11
  672. # qhasm: *(uint64 *)(rp + 0) = rx0
  673. # asm 1: movq <rx0=int64#4,0(<rp=int64#1)
  674. # asm 2: movq <rx0=%rcx,0(<rp=%rdi)
  675. movq %rcx,0(%rdi)
  676. # qhasm: *(uint64 *)(rp + 8) = rx1
  677. # asm 1: movq <rx1=int64#6,8(<rp=int64#1)
  678. # asm 2: movq <rx1=%r9,8(<rp=%rdi)
  679. movq %r9,8(%rdi)
  680. # qhasm: *(uint64 *)(rp + 16) = rx2
  681. # asm 1: movq <rx2=int64#7,16(<rp=int64#1)
  682. # asm 2: movq <rx2=%rax,16(<rp=%rdi)
  683. movq %rax,16(%rdi)
  684. # qhasm: *(uint64 *)(rp + 24) = rx3
  685. # asm 1: movq <rx3=int64#8,24(<rp=int64#1)
  686. # asm 2: movq <rx3=%r10,24(<rp=%rdi)
  687. movq %r10,24(%rdi)
  688. # qhasm: *(uint64 *)(rp + 32) = rx4
  689. # asm 1: movq <rx4=int64#9,32(<rp=int64#1)
  690. # asm 2: movq <rx4=%r11,32(<rp=%rdi)
  691. movq %r11,32(%rdi)
  692. # qhasm: mulrax = *(uint64 *)(pp + 104)
  693. # asm 1: movq 104(<pp=int64#2),>mulrax=int64#3
  694. # asm 2: movq 104(<pp=%rsi),>mulrax=%rdx
  695. movq 104(%rsi),%rdx
  696. # qhasm: mulrax *= 19
  697. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  698. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  699. imulq $19,%rdx,%rax
  700. # qhasm: mulx319_stack = mulrax
  701. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  702. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  703. movq %rax,56(%rsp)
  704. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 56)
  705. # asm 1: mulq 56(<pp=int64#2)
  706. # asm 2: mulq 56(<pp=%rsi)
  707. mulq 56(%rsi)
  708. # qhasm: ry0 = mulrax
  709. # asm 1: mov <mulrax=int64#7,>ry0=int64#4
  710. # asm 2: mov <mulrax=%rax,>ry0=%rcx
  711. mov %rax,%rcx
  712. # qhasm: mulr01 = mulrdx
  713. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#5
  714. # asm 2: mov <mulrdx=%rdx,>mulr01=%r8
  715. mov %rdx,%r8
  716. # qhasm: mulrax = *(uint64 *)(pp + 112)
  717. # asm 1: movq 112(<pp=int64#2),>mulrax=int64#3
  718. # asm 2: movq 112(<pp=%rsi),>mulrax=%rdx
  719. movq 112(%rsi),%rdx
  720. # qhasm: mulrax *= 19
  721. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  722. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  723. imulq $19,%rdx,%rax
  724. # qhasm: mulx419_stack = mulrax
  725. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  726. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  727. movq %rax,64(%rsp)
  728. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 48)
  729. # asm 1: mulq 48(<pp=int64#2)
  730. # asm 2: mulq 48(<pp=%rsi)
  731. mulq 48(%rsi)
  732. # qhasm: carry? ry0 += mulrax
  733. # asm 1: add <mulrax=int64#7,<ry0=int64#4
  734. # asm 2: add <mulrax=%rax,<ry0=%rcx
  735. add %rax,%rcx
  736. # qhasm: mulr01 += mulrdx + carry
  737. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  738. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  739. adc %rdx,%r8
  740. # qhasm: mulrax = *(uint64 *)(pp + 80)
  741. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  742. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  743. movq 80(%rsi),%rax
  744. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 40)
  745. # asm 1: mulq 40(<pp=int64#2)
  746. # asm 2: mulq 40(<pp=%rsi)
  747. mulq 40(%rsi)
  748. # qhasm: carry? ry0 += mulrax
  749. # asm 1: add <mulrax=int64#7,<ry0=int64#4
  750. # asm 2: add <mulrax=%rax,<ry0=%rcx
  751. add %rax,%rcx
  752. # qhasm: mulr01 += mulrdx + carry
  753. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  754. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  755. adc %rdx,%r8
  756. # qhasm: mulrax = *(uint64 *)(pp + 80)
  757. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  758. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  759. movq 80(%rsi),%rax
  760. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 48)
  761. # asm 1: mulq 48(<pp=int64#2)
  762. # asm 2: mulq 48(<pp=%rsi)
  763. mulq 48(%rsi)
  764. # qhasm: ry1 = mulrax
  765. # asm 1: mov <mulrax=int64#7,>ry1=int64#6
  766. # asm 2: mov <mulrax=%rax,>ry1=%r9
  767. mov %rax,%r9
  768. # qhasm: mulr11 = mulrdx
  769. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#8
  770. # asm 2: mov <mulrdx=%rdx,>mulr11=%r10
  771. mov %rdx,%r10
  772. # qhasm: mulrax = *(uint64 *)(pp + 80)
  773. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  774. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  775. movq 80(%rsi),%rax
  776. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 56)
  777. # asm 1: mulq 56(<pp=int64#2)
  778. # asm 2: mulq 56(<pp=%rsi)
  779. mulq 56(%rsi)
  780. # qhasm: ry2 = mulrax
  781. # asm 1: mov <mulrax=int64#7,>ry2=int64#9
  782. # asm 2: mov <mulrax=%rax,>ry2=%r11
  783. mov %rax,%r11
  784. # qhasm: mulr21 = mulrdx
  785. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#10
  786. # asm 2: mov <mulrdx=%rdx,>mulr21=%r12
  787. mov %rdx,%r12
  788. # qhasm: mulrax = *(uint64 *)(pp + 80)
  789. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  790. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  791. movq 80(%rsi),%rax
  792. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 64)
  793. # asm 1: mulq 64(<pp=int64#2)
  794. # asm 2: mulq 64(<pp=%rsi)
  795. mulq 64(%rsi)
  796. # qhasm: ry3 = mulrax
  797. # asm 1: mov <mulrax=int64#7,>ry3=int64#11
  798. # asm 2: mov <mulrax=%rax,>ry3=%r13
  799. mov %rax,%r13
  800. # qhasm: mulr31 = mulrdx
  801. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#12
  802. # asm 2: mov <mulrdx=%rdx,>mulr31=%r14
  803. mov %rdx,%r14
  804. # qhasm: mulrax = *(uint64 *)(pp + 80)
  805. # asm 1: movq 80(<pp=int64#2),>mulrax=int64#7
  806. # asm 2: movq 80(<pp=%rsi),>mulrax=%rax
  807. movq 80(%rsi),%rax
  808. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 72)
  809. # asm 1: mulq 72(<pp=int64#2)
  810. # asm 2: mulq 72(<pp=%rsi)
  811. mulq 72(%rsi)
  812. # qhasm: ry4 = mulrax
  813. # asm 1: mov <mulrax=int64#7,>ry4=int64#13
  814. # asm 2: mov <mulrax=%rax,>ry4=%r15
  815. mov %rax,%r15
  816. # qhasm: mulr41 = mulrdx
  817. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#14
  818. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbx
  819. mov %rdx,%rbx
  820. # qhasm: mulrax = *(uint64 *)(pp + 88)
  821. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  822. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  823. movq 88(%rsi),%rax
  824. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 40)
  825. # asm 1: mulq 40(<pp=int64#2)
  826. # asm 2: mulq 40(<pp=%rsi)
  827. mulq 40(%rsi)
  828. # qhasm: carry? ry1 += mulrax
  829. # asm 1: add <mulrax=int64#7,<ry1=int64#6
  830. # asm 2: add <mulrax=%rax,<ry1=%r9
  831. add %rax,%r9
  832. # qhasm: mulr11 += mulrdx + carry
  833. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  834. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  835. adc %rdx,%r10
  836. # qhasm: mulrax = *(uint64 *)(pp + 88)
  837. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  838. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  839. movq 88(%rsi),%rax
  840. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 48)
  841. # asm 1: mulq 48(<pp=int64#2)
  842. # asm 2: mulq 48(<pp=%rsi)
  843. mulq 48(%rsi)
  844. # qhasm: carry? ry2 += mulrax
  845. # asm 1: add <mulrax=int64#7,<ry2=int64#9
  846. # asm 2: add <mulrax=%rax,<ry2=%r11
  847. add %rax,%r11
  848. # qhasm: mulr21 += mulrdx + carry
  849. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  850. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  851. adc %rdx,%r12
  852. # qhasm: mulrax = *(uint64 *)(pp + 88)
  853. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  854. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  855. movq 88(%rsi),%rax
  856. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 56)
  857. # asm 1: mulq 56(<pp=int64#2)
  858. # asm 2: mulq 56(<pp=%rsi)
  859. mulq 56(%rsi)
  860. # qhasm: carry? ry3 += mulrax
  861. # asm 1: add <mulrax=int64#7,<ry3=int64#11
  862. # asm 2: add <mulrax=%rax,<ry3=%r13
  863. add %rax,%r13
  864. # qhasm: mulr31 += mulrdx + carry
  865. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  866. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  867. adc %rdx,%r14
  868. # qhasm: mulrax = *(uint64 *)(pp + 88)
  869. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#7
  870. # asm 2: movq 88(<pp=%rsi),>mulrax=%rax
  871. movq 88(%rsi),%rax
  872. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 64)
  873. # asm 1: mulq 64(<pp=int64#2)
  874. # asm 2: mulq 64(<pp=%rsi)
  875. mulq 64(%rsi)
  876. # qhasm: carry? ry4 += mulrax
  877. # asm 1: add <mulrax=int64#7,<ry4=int64#13
  878. # asm 2: add <mulrax=%rax,<ry4=%r15
  879. add %rax,%r15
  880. # qhasm: mulr41 += mulrdx + carry
  881. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  882. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  883. adc %rdx,%rbx
  884. # qhasm: mulrax = *(uint64 *)(pp + 88)
  885. # asm 1: movq 88(<pp=int64#2),>mulrax=int64#3
  886. # asm 2: movq 88(<pp=%rsi),>mulrax=%rdx
  887. movq 88(%rsi),%rdx
  888. # qhasm: mulrax *= 19
  889. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  890. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  891. imulq $19,%rdx,%rax
  892. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 72)
  893. # asm 1: mulq 72(<pp=int64#2)
  894. # asm 2: mulq 72(<pp=%rsi)
  895. mulq 72(%rsi)
  896. # qhasm: carry? ry0 += mulrax
  897. # asm 1: add <mulrax=int64#7,<ry0=int64#4
  898. # asm 2: add <mulrax=%rax,<ry0=%rcx
  899. add %rax,%rcx
  900. # qhasm: mulr01 += mulrdx + carry
  901. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  902. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  903. adc %rdx,%r8
  904. # qhasm: mulrax = *(uint64 *)(pp + 96)
  905. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#7
  906. # asm 2: movq 96(<pp=%rsi),>mulrax=%rax
  907. movq 96(%rsi),%rax
  908. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 40)
  909. # asm 1: mulq 40(<pp=int64#2)
  910. # asm 2: mulq 40(<pp=%rsi)
  911. mulq 40(%rsi)
  912. # qhasm: carry? ry2 += mulrax
  913. # asm 1: add <mulrax=int64#7,<ry2=int64#9
  914. # asm 2: add <mulrax=%rax,<ry2=%r11
  915. add %rax,%r11
  916. # qhasm: mulr21 += mulrdx + carry
  917. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  918. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  919. adc %rdx,%r12
  920. # qhasm: mulrax = *(uint64 *)(pp + 96)
  921. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#7
  922. # asm 2: movq 96(<pp=%rsi),>mulrax=%rax
  923. movq 96(%rsi),%rax
  924. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 48)
  925. # asm 1: mulq 48(<pp=int64#2)
  926. # asm 2: mulq 48(<pp=%rsi)
  927. mulq 48(%rsi)
  928. # qhasm: carry? ry3 += mulrax
  929. # asm 1: add <mulrax=int64#7,<ry3=int64#11
  930. # asm 2: add <mulrax=%rax,<ry3=%r13
  931. add %rax,%r13
  932. # qhasm: mulr31 += mulrdx + carry
  933. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  934. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  935. adc %rdx,%r14
  936. # qhasm: mulrax = *(uint64 *)(pp + 96)
  937. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#7
  938. # asm 2: movq 96(<pp=%rsi),>mulrax=%rax
  939. movq 96(%rsi),%rax
  940. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 56)
  941. # asm 1: mulq 56(<pp=int64#2)
  942. # asm 2: mulq 56(<pp=%rsi)
  943. mulq 56(%rsi)
  944. # qhasm: carry? ry4 += mulrax
  945. # asm 1: add <mulrax=int64#7,<ry4=int64#13
  946. # asm 2: add <mulrax=%rax,<ry4=%r15
  947. add %rax,%r15
  948. # qhasm: mulr41 += mulrdx + carry
  949. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  950. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  951. adc %rdx,%rbx
  952. # qhasm: mulrax = *(uint64 *)(pp + 96)
  953. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#3
  954. # asm 2: movq 96(<pp=%rsi),>mulrax=%rdx
  955. movq 96(%rsi),%rdx
  956. # qhasm: mulrax *= 19
  957. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  958. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  959. imulq $19,%rdx,%rax
  960. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 64)
  961. # asm 1: mulq 64(<pp=int64#2)
  962. # asm 2: mulq 64(<pp=%rsi)
  963. mulq 64(%rsi)
  964. # qhasm: carry? ry0 += mulrax
  965. # asm 1: add <mulrax=int64#7,<ry0=int64#4
  966. # asm 2: add <mulrax=%rax,<ry0=%rcx
  967. add %rax,%rcx
  968. # qhasm: mulr01 += mulrdx + carry
  969. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  970. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  971. adc %rdx,%r8
  972. # qhasm: mulrax = *(uint64 *)(pp + 96)
  973. # asm 1: movq 96(<pp=int64#2),>mulrax=int64#3
  974. # asm 2: movq 96(<pp=%rsi),>mulrax=%rdx
  975. movq 96(%rsi),%rdx
  976. # qhasm: mulrax *= 19
  977. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  978. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  979. imulq $19,%rdx,%rax
  980. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 72)
  981. # asm 1: mulq 72(<pp=int64#2)
  982. # asm 2: mulq 72(<pp=%rsi)
  983. mulq 72(%rsi)
  984. # qhasm: carry? ry1 += mulrax
  985. # asm 1: add <mulrax=int64#7,<ry1=int64#6
  986. # asm 2: add <mulrax=%rax,<ry1=%r9
  987. add %rax,%r9
  988. # qhasm: mulr11 += mulrdx + carry
  989. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  990. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  991. adc %rdx,%r10
  992. # qhasm: mulrax = *(uint64 *)(pp + 104)
  993. # asm 1: movq 104(<pp=int64#2),>mulrax=int64#7
  994. # asm 2: movq 104(<pp=%rsi),>mulrax=%rax
  995. movq 104(%rsi),%rax
  996. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 40)
  997. # asm 1: mulq 40(<pp=int64#2)
  998. # asm 2: mulq 40(<pp=%rsi)
  999. mulq 40(%rsi)
  1000. # qhasm: carry? ry3 += mulrax
  1001. # asm 1: add <mulrax=int64#7,<ry3=int64#11
  1002. # asm 2: add <mulrax=%rax,<ry3=%r13
  1003. add %rax,%r13
  1004. # qhasm: mulr31 += mulrdx + carry
  1005. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1006. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1007. adc %rdx,%r14
  1008. # qhasm: mulrax = *(uint64 *)(pp + 104)
  1009. # asm 1: movq 104(<pp=int64#2),>mulrax=int64#7
  1010. # asm 2: movq 104(<pp=%rsi),>mulrax=%rax
  1011. movq 104(%rsi),%rax
  1012. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 48)
  1013. # asm 1: mulq 48(<pp=int64#2)
  1014. # asm 2: mulq 48(<pp=%rsi)
  1015. mulq 48(%rsi)
  1016. # qhasm: carry? ry4 += mulrax
  1017. # asm 1: add <mulrax=int64#7,<ry4=int64#13
  1018. # asm 2: add <mulrax=%rax,<ry4=%r15
  1019. add %rax,%r15
  1020. # qhasm: mulr41 += mulrdx + carry
  1021. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1022. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1023. adc %rdx,%rbx
  1024. # qhasm: mulrax = mulx319_stack
  1025. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  1026. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  1027. movq 56(%rsp),%rax
  1028. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 64)
  1029. # asm 1: mulq 64(<pp=int64#2)
  1030. # asm 2: mulq 64(<pp=%rsi)
  1031. mulq 64(%rsi)
  1032. # qhasm: carry? ry1 += mulrax
  1033. # asm 1: add <mulrax=int64#7,<ry1=int64#6
  1034. # asm 2: add <mulrax=%rax,<ry1=%r9
  1035. add %rax,%r9
  1036. # qhasm: mulr11 += mulrdx + carry
  1037. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1038. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1039. adc %rdx,%r10
  1040. # qhasm: mulrax = mulx319_stack
  1041. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  1042. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  1043. movq 56(%rsp),%rax
  1044. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 72)
  1045. # asm 1: mulq 72(<pp=int64#2)
  1046. # asm 2: mulq 72(<pp=%rsi)
  1047. mulq 72(%rsi)
  1048. # qhasm: carry? ry2 += mulrax
  1049. # asm 1: add <mulrax=int64#7,<ry2=int64#9
  1050. # asm 2: add <mulrax=%rax,<ry2=%r11
  1051. add %rax,%r11
  1052. # qhasm: mulr21 += mulrdx + carry
  1053. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1054. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1055. adc %rdx,%r12
  1056. # qhasm: mulrax = *(uint64 *)(pp + 112)
  1057. # asm 1: movq 112(<pp=int64#2),>mulrax=int64#7
  1058. # asm 2: movq 112(<pp=%rsi),>mulrax=%rax
  1059. movq 112(%rsi),%rax
  1060. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 40)
  1061. # asm 1: mulq 40(<pp=int64#2)
  1062. # asm 2: mulq 40(<pp=%rsi)
  1063. mulq 40(%rsi)
  1064. # qhasm: carry? ry4 += mulrax
  1065. # asm 1: add <mulrax=int64#7,<ry4=int64#13
  1066. # asm 2: add <mulrax=%rax,<ry4=%r15
  1067. add %rax,%r15
  1068. # qhasm: mulr41 += mulrdx + carry
  1069. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1070. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1071. adc %rdx,%rbx
  1072. # qhasm: mulrax = mulx419_stack
  1073. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  1074. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  1075. movq 64(%rsp),%rax
  1076. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 56)
  1077. # asm 1: mulq 56(<pp=int64#2)
  1078. # asm 2: mulq 56(<pp=%rsi)
  1079. mulq 56(%rsi)
  1080. # qhasm: carry? ry1 += mulrax
  1081. # asm 1: add <mulrax=int64#7,<ry1=int64#6
  1082. # asm 2: add <mulrax=%rax,<ry1=%r9
  1083. add %rax,%r9
  1084. # qhasm: mulr11 += mulrdx + carry
  1085. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1086. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1087. adc %rdx,%r10
  1088. # qhasm: mulrax = mulx419_stack
  1089. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  1090. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  1091. movq 64(%rsp),%rax
  1092. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 64)
  1093. # asm 1: mulq 64(<pp=int64#2)
  1094. # asm 2: mulq 64(<pp=%rsi)
  1095. mulq 64(%rsi)
  1096. # qhasm: carry? ry2 += mulrax
  1097. # asm 1: add <mulrax=int64#7,<ry2=int64#9
  1098. # asm 2: add <mulrax=%rax,<ry2=%r11
  1099. add %rax,%r11
  1100. # qhasm: mulr21 += mulrdx + carry
  1101. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1102. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1103. adc %rdx,%r12
  1104. # qhasm: mulrax = mulx419_stack
  1105. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  1106. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  1107. movq 64(%rsp),%rax
  1108. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 72)
  1109. # asm 1: mulq 72(<pp=int64#2)
  1110. # asm 2: mulq 72(<pp=%rsi)
  1111. mulq 72(%rsi)
  1112. # qhasm: carry? ry3 += mulrax
  1113. # asm 1: add <mulrax=int64#7,<ry3=int64#11
  1114. # asm 2: add <mulrax=%rax,<ry3=%r13
  1115. add %rax,%r13
  1116. # qhasm: mulr31 += mulrdx + carry
  1117. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1118. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1119. adc %rdx,%r14
  1120. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  1121. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#3
  1122. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rdx
  1123. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rdx
  1124. # qhasm: mulr01 = (mulr01.ry0) << 13
  1125. # asm 1: shld $13,<ry0=int64#4,<mulr01=int64#5
  1126. # asm 2: shld $13,<ry0=%rcx,<mulr01=%r8
  1127. shld $13,%rcx,%r8
  1128. # qhasm: ry0 &= mulredmask
  1129. # asm 1: and <mulredmask=int64#3,<ry0=int64#4
  1130. # asm 2: and <mulredmask=%rdx,<ry0=%rcx
  1131. and %rdx,%rcx
  1132. # qhasm: mulr11 = (mulr11.ry1) << 13
  1133. # asm 1: shld $13,<ry1=int64#6,<mulr11=int64#8
  1134. # asm 2: shld $13,<ry1=%r9,<mulr11=%r10
  1135. shld $13,%r9,%r10
  1136. # qhasm: ry1 &= mulredmask
  1137. # asm 1: and <mulredmask=int64#3,<ry1=int64#6
  1138. # asm 2: and <mulredmask=%rdx,<ry1=%r9
  1139. and %rdx,%r9
  1140. # qhasm: ry1 += mulr01
  1141. # asm 1: add <mulr01=int64#5,<ry1=int64#6
  1142. # asm 2: add <mulr01=%r8,<ry1=%r9
  1143. add %r8,%r9
  1144. # qhasm: mulr21 = (mulr21.ry2) << 13
  1145. # asm 1: shld $13,<ry2=int64#9,<mulr21=int64#10
  1146. # asm 2: shld $13,<ry2=%r11,<mulr21=%r12
  1147. shld $13,%r11,%r12
  1148. # qhasm: ry2 &= mulredmask
  1149. # asm 1: and <mulredmask=int64#3,<ry2=int64#9
  1150. # asm 2: and <mulredmask=%rdx,<ry2=%r11
  1151. and %rdx,%r11
  1152. # qhasm: ry2 += mulr11
  1153. # asm 1: add <mulr11=int64#8,<ry2=int64#9
  1154. # asm 2: add <mulr11=%r10,<ry2=%r11
  1155. add %r10,%r11
  1156. # qhasm: mulr31 = (mulr31.ry3) << 13
  1157. # asm 1: shld $13,<ry3=int64#11,<mulr31=int64#12
  1158. # asm 2: shld $13,<ry3=%r13,<mulr31=%r14
  1159. shld $13,%r13,%r14
  1160. # qhasm: ry3 &= mulredmask
  1161. # asm 1: and <mulredmask=int64#3,<ry3=int64#11
  1162. # asm 2: and <mulredmask=%rdx,<ry3=%r13
  1163. and %rdx,%r13
  1164. # qhasm: ry3 += mulr21
  1165. # asm 1: add <mulr21=int64#10,<ry3=int64#11
  1166. # asm 2: add <mulr21=%r12,<ry3=%r13
  1167. add %r12,%r13
  1168. # qhasm: mulr41 = (mulr41.ry4) << 13
  1169. # asm 1: shld $13,<ry4=int64#13,<mulr41=int64#14
  1170. # asm 2: shld $13,<ry4=%r15,<mulr41=%rbx
  1171. shld $13,%r15,%rbx
  1172. # qhasm: ry4 &= mulredmask
  1173. # asm 1: and <mulredmask=int64#3,<ry4=int64#13
  1174. # asm 2: and <mulredmask=%rdx,<ry4=%r15
  1175. and %rdx,%r15
  1176. # qhasm: ry4 += mulr31
  1177. # asm 1: add <mulr31=int64#12,<ry4=int64#13
  1178. # asm 2: add <mulr31=%r14,<ry4=%r15
  1179. add %r14,%r15
  1180. # qhasm: mulr41 = mulr41 * 19
  1181. # asm 1: imulq $19,<mulr41=int64#14,>mulr41=int64#5
  1182. # asm 2: imulq $19,<mulr41=%rbx,>mulr41=%r8
  1183. imulq $19,%rbx,%r8
  1184. # qhasm: ry0 += mulr41
  1185. # asm 1: add <mulr41=int64#5,<ry0=int64#4
  1186. # asm 2: add <mulr41=%r8,<ry0=%rcx
  1187. add %r8,%rcx
  1188. # qhasm: mult = ry0
  1189. # asm 1: mov <ry0=int64#4,>mult=int64#5
  1190. # asm 2: mov <ry0=%rcx,>mult=%r8
  1191. mov %rcx,%r8
  1192. # qhasm: (uint64) mult >>= 51
  1193. # asm 1: shr $51,<mult=int64#5
  1194. # asm 2: shr $51,<mult=%r8
  1195. shr $51,%r8
  1196. # qhasm: mult += ry1
  1197. # asm 1: add <ry1=int64#6,<mult=int64#5
  1198. # asm 2: add <ry1=%r9,<mult=%r8
  1199. add %r9,%r8
  1200. # qhasm: ry1 = mult
  1201. # asm 1: mov <mult=int64#5,>ry1=int64#6
  1202. # asm 2: mov <mult=%r8,>ry1=%r9
  1203. mov %r8,%r9
  1204. # qhasm: (uint64) mult >>= 51
  1205. # asm 1: shr $51,<mult=int64#5
  1206. # asm 2: shr $51,<mult=%r8
  1207. shr $51,%r8
  1208. # qhasm: ry0 &= mulredmask
  1209. # asm 1: and <mulredmask=int64#3,<ry0=int64#4
  1210. # asm 2: and <mulredmask=%rdx,<ry0=%rcx
  1211. and %rdx,%rcx
  1212. # qhasm: mult += ry2
  1213. # asm 1: add <ry2=int64#9,<mult=int64#5
  1214. # asm 2: add <ry2=%r11,<mult=%r8
  1215. add %r11,%r8
  1216. # qhasm: ry2 = mult
  1217. # asm 1: mov <mult=int64#5,>ry2=int64#7
  1218. # asm 2: mov <mult=%r8,>ry2=%rax
  1219. mov %r8,%rax
  1220. # qhasm: (uint64) mult >>= 51
  1221. # asm 1: shr $51,<mult=int64#5
  1222. # asm 2: shr $51,<mult=%r8
  1223. shr $51,%r8
  1224. # qhasm: ry1 &= mulredmask
  1225. # asm 1: and <mulredmask=int64#3,<ry1=int64#6
  1226. # asm 2: and <mulredmask=%rdx,<ry1=%r9
  1227. and %rdx,%r9
  1228. # qhasm: mult += ry3
  1229. # asm 1: add <ry3=int64#11,<mult=int64#5
  1230. # asm 2: add <ry3=%r13,<mult=%r8
  1231. add %r13,%r8
  1232. # qhasm: ry3 = mult
  1233. # asm 1: mov <mult=int64#5,>ry3=int64#8
  1234. # asm 2: mov <mult=%r8,>ry3=%r10
  1235. mov %r8,%r10
  1236. # qhasm: (uint64) mult >>= 51
  1237. # asm 1: shr $51,<mult=int64#5
  1238. # asm 2: shr $51,<mult=%r8
  1239. shr $51,%r8
  1240. # qhasm: ry2 &= mulredmask
  1241. # asm 1: and <mulredmask=int64#3,<ry2=int64#7
  1242. # asm 2: and <mulredmask=%rdx,<ry2=%rax
  1243. and %rdx,%rax
  1244. # qhasm: mult += ry4
  1245. # asm 1: add <ry4=int64#13,<mult=int64#5
  1246. # asm 2: add <ry4=%r15,<mult=%r8
  1247. add %r15,%r8
  1248. # qhasm: ry4 = mult
  1249. # asm 1: mov <mult=int64#5,>ry4=int64#9
  1250. # asm 2: mov <mult=%r8,>ry4=%r11
  1251. mov %r8,%r11
  1252. # qhasm: (uint64) mult >>= 51
  1253. # asm 1: shr $51,<mult=int64#5
  1254. # asm 2: shr $51,<mult=%r8
  1255. shr $51,%r8
  1256. # qhasm: ry3 &= mulredmask
  1257. # asm 1: and <mulredmask=int64#3,<ry3=int64#8
  1258. # asm 2: and <mulredmask=%rdx,<ry3=%r10
  1259. and %rdx,%r10
  1260. # qhasm: mult *= 19
  1261. # asm 1: imulq $19,<mult=int64#5,>mult=int64#5
  1262. # asm 2: imulq $19,<mult=%r8,>mult=%r8
  1263. imulq $19,%r8,%r8
  1264. # qhasm: ry0 += mult
  1265. # asm 1: add <mult=int64#5,<ry0=int64#4
  1266. # asm 2: add <mult=%r8,<ry0=%rcx
  1267. add %r8,%rcx
  1268. # qhasm: ry4 &= mulredmask
  1269. # asm 1: and <mulredmask=int64#3,<ry4=int64#9
  1270. # asm 2: and <mulredmask=%rdx,<ry4=%r11
  1271. and %rdx,%r11
  1272. # qhasm: *(uint64 *)(rp + 40) = ry0
  1273. # asm 1: movq <ry0=int64#4,40(<rp=int64#1)
  1274. # asm 2: movq <ry0=%rcx,40(<rp=%rdi)
  1275. movq %rcx,40(%rdi)
  1276. # qhasm: *(uint64 *)(rp + 48) = ry1
  1277. # asm 1: movq <ry1=int64#6,48(<rp=int64#1)
  1278. # asm 2: movq <ry1=%r9,48(<rp=%rdi)
  1279. movq %r9,48(%rdi)
  1280. # qhasm: *(uint64 *)(rp + 56) = ry2
  1281. # asm 1: movq <ry2=int64#7,56(<rp=int64#1)
  1282. # asm 2: movq <ry2=%rax,56(<rp=%rdi)
  1283. movq %rax,56(%rdi)
  1284. # qhasm: *(uint64 *)(rp + 64) = ry3
  1285. # asm 1: movq <ry3=int64#8,64(<rp=int64#1)
  1286. # asm 2: movq <ry3=%r10,64(<rp=%rdi)
  1287. movq %r10,64(%rdi)
  1288. # qhasm: *(uint64 *)(rp + 72) = ry4
  1289. # asm 1: movq <ry4=int64#9,72(<rp=int64#1)
  1290. # asm 2: movq <ry4=%r11,72(<rp=%rdi)
  1291. movq %r11,72(%rdi)
  1292. # qhasm: mulrax = *(uint64 *)(pp + 64)
  1293. # asm 1: movq 64(<pp=int64#2),>mulrax=int64#3
  1294. # asm 2: movq 64(<pp=%rsi),>mulrax=%rdx
  1295. movq 64(%rsi),%rdx
  1296. # qhasm: mulrax *= 19
  1297. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1298. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1299. imulq $19,%rdx,%rax
  1300. # qhasm: mulx319_stack = mulrax
  1301. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  1302. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  1303. movq %rax,56(%rsp)
  1304. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  1305. # asm 1: mulq 136(<pp=int64#2)
  1306. # asm 2: mulq 136(<pp=%rsi)
  1307. mulq 136(%rsi)
  1308. # qhasm: rz0 = mulrax
  1309. # asm 1: mov <mulrax=int64#7,>rz0=int64#4
  1310. # asm 2: mov <mulrax=%rax,>rz0=%rcx
  1311. mov %rax,%rcx
  1312. # qhasm: mulr01 = mulrdx
  1313. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#5
  1314. # asm 2: mov <mulrdx=%rdx,>mulr01=%r8
  1315. mov %rdx,%r8
  1316. # qhasm: mulrax = *(uint64 *)(pp + 72)
  1317. # asm 1: movq 72(<pp=int64#2),>mulrax=int64#3
  1318. # asm 2: movq 72(<pp=%rsi),>mulrax=%rdx
  1319. movq 72(%rsi),%rdx
  1320. # qhasm: mulrax *= 19
  1321. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1322. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1323. imulq $19,%rdx,%rax
  1324. # qhasm: mulx419_stack = mulrax
  1325. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  1326. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  1327. movq %rax,64(%rsp)
  1328. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  1329. # asm 1: mulq 128(<pp=int64#2)
  1330. # asm 2: mulq 128(<pp=%rsi)
  1331. mulq 128(%rsi)
  1332. # qhasm: carry? rz0 += mulrax
  1333. # asm 1: add <mulrax=int64#7,<rz0=int64#4
  1334. # asm 2: add <mulrax=%rax,<rz0=%rcx
  1335. add %rax,%rcx
  1336. # qhasm: mulr01 += mulrdx + carry
  1337. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1338. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1339. adc %rdx,%r8
  1340. # qhasm: mulrax = *(uint64 *)(pp + 40)
  1341. # asm 1: movq 40(<pp=int64#2),>mulrax=int64#7
  1342. # asm 2: movq 40(<pp=%rsi),>mulrax=%rax
  1343. movq 40(%rsi),%rax
  1344. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  1345. # asm 1: mulq 120(<pp=int64#2)
  1346. # asm 2: mulq 120(<pp=%rsi)
  1347. mulq 120(%rsi)
  1348. # qhasm: carry? rz0 += mulrax
  1349. # asm 1: add <mulrax=int64#7,<rz0=int64#4
  1350. # asm 2: add <mulrax=%rax,<rz0=%rcx
  1351. add %rax,%rcx
  1352. # qhasm: mulr01 += mulrdx + carry
  1353. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1354. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1355. adc %rdx,%r8
  1356. # qhasm: mulrax = *(uint64 *)(pp + 40)
  1357. # asm 1: movq 40(<pp=int64#2),>mulrax=int64#7
  1358. # asm 2: movq 40(<pp=%rsi),>mulrax=%rax
  1359. movq 40(%rsi),%rax
  1360. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  1361. # asm 1: mulq 128(<pp=int64#2)
  1362. # asm 2: mulq 128(<pp=%rsi)
  1363. mulq 128(%rsi)
  1364. # qhasm: rz1 = mulrax
  1365. # asm 1: mov <mulrax=int64#7,>rz1=int64#6
  1366. # asm 2: mov <mulrax=%rax,>rz1=%r9
  1367. mov %rax,%r9
  1368. # qhasm: mulr11 = mulrdx
  1369. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#8
  1370. # asm 2: mov <mulrdx=%rdx,>mulr11=%r10
  1371. mov %rdx,%r10
  1372. # qhasm: mulrax = *(uint64 *)(pp + 40)
  1373. # asm 1: movq 40(<pp=int64#2),>mulrax=int64#7
  1374. # asm 2: movq 40(<pp=%rsi),>mulrax=%rax
  1375. movq 40(%rsi),%rax
  1376. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  1377. # asm 1: mulq 136(<pp=int64#2)
  1378. # asm 2: mulq 136(<pp=%rsi)
  1379. mulq 136(%rsi)
  1380. # qhasm: rz2 = mulrax
  1381. # asm 1: mov <mulrax=int64#7,>rz2=int64#9
  1382. # asm 2: mov <mulrax=%rax,>rz2=%r11
  1383. mov %rax,%r11
  1384. # qhasm: mulr21 = mulrdx
  1385. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#10
  1386. # asm 2: mov <mulrdx=%rdx,>mulr21=%r12
  1387. mov %rdx,%r12
  1388. # qhasm: mulrax = *(uint64 *)(pp + 40)
  1389. # asm 1: movq 40(<pp=int64#2),>mulrax=int64#7
  1390. # asm 2: movq 40(<pp=%rsi),>mulrax=%rax
  1391. movq 40(%rsi),%rax
  1392. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  1393. # asm 1: mulq 144(<pp=int64#2)
  1394. # asm 2: mulq 144(<pp=%rsi)
  1395. mulq 144(%rsi)
  1396. # qhasm: rz3 = mulrax
  1397. # asm 1: mov <mulrax=int64#7,>rz3=int64#11
  1398. # asm 2: mov <mulrax=%rax,>rz3=%r13
  1399. mov %rax,%r13
  1400. # qhasm: mulr31 = mulrdx
  1401. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#12
  1402. # asm 2: mov <mulrdx=%rdx,>mulr31=%r14
  1403. mov %rdx,%r14
  1404. # qhasm: mulrax = *(uint64 *)(pp + 40)
  1405. # asm 1: movq 40(<pp=int64#2),>mulrax=int64#7
  1406. # asm 2: movq 40(<pp=%rsi),>mulrax=%rax
  1407. movq 40(%rsi),%rax
  1408. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  1409. # asm 1: mulq 152(<pp=int64#2)
  1410. # asm 2: mulq 152(<pp=%rsi)
  1411. mulq 152(%rsi)
  1412. # qhasm: rz4 = mulrax
  1413. # asm 1: mov <mulrax=int64#7,>rz4=int64#13
  1414. # asm 2: mov <mulrax=%rax,>rz4=%r15
  1415. mov %rax,%r15
  1416. # qhasm: mulr41 = mulrdx
  1417. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#14
  1418. # asm 2: mov <mulrdx=%rdx,>mulr41=%rbx
  1419. mov %rdx,%rbx
  1420. # qhasm: mulrax = *(uint64 *)(pp + 48)
  1421. # asm 1: movq 48(<pp=int64#2),>mulrax=int64#7
  1422. # asm 2: movq 48(<pp=%rsi),>mulrax=%rax
  1423. movq 48(%rsi),%rax
  1424. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  1425. # asm 1: mulq 120(<pp=int64#2)
  1426. # asm 2: mulq 120(<pp=%rsi)
  1427. mulq 120(%rsi)
  1428. # qhasm: carry? rz1 += mulrax
  1429. # asm 1: add <mulrax=int64#7,<rz1=int64#6
  1430. # asm 2: add <mulrax=%rax,<rz1=%r9
  1431. add %rax,%r9
  1432. # qhasm: mulr11 += mulrdx + carry
  1433. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1434. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1435. adc %rdx,%r10
  1436. # qhasm: mulrax = *(uint64 *)(pp + 48)
  1437. # asm 1: movq 48(<pp=int64#2),>mulrax=int64#7
  1438. # asm 2: movq 48(<pp=%rsi),>mulrax=%rax
  1439. movq 48(%rsi),%rax
  1440. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  1441. # asm 1: mulq 128(<pp=int64#2)
  1442. # asm 2: mulq 128(<pp=%rsi)
  1443. mulq 128(%rsi)
  1444. # qhasm: carry? rz2 += mulrax
  1445. # asm 1: add <mulrax=int64#7,<rz2=int64#9
  1446. # asm 2: add <mulrax=%rax,<rz2=%r11
  1447. add %rax,%r11
  1448. # qhasm: mulr21 += mulrdx + carry
  1449. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1450. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1451. adc %rdx,%r12
  1452. # qhasm: mulrax = *(uint64 *)(pp + 48)
  1453. # asm 1: movq 48(<pp=int64#2),>mulrax=int64#7
  1454. # asm 2: movq 48(<pp=%rsi),>mulrax=%rax
  1455. movq 48(%rsi),%rax
  1456. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  1457. # asm 1: mulq 136(<pp=int64#2)
  1458. # asm 2: mulq 136(<pp=%rsi)
  1459. mulq 136(%rsi)
  1460. # qhasm: carry? rz3 += mulrax
  1461. # asm 1: add <mulrax=int64#7,<rz3=int64#11
  1462. # asm 2: add <mulrax=%rax,<rz3=%r13
  1463. add %rax,%r13
  1464. # qhasm: mulr31 += mulrdx + carry
  1465. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1466. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1467. adc %rdx,%r14
  1468. # qhasm: mulrax = *(uint64 *)(pp + 48)
  1469. # asm 1: movq 48(<pp=int64#2),>mulrax=int64#7
  1470. # asm 2: movq 48(<pp=%rsi),>mulrax=%rax
  1471. movq 48(%rsi),%rax
  1472. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  1473. # asm 1: mulq 144(<pp=int64#2)
  1474. # asm 2: mulq 144(<pp=%rsi)
  1475. mulq 144(%rsi)
  1476. # qhasm: carry? rz4 += mulrax
  1477. # asm 1: add <mulrax=int64#7,<rz4=int64#13
  1478. # asm 2: add <mulrax=%rax,<rz4=%r15
  1479. add %rax,%r15
  1480. # qhasm: mulr41 += mulrdx + carry
  1481. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1482. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1483. adc %rdx,%rbx
  1484. # qhasm: mulrax = *(uint64 *)(pp + 48)
  1485. # asm 1: movq 48(<pp=int64#2),>mulrax=int64#3
  1486. # asm 2: movq 48(<pp=%rsi),>mulrax=%rdx
  1487. movq 48(%rsi),%rdx
  1488. # qhasm: mulrax *= 19
  1489. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1490. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1491. imulq $19,%rdx,%rax
  1492. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  1493. # asm 1: mulq 152(<pp=int64#2)
  1494. # asm 2: mulq 152(<pp=%rsi)
  1495. mulq 152(%rsi)
  1496. # qhasm: carry? rz0 += mulrax
  1497. # asm 1: add <mulrax=int64#7,<rz0=int64#4
  1498. # asm 2: add <mulrax=%rax,<rz0=%rcx
  1499. add %rax,%rcx
  1500. # qhasm: mulr01 += mulrdx + carry
  1501. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1502. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1503. adc %rdx,%r8
  1504. # qhasm: mulrax = *(uint64 *)(pp + 56)
  1505. # asm 1: movq 56(<pp=int64#2),>mulrax=int64#7
  1506. # asm 2: movq 56(<pp=%rsi),>mulrax=%rax
  1507. movq 56(%rsi),%rax
  1508. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  1509. # asm 1: mulq 120(<pp=int64#2)
  1510. # asm 2: mulq 120(<pp=%rsi)
  1511. mulq 120(%rsi)
  1512. # qhasm: carry? rz2 += mulrax
  1513. # asm 1: add <mulrax=int64#7,<rz2=int64#9
  1514. # asm 2: add <mulrax=%rax,<rz2=%r11
  1515. add %rax,%r11
  1516. # qhasm: mulr21 += mulrdx + carry
  1517. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1518. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1519. adc %rdx,%r12
  1520. # qhasm: mulrax = *(uint64 *)(pp + 56)
  1521. # asm 1: movq 56(<pp=int64#2),>mulrax=int64#7
  1522. # asm 2: movq 56(<pp=%rsi),>mulrax=%rax
  1523. movq 56(%rsi),%rax
  1524. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  1525. # asm 1: mulq 128(<pp=int64#2)
  1526. # asm 2: mulq 128(<pp=%rsi)
  1527. mulq 128(%rsi)
  1528. # qhasm: carry? rz3 += mulrax
  1529. # asm 1: add <mulrax=int64#7,<rz3=int64#11
  1530. # asm 2: add <mulrax=%rax,<rz3=%r13
  1531. add %rax,%r13
  1532. # qhasm: mulr31 += mulrdx + carry
  1533. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1534. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1535. adc %rdx,%r14
  1536. # qhasm: mulrax = *(uint64 *)(pp + 56)
  1537. # asm 1: movq 56(<pp=int64#2),>mulrax=int64#7
  1538. # asm 2: movq 56(<pp=%rsi),>mulrax=%rax
  1539. movq 56(%rsi),%rax
  1540. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  1541. # asm 1: mulq 136(<pp=int64#2)
  1542. # asm 2: mulq 136(<pp=%rsi)
  1543. mulq 136(%rsi)
  1544. # qhasm: carry? rz4 += mulrax
  1545. # asm 1: add <mulrax=int64#7,<rz4=int64#13
  1546. # asm 2: add <mulrax=%rax,<rz4=%r15
  1547. add %rax,%r15
  1548. # qhasm: mulr41 += mulrdx + carry
  1549. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1550. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1551. adc %rdx,%rbx
  1552. # qhasm: mulrax = *(uint64 *)(pp + 56)
  1553. # asm 1: movq 56(<pp=int64#2),>mulrax=int64#3
  1554. # asm 2: movq 56(<pp=%rsi),>mulrax=%rdx
  1555. movq 56(%rsi),%rdx
  1556. # qhasm: mulrax *= 19
  1557. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1558. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1559. imulq $19,%rdx,%rax
  1560. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  1561. # asm 1: mulq 144(<pp=int64#2)
  1562. # asm 2: mulq 144(<pp=%rsi)
  1563. mulq 144(%rsi)
  1564. # qhasm: carry? rz0 += mulrax
  1565. # asm 1: add <mulrax=int64#7,<rz0=int64#4
  1566. # asm 2: add <mulrax=%rax,<rz0=%rcx
  1567. add %rax,%rcx
  1568. # qhasm: mulr01 += mulrdx + carry
  1569. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#5
  1570. # asm 2: adc <mulrdx=%rdx,<mulr01=%r8
  1571. adc %rdx,%r8
  1572. # qhasm: mulrax = *(uint64 *)(pp + 56)
  1573. # asm 1: movq 56(<pp=int64#2),>mulrax=int64#3
  1574. # asm 2: movq 56(<pp=%rsi),>mulrax=%rdx
  1575. movq 56(%rsi),%rdx
  1576. # qhasm: mulrax *= 19
  1577. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1578. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1579. imulq $19,%rdx,%rax
  1580. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  1581. # asm 1: mulq 152(<pp=int64#2)
  1582. # asm 2: mulq 152(<pp=%rsi)
  1583. mulq 152(%rsi)
  1584. # qhasm: carry? rz1 += mulrax
  1585. # asm 1: add <mulrax=int64#7,<rz1=int64#6
  1586. # asm 2: add <mulrax=%rax,<rz1=%r9
  1587. add %rax,%r9
  1588. # qhasm: mulr11 += mulrdx + carry
  1589. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1590. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1591. adc %rdx,%r10
  1592. # qhasm: mulrax = *(uint64 *)(pp + 64)
  1593. # asm 1: movq 64(<pp=int64#2),>mulrax=int64#7
  1594. # asm 2: movq 64(<pp=%rsi),>mulrax=%rax
  1595. movq 64(%rsi),%rax
  1596. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  1597. # asm 1: mulq 120(<pp=int64#2)
  1598. # asm 2: mulq 120(<pp=%rsi)
  1599. mulq 120(%rsi)
  1600. # qhasm: carry? rz3 += mulrax
  1601. # asm 1: add <mulrax=int64#7,<rz3=int64#11
  1602. # asm 2: add <mulrax=%rax,<rz3=%r13
  1603. add %rax,%r13
  1604. # qhasm: mulr31 += mulrdx + carry
  1605. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1606. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1607. adc %rdx,%r14
  1608. # qhasm: mulrax = *(uint64 *)(pp + 64)
  1609. # asm 1: movq 64(<pp=int64#2),>mulrax=int64#7
  1610. # asm 2: movq 64(<pp=%rsi),>mulrax=%rax
  1611. movq 64(%rsi),%rax
  1612. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 128)
  1613. # asm 1: mulq 128(<pp=int64#2)
  1614. # asm 2: mulq 128(<pp=%rsi)
  1615. mulq 128(%rsi)
  1616. # qhasm: carry? rz4 += mulrax
  1617. # asm 1: add <mulrax=int64#7,<rz4=int64#13
  1618. # asm 2: add <mulrax=%rax,<rz4=%r15
  1619. add %rax,%r15
  1620. # qhasm: mulr41 += mulrdx + carry
  1621. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1622. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1623. adc %rdx,%rbx
  1624. # qhasm: mulrax = mulx319_stack
  1625. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  1626. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  1627. movq 56(%rsp),%rax
  1628. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  1629. # asm 1: mulq 144(<pp=int64#2)
  1630. # asm 2: mulq 144(<pp=%rsi)
  1631. mulq 144(%rsi)
  1632. # qhasm: carry? rz1 += mulrax
  1633. # asm 1: add <mulrax=int64#7,<rz1=int64#6
  1634. # asm 2: add <mulrax=%rax,<rz1=%r9
  1635. add %rax,%r9
  1636. # qhasm: mulr11 += mulrdx + carry
  1637. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1638. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1639. adc %rdx,%r10
  1640. # qhasm: mulrax = mulx319_stack
  1641. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  1642. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  1643. movq 56(%rsp),%rax
  1644. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  1645. # asm 1: mulq 152(<pp=int64#2)
  1646. # asm 2: mulq 152(<pp=%rsi)
  1647. mulq 152(%rsi)
  1648. # qhasm: carry? rz2 += mulrax
  1649. # asm 1: add <mulrax=int64#7,<rz2=int64#9
  1650. # asm 2: add <mulrax=%rax,<rz2=%r11
  1651. add %rax,%r11
  1652. # qhasm: mulr21 += mulrdx + carry
  1653. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1654. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1655. adc %rdx,%r12
  1656. # qhasm: mulrax = *(uint64 *)(pp + 72)
  1657. # asm 1: movq 72(<pp=int64#2),>mulrax=int64#7
  1658. # asm 2: movq 72(<pp=%rsi),>mulrax=%rax
  1659. movq 72(%rsi),%rax
  1660. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 120)
  1661. # asm 1: mulq 120(<pp=int64#2)
  1662. # asm 2: mulq 120(<pp=%rsi)
  1663. mulq 120(%rsi)
  1664. # qhasm: carry? rz4 += mulrax
  1665. # asm 1: add <mulrax=int64#7,<rz4=int64#13
  1666. # asm 2: add <mulrax=%rax,<rz4=%r15
  1667. add %rax,%r15
  1668. # qhasm: mulr41 += mulrdx + carry
  1669. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#14
  1670. # asm 2: adc <mulrdx=%rdx,<mulr41=%rbx
  1671. adc %rdx,%rbx
  1672. # qhasm: mulrax = mulx419_stack
  1673. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  1674. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  1675. movq 64(%rsp),%rax
  1676. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 136)
  1677. # asm 1: mulq 136(<pp=int64#2)
  1678. # asm 2: mulq 136(<pp=%rsi)
  1679. mulq 136(%rsi)
  1680. # qhasm: carry? rz1 += mulrax
  1681. # asm 1: add <mulrax=int64#7,<rz1=int64#6
  1682. # asm 2: add <mulrax=%rax,<rz1=%r9
  1683. add %rax,%r9
  1684. # qhasm: mulr11 += mulrdx + carry
  1685. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#8
  1686. # asm 2: adc <mulrdx=%rdx,<mulr11=%r10
  1687. adc %rdx,%r10
  1688. # qhasm: mulrax = mulx419_stack
  1689. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  1690. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  1691. movq 64(%rsp),%rax
  1692. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 144)
  1693. # asm 1: mulq 144(<pp=int64#2)
  1694. # asm 2: mulq 144(<pp=%rsi)
  1695. mulq 144(%rsi)
  1696. # qhasm: carry? rz2 += mulrax
  1697. # asm 1: add <mulrax=int64#7,<rz2=int64#9
  1698. # asm 2: add <mulrax=%rax,<rz2=%r11
  1699. add %rax,%r11
  1700. # qhasm: mulr21 += mulrdx + carry
  1701. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#10
  1702. # asm 2: adc <mulrdx=%rdx,<mulr21=%r12
  1703. adc %rdx,%r12
  1704. # qhasm: mulrax = mulx419_stack
  1705. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  1706. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  1707. movq 64(%rsp),%rax
  1708. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(pp + 152)
  1709. # asm 1: mulq 152(<pp=int64#2)
  1710. # asm 2: mulq 152(<pp=%rsi)
  1711. mulq 152(%rsi)
  1712. # qhasm: carry? rz3 += mulrax
  1713. # asm 1: add <mulrax=int64#7,<rz3=int64#11
  1714. # asm 2: add <mulrax=%rax,<rz3=%r13
  1715. add %rax,%r13
  1716. # qhasm: mulr31 += mulrdx + carry
  1717. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#12
  1718. # asm 2: adc <mulrdx=%rdx,<mulr31=%r14
  1719. adc %rdx,%r14
  1720. # qhasm: mulredmask = *(uint64 *) &CRYPTO_NAMESPACE(batch_REDMASK51)
  1721. # asm 1: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=int64#2
  1722. # asm 2: movq CRYPTO_NAMESPACE(batch_REDMASK51),>mulredmask=%rsi
  1723. movq CRYPTO_NAMESPACE(batch_REDMASK51)(%rip),%rsi
  1724. # qhasm: mulr01 = (mulr01.rz0) << 13
  1725. # asm 1: shld $13,<rz0=int64#4,<mulr01=int64#5
  1726. # asm 2: shld $13,<rz0=%rcx,<mulr01=%r8
  1727. shld $13,%rcx,%r8
  1728. # qhasm: rz0 &= mulredmask
  1729. # asm 1: and <mulredmask=int64#2,<rz0=int64#4
  1730. # asm 2: and <mulredmask=%rsi,<rz0=%rcx
  1731. and %rsi,%rcx
  1732. # qhasm: mulr11 = (mulr11.rz1) << 13
  1733. # asm 1: shld $13,<rz1=int64#6,<mulr11=int64#8
  1734. # asm 2: shld $13,<rz1=%r9,<mulr11=%r10
  1735. shld $13,%r9,%r10
  1736. # qhasm: rz1 &= mulredmask
  1737. # asm 1: and <mulredmask=int64#2,<rz1=int64#6
  1738. # asm 2: and <mulredmask=%rsi,<rz1=%r9
  1739. and %rsi,%r9
  1740. # qhasm: rz1 += mulr01
  1741. # asm 1: add <mulr01=int64#5,<rz1=int64#6
  1742. # asm 2: add <mulr01=%r8,<rz1=%r9
  1743. add %r8,%r9
  1744. # qhasm: mulr21 = (mulr21.rz2) << 13
  1745. # asm 1: shld $13,<rz2=int64#9,<mulr21=int64#10
  1746. # asm 2: shld $13,<rz2=%r11,<mulr21=%r12
  1747. shld $13,%r11,%r12
  1748. # qhasm: rz2 &= mulredmask
  1749. # asm 1: and <mulredmask=int64#2,<rz2=int64#9
  1750. # asm 2: and <mulredmask=%rsi,<rz2=%r11
  1751. and %rsi,%r11
  1752. # qhasm: rz2 += mulr11
  1753. # asm 1: add <mulr11=int64#8,<rz2=int64#9
  1754. # asm 2: add <mulr11=%r10,<rz2=%r11
  1755. add %r10,%r11
  1756. # qhasm: mulr31 = (mulr31.rz3) << 13
  1757. # asm 1: shld $13,<rz3=int64#11,<mulr31=int64#12
  1758. # asm 2: shld $13,<rz3=%r13,<mulr31=%r14
  1759. shld $13,%r13,%r14
  1760. # qhasm: rz3 &= mulredmask
  1761. # asm 1: and <mulredmask=int64#2,<rz3=int64#11
  1762. # asm 2: and <mulredmask=%rsi,<rz3=%r13
  1763. and %rsi,%r13
  1764. # qhasm: rz3 += mulr21
  1765. # asm 1: add <mulr21=int64#10,<rz3=int64#11
  1766. # asm 2: add <mulr21=%r12,<rz3=%r13
  1767. add %r12,%r13
  1768. # qhasm: mulr41 = (mulr41.rz4) << 13
  1769. # asm 1: shld $13,<rz4=int64#13,<mulr41=int64#14
  1770. # asm 2: shld $13,<rz4=%r15,<mulr41=%rbx
  1771. shld $13,%r15,%rbx
  1772. # qhasm: rz4 &= mulredmask
  1773. # asm 1: and <mulredmask=int64#2,<rz4=int64#13
  1774. # asm 2: and <mulredmask=%rsi,<rz4=%r15
  1775. and %rsi,%r15
  1776. # qhasm: rz4 += mulr31
  1777. # asm 1: add <mulr31=int64#12,<rz4=int64#13
  1778. # asm 2: add <mulr31=%r14,<rz4=%r15
  1779. add %r14,%r15
  1780. # qhasm: mulr41 = mulr41 * 19
  1781. # asm 1: imulq $19,<mulr41=int64#14,>mulr41=int64#3
  1782. # asm 2: imulq $19,<mulr41=%rbx,>mulr41=%rdx
  1783. imulq $19,%rbx,%rdx
  1784. # qhasm: rz0 += mulr41
  1785. # asm 1: add <mulr41=int64#3,<rz0=int64#4
  1786. # asm 2: add <mulr41=%rdx,<rz0=%rcx
  1787. add %rdx,%rcx
  1788. # qhasm: mult = rz0
  1789. # asm 1: mov <rz0=int64#4,>mult=int64#3
  1790. # asm 2: mov <rz0=%rcx,>mult=%rdx
  1791. mov %rcx,%rdx
  1792. # qhasm: (uint64) mult >>= 51
  1793. # asm 1: shr $51,<mult=int64#3
  1794. # asm 2: shr $51,<mult=%rdx
  1795. shr $51,%rdx
  1796. # qhasm: mult += rz1
  1797. # asm 1: add <rz1=int64#6,<mult=int64#3
  1798. # asm 2: add <rz1=%r9,<mult=%rdx
  1799. add %r9,%rdx
  1800. # qhasm: rz1 = mult
  1801. # asm 1: mov <mult=int64#3,>rz1=int64#5
  1802. # asm 2: mov <mult=%rdx,>rz1=%r8
  1803. mov %rdx,%r8
  1804. # qhasm: (uint64) mult >>= 51
  1805. # asm 1: shr $51,<mult=int64#3
  1806. # asm 2: shr $51,<mult=%rdx
  1807. shr $51,%rdx
  1808. # qhasm: rz0 &= mulredmask
  1809. # asm 1: and <mulredmask=int64#2,<rz0=int64#4
  1810. # asm 2: and <mulredmask=%rsi,<rz0=%rcx
  1811. and %rsi,%rcx
  1812. # qhasm: mult += rz2
  1813. # asm 1: add <rz2=int64#9,<mult=int64#3
  1814. # asm 2: add <rz2=%r11,<mult=%rdx
  1815. add %r11,%rdx
  1816. # qhasm: rz2 = mult
  1817. # asm 1: mov <mult=int64#3,>rz2=int64#6
  1818. # asm 2: mov <mult=%rdx,>rz2=%r9
  1819. mov %rdx,%r9
  1820. # qhasm: (uint64) mult >>= 51
  1821. # asm 1: shr $51,<mult=int64#3
  1822. # asm 2: shr $51,<mult=%rdx
  1823. shr $51,%rdx
  1824. # qhasm: rz1 &= mulredmask
  1825. # asm 1: and <mulredmask=int64#2,<rz1=int64#5
  1826. # asm 2: and <mulredmask=%rsi,<rz1=%r8
  1827. and %rsi,%r8
  1828. # qhasm: mult += rz3
  1829. # asm 1: add <rz3=int64#11,<mult=int64#3
  1830. # asm 2: add <rz3=%r13,<mult=%rdx
  1831. add %r13,%rdx
  1832. # qhasm: rz3 = mult
  1833. # asm 1: mov <mult=int64#3,>rz3=int64#7
  1834. # asm 2: mov <mult=%rdx,>rz3=%rax
  1835. mov %rdx,%rax
  1836. # qhasm: (uint64) mult >>= 51
  1837. # asm 1: shr $51,<mult=int64#3
  1838. # asm 2: shr $51,<mult=%rdx
  1839. shr $51,%rdx
  1840. # qhasm: rz2 &= mulredmask
  1841. # asm 1: and <mulredmask=int64#2,<rz2=int64#6
  1842. # asm 2: and <mulredmask=%rsi,<rz2=%r9
  1843. and %rsi,%r9
  1844. # qhasm: mult += rz4
  1845. # asm 1: add <rz4=int64#13,<mult=int64#3
  1846. # asm 2: add <rz4=%r15,<mult=%rdx
  1847. add %r15,%rdx
  1848. # qhasm: rz4 = mult
  1849. # asm 1: mov <mult=int64#3,>rz4=int64#8
  1850. # asm 2: mov <mult=%rdx,>rz4=%r10
  1851. mov %rdx,%r10
  1852. # qhasm: (uint64) mult >>= 51
  1853. # asm 1: shr $51,<mult=int64#3
  1854. # asm 2: shr $51,<mult=%rdx
  1855. shr $51,%rdx
  1856. # qhasm: rz3 &= mulredmask
  1857. # asm 1: and <mulredmask=int64#2,<rz3=int64#7
  1858. # asm 2: and <mulredmask=%rsi,<rz3=%rax
  1859. and %rsi,%rax
  1860. # qhasm: mult *= 19
  1861. # asm 1: imulq $19,<mult=int64#3,>mult=int64#3
  1862. # asm 2: imulq $19,<mult=%rdx,>mult=%rdx
  1863. imulq $19,%rdx,%rdx
  1864. # qhasm: rz0 += mult
  1865. # asm 1: add <mult=int64#3,<rz0=int64#4
  1866. # asm 2: add <mult=%rdx,<rz0=%rcx
  1867. add %rdx,%rcx
  1868. # qhasm: rz4 &= mulredmask
  1869. # asm 1: and <mulredmask=int64#2,<rz4=int64#8
  1870. # asm 2: and <mulredmask=%rsi,<rz4=%r10
  1871. and %rsi,%r10
  1872. # qhasm: *(uint64 *)(rp + 80) = rz0
  1873. # asm 1: movq <rz0=int64#4,80(<rp=int64#1)
  1874. # asm 2: movq <rz0=%rcx,80(<rp=%rdi)
  1875. movq %rcx,80(%rdi)
  1876. # qhasm: *(uint64 *)(rp + 88) = rz1
  1877. # asm 1: movq <rz1=int64#5,88(<rp=int64#1)
  1878. # asm 2: movq <rz1=%r8,88(<rp=%rdi)
  1879. movq %r8,88(%rdi)
  1880. # qhasm: *(uint64 *)(rp + 96) = rz2
  1881. # asm 1: movq <rz2=int64#6,96(<rp=int64#1)
  1882. # asm 2: movq <rz2=%r9,96(<rp=%rdi)
  1883. movq %r9,96(%rdi)
  1884. # qhasm: *(uint64 *)(rp + 104) = rz3
  1885. # asm 1: movq <rz3=int64#7,104(<rp=int64#1)
  1886. # asm 2: movq <rz3=%rax,104(<rp=%rdi)
  1887. movq %rax,104(%rdi)
  1888. # qhasm: *(uint64 *)(rp + 112) = rz4
  1889. # asm 1: movq <rz4=int64#8,112(<rp=int64#1)
  1890. # asm 2: movq <rz4=%r10,112(<rp=%rdi)
  1891. movq %r10,112(%rdi)
  1892. # qhasm: caller1 = caller1_stack
  1893. # asm 1: movq <caller1_stack=stack64#1,>caller1=int64#9
  1894. # asm 2: movq <caller1_stack=0(%rsp),>caller1=%r11
  1895. movq 0(%rsp),%r11
  1896. # qhasm: caller2 = caller2_stack
  1897. # asm 1: movq <caller2_stack=stack64#2,>caller2=int64#10
  1898. # asm 2: movq <caller2_stack=8(%rsp),>caller2=%r12
  1899. movq 8(%rsp),%r12
  1900. # qhasm: caller3 = caller3_stack
  1901. # asm 1: movq <caller3_stack=stack64#3,>caller3=int64#11
  1902. # asm 2: movq <caller3_stack=16(%rsp),>caller3=%r13
  1903. movq 16(%rsp),%r13
  1904. # qhasm: caller4 = caller4_stack
  1905. # asm 1: movq <caller4_stack=stack64#4,>caller4=int64#12
  1906. # asm 2: movq <caller4_stack=24(%rsp),>caller4=%r14
  1907. movq 24(%rsp),%r14
  1908. # qhasm: caller5 = caller5_stack
  1909. # asm 1: movq <caller5_stack=stack64#5,>caller5=int64#13
  1910. # asm 2: movq <caller5_stack=32(%rsp),>caller5=%r15
  1911. movq 32(%rsp),%r15
  1912. # qhasm: caller6 = caller6_stack
  1913. # asm 1: movq <caller6_stack=stack64#6,>caller6=int64#14
  1914. # asm 2: movq <caller6_stack=40(%rsp),>caller6=%rbx
  1915. movq 40(%rsp),%rbx
  1916. # qhasm: caller7 = caller7_stack
  1917. # asm 1: movq <caller7_stack=stack64#7,>caller7=int64#15
  1918. # asm 2: movq <caller7_stack=48(%rsp),>caller7=%rbp
  1919. movq 48(%rsp),%rbp
  1920. # qhasm: leave
  1921. add %r11,%rsp
  1922. mov %rdi,%rax
  1923. mov %rsi,%rdx
  1924. ret