sc25519_lt.S 2.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. # qhasm: int64 xp
  2. # qhasm: int64 yp
  3. # qhasm: int64 ret
  4. # qhasm: input xp
  5. # qhasm: input yp
  6. # qhasm: output ret
  7. # qhasm: int64 t0
  8. # qhasm: int64 t1
  9. # qhasm: int64 t2
  10. # qhasm: int64 t3
  11. # qhasm: int64 doof
  12. # qhasm: int64 caller1
  13. # qhasm: int64 caller2
  14. # qhasm: int64 caller3
  15. # qhasm: int64 caller4
  16. # qhasm: int64 caller5
  17. # qhasm: int64 caller6
  18. # qhasm: int64 caller7
  19. # qhasm: caller caller1
  20. # qhasm: caller caller2
  21. # qhasm: caller caller3
  22. # qhasm: caller caller4
  23. # qhasm: caller caller5
  24. # qhasm: caller caller6
  25. # qhasm: caller caller7
  26. # qhasm: stack64 caller4_stack
  27. # qhasm: stack64 caller5_stack
  28. # qhasm: stack64 caller6_stack
  29. # qhasm: stack64 caller7_stack
  30. # qhasm: enter CRYPTO_NAMESPACE(batch_sc25519_lt)
  31. .text
  32. .p2align 5
  33. .globl _CRYPTO_NAMESPACE(batch_sc25519_lt)
  34. .globl CRYPTO_NAMESPACE(batch_sc25519_lt)
  35. _CRYPTO_NAMESPACE(batch_sc25519_lt):
  36. CRYPTO_NAMESPACE(batch_sc25519_lt):
  37. mov %rsp,%r11
  38. and $31,%r11
  39. add $0,%r11
  40. sub %r11,%rsp
  41. # qhasm: t0 = *(uint64 *)(xp + 0)
  42. # asm 1: movq 0(<xp=int64#1),>t0=int64#3
  43. # asm 2: movq 0(<xp=%rdi),>t0=%rdx
  44. movq 0(%rdi),%rdx
  45. # qhasm: t1 = *(uint64 *)(xp + 8)
  46. # asm 1: movq 8(<xp=int64#1),>t1=int64#4
  47. # asm 2: movq 8(<xp=%rdi),>t1=%rcx
  48. movq 8(%rdi),%rcx
  49. # qhasm: t2 = *(uint64 *)(xp + 16)
  50. # asm 1: movq 16(<xp=int64#1),>t2=int64#5
  51. # asm 2: movq 16(<xp=%rdi),>t2=%r8
  52. movq 16(%rdi),%r8
  53. # qhasm: t3 = *(uint64 *)(xp + 24)
  54. # asm 1: movq 24(<xp=int64#1),>t3=int64#1
  55. # asm 2: movq 24(<xp=%rdi),>t3=%rdi
  56. movq 24(%rdi),%rdi
  57. # qhasm: carry? t0 -= *(uint64 *)(yp + 0)
  58. # asm 1: subq 0(<yp=int64#2),<t0=int64#3
  59. # asm 2: subq 0(<yp=%rsi),<t0=%rdx
  60. subq 0(%rsi),%rdx
  61. # qhasm: carry? t1 -= *(uint64 *)(yp + 8) - carry
  62. # asm 1: sbbq 8(<yp=int64#2),<t1=int64#4
  63. # asm 2: sbbq 8(<yp=%rsi),<t1=%rcx
  64. sbbq 8(%rsi),%rcx
  65. # qhasm: carry? t2 -= *(uint64 *)(yp + 16) - carry
  66. # asm 1: sbbq 16(<yp=int64#2),<t2=int64#5
  67. # asm 2: sbbq 16(<yp=%rsi),<t2=%r8
  68. sbbq 16(%rsi),%r8
  69. # qhasm: carry? t3 -= *(uint64 *)(yp + 24) - carry
  70. # asm 1: sbbq 24(<yp=int64#2),<t3=int64#1
  71. # asm 2: sbbq 24(<yp=%rsi),<t3=%rdi
  72. sbbq 24(%rsi),%rdi
  73. # qhasm: ret = 0
  74. # asm 1: mov $0,>ret=int64#1
  75. # asm 2: mov $0,>ret=%rdi
  76. mov $0,%rdi
  77. # qhasm: doof = 1
  78. # asm 1: mov $1,>doof=int64#2
  79. # asm 2: mov $1,>doof=%rsi
  80. mov $1,%rsi
  81. # qhasm: ret = doof if carry
  82. # asm 1: cmovc <doof=int64#2,<ret=int64#1
  83. # asm 2: cmovc <doof=%rsi,<ret=%rdi
  84. cmovc %rsi,%rdi
  85. # qhasm: leave
  86. add %r11,%rsp
  87. mov %rdi,%rax
  88. mov %rsi,%rdx
  89. ret