scrypt-x86_64.S 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862
  1. // ECOin - Copyright (c) - 2014/2022 - GPLv3 - epsylon@riseup.net (https://03c8.net)
  2. # Copyright 2011-2012 pooler@litecoinpool.org
  3. # All rights reserved.
  4. #
  5. # Redistribution and use in source and binary forms, with or without
  6. # modification, are permitted provided that the following conditions
  7. # are met:
  8. # 1. Redistributions of source code must retain the above copyright
  9. # notice, this list of conditions and the following disclaimer.
  10. # 2. Redistributions in binary form must reproduce the above copyright
  11. # notice, this list of conditions and the following disclaimer in the
  12. # documentation and/or other materials provided with the distribution.
  13. #
  14. # THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
  15. # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  16. # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  17. # ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
  18. # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  19. # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
  20. # OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  21. # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  22. # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
  23. # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  24. # SUCH DAMAGE.
  25. #if defined(OPTIMIZED_SALSA) && defined(__x86_64__)
  26. #if defined(__linux__) && defined(__ELF__)
  27. .section .note.GNU-stack,"",%progbits
  28. #endif
  29. #define scrypt_shuffle(src, so, dest, do) \
  30. movl so+60(src), %r8d; \
  31. movl so+44(src), %r9d; \
  32. movl so+28(src), %r10d; \
  33. movl so+12(src), %r11d; \
  34. movl %r8d, do+12(dest); \
  35. movl %r9d, do+28(dest); \
  36. movl %r10d, do+44(dest); \
  37. movl %r11d, do+60(dest); \
  38. movl so+40(src), %r8d; \
  39. movl so+8(src), %r9d; \
  40. movl so+48(src), %r10d; \
  41. movl so+16(src), %r11d; \
  42. movl %r8d, do+8(dest); \
  43. movl %r9d, do+40(dest); \
  44. movl %r10d, do+16(dest); \
  45. movl %r11d, do+48(dest); \
  46. movl so+20(src), %r8d; \
  47. movl so+4(src), %r9d; \
  48. movl so+52(src), %r10d; \
  49. movl so+36(src), %r11d; \
  50. movl %r8d, do+4(dest); \
  51. movl %r9d, do+20(dest); \
  52. movl %r10d, do+36(dest); \
  53. movl %r11d, do+52(dest); \
  54. movl so+0(src), %r8d; \
  55. movl so+24(src), %r9d; \
  56. movl so+32(src), %r10d; \
  57. movl so+56(src), %r11d; \
  58. movl %r8d, do+0(dest); \
  59. movl %r9d, do+24(dest); \
  60. movl %r10d, do+32(dest); \
  61. movl %r11d, do+56(dest); \
  62. #define gen_salsa8_core_doubleround() \
  63. movq 72(%rsp), %r15; \
  64. leaq (%r14, %rdx), %rbp; \
  65. roll $7, %ebp; \
  66. xorq %rbp, %r9; \
  67. leaq (%rdi, %r15), %rbp; \
  68. roll $7, %ebp; \
  69. xorq %rbp, %r10; \
  70. leaq (%rdx, %r9), %rbp; \
  71. roll $9, %ebp; \
  72. xorq %rbp, %r11; \
  73. leaq (%r15, %r10), %rbp; \
  74. roll $9, %ebp; \
  75. xorq %rbp, %r13; \
  76. leaq (%r9, %r11), %rbp; \
  77. roll $13, %ebp; \
  78. xorq %rbp, %r14; \
  79. leaq (%r10, %r13), %rbp; \
  80. roll $13, %ebp; \
  81. xorq %rbp, %rdi; \
  82. leaq (%r11, %r14), %rbp; \
  83. roll $18, %ebp; \
  84. xorq %rbp, %rdx; \
  85. leaq (%r13, %rdi), %rbp; \
  86. roll $18, %ebp; \
  87. xorq %rbp, %r15; \
  88. movq 48(%rsp), %rbp; \
  89. movq %r15, 72(%rsp); \
  90. leaq (%rax, %rbp), %r15; \
  91. roll $7, %r15d; \
  92. xorq %r15, %rbx; \
  93. leaq (%rbp, %rbx), %r15; \
  94. roll $9, %r15d; \
  95. xorq %r15, %rcx; \
  96. leaq (%rbx, %rcx), %r15; \
  97. roll $13, %r15d; \
  98. xorq %r15, %rax; \
  99. leaq (%rcx, %rax), %r15; \
  100. roll $18, %r15d; \
  101. xorq %r15, %rbp; \
  102. movq 88(%rsp), %r15; \
  103. movq %rbp, 48(%rsp); \
  104. leaq (%r12, %r15), %rbp; \
  105. roll $7, %ebp; \
  106. xorq %rbp, %rsi; \
  107. leaq (%r15, %rsi), %rbp; \
  108. roll $9, %ebp; \
  109. xorq %rbp, %r8; \
  110. leaq (%rsi, %r8), %rbp; \
  111. roll $13, %ebp; \
  112. xorq %rbp, %r12; \
  113. leaq (%r8, %r12), %rbp; \
  114. roll $18, %ebp; \
  115. xorq %rbp, %r15; \
  116. movq %r15, 88(%rsp); \
  117. movq 72(%rsp), %r15; \
  118. leaq (%rsi, %rdx), %rbp; \
  119. roll $7, %ebp; \
  120. xorq %rbp, %rdi; \
  121. leaq (%r9, %r15), %rbp; \
  122. roll $7, %ebp; \
  123. xorq %rbp, %rax; \
  124. leaq (%rdx, %rdi), %rbp; \
  125. roll $9, %ebp; \
  126. xorq %rbp, %rcx; \
  127. leaq (%r15, %rax), %rbp; \
  128. roll $9, %ebp; \
  129. xorq %rbp, %r8; \
  130. leaq (%rdi, %rcx), %rbp; \
  131. roll $13, %ebp; \
  132. xorq %rbp, %rsi; \
  133. leaq (%rax, %r8), %rbp; \
  134. roll $13, %ebp; \
  135. xorq %rbp, %r9; \
  136. leaq (%rcx, %rsi), %rbp; \
  137. roll $18, %ebp; \
  138. xorq %rbp, %rdx; \
  139. leaq (%r8, %r9), %rbp; \
  140. roll $18, %ebp; \
  141. xorq %rbp, %r15; \
  142. movq 48(%rsp), %rbp; \
  143. movq %r15, 72(%rsp); \
  144. leaq (%r10, %rbp), %r15; \
  145. roll $7, %r15d; \
  146. xorq %r15, %r12; \
  147. leaq (%rbp, %r12), %r15; \
  148. roll $9, %r15d; \
  149. xorq %r15, %r11; \
  150. leaq (%r12, %r11), %r15; \
  151. roll $13, %r15d; \
  152. xorq %r15, %r10; \
  153. leaq (%r11, %r10), %r15; \
  154. roll $18, %r15d; \
  155. xorq %r15, %rbp; \
  156. movq 88(%rsp), %r15; \
  157. movq %rbp, 48(%rsp); \
  158. leaq (%rbx, %r15), %rbp; \
  159. roll $7, %ebp; \
  160. xorq %rbp, %r14; \
  161. leaq (%r15, %r14), %rbp; \
  162. roll $9, %ebp; \
  163. xorq %rbp, %r13; \
  164. leaq (%r14, %r13), %rbp; \
  165. roll $13, %ebp; \
  166. xorq %rbp, %rbx; \
  167. leaq (%r13, %rbx), %rbp; \
  168. roll $18, %ebp; \
  169. xorq %rbp, %r15; \
  170. movq %r15, 88(%rsp); \
  171. .text
  172. .align 32
  173. gen_salsa8_core:
  174. # 0: %rdx, %rdi, %rcx, %rsi
  175. movq 8(%rsp), %rdi
  176. movq %rdi, %rdx
  177. shrq $32, %rdi
  178. movq 16(%rsp), %rsi
  179. movq %rsi, %rcx
  180. shrq $32, %rsi
  181. # 1: %r9, 72(%rsp), %rax, %r8
  182. movq 24(%rsp), %r8
  183. movq %r8, %r9
  184. shrq $32, %r8
  185. movq %r8, 72(%rsp)
  186. movq 32(%rsp), %r8
  187. movq %r8, %rax
  188. shrq $32, %r8
  189. # 2: %r11, %r10, 48(%rsp), %r12
  190. movq 40(%rsp), %r10
  191. movq %r10, %r11
  192. shrq $32, %r10
  193. movq 48(%rsp), %r12
  194. #movq %r12, %r13
  195. #movq %r13, 48(%rsp)
  196. shrq $32, %r12
  197. # 3: %r14, %r13, %rbx, 88(%rsp)
  198. movq 56(%rsp), %r13
  199. movq %r13, %r14
  200. shrq $32, %r13
  201. movq 64(%rsp), %r15
  202. movq %r15, %rbx
  203. shrq $32, %r15
  204. movq %r15, 88(%rsp)
  205. gen_salsa8_core_doubleround()
  206. gen_salsa8_core_doubleround()
  207. gen_salsa8_core_doubleround()
  208. gen_salsa8_core_doubleround()
  209. movl %edx, %edx
  210. shlq $32, %rdi
  211. addq %rdi, %rdx
  212. movd %rdx, %xmm0
  213. movl %ecx, %ecx
  214. shlq $32, %rsi
  215. addq %rsi, %rcx
  216. movd %rcx, %xmm4
  217. movq 72(%rsp), %rdi
  218. movl %r9d, %r9d
  219. shlq $32, %rdi
  220. addq %rdi, %r9
  221. movd %r9, %xmm1
  222. movl %eax, %eax
  223. shlq $32, %r8
  224. addq %r8, %rax
  225. movd %rax, %xmm5
  226. movl %r11d, %r11d
  227. shlq $32, %r10
  228. addq %r10, %r11
  229. movd %r11, %xmm2
  230. movl 48(%rsp), %r8d
  231. shlq $32, %r12
  232. addq %r12, %r8
  233. movd %r8, %xmm6
  234. movl %r14d, %r14d
  235. shlq $32, %r13
  236. addq %r13, %r14
  237. movd %r14, %xmm3
  238. movq 88(%rsp), %rdi
  239. movl %ebx, %ebx
  240. shlq $32, %rdi
  241. addq %rdi, %rbx
  242. movd %rbx, %xmm7
  243. punpcklqdq %xmm4, %xmm0
  244. punpcklqdq %xmm5, %xmm1
  245. punpcklqdq %xmm6, %xmm2
  246. punpcklqdq %xmm7, %xmm3
  247. #movq %rdx, 8(%rsp)
  248. #movq %rcx, 16(%rsp)
  249. #movq %r9, 24(%rsp)
  250. #movq %rax, 32(%rsp)
  251. #movq %r11, 40(%rsp)
  252. #movq %r8, 48(%rsp)
  253. #movq %r14, 56(%rsp)
  254. #movq %rbx, 64(%rsp)
  255. ret
  256. .text
  257. .align 32
  258. .globl scrypt_core
  259. .globl _scrypt_core
  260. scrypt_core:
  261. _scrypt_core:
  262. pushq %rbx
  263. pushq %rbp
  264. pushq %r12
  265. pushq %r13
  266. pushq %r14
  267. pushq %r15
  268. #if defined(WIN64)
  269. subq $176, %rsp
  270. movdqa %xmm6, 8(%rsp)
  271. movdqa %xmm7, 24(%rsp)
  272. movdqa %xmm8, 40(%rsp)
  273. movdqa %xmm9, 56(%rsp)
  274. movdqa %xmm10, 72(%rsp)
  275. movdqa %xmm11, 88(%rsp)
  276. movdqa %xmm12, 104(%rsp)
  277. movdqa %xmm13, 120(%rsp)
  278. movdqa %xmm14, 136(%rsp)
  279. movdqa %xmm15, 152(%rsp)
  280. pushq %rdi
  281. pushq %rsi
  282. movq %rcx, %rdi
  283. movq %rdx, %rsi
  284. #endif
  285. #define scrypt_core_cleanup() \
  286. popq %r15; \
  287. popq %r14; \
  288. popq %r13; \
  289. popq %r12; \
  290. popq %rbp; \
  291. popq %rbx; \
  292. # GenuineIntel processors have fast SIMD
  293. xorl %eax, %eax
  294. cpuid
  295. cmpl $0x6c65746e, %ecx
  296. jne gen_scrypt_core
  297. cmpl $0x49656e69, %edx
  298. jne gen_scrypt_core
  299. cmpl $0x756e6547, %ebx
  300. je xmm_scrypt_core
  301. gen_scrypt_core:
  302. subq $136, %rsp
  303. movdqa 0(%rdi), %xmm8
  304. movdqa 16(%rdi), %xmm9
  305. movdqa 32(%rdi), %xmm10
  306. movdqa 48(%rdi), %xmm11
  307. movdqa 64(%rdi), %xmm12
  308. movdqa 80(%rdi), %xmm13
  309. movdqa 96(%rdi), %xmm14
  310. movdqa 112(%rdi), %xmm15
  311. leaq 131072(%rsi), %rcx
  312. movq %rdi, 104(%rsp)
  313. movq %rsi, 112(%rsp)
  314. movq %rcx, 120(%rsp)
  315. gen_scrypt_core_loop1:
  316. movdqa %xmm8, 0(%rsi)
  317. movdqa %xmm9, 16(%rsi)
  318. movdqa %xmm10, 32(%rsi)
  319. movdqa %xmm11, 48(%rsi)
  320. movdqa %xmm12, 64(%rsi)
  321. movdqa %xmm13, 80(%rsi)
  322. movdqa %xmm14, 96(%rsi)
  323. movdqa %xmm15, 112(%rsi)
  324. pxor %xmm12, %xmm8
  325. pxor %xmm13, %xmm9
  326. pxor %xmm14, %xmm10
  327. pxor %xmm15, %xmm11
  328. movdqa %xmm8, 0(%rsp)
  329. movdqa %xmm9, 16(%rsp)
  330. movdqa %xmm10, 32(%rsp)
  331. movdqa %xmm11, 48(%rsp)
  332. movq %rsi, 128(%rsp)
  333. call gen_salsa8_core
  334. paddd %xmm0, %xmm8
  335. paddd %xmm1, %xmm9
  336. paddd %xmm2, %xmm10
  337. paddd %xmm3, %xmm11
  338. pxor %xmm8, %xmm12
  339. pxor %xmm9, %xmm13
  340. pxor %xmm10, %xmm14
  341. pxor %xmm11, %xmm15
  342. movdqa %xmm12, 0(%rsp)
  343. movdqa %xmm13, 16(%rsp)
  344. movdqa %xmm14, 32(%rsp)
  345. movdqa %xmm15, 48(%rsp)
  346. call gen_salsa8_core
  347. movq 128(%rsp), %rsi
  348. paddd %xmm0, %xmm12
  349. paddd %xmm1, %xmm13
  350. paddd %xmm2, %xmm14
  351. paddd %xmm3, %xmm15
  352. addq $128, %rsi
  353. movq 120(%rsp), %rcx
  354. cmpq %rcx, %rsi
  355. jne gen_scrypt_core_loop1
  356. movq $1024, %rcx
  357. gen_scrypt_core_loop2:
  358. movq 112(%rsp), %rsi
  359. movd %xmm12, %edx
  360. andl $1023, %edx
  361. shll $7, %edx
  362. movdqa 0(%rsi, %rdx), %xmm0
  363. movdqa 16(%rsi, %rdx), %xmm1
  364. movdqa 32(%rsi, %rdx), %xmm2
  365. movdqa 48(%rsi, %rdx), %xmm3
  366. movdqa 64(%rsi, %rdx), %xmm4
  367. movdqa 80(%rsi, %rdx), %xmm5
  368. movdqa 96(%rsi, %rdx), %xmm6
  369. movdqa 112(%rsi, %rdx), %xmm7
  370. pxor %xmm0, %xmm8
  371. pxor %xmm1, %xmm9
  372. pxor %xmm2, %xmm10
  373. pxor %xmm3, %xmm11
  374. pxor %xmm4, %xmm12
  375. pxor %xmm5, %xmm13
  376. pxor %xmm6, %xmm14
  377. pxor %xmm7, %xmm15
  378. pxor %xmm12, %xmm8
  379. pxor %xmm13, %xmm9
  380. pxor %xmm14, %xmm10
  381. pxor %xmm15, %xmm11
  382. movdqa %xmm8, 0(%rsp)
  383. movdqa %xmm9, 16(%rsp)
  384. movdqa %xmm10, 32(%rsp)
  385. movdqa %xmm11, 48(%rsp)
  386. movq %rcx, 128(%rsp)
  387. call gen_salsa8_core
  388. paddd %xmm0, %xmm8
  389. paddd %xmm1, %xmm9
  390. paddd %xmm2, %xmm10
  391. paddd %xmm3, %xmm11
  392. pxor %xmm8, %xmm12
  393. pxor %xmm9, %xmm13
  394. pxor %xmm10, %xmm14
  395. pxor %xmm11, %xmm15
  396. movdqa %xmm12, 0(%rsp)
  397. movdqa %xmm13, 16(%rsp)
  398. movdqa %xmm14, 32(%rsp)
  399. movdqa %xmm15, 48(%rsp)
  400. call gen_salsa8_core
  401. movq 128(%rsp), %rcx
  402. paddd %xmm0, %xmm12
  403. paddd %xmm1, %xmm13
  404. paddd %xmm2, %xmm14
  405. paddd %xmm3, %xmm15
  406. subq $1, %rcx
  407. ja gen_scrypt_core_loop2
  408. movq 104(%rsp), %rdi
  409. movdqa %xmm8, 0(%rdi)
  410. movdqa %xmm9, 16(%rdi)
  411. movdqa %xmm10, 32(%rdi)
  412. movdqa %xmm11, 48(%rdi)
  413. movdqa %xmm12, 64(%rdi)
  414. movdqa %xmm13, 80(%rdi)
  415. movdqa %xmm14, 96(%rdi)
  416. movdqa %xmm15, 112(%rdi)
  417. addq $136, %rsp
  418. scrypt_core_cleanup()
  419. ret
  420. #define xmm_salsa8_core_doubleround() \
  421. movdqa %xmm1, %xmm4; \
  422. paddd %xmm0, %xmm4; \
  423. movdqa %xmm4, %xmm5; \
  424. pslld $7, %xmm4; \
  425. psrld $25, %xmm5; \
  426. pxor %xmm4, %xmm3; \
  427. pxor %xmm5, %xmm3; \
  428. movdqa %xmm0, %xmm4; \
  429. paddd %xmm3, %xmm4; \
  430. movdqa %xmm4, %xmm5; \
  431. pslld $9, %xmm4; \
  432. psrld $23, %xmm5; \
  433. pxor %xmm4, %xmm2; \
  434. movdqa %xmm3, %xmm4; \
  435. pshufd $0x93, %xmm3, %xmm3; \
  436. pxor %xmm5, %xmm2; \
  437. paddd %xmm2, %xmm4; \
  438. movdqa %xmm4, %xmm5; \
  439. pslld $13, %xmm4; \
  440. psrld $19, %xmm5; \
  441. pxor %xmm4, %xmm1; \
  442. movdqa %xmm2, %xmm4; \
  443. pshufd $0x4e, %xmm2, %xmm2; \
  444. pxor %xmm5, %xmm1; \
  445. paddd %xmm1, %xmm4; \
  446. movdqa %xmm4, %xmm5; \
  447. pslld $18, %xmm4; \
  448. psrld $14, %xmm5; \
  449. pxor %xmm4, %xmm0; \
  450. pshufd $0x39, %xmm1, %xmm1; \
  451. pxor %xmm5, %xmm0; \
  452. movdqa %xmm3, %xmm4; \
  453. paddd %xmm0, %xmm4; \
  454. movdqa %xmm4, %xmm5; \
  455. pslld $7, %xmm4; \
  456. psrld $25, %xmm5; \
  457. pxor %xmm4, %xmm1; \
  458. pxor %xmm5, %xmm1; \
  459. movdqa %xmm0, %xmm4; \
  460. paddd %xmm1, %xmm4; \
  461. movdqa %xmm4, %xmm5; \
  462. pslld $9, %xmm4; \
  463. psrld $23, %xmm5; \
  464. pxor %xmm4, %xmm2; \
  465. movdqa %xmm1, %xmm4; \
  466. pshufd $0x93, %xmm1, %xmm1; \
  467. pxor %xmm5, %xmm2; \
  468. paddd %xmm2, %xmm4; \
  469. movdqa %xmm4, %xmm5; \
  470. pslld $13, %xmm4; \
  471. psrld $19, %xmm5; \
  472. pxor %xmm4, %xmm3; \
  473. movdqa %xmm2, %xmm4; \
  474. pshufd $0x4e, %xmm2, %xmm2; \
  475. pxor %xmm5, %xmm3; \
  476. paddd %xmm3, %xmm4; \
  477. movdqa %xmm4, %xmm5; \
  478. pslld $18, %xmm4; \
  479. psrld $14, %xmm5; \
  480. pxor %xmm4, %xmm0; \
  481. pshufd $0x39, %xmm3, %xmm3; \
  482. pxor %xmm5, %xmm0; \
  483. #define xmm_salsa8_core() \
  484. xmm_salsa8_core_doubleround(); \
  485. xmm_salsa8_core_doubleround(); \
  486. xmm_salsa8_core_doubleround(); \
  487. xmm_salsa8_core_doubleround(); \
  488. .align 32
  489. xmm_scrypt_core:
  490. # shuffle 1st block into %xmm8-%xmm11
  491. movl 60(%rdi), %edx
  492. movl 44(%rdi), %ecx
  493. movl 28(%rdi), %ebx
  494. movl 12(%rdi), %eax
  495. movd %edx, %xmm0
  496. movd %ecx, %xmm1
  497. movd %ebx, %xmm2
  498. movd %eax, %xmm3
  499. movl 40(%rdi), %ecx
  500. movl 24(%rdi), %ebx
  501. movl 8(%rdi), %eax
  502. movl 56(%rdi), %edx
  503. pshufd $0x93, %xmm0, %xmm0
  504. pshufd $0x93, %xmm1, %xmm1
  505. pshufd $0x93, %xmm2, %xmm2
  506. pshufd $0x93, %xmm3, %xmm3
  507. movd %ecx, %xmm4
  508. movd %ebx, %xmm5
  509. movd %eax, %xmm6
  510. movd %edx, %xmm7
  511. paddd %xmm4, %xmm0
  512. paddd %xmm5, %xmm1
  513. paddd %xmm6, %xmm2
  514. paddd %xmm7, %xmm3
  515. movl 20(%rdi), %ebx
  516. movl 4(%rdi), %eax
  517. movl 52(%rdi), %edx
  518. movl 36(%rdi), %ecx
  519. pshufd $0x93, %xmm0, %xmm0
  520. pshufd $0x93, %xmm1, %xmm1
  521. pshufd $0x93, %xmm2, %xmm2
  522. pshufd $0x93, %xmm3, %xmm3
  523. movd %ebx, %xmm4
  524. movd %eax, %xmm5
  525. movd %edx, %xmm6
  526. movd %ecx, %xmm7
  527. paddd %xmm4, %xmm0
  528. paddd %xmm5, %xmm1
  529. paddd %xmm6, %xmm2
  530. paddd %xmm7, %xmm3
  531. movl 0(%rdi), %eax
  532. movl 48(%rdi), %edx
  533. movl 32(%rdi), %ecx
  534. movl 16(%rdi), %ebx
  535. pshufd $0x93, %xmm0, %xmm0
  536. pshufd $0x93, %xmm1, %xmm1
  537. pshufd $0x93, %xmm2, %xmm2
  538. pshufd $0x93, %xmm3, %xmm3
  539. movd %eax, %xmm8
  540. movd %edx, %xmm9
  541. movd %ecx, %xmm10
  542. movd %ebx, %xmm11
  543. paddd %xmm0, %xmm8
  544. paddd %xmm1, %xmm9
  545. paddd %xmm2, %xmm10
  546. paddd %xmm3, %xmm11
  547. # shuffle 2nd block into %xmm12-%xmm15
  548. movl 124(%rdi), %edx
  549. movl 108(%rdi), %ecx
  550. movl 92(%rdi), %ebx
  551. movl 76(%rdi), %eax
  552. movd %edx, %xmm0
  553. movd %ecx, %xmm1
  554. movd %ebx, %xmm2
  555. movd %eax, %xmm3
  556. movl 104(%rdi), %ecx
  557. movl 88(%rdi), %ebx
  558. movl 72(%rdi), %eax
  559. movl 120(%rdi), %edx
  560. pshufd $0x93, %xmm0, %xmm0
  561. pshufd $0x93, %xmm1, %xmm1
  562. pshufd $0x93, %xmm2, %xmm2
  563. pshufd $0x93, %xmm3, %xmm3
  564. movd %ecx, %xmm4
  565. movd %ebx, %xmm5
  566. movd %eax, %xmm6
  567. movd %edx, %xmm7
  568. paddd %xmm4, %xmm0
  569. paddd %xmm5, %xmm1
  570. paddd %xmm6, %xmm2
  571. paddd %xmm7, %xmm3
  572. movl 84(%rdi), %ebx
  573. movl 68(%rdi), %eax
  574. movl 116(%rdi), %edx
  575. movl 100(%rdi), %ecx
  576. pshufd $0x93, %xmm0, %xmm0
  577. pshufd $0x93, %xmm1, %xmm1
  578. pshufd $0x93, %xmm2, %xmm2
  579. pshufd $0x93, %xmm3, %xmm3
  580. movd %ebx, %xmm4
  581. movd %eax, %xmm5
  582. movd %edx, %xmm6
  583. movd %ecx, %xmm7
  584. paddd %xmm4, %xmm0
  585. paddd %xmm5, %xmm1
  586. paddd %xmm6, %xmm2
  587. paddd %xmm7, %xmm3
  588. movl 64(%rdi), %eax
  589. movl 112(%rdi), %edx
  590. movl 96(%rdi), %ecx
  591. movl 80(%rdi), %ebx
  592. pshufd $0x93, %xmm0, %xmm0
  593. pshufd $0x93, %xmm1, %xmm1
  594. pshufd $0x93, %xmm2, %xmm2
  595. pshufd $0x93, %xmm3, %xmm3
  596. movd %eax, %xmm12
  597. movd %edx, %xmm13
  598. movd %ecx, %xmm14
  599. movd %ebx, %xmm15
  600. paddd %xmm0, %xmm12
  601. paddd %xmm1, %xmm13
  602. paddd %xmm2, %xmm14
  603. paddd %xmm3, %xmm15
  604. movq %rsi, %rdx
  605. leaq 131072(%rsi), %rcx
  606. xmm_scrypt_core_loop1:
  607. movdqa %xmm8, 0(%rdx)
  608. movdqa %xmm9, 16(%rdx)
  609. movdqa %xmm10, 32(%rdx)
  610. movdqa %xmm11, 48(%rdx)
  611. movdqa %xmm12, 64(%rdx)
  612. movdqa %xmm13, 80(%rdx)
  613. movdqa %xmm14, 96(%rdx)
  614. movdqa %xmm15, 112(%rdx)
  615. pxor %xmm12, %xmm8
  616. pxor %xmm13, %xmm9
  617. pxor %xmm14, %xmm10
  618. pxor %xmm15, %xmm11
  619. movdqa %xmm8, %xmm0
  620. movdqa %xmm9, %xmm1
  621. movdqa %xmm10, %xmm2
  622. movdqa %xmm11, %xmm3
  623. xmm_salsa8_core()
  624. paddd %xmm0, %xmm8
  625. paddd %xmm1, %xmm9
  626. paddd %xmm2, %xmm10
  627. paddd %xmm3, %xmm11
  628. pxor %xmm8, %xmm12
  629. pxor %xmm9, %xmm13
  630. pxor %xmm10, %xmm14
  631. pxor %xmm11, %xmm15
  632. movdqa %xmm12, %xmm0
  633. movdqa %xmm13, %xmm1
  634. movdqa %xmm14, %xmm2
  635. movdqa %xmm15, %xmm3
  636. xmm_salsa8_core()
  637. paddd %xmm0, %xmm12
  638. paddd %xmm1, %xmm13
  639. paddd %xmm2, %xmm14
  640. paddd %xmm3, %xmm15
  641. addq $128, %rdx
  642. cmpq %rcx, %rdx
  643. jne xmm_scrypt_core_loop1
  644. movq $1024, %rcx
  645. xmm_scrypt_core_loop2:
  646. movd %xmm12, %edx
  647. andl $1023, %edx
  648. shll $7, %edx
  649. movdqa 0(%rsi, %rdx), %xmm0
  650. movdqa 16(%rsi, %rdx), %xmm1
  651. movdqa 32(%rsi, %rdx), %xmm2
  652. movdqa 48(%rsi, %rdx), %xmm3
  653. movdqa 64(%rsi, %rdx), %xmm4
  654. movdqa 80(%rsi, %rdx), %xmm5
  655. movdqa 96(%rsi, %rdx), %xmm6
  656. movdqa 112(%rsi, %rdx), %xmm7
  657. pxor %xmm0, %xmm8
  658. pxor %xmm1, %xmm9
  659. pxor %xmm2, %xmm10
  660. pxor %xmm3, %xmm11
  661. pxor %xmm4, %xmm12
  662. pxor %xmm5, %xmm13
  663. pxor %xmm6, %xmm14
  664. pxor %xmm7, %xmm15
  665. pxor %xmm12, %xmm8
  666. pxor %xmm13, %xmm9
  667. pxor %xmm14, %xmm10
  668. pxor %xmm15, %xmm11
  669. movdqa %xmm8, %xmm0
  670. movdqa %xmm9, %xmm1
  671. movdqa %xmm10, %xmm2
  672. movdqa %xmm11, %xmm3
  673. xmm_salsa8_core()
  674. paddd %xmm0, %xmm8
  675. paddd %xmm1, %xmm9
  676. paddd %xmm2, %xmm10
  677. paddd %xmm3, %xmm11
  678. pxor %xmm8, %xmm12
  679. pxor %xmm9, %xmm13
  680. pxor %xmm10, %xmm14
  681. pxor %xmm11, %xmm15
  682. movdqa %xmm12, %xmm0
  683. movdqa %xmm13, %xmm1
  684. movdqa %xmm14, %xmm2
  685. movdqa %xmm15, %xmm3
  686. xmm_salsa8_core()
  687. paddd %xmm0, %xmm12
  688. paddd %xmm1, %xmm13
  689. paddd %xmm2, %xmm14
  690. paddd %xmm3, %xmm15
  691. subq $1, %rcx
  692. ja xmm_scrypt_core_loop2
  693. # re-shuffle 1st block back
  694. movd %xmm8, %eax
  695. movd %xmm9, %edx
  696. movd %xmm10, %ecx
  697. movd %xmm11, %ebx
  698. pshufd $0x39, %xmm8, %xmm8
  699. pshufd $0x39, %xmm9, %xmm9
  700. pshufd $0x39, %xmm10, %xmm10
  701. pshufd $0x39, %xmm11, %xmm11
  702. movl %eax, 0(%rdi)
  703. movl %edx, 48(%rdi)
  704. movl %ecx, 32(%rdi)
  705. movl %ebx, 16(%rdi)
  706. movd %xmm8, %ebx
  707. movd %xmm9, %eax
  708. movd %xmm10, %edx
  709. movd %xmm11, %ecx
  710. pshufd $0x39, %xmm8, %xmm8
  711. pshufd $0x39, %xmm9, %xmm9
  712. pshufd $0x39, %xmm10, %xmm10
  713. pshufd $0x39, %xmm11, %xmm11
  714. movl %ebx, 20(%rdi)
  715. movl %eax, 4(%rdi)
  716. movl %edx, 52(%rdi)
  717. movl %ecx, 36(%rdi)
  718. movd %xmm8, %ecx
  719. movd %xmm9, %ebx
  720. movd %xmm10, %eax
  721. movd %xmm11, %edx
  722. pshufd $0x39, %xmm8, %xmm8
  723. pshufd $0x39, %xmm9, %xmm9
  724. pshufd $0x39, %xmm10, %xmm10
  725. pshufd $0x39, %xmm11, %xmm11
  726. movl %ecx, 40(%rdi)
  727. movl %ebx, 24(%rdi)
  728. movl %eax, 8(%rdi)
  729. movl %edx, 56(%rdi)
  730. movd %xmm8, %edx
  731. movd %xmm9, %ecx
  732. movd %xmm10, %ebx
  733. movd %xmm11, %eax
  734. movl %edx, 60(%rdi)
  735. movl %ecx, 44(%rdi)
  736. movl %ebx, 28(%rdi)
  737. movl %eax, 12(%rdi)
  738. # re-shuffle 2nd block back
  739. movd %xmm12, %eax
  740. movd %xmm13, %edx
  741. movd %xmm14, %ecx
  742. movd %xmm15, %ebx
  743. pshufd $0x39, %xmm12, %xmm12
  744. pshufd $0x39, %xmm13, %xmm13
  745. pshufd $0x39, %xmm14, %xmm14
  746. pshufd $0x39, %xmm15, %xmm15
  747. movl %eax, 64(%rdi)
  748. movl %edx, 112(%rdi)
  749. movl %ecx, 96(%rdi)
  750. movl %ebx, 80(%rdi)
  751. movd %xmm12, %ebx
  752. movd %xmm13, %eax
  753. movd %xmm14, %edx
  754. movd %xmm15, %ecx
  755. pshufd $0x39, %xmm12, %xmm12
  756. pshufd $0x39, %xmm13, %xmm13
  757. pshufd $0x39, %xmm14, %xmm14
  758. pshufd $0x39, %xmm15, %xmm15
  759. movl %ebx, 84(%rdi)
  760. movl %eax, 68(%rdi)
  761. movl %edx, 116(%rdi)
  762. movl %ecx, 100(%rdi)
  763. movd %xmm12, %ecx
  764. movd %xmm13, %ebx
  765. movd %xmm14, %eax
  766. movd %xmm15, %edx
  767. pshufd $0x39, %xmm12, %xmm12
  768. pshufd $0x39, %xmm13, %xmm13
  769. pshufd $0x39, %xmm14, %xmm14
  770. pshufd $0x39, %xmm15, %xmm15
  771. movl %ecx, 104(%rdi)
  772. movl %ebx, 88(%rdi)
  773. movl %eax, 72(%rdi)
  774. movl %edx, 120(%rdi)
  775. movd %xmm12, %edx
  776. movd %xmm13, %ecx
  777. movd %xmm14, %ebx
  778. movd %xmm15, %eax
  779. movl %edx, 124(%rdi)
  780. movl %ecx, 108(%rdi)
  781. movl %ebx, 92(%rdi)
  782. movl %eax, 76(%rdi)
  783. scrypt_core_cleanup()
  784. ret
  785. addq $392, %rsp
  786. #if defined(WIN64)
  787. popq %rsi
  788. popq %rdi
  789. movdqa 8(%rsp), %xmm6
  790. movdqa 24(%rsp), %xmm7
  791. movdqa 40(%rsp), %xmm8
  792. movdqa 56(%rsp), %xmm9
  793. movdqa 72(%rsp), %xmm10
  794. movdqa 88(%rsp), %xmm11
  795. movdqa 104(%rsp), %xmm12
  796. movdqa 120(%rsp), %xmm13
  797. movdqa 136(%rsp), %xmm14
  798. movdqa 152(%rsp), %xmm15
  799. addq $176, %rsp
  800. #endif
  801. popq %rbp
  802. popq %rbx
  803. ret
  804. #endif