x86_64-mont.S 19 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256
  1. // This file is generated from a similarly-named Perl script in the BoringSSL
  2. // source tree. Do not edit by hand.
  3. #if defined(__has_feature)
  4. #if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
  5. #define OPENSSL_NO_ASM
  6. #endif
  7. #endif
  8. #if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
  9. #if defined(BORINGSSL_PREFIX)
  10. #include <boringssl_prefix_symbols_asm.h>
  11. #endif
  12. .text
  13. .globl _bn_mul_mont
  14. .private_extern _bn_mul_mont
  15. .p2align 4
  16. _bn_mul_mont:
  17. movl %r9d,%r9d
  18. movq %rsp,%rax
  19. testl $3,%r9d
  20. jnz L$mul_enter
  21. cmpl $8,%r9d
  22. jb L$mul_enter
  23. leaq _OPENSSL_ia32cap_P(%rip),%r11
  24. movl 8(%r11),%r11d
  25. cmpq %rsi,%rdx
  26. jne L$mul4x_enter
  27. testl $7,%r9d
  28. jz L$sqr8x_enter
  29. jmp L$mul4x_enter
  30. .p2align 4
  31. L$mul_enter:
  32. pushq %rbx
  33. pushq %rbp
  34. pushq %r12
  35. pushq %r13
  36. pushq %r14
  37. pushq %r15
  38. negq %r9
  39. movq %rsp,%r11
  40. leaq -16(%rsp,%r9,8),%r10
  41. negq %r9
  42. andq $-1024,%r10
  43. subq %r10,%r11
  44. andq $-4096,%r11
  45. leaq (%r10,%r11,1),%rsp
  46. movq (%rsp),%r11
  47. cmpq %r10,%rsp
  48. ja L$mul_page_walk
  49. jmp L$mul_page_walk_done
  50. .p2align 4
  51. L$mul_page_walk:
  52. leaq -4096(%rsp),%rsp
  53. movq (%rsp),%r11
  54. cmpq %r10,%rsp
  55. ja L$mul_page_walk
  56. L$mul_page_walk_done:
  57. movq %rax,8(%rsp,%r9,8)
  58. L$mul_body:
  59. movq %rdx,%r12
  60. movq (%r8),%r8
  61. movq (%r12),%rbx
  62. movq (%rsi),%rax
  63. xorq %r14,%r14
  64. xorq %r15,%r15
  65. movq %r8,%rbp
  66. mulq %rbx
  67. movq %rax,%r10
  68. movq (%rcx),%rax
  69. imulq %r10,%rbp
  70. movq %rdx,%r11
  71. mulq %rbp
  72. addq %rax,%r10
  73. movq 8(%rsi),%rax
  74. adcq $0,%rdx
  75. movq %rdx,%r13
  76. leaq 1(%r15),%r15
  77. jmp L$1st_enter
  78. .p2align 4
  79. L$1st:
  80. addq %rax,%r13
  81. movq (%rsi,%r15,8),%rax
  82. adcq $0,%rdx
  83. addq %r11,%r13
  84. movq %r10,%r11
  85. adcq $0,%rdx
  86. movq %r13,-16(%rsp,%r15,8)
  87. movq %rdx,%r13
  88. L$1st_enter:
  89. mulq %rbx
  90. addq %rax,%r11
  91. movq (%rcx,%r15,8),%rax
  92. adcq $0,%rdx
  93. leaq 1(%r15),%r15
  94. movq %rdx,%r10
  95. mulq %rbp
  96. cmpq %r9,%r15
  97. jne L$1st
  98. addq %rax,%r13
  99. movq (%rsi),%rax
  100. adcq $0,%rdx
  101. addq %r11,%r13
  102. adcq $0,%rdx
  103. movq %r13,-16(%rsp,%r15,8)
  104. movq %rdx,%r13
  105. movq %r10,%r11
  106. xorq %rdx,%rdx
  107. addq %r11,%r13
  108. adcq $0,%rdx
  109. movq %r13,-8(%rsp,%r9,8)
  110. movq %rdx,(%rsp,%r9,8)
  111. leaq 1(%r14),%r14
  112. jmp L$outer
  113. .p2align 4
  114. L$outer:
  115. movq (%r12,%r14,8),%rbx
  116. xorq %r15,%r15
  117. movq %r8,%rbp
  118. movq (%rsp),%r10
  119. mulq %rbx
  120. addq %rax,%r10
  121. movq (%rcx),%rax
  122. adcq $0,%rdx
  123. imulq %r10,%rbp
  124. movq %rdx,%r11
  125. mulq %rbp
  126. addq %rax,%r10
  127. movq 8(%rsi),%rax
  128. adcq $0,%rdx
  129. movq 8(%rsp),%r10
  130. movq %rdx,%r13
  131. leaq 1(%r15),%r15
  132. jmp L$inner_enter
  133. .p2align 4
  134. L$inner:
  135. addq %rax,%r13
  136. movq (%rsi,%r15,8),%rax
  137. adcq $0,%rdx
  138. addq %r10,%r13
  139. movq (%rsp,%r15,8),%r10
  140. adcq $0,%rdx
  141. movq %r13,-16(%rsp,%r15,8)
  142. movq %rdx,%r13
  143. L$inner_enter:
  144. mulq %rbx
  145. addq %rax,%r11
  146. movq (%rcx,%r15,8),%rax
  147. adcq $0,%rdx
  148. addq %r11,%r10
  149. movq %rdx,%r11
  150. adcq $0,%r11
  151. leaq 1(%r15),%r15
  152. mulq %rbp
  153. cmpq %r9,%r15
  154. jne L$inner
  155. addq %rax,%r13
  156. movq (%rsi),%rax
  157. adcq $0,%rdx
  158. addq %r10,%r13
  159. movq (%rsp,%r15,8),%r10
  160. adcq $0,%rdx
  161. movq %r13,-16(%rsp,%r15,8)
  162. movq %rdx,%r13
  163. xorq %rdx,%rdx
  164. addq %r11,%r13
  165. adcq $0,%rdx
  166. addq %r10,%r13
  167. adcq $0,%rdx
  168. movq %r13,-8(%rsp,%r9,8)
  169. movq %rdx,(%rsp,%r9,8)
  170. leaq 1(%r14),%r14
  171. cmpq %r9,%r14
  172. jb L$outer
  173. xorq %r14,%r14
  174. movq (%rsp),%rax
  175. movq %r9,%r15
  176. .p2align 4
  177. L$sub: sbbq (%rcx,%r14,8),%rax
  178. movq %rax,(%rdi,%r14,8)
  179. movq 8(%rsp,%r14,8),%rax
  180. leaq 1(%r14),%r14
  181. decq %r15
  182. jnz L$sub
  183. sbbq $0,%rax
  184. movq $-1,%rbx
  185. xorq %rax,%rbx
  186. xorq %r14,%r14
  187. movq %r9,%r15
  188. L$copy:
  189. movq (%rdi,%r14,8),%rcx
  190. movq (%rsp,%r14,8),%rdx
  191. andq %rbx,%rcx
  192. andq %rax,%rdx
  193. movq %r9,(%rsp,%r14,8)
  194. orq %rcx,%rdx
  195. movq %rdx,(%rdi,%r14,8)
  196. leaq 1(%r14),%r14
  197. subq $1,%r15
  198. jnz L$copy
  199. movq 8(%rsp,%r9,8),%rsi
  200. movq $1,%rax
  201. movq -48(%rsi),%r15
  202. movq -40(%rsi),%r14
  203. movq -32(%rsi),%r13
  204. movq -24(%rsi),%r12
  205. movq -16(%rsi),%rbp
  206. movq -8(%rsi),%rbx
  207. leaq (%rsi),%rsp
  208. L$mul_epilogue:
  209. .byte 0xf3,0xc3
  210. .p2align 4
  211. bn_mul4x_mont:
  212. movl %r9d,%r9d
  213. movq %rsp,%rax
  214. L$mul4x_enter:
  215. andl $0x80100,%r11d
  216. cmpl $0x80100,%r11d
  217. je L$mulx4x_enter
  218. pushq %rbx
  219. pushq %rbp
  220. pushq %r12
  221. pushq %r13
  222. pushq %r14
  223. pushq %r15
  224. negq %r9
  225. movq %rsp,%r11
  226. leaq -32(%rsp,%r9,8),%r10
  227. negq %r9
  228. andq $-1024,%r10
  229. subq %r10,%r11
  230. andq $-4096,%r11
  231. leaq (%r10,%r11,1),%rsp
  232. movq (%rsp),%r11
  233. cmpq %r10,%rsp
  234. ja L$mul4x_page_walk
  235. jmp L$mul4x_page_walk_done
  236. L$mul4x_page_walk:
  237. leaq -4096(%rsp),%rsp
  238. movq (%rsp),%r11
  239. cmpq %r10,%rsp
  240. ja L$mul4x_page_walk
  241. L$mul4x_page_walk_done:
  242. movq %rax,8(%rsp,%r9,8)
  243. L$mul4x_body:
  244. movq %rdi,16(%rsp,%r9,8)
  245. movq %rdx,%r12
  246. movq (%r8),%r8
  247. movq (%r12),%rbx
  248. movq (%rsi),%rax
  249. xorq %r14,%r14
  250. xorq %r15,%r15
  251. movq %r8,%rbp
  252. mulq %rbx
  253. movq %rax,%r10
  254. movq (%rcx),%rax
  255. imulq %r10,%rbp
  256. movq %rdx,%r11
  257. mulq %rbp
  258. addq %rax,%r10
  259. movq 8(%rsi),%rax
  260. adcq $0,%rdx
  261. movq %rdx,%rdi
  262. mulq %rbx
  263. addq %rax,%r11
  264. movq 8(%rcx),%rax
  265. adcq $0,%rdx
  266. movq %rdx,%r10
  267. mulq %rbp
  268. addq %rax,%rdi
  269. movq 16(%rsi),%rax
  270. adcq $0,%rdx
  271. addq %r11,%rdi
  272. leaq 4(%r15),%r15
  273. adcq $0,%rdx
  274. movq %rdi,(%rsp)
  275. movq %rdx,%r13
  276. jmp L$1st4x
  277. .p2align 4
  278. L$1st4x:
  279. mulq %rbx
  280. addq %rax,%r10
  281. movq -16(%rcx,%r15,8),%rax
  282. adcq $0,%rdx
  283. movq %rdx,%r11
  284. mulq %rbp
  285. addq %rax,%r13
  286. movq -8(%rsi,%r15,8),%rax
  287. adcq $0,%rdx
  288. addq %r10,%r13
  289. adcq $0,%rdx
  290. movq %r13,-24(%rsp,%r15,8)
  291. movq %rdx,%rdi
  292. mulq %rbx
  293. addq %rax,%r11
  294. movq -8(%rcx,%r15,8),%rax
  295. adcq $0,%rdx
  296. movq %rdx,%r10
  297. mulq %rbp
  298. addq %rax,%rdi
  299. movq (%rsi,%r15,8),%rax
  300. adcq $0,%rdx
  301. addq %r11,%rdi
  302. adcq $0,%rdx
  303. movq %rdi,-16(%rsp,%r15,8)
  304. movq %rdx,%r13
  305. mulq %rbx
  306. addq %rax,%r10
  307. movq (%rcx,%r15,8),%rax
  308. adcq $0,%rdx
  309. movq %rdx,%r11
  310. mulq %rbp
  311. addq %rax,%r13
  312. movq 8(%rsi,%r15,8),%rax
  313. adcq $0,%rdx
  314. addq %r10,%r13
  315. adcq $0,%rdx
  316. movq %r13,-8(%rsp,%r15,8)
  317. movq %rdx,%rdi
  318. mulq %rbx
  319. addq %rax,%r11
  320. movq 8(%rcx,%r15,8),%rax
  321. adcq $0,%rdx
  322. leaq 4(%r15),%r15
  323. movq %rdx,%r10
  324. mulq %rbp
  325. addq %rax,%rdi
  326. movq -16(%rsi,%r15,8),%rax
  327. adcq $0,%rdx
  328. addq %r11,%rdi
  329. adcq $0,%rdx
  330. movq %rdi,-32(%rsp,%r15,8)
  331. movq %rdx,%r13
  332. cmpq %r9,%r15
  333. jb L$1st4x
  334. mulq %rbx
  335. addq %rax,%r10
  336. movq -16(%rcx,%r15,8),%rax
  337. adcq $0,%rdx
  338. movq %rdx,%r11
  339. mulq %rbp
  340. addq %rax,%r13
  341. movq -8(%rsi,%r15,8),%rax
  342. adcq $0,%rdx
  343. addq %r10,%r13
  344. adcq $0,%rdx
  345. movq %r13,-24(%rsp,%r15,8)
  346. movq %rdx,%rdi
  347. mulq %rbx
  348. addq %rax,%r11
  349. movq -8(%rcx,%r15,8),%rax
  350. adcq $0,%rdx
  351. movq %rdx,%r10
  352. mulq %rbp
  353. addq %rax,%rdi
  354. movq (%rsi),%rax
  355. adcq $0,%rdx
  356. addq %r11,%rdi
  357. adcq $0,%rdx
  358. movq %rdi,-16(%rsp,%r15,8)
  359. movq %rdx,%r13
  360. xorq %rdi,%rdi
  361. addq %r10,%r13
  362. adcq $0,%rdi
  363. movq %r13,-8(%rsp,%r15,8)
  364. movq %rdi,(%rsp,%r15,8)
  365. leaq 1(%r14),%r14
  366. .p2align 2
  367. L$outer4x:
  368. movq (%r12,%r14,8),%rbx
  369. xorq %r15,%r15
  370. movq (%rsp),%r10
  371. movq %r8,%rbp
  372. mulq %rbx
  373. addq %rax,%r10
  374. movq (%rcx),%rax
  375. adcq $0,%rdx
  376. imulq %r10,%rbp
  377. movq %rdx,%r11
  378. mulq %rbp
  379. addq %rax,%r10
  380. movq 8(%rsi),%rax
  381. adcq $0,%rdx
  382. movq %rdx,%rdi
  383. mulq %rbx
  384. addq %rax,%r11
  385. movq 8(%rcx),%rax
  386. adcq $0,%rdx
  387. addq 8(%rsp),%r11
  388. adcq $0,%rdx
  389. movq %rdx,%r10
  390. mulq %rbp
  391. addq %rax,%rdi
  392. movq 16(%rsi),%rax
  393. adcq $0,%rdx
  394. addq %r11,%rdi
  395. leaq 4(%r15),%r15
  396. adcq $0,%rdx
  397. movq %rdi,(%rsp)
  398. movq %rdx,%r13
  399. jmp L$inner4x
  400. .p2align 4
  401. L$inner4x:
  402. mulq %rbx
  403. addq %rax,%r10
  404. movq -16(%rcx,%r15,8),%rax
  405. adcq $0,%rdx
  406. addq -16(%rsp,%r15,8),%r10
  407. adcq $0,%rdx
  408. movq %rdx,%r11
  409. mulq %rbp
  410. addq %rax,%r13
  411. movq -8(%rsi,%r15,8),%rax
  412. adcq $0,%rdx
  413. addq %r10,%r13
  414. adcq $0,%rdx
  415. movq %r13,-24(%rsp,%r15,8)
  416. movq %rdx,%rdi
  417. mulq %rbx
  418. addq %rax,%r11
  419. movq -8(%rcx,%r15,8),%rax
  420. adcq $0,%rdx
  421. addq -8(%rsp,%r15,8),%r11
  422. adcq $0,%rdx
  423. movq %rdx,%r10
  424. mulq %rbp
  425. addq %rax,%rdi
  426. movq (%rsi,%r15,8),%rax
  427. adcq $0,%rdx
  428. addq %r11,%rdi
  429. adcq $0,%rdx
  430. movq %rdi,-16(%rsp,%r15,8)
  431. movq %rdx,%r13
  432. mulq %rbx
  433. addq %rax,%r10
  434. movq (%rcx,%r15,8),%rax
  435. adcq $0,%rdx
  436. addq (%rsp,%r15,8),%r10
  437. adcq $0,%rdx
  438. movq %rdx,%r11
  439. mulq %rbp
  440. addq %rax,%r13
  441. movq 8(%rsi,%r15,8),%rax
  442. adcq $0,%rdx
  443. addq %r10,%r13
  444. adcq $0,%rdx
  445. movq %r13,-8(%rsp,%r15,8)
  446. movq %rdx,%rdi
  447. mulq %rbx
  448. addq %rax,%r11
  449. movq 8(%rcx,%r15,8),%rax
  450. adcq $0,%rdx
  451. addq 8(%rsp,%r15,8),%r11
  452. adcq $0,%rdx
  453. leaq 4(%r15),%r15
  454. movq %rdx,%r10
  455. mulq %rbp
  456. addq %rax,%rdi
  457. movq -16(%rsi,%r15,8),%rax
  458. adcq $0,%rdx
  459. addq %r11,%rdi
  460. adcq $0,%rdx
  461. movq %rdi,-32(%rsp,%r15,8)
  462. movq %rdx,%r13
  463. cmpq %r9,%r15
  464. jb L$inner4x
  465. mulq %rbx
  466. addq %rax,%r10
  467. movq -16(%rcx,%r15,8),%rax
  468. adcq $0,%rdx
  469. addq -16(%rsp,%r15,8),%r10
  470. adcq $0,%rdx
  471. movq %rdx,%r11
  472. mulq %rbp
  473. addq %rax,%r13
  474. movq -8(%rsi,%r15,8),%rax
  475. adcq $0,%rdx
  476. addq %r10,%r13
  477. adcq $0,%rdx
  478. movq %r13,-24(%rsp,%r15,8)
  479. movq %rdx,%rdi
  480. mulq %rbx
  481. addq %rax,%r11
  482. movq -8(%rcx,%r15,8),%rax
  483. adcq $0,%rdx
  484. addq -8(%rsp,%r15,8),%r11
  485. adcq $0,%rdx
  486. leaq 1(%r14),%r14
  487. movq %rdx,%r10
  488. mulq %rbp
  489. addq %rax,%rdi
  490. movq (%rsi),%rax
  491. adcq $0,%rdx
  492. addq %r11,%rdi
  493. adcq $0,%rdx
  494. movq %rdi,-16(%rsp,%r15,8)
  495. movq %rdx,%r13
  496. xorq %rdi,%rdi
  497. addq %r10,%r13
  498. adcq $0,%rdi
  499. addq (%rsp,%r9,8),%r13
  500. adcq $0,%rdi
  501. movq %r13,-8(%rsp,%r15,8)
  502. movq %rdi,(%rsp,%r15,8)
  503. cmpq %r9,%r14
  504. jb L$outer4x
  505. movq 16(%rsp,%r9,8),%rdi
  506. leaq -4(%r9),%r15
  507. movq 0(%rsp),%rax
  508. movq 8(%rsp),%rdx
  509. shrq $2,%r15
  510. leaq (%rsp),%rsi
  511. xorq %r14,%r14
  512. subq 0(%rcx),%rax
  513. movq 16(%rsi),%rbx
  514. movq 24(%rsi),%rbp
  515. sbbq 8(%rcx),%rdx
  516. L$sub4x:
  517. movq %rax,0(%rdi,%r14,8)
  518. movq %rdx,8(%rdi,%r14,8)
  519. sbbq 16(%rcx,%r14,8),%rbx
  520. movq 32(%rsi,%r14,8),%rax
  521. movq 40(%rsi,%r14,8),%rdx
  522. sbbq 24(%rcx,%r14,8),%rbp
  523. movq %rbx,16(%rdi,%r14,8)
  524. movq %rbp,24(%rdi,%r14,8)
  525. sbbq 32(%rcx,%r14,8),%rax
  526. movq 48(%rsi,%r14,8),%rbx
  527. movq 56(%rsi,%r14,8),%rbp
  528. sbbq 40(%rcx,%r14,8),%rdx
  529. leaq 4(%r14),%r14
  530. decq %r15
  531. jnz L$sub4x
  532. movq %rax,0(%rdi,%r14,8)
  533. movq 32(%rsi,%r14,8),%rax
  534. sbbq 16(%rcx,%r14,8),%rbx
  535. movq %rdx,8(%rdi,%r14,8)
  536. sbbq 24(%rcx,%r14,8),%rbp
  537. movq %rbx,16(%rdi,%r14,8)
  538. sbbq $0,%rax
  539. movq %rbp,24(%rdi,%r14,8)
  540. pxor %xmm0,%xmm0
  541. .byte 102,72,15,110,224
  542. pcmpeqd %xmm5,%xmm5
  543. pshufd $0,%xmm4,%xmm4
  544. movq %r9,%r15
  545. pxor %xmm4,%xmm5
  546. shrq $2,%r15
  547. xorl %eax,%eax
  548. jmp L$copy4x
  549. .p2align 4
  550. L$copy4x:
  551. movdqa (%rsp,%rax,1),%xmm1
  552. movdqu (%rdi,%rax,1),%xmm2
  553. pand %xmm4,%xmm1
  554. pand %xmm5,%xmm2
  555. movdqa 16(%rsp,%rax,1),%xmm3
  556. movdqa %xmm0,(%rsp,%rax,1)
  557. por %xmm2,%xmm1
  558. movdqu 16(%rdi,%rax,1),%xmm2
  559. movdqu %xmm1,(%rdi,%rax,1)
  560. pand %xmm4,%xmm3
  561. pand %xmm5,%xmm2
  562. movdqa %xmm0,16(%rsp,%rax,1)
  563. por %xmm2,%xmm3
  564. movdqu %xmm3,16(%rdi,%rax,1)
  565. leaq 32(%rax),%rax
  566. decq %r15
  567. jnz L$copy4x
  568. movq 8(%rsp,%r9,8),%rsi
  569. movq $1,%rax
  570. movq -48(%rsi),%r15
  571. movq -40(%rsi),%r14
  572. movq -32(%rsi),%r13
  573. movq -24(%rsi),%r12
  574. movq -16(%rsi),%rbp
  575. movq -8(%rsi),%rbx
  576. leaq (%rsi),%rsp
  577. L$mul4x_epilogue:
  578. .byte 0xf3,0xc3
  579. .p2align 5
  580. bn_sqr8x_mont:
  581. movq %rsp,%rax
  582. L$sqr8x_enter:
  583. pushq %rbx
  584. pushq %rbp
  585. pushq %r12
  586. pushq %r13
  587. pushq %r14
  588. pushq %r15
  589. L$sqr8x_prologue:
  590. movl %r9d,%r10d
  591. shll $3,%r9d
  592. shlq $3+2,%r10
  593. negq %r9
  594. leaq -64(%rsp,%r9,2),%r11
  595. movq %rsp,%rbp
  596. movq (%r8),%r8
  597. subq %rsi,%r11
  598. andq $4095,%r11
  599. cmpq %r11,%r10
  600. jb L$sqr8x_sp_alt
  601. subq %r11,%rbp
  602. leaq -64(%rbp,%r9,2),%rbp
  603. jmp L$sqr8x_sp_done
  604. .p2align 5
  605. L$sqr8x_sp_alt:
  606. leaq 4096-64(,%r9,2),%r10
  607. leaq -64(%rbp,%r9,2),%rbp
  608. subq %r10,%r11
  609. movq $0,%r10
  610. cmovcq %r10,%r11
  611. subq %r11,%rbp
  612. L$sqr8x_sp_done:
  613. andq $-64,%rbp
  614. movq %rsp,%r11
  615. subq %rbp,%r11
  616. andq $-4096,%r11
  617. leaq (%r11,%rbp,1),%rsp
  618. movq (%rsp),%r10
  619. cmpq %rbp,%rsp
  620. ja L$sqr8x_page_walk
  621. jmp L$sqr8x_page_walk_done
  622. .p2align 4
  623. L$sqr8x_page_walk:
  624. leaq -4096(%rsp),%rsp
  625. movq (%rsp),%r10
  626. cmpq %rbp,%rsp
  627. ja L$sqr8x_page_walk
  628. L$sqr8x_page_walk_done:
  629. movq %r9,%r10
  630. negq %r9
  631. movq %r8,32(%rsp)
  632. movq %rax,40(%rsp)
  633. L$sqr8x_body:
  634. .byte 102,72,15,110,209
  635. pxor %xmm0,%xmm0
  636. .byte 102,72,15,110,207
  637. .byte 102,73,15,110,218
  638. leaq _OPENSSL_ia32cap_P(%rip),%rax
  639. movl 8(%rax),%eax
  640. andl $0x80100,%eax
  641. cmpl $0x80100,%eax
  642. jne L$sqr8x_nox
  643. call _bn_sqrx8x_internal
  644. leaq (%r8,%rcx,1),%rbx
  645. movq %rcx,%r9
  646. movq %rcx,%rdx
  647. .byte 102,72,15,126,207
  648. sarq $3+2,%rcx
  649. jmp L$sqr8x_sub
  650. .p2align 5
  651. L$sqr8x_nox:
  652. call _bn_sqr8x_internal
  653. leaq (%rdi,%r9,1),%rbx
  654. movq %r9,%rcx
  655. movq %r9,%rdx
  656. .byte 102,72,15,126,207
  657. sarq $3+2,%rcx
  658. jmp L$sqr8x_sub
  659. .p2align 5
  660. L$sqr8x_sub:
  661. movq 0(%rbx),%r12
  662. movq 8(%rbx),%r13
  663. movq 16(%rbx),%r14
  664. movq 24(%rbx),%r15
  665. leaq 32(%rbx),%rbx
  666. sbbq 0(%rbp),%r12
  667. sbbq 8(%rbp),%r13
  668. sbbq 16(%rbp),%r14
  669. sbbq 24(%rbp),%r15
  670. leaq 32(%rbp),%rbp
  671. movq %r12,0(%rdi)
  672. movq %r13,8(%rdi)
  673. movq %r14,16(%rdi)
  674. movq %r15,24(%rdi)
  675. leaq 32(%rdi),%rdi
  676. incq %rcx
  677. jnz L$sqr8x_sub
  678. sbbq $0,%rax
  679. leaq (%rbx,%r9,1),%rbx
  680. leaq (%rdi,%r9,1),%rdi
  681. .byte 102,72,15,110,200
  682. pxor %xmm0,%xmm0
  683. pshufd $0,%xmm1,%xmm1
  684. movq 40(%rsp),%rsi
  685. jmp L$sqr8x_cond_copy
  686. .p2align 5
  687. L$sqr8x_cond_copy:
  688. movdqa 0(%rbx),%xmm2
  689. movdqa 16(%rbx),%xmm3
  690. leaq 32(%rbx),%rbx
  691. movdqu 0(%rdi),%xmm4
  692. movdqu 16(%rdi),%xmm5
  693. leaq 32(%rdi),%rdi
  694. movdqa %xmm0,-32(%rbx)
  695. movdqa %xmm0,-16(%rbx)
  696. movdqa %xmm0,-32(%rbx,%rdx,1)
  697. movdqa %xmm0,-16(%rbx,%rdx,1)
  698. pcmpeqd %xmm1,%xmm0
  699. pand %xmm1,%xmm2
  700. pand %xmm1,%xmm3
  701. pand %xmm0,%xmm4
  702. pand %xmm0,%xmm5
  703. pxor %xmm0,%xmm0
  704. por %xmm2,%xmm4
  705. por %xmm3,%xmm5
  706. movdqu %xmm4,-32(%rdi)
  707. movdqu %xmm5,-16(%rdi)
  708. addq $32,%r9
  709. jnz L$sqr8x_cond_copy
  710. movq $1,%rax
  711. movq -48(%rsi),%r15
  712. movq -40(%rsi),%r14
  713. movq -32(%rsi),%r13
  714. movq -24(%rsi),%r12
  715. movq -16(%rsi),%rbp
  716. movq -8(%rsi),%rbx
  717. leaq (%rsi),%rsp
  718. L$sqr8x_epilogue:
  719. .byte 0xf3,0xc3
  720. .p2align 5
  721. bn_mulx4x_mont:
  722. movq %rsp,%rax
  723. L$mulx4x_enter:
  724. pushq %rbx
  725. pushq %rbp
  726. pushq %r12
  727. pushq %r13
  728. pushq %r14
  729. pushq %r15
  730. L$mulx4x_prologue:
  731. shll $3,%r9d
  732. xorq %r10,%r10
  733. subq %r9,%r10
  734. movq (%r8),%r8
  735. leaq -72(%rsp,%r10,1),%rbp
  736. andq $-128,%rbp
  737. movq %rsp,%r11
  738. subq %rbp,%r11
  739. andq $-4096,%r11
  740. leaq (%r11,%rbp,1),%rsp
  741. movq (%rsp),%r10
  742. cmpq %rbp,%rsp
  743. ja L$mulx4x_page_walk
  744. jmp L$mulx4x_page_walk_done
  745. .p2align 4
  746. L$mulx4x_page_walk:
  747. leaq -4096(%rsp),%rsp
  748. movq (%rsp),%r10
  749. cmpq %rbp,%rsp
  750. ja L$mulx4x_page_walk
  751. L$mulx4x_page_walk_done:
  752. leaq (%rdx,%r9,1),%r10
  753. movq %r9,0(%rsp)
  754. shrq $5,%r9
  755. movq %r10,16(%rsp)
  756. subq $1,%r9
  757. movq %r8,24(%rsp)
  758. movq %rdi,32(%rsp)
  759. movq %rax,40(%rsp)
  760. movq %r9,48(%rsp)
  761. jmp L$mulx4x_body
  762. .p2align 5
  763. L$mulx4x_body:
  764. leaq 8(%rdx),%rdi
  765. movq (%rdx),%rdx
  766. leaq 64+32(%rsp),%rbx
  767. movq %rdx,%r9
  768. mulxq 0(%rsi),%r8,%rax
  769. mulxq 8(%rsi),%r11,%r14
  770. addq %rax,%r11
  771. movq %rdi,8(%rsp)
  772. mulxq 16(%rsi),%r12,%r13
  773. adcq %r14,%r12
  774. adcq $0,%r13
  775. movq %r8,%rdi
  776. imulq 24(%rsp),%r8
  777. xorq %rbp,%rbp
  778. mulxq 24(%rsi),%rax,%r14
  779. movq %r8,%rdx
  780. leaq 32(%rsi),%rsi
  781. adcxq %rax,%r13
  782. adcxq %rbp,%r14
  783. mulxq 0(%rcx),%rax,%r10
  784. adcxq %rax,%rdi
  785. adoxq %r11,%r10
  786. mulxq 8(%rcx),%rax,%r11
  787. adcxq %rax,%r10
  788. adoxq %r12,%r11
  789. .byte 0xc4,0x62,0xfb,0xf6,0xa1,0x10,0x00,0x00,0x00
  790. movq 48(%rsp),%rdi
  791. movq %r10,-32(%rbx)
  792. adcxq %rax,%r11
  793. adoxq %r13,%r12
  794. mulxq 24(%rcx),%rax,%r15
  795. movq %r9,%rdx
  796. movq %r11,-24(%rbx)
  797. adcxq %rax,%r12
  798. adoxq %rbp,%r15
  799. leaq 32(%rcx),%rcx
  800. movq %r12,-16(%rbx)
  801. jmp L$mulx4x_1st
  802. .p2align 5
  803. L$mulx4x_1st:
  804. adcxq %rbp,%r15
  805. mulxq 0(%rsi),%r10,%rax
  806. adcxq %r14,%r10
  807. mulxq 8(%rsi),%r11,%r14
  808. adcxq %rax,%r11
  809. mulxq 16(%rsi),%r12,%rax
  810. adcxq %r14,%r12
  811. mulxq 24(%rsi),%r13,%r14
  812. .byte 0x67,0x67
  813. movq %r8,%rdx
  814. adcxq %rax,%r13
  815. adcxq %rbp,%r14
  816. leaq 32(%rsi),%rsi
  817. leaq 32(%rbx),%rbx
  818. adoxq %r15,%r10
  819. mulxq 0(%rcx),%rax,%r15
  820. adcxq %rax,%r10
  821. adoxq %r15,%r11
  822. mulxq 8(%rcx),%rax,%r15
  823. adcxq %rax,%r11
  824. adoxq %r15,%r12
  825. mulxq 16(%rcx),%rax,%r15
  826. movq %r10,-40(%rbx)
  827. adcxq %rax,%r12
  828. movq %r11,-32(%rbx)
  829. adoxq %r15,%r13
  830. mulxq 24(%rcx),%rax,%r15
  831. movq %r9,%rdx
  832. movq %r12,-24(%rbx)
  833. adcxq %rax,%r13
  834. adoxq %rbp,%r15
  835. leaq 32(%rcx),%rcx
  836. movq %r13,-16(%rbx)
  837. decq %rdi
  838. jnz L$mulx4x_1st
  839. movq 0(%rsp),%rax
  840. movq 8(%rsp),%rdi
  841. adcq %rbp,%r15
  842. addq %r15,%r14
  843. sbbq %r15,%r15
  844. movq %r14,-8(%rbx)
  845. jmp L$mulx4x_outer
  846. .p2align 5
  847. L$mulx4x_outer:
  848. movq (%rdi),%rdx
  849. leaq 8(%rdi),%rdi
  850. subq %rax,%rsi
  851. movq %r15,(%rbx)
  852. leaq 64+32(%rsp),%rbx
  853. subq %rax,%rcx
  854. mulxq 0(%rsi),%r8,%r11
  855. xorl %ebp,%ebp
  856. movq %rdx,%r9
  857. mulxq 8(%rsi),%r14,%r12
  858. adoxq -32(%rbx),%r8
  859. adcxq %r14,%r11
  860. mulxq 16(%rsi),%r15,%r13
  861. adoxq -24(%rbx),%r11
  862. adcxq %r15,%r12
  863. adoxq -16(%rbx),%r12
  864. adcxq %rbp,%r13
  865. adoxq %rbp,%r13
  866. movq %rdi,8(%rsp)
  867. movq %r8,%r15
  868. imulq 24(%rsp),%r8
  869. xorl %ebp,%ebp
  870. mulxq 24(%rsi),%rax,%r14
  871. movq %r8,%rdx
  872. adcxq %rax,%r13
  873. adoxq -8(%rbx),%r13
  874. adcxq %rbp,%r14
  875. leaq 32(%rsi),%rsi
  876. adoxq %rbp,%r14
  877. mulxq 0(%rcx),%rax,%r10
  878. adcxq %rax,%r15
  879. adoxq %r11,%r10
  880. mulxq 8(%rcx),%rax,%r11
  881. adcxq %rax,%r10
  882. adoxq %r12,%r11
  883. mulxq 16(%rcx),%rax,%r12
  884. movq %r10,-32(%rbx)
  885. adcxq %rax,%r11
  886. adoxq %r13,%r12
  887. mulxq 24(%rcx),%rax,%r15
  888. movq %r9,%rdx
  889. movq %r11,-24(%rbx)
  890. leaq 32(%rcx),%rcx
  891. adcxq %rax,%r12
  892. adoxq %rbp,%r15
  893. movq 48(%rsp),%rdi
  894. movq %r12,-16(%rbx)
  895. jmp L$mulx4x_inner
  896. .p2align 5
  897. L$mulx4x_inner:
  898. mulxq 0(%rsi),%r10,%rax
  899. adcxq %rbp,%r15
  900. adoxq %r14,%r10
  901. mulxq 8(%rsi),%r11,%r14
  902. adcxq 0(%rbx),%r10
  903. adoxq %rax,%r11
  904. mulxq 16(%rsi),%r12,%rax
  905. adcxq 8(%rbx),%r11
  906. adoxq %r14,%r12
  907. mulxq 24(%rsi),%r13,%r14
  908. movq %r8,%rdx
  909. adcxq 16(%rbx),%r12
  910. adoxq %rax,%r13
  911. adcxq 24(%rbx),%r13
  912. adoxq %rbp,%r14
  913. leaq 32(%rsi),%rsi
  914. leaq 32(%rbx),%rbx
  915. adcxq %rbp,%r14
  916. adoxq %r15,%r10
  917. mulxq 0(%rcx),%rax,%r15
  918. adcxq %rax,%r10
  919. adoxq %r15,%r11
  920. mulxq 8(%rcx),%rax,%r15
  921. adcxq %rax,%r11
  922. adoxq %r15,%r12
  923. mulxq 16(%rcx),%rax,%r15
  924. movq %r10,-40(%rbx)
  925. adcxq %rax,%r12
  926. adoxq %r15,%r13
  927. mulxq 24(%rcx),%rax,%r15
  928. movq %r9,%rdx
  929. movq %r11,-32(%rbx)
  930. movq %r12,-24(%rbx)
  931. adcxq %rax,%r13
  932. adoxq %rbp,%r15
  933. leaq 32(%rcx),%rcx
  934. movq %r13,-16(%rbx)
  935. decq %rdi
  936. jnz L$mulx4x_inner
  937. movq 0(%rsp),%rax
  938. movq 8(%rsp),%rdi
  939. adcq %rbp,%r15
  940. subq 0(%rbx),%rbp
  941. adcq %r15,%r14
  942. sbbq %r15,%r15
  943. movq %r14,-8(%rbx)
  944. cmpq 16(%rsp),%rdi
  945. jne L$mulx4x_outer
  946. leaq 64(%rsp),%rbx
  947. subq %rax,%rcx
  948. negq %r15
  949. movq %rax,%rdx
  950. shrq $3+2,%rax
  951. movq 32(%rsp),%rdi
  952. jmp L$mulx4x_sub
  953. .p2align 5
  954. L$mulx4x_sub:
  955. movq 0(%rbx),%r11
  956. movq 8(%rbx),%r12
  957. movq 16(%rbx),%r13
  958. movq 24(%rbx),%r14
  959. leaq 32(%rbx),%rbx
  960. sbbq 0(%rcx),%r11
  961. sbbq 8(%rcx),%r12
  962. sbbq 16(%rcx),%r13
  963. sbbq 24(%rcx),%r14
  964. leaq 32(%rcx),%rcx
  965. movq %r11,0(%rdi)
  966. movq %r12,8(%rdi)
  967. movq %r13,16(%rdi)
  968. movq %r14,24(%rdi)
  969. leaq 32(%rdi),%rdi
  970. decq %rax
  971. jnz L$mulx4x_sub
  972. sbbq $0,%r15
  973. leaq 64(%rsp),%rbx
  974. subq %rdx,%rdi
  975. .byte 102,73,15,110,207
  976. pxor %xmm0,%xmm0
  977. pshufd $0,%xmm1,%xmm1
  978. movq 40(%rsp),%rsi
  979. jmp L$mulx4x_cond_copy
  980. .p2align 5
  981. L$mulx4x_cond_copy:
  982. movdqa 0(%rbx),%xmm2
  983. movdqa 16(%rbx),%xmm3
  984. leaq 32(%rbx),%rbx
  985. movdqu 0(%rdi),%xmm4
  986. movdqu 16(%rdi),%xmm5
  987. leaq 32(%rdi),%rdi
  988. movdqa %xmm0,-32(%rbx)
  989. movdqa %xmm0,-16(%rbx)
  990. pcmpeqd %xmm1,%xmm0
  991. pand %xmm1,%xmm2
  992. pand %xmm1,%xmm3
  993. pand %xmm0,%xmm4
  994. pand %xmm0,%xmm5
  995. pxor %xmm0,%xmm0
  996. por %xmm2,%xmm4
  997. por %xmm3,%xmm5
  998. movdqu %xmm4,-32(%rdi)
  999. movdqu %xmm5,-16(%rdi)
  1000. subq $32,%rdx
  1001. jnz L$mulx4x_cond_copy
  1002. movq %rdx,(%rbx)
  1003. movq $1,%rax
  1004. movq -48(%rsi),%r15
  1005. movq -40(%rsi),%r14
  1006. movq -32(%rsi),%r13
  1007. movq -24(%rsi),%r12
  1008. movq -16(%rsi),%rbp
  1009. movq -8(%rsi),%rbx
  1010. leaq (%rsi),%rsp
  1011. L$mulx4x_epilogue:
  1012. .byte 0xf3,0xc3
  1013. .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
  1014. .p2align 4
  1015. #endif