aesni-gcm-x86_64.asm 22 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033
  1. ; This file is generated from a similarly-named Perl script in the BoringSSL
  2. ; source tree. Do not edit by hand.
  3. default rel
  4. %define XMMWORD
  5. %define YMMWORD
  6. %define ZMMWORD
  7. %ifdef BORINGSSL_PREFIX
  8. %include "boringssl_prefix_symbols_nasm.inc"
  9. %endif
  10. section .text code align=64
  11. ALIGN 32
  12. _aesni_ctr32_ghash_6x:
  13. vmovdqu xmm2,XMMWORD[32+r11]
  14. sub rdx,6
  15. vpxor xmm4,xmm4,xmm4
  16. vmovdqu xmm15,XMMWORD[((0-128))+rcx]
  17. vpaddb xmm10,xmm1,xmm2
  18. vpaddb xmm11,xmm10,xmm2
  19. vpaddb xmm12,xmm11,xmm2
  20. vpaddb xmm13,xmm12,xmm2
  21. vpaddb xmm14,xmm13,xmm2
  22. vpxor xmm9,xmm1,xmm15
  23. vmovdqu XMMWORD[(16+8)+rsp],xmm4
  24. jmp NEAR $L$oop6x
  25. ALIGN 32
  26. $L$oop6x:
  27. add ebx,100663296
  28. jc NEAR $L$handle_ctr32
  29. vmovdqu xmm3,XMMWORD[((0-32))+r9]
  30. vpaddb xmm1,xmm14,xmm2
  31. vpxor xmm10,xmm10,xmm15
  32. vpxor xmm11,xmm11,xmm15
  33. $L$resume_ctr32:
  34. vmovdqu XMMWORD[r8],xmm1
  35. vpclmulqdq xmm5,xmm7,xmm3,0x10
  36. vpxor xmm12,xmm12,xmm15
  37. vmovups xmm2,XMMWORD[((16-128))+rcx]
  38. vpclmulqdq xmm6,xmm7,xmm3,0x01
  39. xor r12,r12
  40. cmp r15,r14
  41. vaesenc xmm9,xmm9,xmm2
  42. vmovdqu xmm0,XMMWORD[((48+8))+rsp]
  43. vpxor xmm13,xmm13,xmm15
  44. vpclmulqdq xmm1,xmm7,xmm3,0x00
  45. vaesenc xmm10,xmm10,xmm2
  46. vpxor xmm14,xmm14,xmm15
  47. setnc r12b
  48. vpclmulqdq xmm7,xmm7,xmm3,0x11
  49. vaesenc xmm11,xmm11,xmm2
  50. vmovdqu xmm3,XMMWORD[((16-32))+r9]
  51. neg r12
  52. vaesenc xmm12,xmm12,xmm2
  53. vpxor xmm6,xmm6,xmm5
  54. vpclmulqdq xmm5,xmm0,xmm3,0x00
  55. vpxor xmm8,xmm8,xmm4
  56. vaesenc xmm13,xmm13,xmm2
  57. vpxor xmm4,xmm1,xmm5
  58. and r12,0x60
  59. vmovups xmm15,XMMWORD[((32-128))+rcx]
  60. vpclmulqdq xmm1,xmm0,xmm3,0x10
  61. vaesenc xmm14,xmm14,xmm2
  62. vpclmulqdq xmm2,xmm0,xmm3,0x01
  63. lea r14,[r12*1+r14]
  64. vaesenc xmm9,xmm9,xmm15
  65. vpxor xmm8,xmm8,XMMWORD[((16+8))+rsp]
  66. vpclmulqdq xmm3,xmm0,xmm3,0x11
  67. vmovdqu xmm0,XMMWORD[((64+8))+rsp]
  68. vaesenc xmm10,xmm10,xmm15
  69. movbe r13,QWORD[88+r14]
  70. vaesenc xmm11,xmm11,xmm15
  71. movbe r12,QWORD[80+r14]
  72. vaesenc xmm12,xmm12,xmm15
  73. mov QWORD[((32+8))+rsp],r13
  74. vaesenc xmm13,xmm13,xmm15
  75. mov QWORD[((40+8))+rsp],r12
  76. vmovdqu xmm5,XMMWORD[((48-32))+r9]
  77. vaesenc xmm14,xmm14,xmm15
  78. vmovups xmm15,XMMWORD[((48-128))+rcx]
  79. vpxor xmm6,xmm6,xmm1
  80. vpclmulqdq xmm1,xmm0,xmm5,0x00
  81. vaesenc xmm9,xmm9,xmm15
  82. vpxor xmm6,xmm6,xmm2
  83. vpclmulqdq xmm2,xmm0,xmm5,0x10
  84. vaesenc xmm10,xmm10,xmm15
  85. vpxor xmm7,xmm7,xmm3
  86. vpclmulqdq xmm3,xmm0,xmm5,0x01
  87. vaesenc xmm11,xmm11,xmm15
  88. vpclmulqdq xmm5,xmm0,xmm5,0x11
  89. vmovdqu xmm0,XMMWORD[((80+8))+rsp]
  90. vaesenc xmm12,xmm12,xmm15
  91. vaesenc xmm13,xmm13,xmm15
  92. vpxor xmm4,xmm4,xmm1
  93. vmovdqu xmm1,XMMWORD[((64-32))+r9]
  94. vaesenc xmm14,xmm14,xmm15
  95. vmovups xmm15,XMMWORD[((64-128))+rcx]
  96. vpxor xmm6,xmm6,xmm2
  97. vpclmulqdq xmm2,xmm0,xmm1,0x00
  98. vaesenc xmm9,xmm9,xmm15
  99. vpxor xmm6,xmm6,xmm3
  100. vpclmulqdq xmm3,xmm0,xmm1,0x10
  101. vaesenc xmm10,xmm10,xmm15
  102. movbe r13,QWORD[72+r14]
  103. vpxor xmm7,xmm7,xmm5
  104. vpclmulqdq xmm5,xmm0,xmm1,0x01
  105. vaesenc xmm11,xmm11,xmm15
  106. movbe r12,QWORD[64+r14]
  107. vpclmulqdq xmm1,xmm0,xmm1,0x11
  108. vmovdqu xmm0,XMMWORD[((96+8))+rsp]
  109. vaesenc xmm12,xmm12,xmm15
  110. mov QWORD[((48+8))+rsp],r13
  111. vaesenc xmm13,xmm13,xmm15
  112. mov QWORD[((56+8))+rsp],r12
  113. vpxor xmm4,xmm4,xmm2
  114. vmovdqu xmm2,XMMWORD[((96-32))+r9]
  115. vaesenc xmm14,xmm14,xmm15
  116. vmovups xmm15,XMMWORD[((80-128))+rcx]
  117. vpxor xmm6,xmm6,xmm3
  118. vpclmulqdq xmm3,xmm0,xmm2,0x00
  119. vaesenc xmm9,xmm9,xmm15
  120. vpxor xmm6,xmm6,xmm5
  121. vpclmulqdq xmm5,xmm0,xmm2,0x10
  122. vaesenc xmm10,xmm10,xmm15
  123. movbe r13,QWORD[56+r14]
  124. vpxor xmm7,xmm7,xmm1
  125. vpclmulqdq xmm1,xmm0,xmm2,0x01
  126. vpxor xmm8,xmm8,XMMWORD[((112+8))+rsp]
  127. vaesenc xmm11,xmm11,xmm15
  128. movbe r12,QWORD[48+r14]
  129. vpclmulqdq xmm2,xmm0,xmm2,0x11
  130. vaesenc xmm12,xmm12,xmm15
  131. mov QWORD[((64+8))+rsp],r13
  132. vaesenc xmm13,xmm13,xmm15
  133. mov QWORD[((72+8))+rsp],r12
  134. vpxor xmm4,xmm4,xmm3
  135. vmovdqu xmm3,XMMWORD[((112-32))+r9]
  136. vaesenc xmm14,xmm14,xmm15
  137. vmovups xmm15,XMMWORD[((96-128))+rcx]
  138. vpxor xmm6,xmm6,xmm5
  139. vpclmulqdq xmm5,xmm8,xmm3,0x10
  140. vaesenc xmm9,xmm9,xmm15
  141. vpxor xmm6,xmm6,xmm1
  142. vpclmulqdq xmm1,xmm8,xmm3,0x01
  143. vaesenc xmm10,xmm10,xmm15
  144. movbe r13,QWORD[40+r14]
  145. vpxor xmm7,xmm7,xmm2
  146. vpclmulqdq xmm2,xmm8,xmm3,0x00
  147. vaesenc xmm11,xmm11,xmm15
  148. movbe r12,QWORD[32+r14]
  149. vpclmulqdq xmm8,xmm8,xmm3,0x11
  150. vaesenc xmm12,xmm12,xmm15
  151. mov QWORD[((80+8))+rsp],r13
  152. vaesenc xmm13,xmm13,xmm15
  153. mov QWORD[((88+8))+rsp],r12
  154. vpxor xmm6,xmm6,xmm5
  155. vaesenc xmm14,xmm14,xmm15
  156. vpxor xmm6,xmm6,xmm1
  157. vmovups xmm15,XMMWORD[((112-128))+rcx]
  158. vpslldq xmm5,xmm6,8
  159. vpxor xmm4,xmm4,xmm2
  160. vmovdqu xmm3,XMMWORD[16+r11]
  161. vaesenc xmm9,xmm9,xmm15
  162. vpxor xmm7,xmm7,xmm8
  163. vaesenc xmm10,xmm10,xmm15
  164. vpxor xmm4,xmm4,xmm5
  165. movbe r13,QWORD[24+r14]
  166. vaesenc xmm11,xmm11,xmm15
  167. movbe r12,QWORD[16+r14]
  168. vpalignr xmm0,xmm4,xmm4,8
  169. vpclmulqdq xmm4,xmm4,xmm3,0x10
  170. mov QWORD[((96+8))+rsp],r13
  171. vaesenc xmm12,xmm12,xmm15
  172. mov QWORD[((104+8))+rsp],r12
  173. vaesenc xmm13,xmm13,xmm15
  174. vmovups xmm1,XMMWORD[((128-128))+rcx]
  175. vaesenc xmm14,xmm14,xmm15
  176. vaesenc xmm9,xmm9,xmm1
  177. vmovups xmm15,XMMWORD[((144-128))+rcx]
  178. vaesenc xmm10,xmm10,xmm1
  179. vpsrldq xmm6,xmm6,8
  180. vaesenc xmm11,xmm11,xmm1
  181. vpxor xmm7,xmm7,xmm6
  182. vaesenc xmm12,xmm12,xmm1
  183. vpxor xmm4,xmm4,xmm0
  184. movbe r13,QWORD[8+r14]
  185. vaesenc xmm13,xmm13,xmm1
  186. movbe r12,QWORD[r14]
  187. vaesenc xmm14,xmm14,xmm1
  188. vmovups xmm1,XMMWORD[((160-128))+rcx]
  189. cmp ebp,11
  190. jb NEAR $L$enc_tail
  191. vaesenc xmm9,xmm9,xmm15
  192. vaesenc xmm10,xmm10,xmm15
  193. vaesenc xmm11,xmm11,xmm15
  194. vaesenc xmm12,xmm12,xmm15
  195. vaesenc xmm13,xmm13,xmm15
  196. vaesenc xmm14,xmm14,xmm15
  197. vaesenc xmm9,xmm9,xmm1
  198. vaesenc xmm10,xmm10,xmm1
  199. vaesenc xmm11,xmm11,xmm1
  200. vaesenc xmm12,xmm12,xmm1
  201. vaesenc xmm13,xmm13,xmm1
  202. vmovups xmm15,XMMWORD[((176-128))+rcx]
  203. vaesenc xmm14,xmm14,xmm1
  204. vmovups xmm1,XMMWORD[((192-128))+rcx]
  205. je NEAR $L$enc_tail
  206. vaesenc xmm9,xmm9,xmm15
  207. vaesenc xmm10,xmm10,xmm15
  208. vaesenc xmm11,xmm11,xmm15
  209. vaesenc xmm12,xmm12,xmm15
  210. vaesenc xmm13,xmm13,xmm15
  211. vaesenc xmm14,xmm14,xmm15
  212. vaesenc xmm9,xmm9,xmm1
  213. vaesenc xmm10,xmm10,xmm1
  214. vaesenc xmm11,xmm11,xmm1
  215. vaesenc xmm12,xmm12,xmm1
  216. vaesenc xmm13,xmm13,xmm1
  217. vmovups xmm15,XMMWORD[((208-128))+rcx]
  218. vaesenc xmm14,xmm14,xmm1
  219. vmovups xmm1,XMMWORD[((224-128))+rcx]
  220. jmp NEAR $L$enc_tail
  221. ALIGN 32
  222. $L$handle_ctr32:
  223. vmovdqu xmm0,XMMWORD[r11]
  224. vpshufb xmm6,xmm1,xmm0
  225. vmovdqu xmm5,XMMWORD[48+r11]
  226. vpaddd xmm10,xmm6,XMMWORD[64+r11]
  227. vpaddd xmm11,xmm6,xmm5
  228. vmovdqu xmm3,XMMWORD[((0-32))+r9]
  229. vpaddd xmm12,xmm10,xmm5
  230. vpshufb xmm10,xmm10,xmm0
  231. vpaddd xmm13,xmm11,xmm5
  232. vpshufb xmm11,xmm11,xmm0
  233. vpxor xmm10,xmm10,xmm15
  234. vpaddd xmm14,xmm12,xmm5
  235. vpshufb xmm12,xmm12,xmm0
  236. vpxor xmm11,xmm11,xmm15
  237. vpaddd xmm1,xmm13,xmm5
  238. vpshufb xmm13,xmm13,xmm0
  239. vpshufb xmm14,xmm14,xmm0
  240. vpshufb xmm1,xmm1,xmm0
  241. jmp NEAR $L$resume_ctr32
  242. ALIGN 32
  243. $L$enc_tail:
  244. vaesenc xmm9,xmm9,xmm15
  245. vmovdqu XMMWORD[(16+8)+rsp],xmm7
  246. vpalignr xmm8,xmm4,xmm4,8
  247. vaesenc xmm10,xmm10,xmm15
  248. vpclmulqdq xmm4,xmm4,xmm3,0x10
  249. vpxor xmm2,xmm1,XMMWORD[rdi]
  250. vaesenc xmm11,xmm11,xmm15
  251. vpxor xmm0,xmm1,XMMWORD[16+rdi]
  252. vaesenc xmm12,xmm12,xmm15
  253. vpxor xmm5,xmm1,XMMWORD[32+rdi]
  254. vaesenc xmm13,xmm13,xmm15
  255. vpxor xmm6,xmm1,XMMWORD[48+rdi]
  256. vaesenc xmm14,xmm14,xmm15
  257. vpxor xmm7,xmm1,XMMWORD[64+rdi]
  258. vpxor xmm3,xmm1,XMMWORD[80+rdi]
  259. vmovdqu xmm1,XMMWORD[r8]
  260. vaesenclast xmm9,xmm9,xmm2
  261. vmovdqu xmm2,XMMWORD[32+r11]
  262. vaesenclast xmm10,xmm10,xmm0
  263. vpaddb xmm0,xmm1,xmm2
  264. mov QWORD[((112+8))+rsp],r13
  265. lea rdi,[96+rdi]
  266. vaesenclast xmm11,xmm11,xmm5
  267. vpaddb xmm5,xmm0,xmm2
  268. mov QWORD[((120+8))+rsp],r12
  269. lea rsi,[96+rsi]
  270. vmovdqu xmm15,XMMWORD[((0-128))+rcx]
  271. vaesenclast xmm12,xmm12,xmm6
  272. vpaddb xmm6,xmm5,xmm2
  273. vaesenclast xmm13,xmm13,xmm7
  274. vpaddb xmm7,xmm6,xmm2
  275. vaesenclast xmm14,xmm14,xmm3
  276. vpaddb xmm3,xmm7,xmm2
  277. add r10,0x60
  278. sub rdx,0x6
  279. jc NEAR $L$6x_done
  280. vmovups XMMWORD[(-96)+rsi],xmm9
  281. vpxor xmm9,xmm1,xmm15
  282. vmovups XMMWORD[(-80)+rsi],xmm10
  283. vmovdqa xmm10,xmm0
  284. vmovups XMMWORD[(-64)+rsi],xmm11
  285. vmovdqa xmm11,xmm5
  286. vmovups XMMWORD[(-48)+rsi],xmm12
  287. vmovdqa xmm12,xmm6
  288. vmovups XMMWORD[(-32)+rsi],xmm13
  289. vmovdqa xmm13,xmm7
  290. vmovups XMMWORD[(-16)+rsi],xmm14
  291. vmovdqa xmm14,xmm3
  292. vmovdqu xmm7,XMMWORD[((32+8))+rsp]
  293. jmp NEAR $L$oop6x
  294. $L$6x_done:
  295. vpxor xmm8,xmm8,XMMWORD[((16+8))+rsp]
  296. vpxor xmm8,xmm8,xmm4
  297. DB 0F3h,0C3h ;repret
  298. global aesni_gcm_decrypt
  299. ALIGN 32
  300. aesni_gcm_decrypt:
  301. mov QWORD[8+rsp],rdi ;WIN64 prologue
  302. mov QWORD[16+rsp],rsi
  303. mov rax,rsp
  304. $L$SEH_begin_aesni_gcm_decrypt:
  305. mov rdi,rcx
  306. mov rsi,rdx
  307. mov rdx,r8
  308. mov rcx,r9
  309. mov r8,QWORD[40+rsp]
  310. mov r9,QWORD[48+rsp]
  311. xor r10,r10
  312. cmp rdx,0x60
  313. jb NEAR $L$gcm_dec_abort
  314. lea rax,[rsp]
  315. push rbx
  316. push rbp
  317. push r12
  318. push r13
  319. push r14
  320. push r15
  321. lea rsp,[((-168))+rsp]
  322. movaps XMMWORD[(-216)+rax],xmm6
  323. movaps XMMWORD[(-200)+rax],xmm7
  324. movaps XMMWORD[(-184)+rax],xmm8
  325. movaps XMMWORD[(-168)+rax],xmm9
  326. movaps XMMWORD[(-152)+rax],xmm10
  327. movaps XMMWORD[(-136)+rax],xmm11
  328. movaps XMMWORD[(-120)+rax],xmm12
  329. movaps XMMWORD[(-104)+rax],xmm13
  330. movaps XMMWORD[(-88)+rax],xmm14
  331. movaps XMMWORD[(-72)+rax],xmm15
  332. $L$gcm_dec_body:
  333. vzeroupper
  334. vmovdqu xmm1,XMMWORD[r8]
  335. add rsp,-128
  336. mov ebx,DWORD[12+r8]
  337. lea r11,[$L$bswap_mask]
  338. lea r14,[((-128))+rcx]
  339. mov r15,0xf80
  340. vmovdqu xmm8,XMMWORD[r9]
  341. and rsp,-128
  342. vmovdqu xmm0,XMMWORD[r11]
  343. lea rcx,[128+rcx]
  344. lea r9,[((32+32))+r9]
  345. mov ebp,DWORD[((240-128))+rcx]
  346. vpshufb xmm8,xmm8,xmm0
  347. and r14,r15
  348. and r15,rsp
  349. sub r15,r14
  350. jc NEAR $L$dec_no_key_aliasing
  351. cmp r15,768
  352. jnc NEAR $L$dec_no_key_aliasing
  353. sub rsp,r15
  354. $L$dec_no_key_aliasing:
  355. vmovdqu xmm7,XMMWORD[80+rdi]
  356. lea r14,[rdi]
  357. vmovdqu xmm4,XMMWORD[64+rdi]
  358. lea r15,[((-192))+rdx*1+rdi]
  359. vmovdqu xmm5,XMMWORD[48+rdi]
  360. shr rdx,4
  361. xor r10,r10
  362. vmovdqu xmm6,XMMWORD[32+rdi]
  363. vpshufb xmm7,xmm7,xmm0
  364. vmovdqu xmm2,XMMWORD[16+rdi]
  365. vpshufb xmm4,xmm4,xmm0
  366. vmovdqu xmm3,XMMWORD[rdi]
  367. vpshufb xmm5,xmm5,xmm0
  368. vmovdqu XMMWORD[48+rsp],xmm4
  369. vpshufb xmm6,xmm6,xmm0
  370. vmovdqu XMMWORD[64+rsp],xmm5
  371. vpshufb xmm2,xmm2,xmm0
  372. vmovdqu XMMWORD[80+rsp],xmm6
  373. vpshufb xmm3,xmm3,xmm0
  374. vmovdqu XMMWORD[96+rsp],xmm2
  375. vmovdqu XMMWORD[112+rsp],xmm3
  376. call _aesni_ctr32_ghash_6x
  377. vmovups XMMWORD[(-96)+rsi],xmm9
  378. vmovups XMMWORD[(-80)+rsi],xmm10
  379. vmovups XMMWORD[(-64)+rsi],xmm11
  380. vmovups XMMWORD[(-48)+rsi],xmm12
  381. vmovups XMMWORD[(-32)+rsi],xmm13
  382. vmovups XMMWORD[(-16)+rsi],xmm14
  383. vpshufb xmm8,xmm8,XMMWORD[r11]
  384. vmovdqu XMMWORD[(-64)+r9],xmm8
  385. vzeroupper
  386. movaps xmm6,XMMWORD[((-216))+rax]
  387. movaps xmm7,XMMWORD[((-200))+rax]
  388. movaps xmm8,XMMWORD[((-184))+rax]
  389. movaps xmm9,XMMWORD[((-168))+rax]
  390. movaps xmm10,XMMWORD[((-152))+rax]
  391. movaps xmm11,XMMWORD[((-136))+rax]
  392. movaps xmm12,XMMWORD[((-120))+rax]
  393. movaps xmm13,XMMWORD[((-104))+rax]
  394. movaps xmm14,XMMWORD[((-88))+rax]
  395. movaps xmm15,XMMWORD[((-72))+rax]
  396. mov r15,QWORD[((-48))+rax]
  397. mov r14,QWORD[((-40))+rax]
  398. mov r13,QWORD[((-32))+rax]
  399. mov r12,QWORD[((-24))+rax]
  400. mov rbp,QWORD[((-16))+rax]
  401. mov rbx,QWORD[((-8))+rax]
  402. lea rsp,[rax]
  403. $L$gcm_dec_abort:
  404. mov rax,r10
  405. mov rdi,QWORD[8+rsp] ;WIN64 epilogue
  406. mov rsi,QWORD[16+rsp]
  407. DB 0F3h,0C3h ;repret
  408. $L$SEH_end_aesni_gcm_decrypt:
  409. ALIGN 32
  410. _aesni_ctr32_6x:
  411. vmovdqu xmm4,XMMWORD[((0-128))+rcx]
  412. vmovdqu xmm2,XMMWORD[32+r11]
  413. lea r13,[((-1))+rbp]
  414. vmovups xmm15,XMMWORD[((16-128))+rcx]
  415. lea r12,[((32-128))+rcx]
  416. vpxor xmm9,xmm1,xmm4
  417. add ebx,100663296
  418. jc NEAR $L$handle_ctr32_2
  419. vpaddb xmm10,xmm1,xmm2
  420. vpaddb xmm11,xmm10,xmm2
  421. vpxor xmm10,xmm10,xmm4
  422. vpaddb xmm12,xmm11,xmm2
  423. vpxor xmm11,xmm11,xmm4
  424. vpaddb xmm13,xmm12,xmm2
  425. vpxor xmm12,xmm12,xmm4
  426. vpaddb xmm14,xmm13,xmm2
  427. vpxor xmm13,xmm13,xmm4
  428. vpaddb xmm1,xmm14,xmm2
  429. vpxor xmm14,xmm14,xmm4
  430. jmp NEAR $L$oop_ctr32
  431. ALIGN 16
  432. $L$oop_ctr32:
  433. vaesenc xmm9,xmm9,xmm15
  434. vaesenc xmm10,xmm10,xmm15
  435. vaesenc xmm11,xmm11,xmm15
  436. vaesenc xmm12,xmm12,xmm15
  437. vaesenc xmm13,xmm13,xmm15
  438. vaesenc xmm14,xmm14,xmm15
  439. vmovups xmm15,XMMWORD[r12]
  440. lea r12,[16+r12]
  441. dec r13d
  442. jnz NEAR $L$oop_ctr32
  443. vmovdqu xmm3,XMMWORD[r12]
  444. vaesenc xmm9,xmm9,xmm15
  445. vpxor xmm4,xmm3,XMMWORD[rdi]
  446. vaesenc xmm10,xmm10,xmm15
  447. vpxor xmm5,xmm3,XMMWORD[16+rdi]
  448. vaesenc xmm11,xmm11,xmm15
  449. vpxor xmm6,xmm3,XMMWORD[32+rdi]
  450. vaesenc xmm12,xmm12,xmm15
  451. vpxor xmm8,xmm3,XMMWORD[48+rdi]
  452. vaesenc xmm13,xmm13,xmm15
  453. vpxor xmm2,xmm3,XMMWORD[64+rdi]
  454. vaesenc xmm14,xmm14,xmm15
  455. vpxor xmm3,xmm3,XMMWORD[80+rdi]
  456. lea rdi,[96+rdi]
  457. vaesenclast xmm9,xmm9,xmm4
  458. vaesenclast xmm10,xmm10,xmm5
  459. vaesenclast xmm11,xmm11,xmm6
  460. vaesenclast xmm12,xmm12,xmm8
  461. vaesenclast xmm13,xmm13,xmm2
  462. vaesenclast xmm14,xmm14,xmm3
  463. vmovups XMMWORD[rsi],xmm9
  464. vmovups XMMWORD[16+rsi],xmm10
  465. vmovups XMMWORD[32+rsi],xmm11
  466. vmovups XMMWORD[48+rsi],xmm12
  467. vmovups XMMWORD[64+rsi],xmm13
  468. vmovups XMMWORD[80+rsi],xmm14
  469. lea rsi,[96+rsi]
  470. DB 0F3h,0C3h ;repret
  471. ALIGN 32
  472. $L$handle_ctr32_2:
  473. vpshufb xmm6,xmm1,xmm0
  474. vmovdqu xmm5,XMMWORD[48+r11]
  475. vpaddd xmm10,xmm6,XMMWORD[64+r11]
  476. vpaddd xmm11,xmm6,xmm5
  477. vpaddd xmm12,xmm10,xmm5
  478. vpshufb xmm10,xmm10,xmm0
  479. vpaddd xmm13,xmm11,xmm5
  480. vpshufb xmm11,xmm11,xmm0
  481. vpxor xmm10,xmm10,xmm4
  482. vpaddd xmm14,xmm12,xmm5
  483. vpshufb xmm12,xmm12,xmm0
  484. vpxor xmm11,xmm11,xmm4
  485. vpaddd xmm1,xmm13,xmm5
  486. vpshufb xmm13,xmm13,xmm0
  487. vpxor xmm12,xmm12,xmm4
  488. vpshufb xmm14,xmm14,xmm0
  489. vpxor xmm13,xmm13,xmm4
  490. vpshufb xmm1,xmm1,xmm0
  491. vpxor xmm14,xmm14,xmm4
  492. jmp NEAR $L$oop_ctr32
  493. global aesni_gcm_encrypt
  494. ALIGN 32
  495. aesni_gcm_encrypt:
  496. mov QWORD[8+rsp],rdi ;WIN64 prologue
  497. mov QWORD[16+rsp],rsi
  498. mov rax,rsp
  499. $L$SEH_begin_aesni_gcm_encrypt:
  500. mov rdi,rcx
  501. mov rsi,rdx
  502. mov rdx,r8
  503. mov rcx,r9
  504. mov r8,QWORD[40+rsp]
  505. mov r9,QWORD[48+rsp]
  506. %ifdef BORINGSSL_DISPATCH_TEST
  507. EXTERN BORINGSSL_function_hit
  508. mov BYTE[((BORINGSSL_function_hit+2))],1
  509. %endif
  510. xor r10,r10
  511. cmp rdx,0x60*3
  512. jb NEAR $L$gcm_enc_abort
  513. lea rax,[rsp]
  514. push rbx
  515. push rbp
  516. push r12
  517. push r13
  518. push r14
  519. push r15
  520. lea rsp,[((-168))+rsp]
  521. movaps XMMWORD[(-216)+rax],xmm6
  522. movaps XMMWORD[(-200)+rax],xmm7
  523. movaps XMMWORD[(-184)+rax],xmm8
  524. movaps XMMWORD[(-168)+rax],xmm9
  525. movaps XMMWORD[(-152)+rax],xmm10
  526. movaps XMMWORD[(-136)+rax],xmm11
  527. movaps XMMWORD[(-120)+rax],xmm12
  528. movaps XMMWORD[(-104)+rax],xmm13
  529. movaps XMMWORD[(-88)+rax],xmm14
  530. movaps XMMWORD[(-72)+rax],xmm15
  531. $L$gcm_enc_body:
  532. vzeroupper
  533. vmovdqu xmm1,XMMWORD[r8]
  534. add rsp,-128
  535. mov ebx,DWORD[12+r8]
  536. lea r11,[$L$bswap_mask]
  537. lea r14,[((-128))+rcx]
  538. mov r15,0xf80
  539. lea rcx,[128+rcx]
  540. vmovdqu xmm0,XMMWORD[r11]
  541. and rsp,-128
  542. mov ebp,DWORD[((240-128))+rcx]
  543. and r14,r15
  544. and r15,rsp
  545. sub r15,r14
  546. jc NEAR $L$enc_no_key_aliasing
  547. cmp r15,768
  548. jnc NEAR $L$enc_no_key_aliasing
  549. sub rsp,r15
  550. $L$enc_no_key_aliasing:
  551. lea r14,[rsi]
  552. lea r15,[((-192))+rdx*1+rsi]
  553. shr rdx,4
  554. call _aesni_ctr32_6x
  555. vpshufb xmm8,xmm9,xmm0
  556. vpshufb xmm2,xmm10,xmm0
  557. vmovdqu XMMWORD[112+rsp],xmm8
  558. vpshufb xmm4,xmm11,xmm0
  559. vmovdqu XMMWORD[96+rsp],xmm2
  560. vpshufb xmm5,xmm12,xmm0
  561. vmovdqu XMMWORD[80+rsp],xmm4
  562. vpshufb xmm6,xmm13,xmm0
  563. vmovdqu XMMWORD[64+rsp],xmm5
  564. vpshufb xmm7,xmm14,xmm0
  565. vmovdqu XMMWORD[48+rsp],xmm6
  566. call _aesni_ctr32_6x
  567. vmovdqu xmm8,XMMWORD[r9]
  568. lea r9,[((32+32))+r9]
  569. sub rdx,12
  570. mov r10,0x60*2
  571. vpshufb xmm8,xmm8,xmm0
  572. call _aesni_ctr32_ghash_6x
  573. vmovdqu xmm7,XMMWORD[32+rsp]
  574. vmovdqu xmm0,XMMWORD[r11]
  575. vmovdqu xmm3,XMMWORD[((0-32))+r9]
  576. vpunpckhqdq xmm1,xmm7,xmm7
  577. vmovdqu xmm15,XMMWORD[((32-32))+r9]
  578. vmovups XMMWORD[(-96)+rsi],xmm9
  579. vpshufb xmm9,xmm9,xmm0
  580. vpxor xmm1,xmm1,xmm7
  581. vmovups XMMWORD[(-80)+rsi],xmm10
  582. vpshufb xmm10,xmm10,xmm0
  583. vmovups XMMWORD[(-64)+rsi],xmm11
  584. vpshufb xmm11,xmm11,xmm0
  585. vmovups XMMWORD[(-48)+rsi],xmm12
  586. vpshufb xmm12,xmm12,xmm0
  587. vmovups XMMWORD[(-32)+rsi],xmm13
  588. vpshufb xmm13,xmm13,xmm0
  589. vmovups XMMWORD[(-16)+rsi],xmm14
  590. vpshufb xmm14,xmm14,xmm0
  591. vmovdqu XMMWORD[16+rsp],xmm9
  592. vmovdqu xmm6,XMMWORD[48+rsp]
  593. vmovdqu xmm0,XMMWORD[((16-32))+r9]
  594. vpunpckhqdq xmm2,xmm6,xmm6
  595. vpclmulqdq xmm5,xmm7,xmm3,0x00
  596. vpxor xmm2,xmm2,xmm6
  597. vpclmulqdq xmm7,xmm7,xmm3,0x11
  598. vpclmulqdq xmm1,xmm1,xmm15,0x00
  599. vmovdqu xmm9,XMMWORD[64+rsp]
  600. vpclmulqdq xmm4,xmm6,xmm0,0x00
  601. vmovdqu xmm3,XMMWORD[((48-32))+r9]
  602. vpxor xmm4,xmm4,xmm5
  603. vpunpckhqdq xmm5,xmm9,xmm9
  604. vpclmulqdq xmm6,xmm6,xmm0,0x11
  605. vpxor xmm5,xmm5,xmm9
  606. vpxor xmm6,xmm6,xmm7
  607. vpclmulqdq xmm2,xmm2,xmm15,0x10
  608. vmovdqu xmm15,XMMWORD[((80-32))+r9]
  609. vpxor xmm2,xmm2,xmm1
  610. vmovdqu xmm1,XMMWORD[80+rsp]
  611. vpclmulqdq xmm7,xmm9,xmm3,0x00
  612. vmovdqu xmm0,XMMWORD[((64-32))+r9]
  613. vpxor xmm7,xmm7,xmm4
  614. vpunpckhqdq xmm4,xmm1,xmm1
  615. vpclmulqdq xmm9,xmm9,xmm3,0x11
  616. vpxor xmm4,xmm4,xmm1
  617. vpxor xmm9,xmm9,xmm6
  618. vpclmulqdq xmm5,xmm5,xmm15,0x00
  619. vpxor xmm5,xmm5,xmm2
  620. vmovdqu xmm2,XMMWORD[96+rsp]
  621. vpclmulqdq xmm6,xmm1,xmm0,0x00
  622. vmovdqu xmm3,XMMWORD[((96-32))+r9]
  623. vpxor xmm6,xmm6,xmm7
  624. vpunpckhqdq xmm7,xmm2,xmm2
  625. vpclmulqdq xmm1,xmm1,xmm0,0x11
  626. vpxor xmm7,xmm7,xmm2
  627. vpxor xmm1,xmm1,xmm9
  628. vpclmulqdq xmm4,xmm4,xmm15,0x10
  629. vmovdqu xmm15,XMMWORD[((128-32))+r9]
  630. vpxor xmm4,xmm4,xmm5
  631. vpxor xmm8,xmm8,XMMWORD[112+rsp]
  632. vpclmulqdq xmm5,xmm2,xmm3,0x00
  633. vmovdqu xmm0,XMMWORD[((112-32))+r9]
  634. vpunpckhqdq xmm9,xmm8,xmm8
  635. vpxor xmm5,xmm5,xmm6
  636. vpclmulqdq xmm2,xmm2,xmm3,0x11
  637. vpxor xmm9,xmm9,xmm8
  638. vpxor xmm2,xmm2,xmm1
  639. vpclmulqdq xmm7,xmm7,xmm15,0x00
  640. vpxor xmm4,xmm7,xmm4
  641. vpclmulqdq xmm6,xmm8,xmm0,0x00
  642. vmovdqu xmm3,XMMWORD[((0-32))+r9]
  643. vpunpckhqdq xmm1,xmm14,xmm14
  644. vpclmulqdq xmm8,xmm8,xmm0,0x11
  645. vpxor xmm1,xmm1,xmm14
  646. vpxor xmm5,xmm6,xmm5
  647. vpclmulqdq xmm9,xmm9,xmm15,0x10
  648. vmovdqu xmm15,XMMWORD[((32-32))+r9]
  649. vpxor xmm7,xmm8,xmm2
  650. vpxor xmm6,xmm9,xmm4
  651. vmovdqu xmm0,XMMWORD[((16-32))+r9]
  652. vpxor xmm9,xmm7,xmm5
  653. vpclmulqdq xmm4,xmm14,xmm3,0x00
  654. vpxor xmm6,xmm6,xmm9
  655. vpunpckhqdq xmm2,xmm13,xmm13
  656. vpclmulqdq xmm14,xmm14,xmm3,0x11
  657. vpxor xmm2,xmm2,xmm13
  658. vpslldq xmm9,xmm6,8
  659. vpclmulqdq xmm1,xmm1,xmm15,0x00
  660. vpxor xmm8,xmm5,xmm9
  661. vpsrldq xmm6,xmm6,8
  662. vpxor xmm7,xmm7,xmm6
  663. vpclmulqdq xmm5,xmm13,xmm0,0x00
  664. vmovdqu xmm3,XMMWORD[((48-32))+r9]
  665. vpxor xmm5,xmm5,xmm4
  666. vpunpckhqdq xmm9,xmm12,xmm12
  667. vpclmulqdq xmm13,xmm13,xmm0,0x11
  668. vpxor xmm9,xmm9,xmm12
  669. vpxor xmm13,xmm13,xmm14
  670. vpalignr xmm14,xmm8,xmm8,8
  671. vpclmulqdq xmm2,xmm2,xmm15,0x10
  672. vmovdqu xmm15,XMMWORD[((80-32))+r9]
  673. vpxor xmm2,xmm2,xmm1
  674. vpclmulqdq xmm4,xmm12,xmm3,0x00
  675. vmovdqu xmm0,XMMWORD[((64-32))+r9]
  676. vpxor xmm4,xmm4,xmm5
  677. vpunpckhqdq xmm1,xmm11,xmm11
  678. vpclmulqdq xmm12,xmm12,xmm3,0x11
  679. vpxor xmm1,xmm1,xmm11
  680. vpxor xmm12,xmm12,xmm13
  681. vxorps xmm7,xmm7,XMMWORD[16+rsp]
  682. vpclmulqdq xmm9,xmm9,xmm15,0x00
  683. vpxor xmm9,xmm9,xmm2
  684. vpclmulqdq xmm8,xmm8,XMMWORD[16+r11],0x10
  685. vxorps xmm8,xmm8,xmm14
  686. vpclmulqdq xmm5,xmm11,xmm0,0x00
  687. vmovdqu xmm3,XMMWORD[((96-32))+r9]
  688. vpxor xmm5,xmm5,xmm4
  689. vpunpckhqdq xmm2,xmm10,xmm10
  690. vpclmulqdq xmm11,xmm11,xmm0,0x11
  691. vpxor xmm2,xmm2,xmm10
  692. vpalignr xmm14,xmm8,xmm8,8
  693. vpxor xmm11,xmm11,xmm12
  694. vpclmulqdq xmm1,xmm1,xmm15,0x10
  695. vmovdqu xmm15,XMMWORD[((128-32))+r9]
  696. vpxor xmm1,xmm1,xmm9
  697. vxorps xmm14,xmm14,xmm7
  698. vpclmulqdq xmm8,xmm8,XMMWORD[16+r11],0x10
  699. vxorps xmm8,xmm8,xmm14
  700. vpclmulqdq xmm4,xmm10,xmm3,0x00
  701. vmovdqu xmm0,XMMWORD[((112-32))+r9]
  702. vpxor xmm4,xmm4,xmm5
  703. vpunpckhqdq xmm9,xmm8,xmm8
  704. vpclmulqdq xmm10,xmm10,xmm3,0x11
  705. vpxor xmm9,xmm9,xmm8
  706. vpxor xmm10,xmm10,xmm11
  707. vpclmulqdq xmm2,xmm2,xmm15,0x00
  708. vpxor xmm2,xmm2,xmm1
  709. vpclmulqdq xmm5,xmm8,xmm0,0x00
  710. vpclmulqdq xmm7,xmm8,xmm0,0x11
  711. vpxor xmm5,xmm5,xmm4
  712. vpclmulqdq xmm6,xmm9,xmm15,0x10
  713. vpxor xmm7,xmm7,xmm10
  714. vpxor xmm6,xmm6,xmm2
  715. vpxor xmm4,xmm7,xmm5
  716. vpxor xmm6,xmm6,xmm4
  717. vpslldq xmm1,xmm6,8
  718. vmovdqu xmm3,XMMWORD[16+r11]
  719. vpsrldq xmm6,xmm6,8
  720. vpxor xmm8,xmm5,xmm1
  721. vpxor xmm7,xmm7,xmm6
  722. vpalignr xmm2,xmm8,xmm8,8
  723. vpclmulqdq xmm8,xmm8,xmm3,0x10
  724. vpxor xmm8,xmm8,xmm2
  725. vpalignr xmm2,xmm8,xmm8,8
  726. vpclmulqdq xmm8,xmm8,xmm3,0x10
  727. vpxor xmm2,xmm2,xmm7
  728. vpxor xmm8,xmm8,xmm2
  729. vpshufb xmm8,xmm8,XMMWORD[r11]
  730. vmovdqu XMMWORD[(-64)+r9],xmm8
  731. vzeroupper
  732. movaps xmm6,XMMWORD[((-216))+rax]
  733. movaps xmm7,XMMWORD[((-200))+rax]
  734. movaps xmm8,XMMWORD[((-184))+rax]
  735. movaps xmm9,XMMWORD[((-168))+rax]
  736. movaps xmm10,XMMWORD[((-152))+rax]
  737. movaps xmm11,XMMWORD[((-136))+rax]
  738. movaps xmm12,XMMWORD[((-120))+rax]
  739. movaps xmm13,XMMWORD[((-104))+rax]
  740. movaps xmm14,XMMWORD[((-88))+rax]
  741. movaps xmm15,XMMWORD[((-72))+rax]
  742. mov r15,QWORD[((-48))+rax]
  743. mov r14,QWORD[((-40))+rax]
  744. mov r13,QWORD[((-32))+rax]
  745. mov r12,QWORD[((-24))+rax]
  746. mov rbp,QWORD[((-16))+rax]
  747. mov rbx,QWORD[((-8))+rax]
  748. lea rsp,[rax]
  749. $L$gcm_enc_abort:
  750. mov rax,r10
  751. mov rdi,QWORD[8+rsp] ;WIN64 epilogue
  752. mov rsi,QWORD[16+rsp]
  753. DB 0F3h,0C3h ;repret
  754. $L$SEH_end_aesni_gcm_encrypt:
  755. ALIGN 64
  756. $L$bswap_mask:
  757. DB 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
  758. $L$poly:
  759. DB 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
  760. $L$one_msb:
  761. DB 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
  762. $L$two_lsb:
  763. DB 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
  764. $L$one_lsb:
  765. DB 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
  766. DB 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108
  767. DB 101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82
  768. DB 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
  769. DB 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
  770. ALIGN 64
  771. EXTERN __imp_RtlVirtualUnwind
  772. ALIGN 16
  773. gcm_se_handler:
  774. push rsi
  775. push rdi
  776. push rbx
  777. push rbp
  778. push r12
  779. push r13
  780. push r14
  781. push r15
  782. pushfq
  783. sub rsp,64
  784. mov rax,QWORD[120+r8]
  785. mov rbx,QWORD[248+r8]
  786. mov rsi,QWORD[8+r9]
  787. mov r11,QWORD[56+r9]
  788. mov r10d,DWORD[r11]
  789. lea r10,[r10*1+rsi]
  790. cmp rbx,r10
  791. jb NEAR $L$common_seh_tail
  792. mov rax,QWORD[152+r8]
  793. mov r10d,DWORD[4+r11]
  794. lea r10,[r10*1+rsi]
  795. cmp rbx,r10
  796. jae NEAR $L$common_seh_tail
  797. mov rax,QWORD[120+r8]
  798. mov r15,QWORD[((-48))+rax]
  799. mov r14,QWORD[((-40))+rax]
  800. mov r13,QWORD[((-32))+rax]
  801. mov r12,QWORD[((-24))+rax]
  802. mov rbp,QWORD[((-16))+rax]
  803. mov rbx,QWORD[((-8))+rax]
  804. mov QWORD[240+r8],r15
  805. mov QWORD[232+r8],r14
  806. mov QWORD[224+r8],r13
  807. mov QWORD[216+r8],r12
  808. mov QWORD[160+r8],rbp
  809. mov QWORD[144+r8],rbx
  810. lea rsi,[((-216))+rax]
  811. lea rdi,[512+r8]
  812. mov ecx,20
  813. DD 0xa548f3fc
  814. $L$common_seh_tail:
  815. mov rdi,QWORD[8+rax]
  816. mov rsi,QWORD[16+rax]
  817. mov QWORD[152+r8],rax
  818. mov QWORD[168+r8],rsi
  819. mov QWORD[176+r8],rdi
  820. mov rdi,QWORD[40+r9]
  821. mov rsi,r8
  822. mov ecx,154
  823. DD 0xa548f3fc
  824. mov rsi,r9
  825. xor rcx,rcx
  826. mov rdx,QWORD[8+rsi]
  827. mov r8,QWORD[rsi]
  828. mov r9,QWORD[16+rsi]
  829. mov r10,QWORD[40+rsi]
  830. lea r11,[56+rsi]
  831. lea r12,[24+rsi]
  832. mov QWORD[32+rsp],r10
  833. mov QWORD[40+rsp],r11
  834. mov QWORD[48+rsp],r12
  835. mov QWORD[56+rsp],rcx
  836. call QWORD[__imp_RtlVirtualUnwind]
  837. mov eax,1
  838. add rsp,64
  839. popfq
  840. pop r15
  841. pop r14
  842. pop r13
  843. pop r12
  844. pop rbp
  845. pop rbx
  846. pop rdi
  847. pop rsi
  848. DB 0F3h,0C3h ;repret
  849. section .pdata rdata align=4
  850. ALIGN 4
  851. DD $L$SEH_begin_aesni_gcm_decrypt wrt ..imagebase
  852. DD $L$SEH_end_aesni_gcm_decrypt wrt ..imagebase
  853. DD $L$SEH_gcm_dec_info wrt ..imagebase
  854. DD $L$SEH_begin_aesni_gcm_encrypt wrt ..imagebase
  855. DD $L$SEH_end_aesni_gcm_encrypt wrt ..imagebase
  856. DD $L$SEH_gcm_enc_info wrt ..imagebase
  857. section .xdata rdata align=8
  858. ALIGN 8
  859. $L$SEH_gcm_dec_info:
  860. DB 9,0,0,0
  861. DD gcm_se_handler wrt ..imagebase
  862. DD $L$gcm_dec_body wrt ..imagebase,$L$gcm_dec_abort wrt ..imagebase
  863. $L$SEH_gcm_enc_info:
  864. DB 9,0,0,0
  865. DD gcm_se_handler wrt ..imagebase
  866. DD $L$gcm_enc_body wrt ..imagebase,$L$gcm_enc_abort wrt ..imagebase