1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476 |
- // This file is generated from a similarly-named Perl script in the BoringSSL
- // source tree. Do not edit by hand.
- #if defined(__i386__)
- #if defined(BORINGSSL_PREFIX)
- #include <boringssl_prefix_symbols_asm.h>
- #endif
- .text
- #ifdef BORINGSSL_DISPATCH_TEST
- #endif
- .globl _aes_hw_encrypt
- .private_extern _aes_hw_encrypt
- .align 4
- _aes_hw_encrypt:
- L_aes_hw_encrypt_begin:
- #ifdef BORINGSSL_DISPATCH_TEST
- pushl %ebx
- pushl %edx
- call L000pic
- L000pic:
- popl %ebx
- leal _BORINGSSL_function_hit+1-L000pic(%ebx),%ebx
- movl $1,%edx
- movb %dl,(%ebx)
- popl %edx
- popl %ebx
- #endif
- movl 4(%esp),%eax
- movl 12(%esp),%edx
- movups (%eax),%xmm2
- movl 240(%edx),%ecx
- movl 8(%esp),%eax
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L001enc1_loop_1:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L001enc1_loop_1
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%eax)
- pxor %xmm2,%xmm2
- ret
- .globl _aes_hw_decrypt
- .private_extern _aes_hw_decrypt
- .align 4
- _aes_hw_decrypt:
- L_aes_hw_decrypt_begin:
- movl 4(%esp),%eax
- movl 12(%esp),%edx
- movups (%eax),%xmm2
- movl 240(%edx),%ecx
- movl 8(%esp),%eax
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L002dec1_loop_2:
- .byte 102,15,56,222,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L002dec1_loop_2
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%eax)
- pxor %xmm2,%xmm2
- ret
- .private_extern __aesni_encrypt2
- .align 4
- __aesni_encrypt2:
- movups (%edx),%xmm0
- shll $4,%ecx
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- movups 32(%edx),%xmm0
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- addl $16,%ecx
- L003enc2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%edx,%ecx,1),%xmm0
- jnz L003enc2_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- ret
- .private_extern __aesni_decrypt2
- .align 4
- __aesni_decrypt2:
- movups (%edx),%xmm0
- shll $4,%ecx
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- movups 32(%edx),%xmm0
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- addl $16,%ecx
- L004dec2_loop:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- movups -16(%edx,%ecx,1),%xmm0
- jnz L004dec2_loop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- ret
- .private_extern __aesni_encrypt3
- .align 4
- __aesni_encrypt3:
- movups (%edx),%xmm0
- shll $4,%ecx
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- movups 32(%edx),%xmm0
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- addl $16,%ecx
- L005enc3_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- movups -16(%edx,%ecx,1),%xmm0
- jnz L005enc3_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- ret
- .private_extern __aesni_decrypt3
- .align 4
- __aesni_decrypt3:
- movups (%edx),%xmm0
- shll $4,%ecx
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- movups 32(%edx),%xmm0
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- addl $16,%ecx
- L006dec3_loop:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- movups -16(%edx,%ecx,1),%xmm0
- jnz L006dec3_loop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- ret
- .private_extern __aesni_encrypt4
- .align 4
- __aesni_encrypt4:
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- shll $4,%ecx
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- movups 32(%edx),%xmm0
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- .byte 15,31,64,0
- addl $16,%ecx
- L007enc4_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups -16(%edx,%ecx,1),%xmm0
- jnz L007enc4_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- ret
- .private_extern __aesni_decrypt4
- .align 4
- __aesni_decrypt4:
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- shll $4,%ecx
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- movups 32(%edx),%xmm0
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- .byte 15,31,64,0
- addl $16,%ecx
- L008dec4_loop:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups -16(%edx,%ecx,1),%xmm0
- jnz L008dec4_loop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- ret
- .private_extern __aesni_encrypt6
- .align 4
- __aesni_encrypt6:
- movups (%edx),%xmm0
- shll $4,%ecx
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,220,209
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,220,217
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- .byte 102,15,56,220,225
- pxor %xmm0,%xmm7
- movups (%edx,%ecx,1),%xmm0
- addl $16,%ecx
- jmp L009_aesni_encrypt6_inner
- .align 4,0x90
- L010enc6_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- L009_aesni_encrypt6_inner:
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- L_aesni_encrypt6_enter:
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%edx,%ecx,1),%xmm0
- jnz L010enc6_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- ret
- .private_extern __aesni_decrypt6
- .align 4
- __aesni_decrypt6:
- movups (%edx),%xmm0
- shll $4,%ecx
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,222,217
- leal 32(%edx,%ecx,1),%edx
- negl %ecx
- .byte 102,15,56,222,225
- pxor %xmm0,%xmm7
- movups (%edx,%ecx,1),%xmm0
- addl $16,%ecx
- jmp L011_aesni_decrypt6_inner
- .align 4,0x90
- L012dec6_loop:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- L011_aesni_decrypt6_inner:
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- L_aesni_decrypt6_enter:
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -16(%edx,%ecx,1),%xmm0
- jnz L012dec6_loop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- ret
- .globl _aes_hw_ecb_encrypt
- .private_extern _aes_hw_ecb_encrypt
- .align 4
- _aes_hw_ecb_encrypt:
- L_aes_hw_ecb_encrypt_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%esi
- movl 24(%esp),%edi
- movl 28(%esp),%eax
- movl 32(%esp),%edx
- movl 36(%esp),%ebx
- andl $-16,%eax
- jz L013ecb_ret
- movl 240(%edx),%ecx
- testl %ebx,%ebx
- jz L014ecb_decrypt
- movl %edx,%ebp
- movl %ecx,%ebx
- cmpl $96,%eax
- jb L015ecb_enc_tail
- movdqu (%esi),%xmm2
- movdqu 16(%esi),%xmm3
- movdqu 32(%esi),%xmm4
- movdqu 48(%esi),%xmm5
- movdqu 64(%esi),%xmm6
- movdqu 80(%esi),%xmm7
- leal 96(%esi),%esi
- subl $96,%eax
- jmp L016ecb_enc_loop6_enter
- .align 4,0x90
- L017ecb_enc_loop6:
- movups %xmm2,(%edi)
- movdqu (%esi),%xmm2
- movups %xmm3,16(%edi)
- movdqu 16(%esi),%xmm3
- movups %xmm4,32(%edi)
- movdqu 32(%esi),%xmm4
- movups %xmm5,48(%edi)
- movdqu 48(%esi),%xmm5
- movups %xmm6,64(%edi)
- movdqu 64(%esi),%xmm6
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- movdqu 80(%esi),%xmm7
- leal 96(%esi),%esi
- L016ecb_enc_loop6_enter:
- call __aesni_encrypt6
- movl %ebp,%edx
- movl %ebx,%ecx
- subl $96,%eax
- jnc L017ecb_enc_loop6
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- addl $96,%eax
- jz L013ecb_ret
- L015ecb_enc_tail:
- movups (%esi),%xmm2
- cmpl $32,%eax
- jb L018ecb_enc_one
- movups 16(%esi),%xmm3
- je L019ecb_enc_two
- movups 32(%esi),%xmm4
- cmpl $64,%eax
- jb L020ecb_enc_three
- movups 48(%esi),%xmm5
- je L021ecb_enc_four
- movups 64(%esi),%xmm6
- xorps %xmm7,%xmm7
- call __aesni_encrypt6
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L018ecb_enc_one:
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L022enc1_loop_3:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L022enc1_loop_3
- .byte 102,15,56,221,209
- movups %xmm2,(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L019ecb_enc_two:
- call __aesni_encrypt2
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L020ecb_enc_three:
- call __aesni_encrypt3
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L021ecb_enc_four:
- call __aesni_encrypt4
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L014ecb_decrypt:
- movl %edx,%ebp
- movl %ecx,%ebx
- cmpl $96,%eax
- jb L023ecb_dec_tail
- movdqu (%esi),%xmm2
- movdqu 16(%esi),%xmm3
- movdqu 32(%esi),%xmm4
- movdqu 48(%esi),%xmm5
- movdqu 64(%esi),%xmm6
- movdqu 80(%esi),%xmm7
- leal 96(%esi),%esi
- subl $96,%eax
- jmp L024ecb_dec_loop6_enter
- .align 4,0x90
- L025ecb_dec_loop6:
- movups %xmm2,(%edi)
- movdqu (%esi),%xmm2
- movups %xmm3,16(%edi)
- movdqu 16(%esi),%xmm3
- movups %xmm4,32(%edi)
- movdqu 32(%esi),%xmm4
- movups %xmm5,48(%edi)
- movdqu 48(%esi),%xmm5
- movups %xmm6,64(%edi)
- movdqu 64(%esi),%xmm6
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- movdqu 80(%esi),%xmm7
- leal 96(%esi),%esi
- L024ecb_dec_loop6_enter:
- call __aesni_decrypt6
- movl %ebp,%edx
- movl %ebx,%ecx
- subl $96,%eax
- jnc L025ecb_dec_loop6
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- addl $96,%eax
- jz L013ecb_ret
- L023ecb_dec_tail:
- movups (%esi),%xmm2
- cmpl $32,%eax
- jb L026ecb_dec_one
- movups 16(%esi),%xmm3
- je L027ecb_dec_two
- movups 32(%esi),%xmm4
- cmpl $64,%eax
- jb L028ecb_dec_three
- movups 48(%esi),%xmm5
- je L029ecb_dec_four
- movups 64(%esi),%xmm6
- xorps %xmm7,%xmm7
- call __aesni_decrypt6
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L026ecb_dec_one:
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L030dec1_loop_4:
- .byte 102,15,56,222,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L030dec1_loop_4
- .byte 102,15,56,223,209
- movups %xmm2,(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L027ecb_dec_two:
- call __aesni_decrypt2
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L028ecb_dec_three:
- call __aesni_decrypt3
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- jmp L013ecb_ret
- .align 4,0x90
- L029ecb_dec_four:
- call __aesni_decrypt4
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- L013ecb_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_ccm64_encrypt_blocks
- .private_extern _aes_hw_ccm64_encrypt_blocks
- .align 4
- _aes_hw_ccm64_encrypt_blocks:
- L_aes_hw_ccm64_encrypt_blocks_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%esi
- movl 24(%esp),%edi
- movl 28(%esp),%eax
- movl 32(%esp),%edx
- movl 36(%esp),%ebx
- movl 40(%esp),%ecx
- movl %esp,%ebp
- subl $60,%esp
- andl $-16,%esp
- movl %ebp,48(%esp)
- movdqu (%ebx),%xmm7
- movdqu (%ecx),%xmm3
- movl 240(%edx),%ecx
- movl $202182159,(%esp)
- movl $134810123,4(%esp)
- movl $67438087,8(%esp)
- movl $66051,12(%esp)
- movl $1,%ebx
- xorl %ebp,%ebp
- movl %ebx,16(%esp)
- movl %ebp,20(%esp)
- movl %ebp,24(%esp)
- movl %ebp,28(%esp)
- shll $4,%ecx
- movl $16,%ebx
- leal (%edx),%ebp
- movdqa (%esp),%xmm5
- movdqa %xmm7,%xmm2
- leal 32(%edx,%ecx,1),%edx
- subl %ecx,%ebx
- .byte 102,15,56,0,253
- L031ccm64_enc_outer:
- movups (%ebp),%xmm0
- movl %ebx,%ecx
- movups (%esi),%xmm6
- xorps %xmm0,%xmm2
- movups 16(%ebp),%xmm1
- xorps %xmm6,%xmm0
- xorps %xmm0,%xmm3
- movups 32(%ebp),%xmm0
- L032ccm64_enc2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%edx,%ecx,1),%xmm0
- jnz L032ccm64_enc2_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- paddq 16(%esp),%xmm7
- decl %eax
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- leal 16(%esi),%esi
- xorps %xmm2,%xmm6
- movdqa %xmm7,%xmm2
- movups %xmm6,(%edi)
- .byte 102,15,56,0,213
- leal 16(%edi),%edi
- jnz L031ccm64_enc_outer
- movl 48(%esp),%esp
- movl 40(%esp),%edi
- movups %xmm3,(%edi)
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_ccm64_decrypt_blocks
- .private_extern _aes_hw_ccm64_decrypt_blocks
- .align 4
- _aes_hw_ccm64_decrypt_blocks:
- L_aes_hw_ccm64_decrypt_blocks_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%esi
- movl 24(%esp),%edi
- movl 28(%esp),%eax
- movl 32(%esp),%edx
- movl 36(%esp),%ebx
- movl 40(%esp),%ecx
- movl %esp,%ebp
- subl $60,%esp
- andl $-16,%esp
- movl %ebp,48(%esp)
- movdqu (%ebx),%xmm7
- movdqu (%ecx),%xmm3
- movl 240(%edx),%ecx
- movl $202182159,(%esp)
- movl $134810123,4(%esp)
- movl $67438087,8(%esp)
- movl $66051,12(%esp)
- movl $1,%ebx
- xorl %ebp,%ebp
- movl %ebx,16(%esp)
- movl %ebp,20(%esp)
- movl %ebp,24(%esp)
- movl %ebp,28(%esp)
- movdqa (%esp),%xmm5
- movdqa %xmm7,%xmm2
- movl %edx,%ebp
- movl %ecx,%ebx
- .byte 102,15,56,0,253
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L033enc1_loop_5:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L033enc1_loop_5
- .byte 102,15,56,221,209
- shll $4,%ebx
- movl $16,%ecx
- movups (%esi),%xmm6
- paddq 16(%esp),%xmm7
- leal 16(%esi),%esi
- subl %ebx,%ecx
- leal 32(%ebp,%ebx,1),%edx
- movl %ecx,%ebx
- jmp L034ccm64_dec_outer
- .align 4,0x90
- L034ccm64_dec_outer:
- xorps %xmm2,%xmm6
- movdqa %xmm7,%xmm2
- movups %xmm6,(%edi)
- leal 16(%edi),%edi
- .byte 102,15,56,0,213
- subl $1,%eax
- jz L035ccm64_dec_break
- movups (%ebp),%xmm0
- movl %ebx,%ecx
- movups 16(%ebp),%xmm1
- xorps %xmm0,%xmm6
- xorps %xmm0,%xmm2
- xorps %xmm6,%xmm3
- movups 32(%ebp),%xmm0
- L036ccm64_dec2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%edx,%ecx,1),%xmm1
- addl $32,%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%edx,%ecx,1),%xmm0
- jnz L036ccm64_dec2_loop
- movups (%esi),%xmm6
- paddq 16(%esp),%xmm7
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- leal 16(%esi),%esi
- jmp L034ccm64_dec_outer
- .align 4,0x90
- L035ccm64_dec_break:
- movl 240(%ebp),%ecx
- movl %ebp,%edx
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm6
- leal 32(%edx),%edx
- xorps %xmm6,%xmm3
- L037enc1_loop_6:
- .byte 102,15,56,220,217
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L037enc1_loop_6
- .byte 102,15,56,221,217
- movl 48(%esp),%esp
- movl 40(%esp),%edi
- movups %xmm3,(%edi)
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_ctr32_encrypt_blocks
- .private_extern _aes_hw_ctr32_encrypt_blocks
- .align 4
- _aes_hw_ctr32_encrypt_blocks:
- L_aes_hw_ctr32_encrypt_blocks_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- #ifdef BORINGSSL_DISPATCH_TEST
- pushl %ebx
- pushl %edx
- call L038pic
- L038pic:
- popl %ebx
- leal _BORINGSSL_function_hit+0-L038pic(%ebx),%ebx
- movl $1,%edx
- movb %dl,(%ebx)
- popl %edx
- popl %ebx
- #endif
- movl 20(%esp),%esi
- movl 24(%esp),%edi
- movl 28(%esp),%eax
- movl 32(%esp),%edx
- movl 36(%esp),%ebx
- movl %esp,%ebp
- subl $88,%esp
- andl $-16,%esp
- movl %ebp,80(%esp)
- cmpl $1,%eax
- je L039ctr32_one_shortcut
- movdqu (%ebx),%xmm7
- movl $202182159,(%esp)
- movl $134810123,4(%esp)
- movl $67438087,8(%esp)
- movl $66051,12(%esp)
- movl $6,%ecx
- xorl %ebp,%ebp
- movl %ecx,16(%esp)
- movl %ecx,20(%esp)
- movl %ecx,24(%esp)
- movl %ebp,28(%esp)
- .byte 102,15,58,22,251,3
- .byte 102,15,58,34,253,3
- movl 240(%edx),%ecx
- bswap %ebx
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movdqa (%esp),%xmm2
- .byte 102,15,58,34,195,0
- leal 3(%ebx),%ebp
- .byte 102,15,58,34,205,0
- incl %ebx
- .byte 102,15,58,34,195,1
- incl %ebp
- .byte 102,15,58,34,205,1
- incl %ebx
- .byte 102,15,58,34,195,2
- incl %ebp
- .byte 102,15,58,34,205,2
- movdqa %xmm0,48(%esp)
- .byte 102,15,56,0,194
- movdqu (%edx),%xmm6
- movdqa %xmm1,64(%esp)
- .byte 102,15,56,0,202
- pshufd $192,%xmm0,%xmm2
- pshufd $128,%xmm0,%xmm3
- cmpl $6,%eax
- jb L040ctr32_tail
- pxor %xmm6,%xmm7
- shll $4,%ecx
- movl $16,%ebx
- movdqa %xmm7,32(%esp)
- movl %edx,%ebp
- subl %ecx,%ebx
- leal 32(%edx,%ecx,1),%edx
- subl $6,%eax
- jmp L041ctr32_loop6
- .align 4,0x90
- L041ctr32_loop6:
- pshufd $64,%xmm0,%xmm4
- movdqa 32(%esp),%xmm0
- pshufd $192,%xmm1,%xmm5
- pxor %xmm0,%xmm2
- pshufd $128,%xmm1,%xmm6
- pxor %xmm0,%xmm3
- pshufd $64,%xmm1,%xmm7
- movups 16(%ebp),%xmm1
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- .byte 102,15,56,220,209
- pxor %xmm0,%xmm6
- pxor %xmm0,%xmm7
- .byte 102,15,56,220,217
- movups 32(%ebp),%xmm0
- movl %ebx,%ecx
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- call L_aesni_encrypt6_enter
- movups (%esi),%xmm1
- movups 16(%esi),%xmm0
- xorps %xmm1,%xmm2
- movups 32(%esi),%xmm1
- xorps %xmm0,%xmm3
- movups %xmm2,(%edi)
- movdqa 16(%esp),%xmm0
- xorps %xmm1,%xmm4
- movdqa 64(%esp),%xmm1
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- paddd %xmm0,%xmm1
- paddd 48(%esp),%xmm0
- movdqa (%esp),%xmm2
- movups 48(%esi),%xmm3
- movups 64(%esi),%xmm4
- xorps %xmm3,%xmm5
- movups 80(%esi),%xmm3
- leal 96(%esi),%esi
- movdqa %xmm0,48(%esp)
- .byte 102,15,56,0,194
- xorps %xmm4,%xmm6
- movups %xmm5,48(%edi)
- xorps %xmm3,%xmm7
- movdqa %xmm1,64(%esp)
- .byte 102,15,56,0,202
- movups %xmm6,64(%edi)
- pshufd $192,%xmm0,%xmm2
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- pshufd $128,%xmm0,%xmm3
- subl $6,%eax
- jnc L041ctr32_loop6
- addl $6,%eax
- jz L042ctr32_ret
- movdqu (%ebp),%xmm7
- movl %ebp,%edx
- pxor 32(%esp),%xmm7
- movl 240(%ebp),%ecx
- L040ctr32_tail:
- por %xmm7,%xmm2
- cmpl $2,%eax
- jb L043ctr32_one
- pshufd $64,%xmm0,%xmm4
- por %xmm7,%xmm3
- je L044ctr32_two
- pshufd $192,%xmm1,%xmm5
- por %xmm7,%xmm4
- cmpl $4,%eax
- jb L045ctr32_three
- pshufd $128,%xmm1,%xmm6
- por %xmm7,%xmm5
- je L046ctr32_four
- por %xmm7,%xmm6
- call __aesni_encrypt6
- movups (%esi),%xmm1
- movups 16(%esi),%xmm0
- xorps %xmm1,%xmm2
- movups 32(%esi),%xmm1
- xorps %xmm0,%xmm3
- movups 48(%esi),%xmm0
- xorps %xmm1,%xmm4
- movups 64(%esi),%xmm1
- xorps %xmm0,%xmm5
- movups %xmm2,(%edi)
- xorps %xmm1,%xmm6
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- jmp L042ctr32_ret
- .align 4,0x90
- L039ctr32_one_shortcut:
- movups (%ebx),%xmm2
- movl 240(%edx),%ecx
- L043ctr32_one:
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L047enc1_loop_7:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L047enc1_loop_7
- .byte 102,15,56,221,209
- movups (%esi),%xmm6
- xorps %xmm2,%xmm6
- movups %xmm6,(%edi)
- jmp L042ctr32_ret
- .align 4,0x90
- L044ctr32_two:
- call __aesni_encrypt2
- movups (%esi),%xmm5
- movups 16(%esi),%xmm6
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- jmp L042ctr32_ret
- .align 4,0x90
- L045ctr32_three:
- call __aesni_encrypt3
- movups (%esi),%xmm5
- movups 16(%esi),%xmm6
- xorps %xmm5,%xmm2
- movups 32(%esi),%xmm7
- xorps %xmm6,%xmm3
- movups %xmm2,(%edi)
- xorps %xmm7,%xmm4
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- jmp L042ctr32_ret
- .align 4,0x90
- L046ctr32_four:
- call __aesni_encrypt4
- movups (%esi),%xmm6
- movups 16(%esi),%xmm7
- movups 32(%esi),%xmm1
- xorps %xmm6,%xmm2
- movups 48(%esi),%xmm0
- xorps %xmm7,%xmm3
- movups %xmm2,(%edi)
- xorps %xmm1,%xmm4
- movups %xmm3,16(%edi)
- xorps %xmm0,%xmm5
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- L042ctr32_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- movdqa %xmm0,32(%esp)
- pxor %xmm5,%xmm5
- movdqa %xmm0,48(%esp)
- pxor %xmm6,%xmm6
- movdqa %xmm0,64(%esp)
- pxor %xmm7,%xmm7
- movl 80(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_xts_encrypt
- .private_extern _aes_hw_xts_encrypt
- .align 4
- _aes_hw_xts_encrypt:
- L_aes_hw_xts_encrypt_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 36(%esp),%edx
- movl 40(%esp),%esi
- movl 240(%edx),%ecx
- movups (%esi),%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L048enc1_loop_8:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L048enc1_loop_8
- .byte 102,15,56,221,209
- movl 20(%esp),%esi
- movl 24(%esp),%edi
- movl 28(%esp),%eax
- movl 32(%esp),%edx
- movl %esp,%ebp
- subl $120,%esp
- movl 240(%edx),%ecx
- andl $-16,%esp
- movl $135,96(%esp)
- movl $0,100(%esp)
- movl $1,104(%esp)
- movl $0,108(%esp)
- movl %eax,112(%esp)
- movl %ebp,116(%esp)
- movdqa %xmm2,%xmm1
- pxor %xmm0,%xmm0
- movdqa 96(%esp),%xmm3
- pcmpgtd %xmm1,%xmm0
- andl $-16,%eax
- movl %edx,%ebp
- movl %ecx,%ebx
- subl $96,%eax
- jc L049xts_enc_short
- shll $4,%ecx
- movl $16,%ebx
- subl %ecx,%ebx
- leal 32(%edx,%ecx,1),%edx
- jmp L050xts_enc_loop6
- .align 4,0x90
- L050xts_enc_loop6:
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,16(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,32(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,48(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm7
- movdqa %xmm1,64(%esp)
- paddq %xmm1,%xmm1
- movups (%ebp),%xmm0
- pand %xmm3,%xmm7
- movups (%esi),%xmm2
- pxor %xmm1,%xmm7
- movl %ebx,%ecx
- movdqu 16(%esi),%xmm3
- xorps %xmm0,%xmm2
- movdqu 32(%esi),%xmm4
- pxor %xmm0,%xmm3
- movdqu 48(%esi),%xmm5
- pxor %xmm0,%xmm4
- movdqu 64(%esi),%xmm6
- pxor %xmm0,%xmm5
- movdqu 80(%esi),%xmm1
- pxor %xmm0,%xmm6
- leal 96(%esi),%esi
- pxor (%esp),%xmm2
- movdqa %xmm7,80(%esp)
- pxor %xmm1,%xmm7
- movups 16(%ebp),%xmm1
- pxor 16(%esp),%xmm3
- pxor 32(%esp),%xmm4
- .byte 102,15,56,220,209
- pxor 48(%esp),%xmm5
- pxor 64(%esp),%xmm6
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm7
- movups 32(%ebp),%xmm0
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- call L_aesni_encrypt6_enter
- movdqa 80(%esp),%xmm1
- pxor %xmm0,%xmm0
- xorps (%esp),%xmm2
- pcmpgtd %xmm1,%xmm0
- xorps 16(%esp),%xmm3
- movups %xmm2,(%edi)
- xorps 32(%esp),%xmm4
- movups %xmm3,16(%edi)
- xorps 48(%esp),%xmm5
- movups %xmm4,32(%edi)
- xorps 64(%esp),%xmm6
- movups %xmm5,48(%edi)
- xorps %xmm1,%xmm7
- movups %xmm6,64(%edi)
- pshufd $19,%xmm0,%xmm2
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- movdqa 96(%esp),%xmm3
- pxor %xmm0,%xmm0
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- subl $96,%eax
- jnc L050xts_enc_loop6
- movl 240(%ebp),%ecx
- movl %ebp,%edx
- movl %ecx,%ebx
- L049xts_enc_short:
- addl $96,%eax
- jz L051xts_enc_done6x
- movdqa %xmm1,%xmm5
- cmpl $32,%eax
- jb L052xts_enc_one
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- je L053xts_enc_two
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,%xmm6
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- cmpl $64,%eax
- jb L054xts_enc_three
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,%xmm7
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- movdqa %xmm5,(%esp)
- movdqa %xmm6,16(%esp)
- je L055xts_enc_four
- movdqa %xmm7,32(%esp)
- pshufd $19,%xmm0,%xmm7
- movdqa %xmm1,48(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm7
- pxor %xmm1,%xmm7
- movdqu (%esi),%xmm2
- movdqu 16(%esi),%xmm3
- movdqu 32(%esi),%xmm4
- pxor (%esp),%xmm2
- movdqu 48(%esi),%xmm5
- pxor 16(%esp),%xmm3
- movdqu 64(%esi),%xmm6
- pxor 32(%esp),%xmm4
- leal 80(%esi),%esi
- pxor 48(%esp),%xmm5
- movdqa %xmm7,64(%esp)
- pxor %xmm7,%xmm6
- call __aesni_encrypt6
- movaps 64(%esp),%xmm1
- xorps (%esp),%xmm2
- xorps 16(%esp),%xmm3
- xorps 32(%esp),%xmm4
- movups %xmm2,(%edi)
- xorps 48(%esp),%xmm5
- movups %xmm3,16(%edi)
- xorps %xmm1,%xmm6
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- leal 80(%edi),%edi
- jmp L056xts_enc_done
- .align 4,0x90
- L052xts_enc_one:
- movups (%esi),%xmm2
- leal 16(%esi),%esi
- xorps %xmm5,%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L057enc1_loop_9:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L057enc1_loop_9
- .byte 102,15,56,221,209
- xorps %xmm5,%xmm2
- movups %xmm2,(%edi)
- leal 16(%edi),%edi
- movdqa %xmm5,%xmm1
- jmp L056xts_enc_done
- .align 4,0x90
- L053xts_enc_two:
- movaps %xmm1,%xmm6
- movups (%esi),%xmm2
- movups 16(%esi),%xmm3
- leal 32(%esi),%esi
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- call __aesni_encrypt2
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- leal 32(%edi),%edi
- movdqa %xmm6,%xmm1
- jmp L056xts_enc_done
- .align 4,0x90
- L054xts_enc_three:
- movaps %xmm1,%xmm7
- movups (%esi),%xmm2
- movups 16(%esi),%xmm3
- movups 32(%esi),%xmm4
- leal 48(%esi),%esi
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- xorps %xmm7,%xmm4
- call __aesni_encrypt3
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- xorps %xmm7,%xmm4
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- leal 48(%edi),%edi
- movdqa %xmm7,%xmm1
- jmp L056xts_enc_done
- .align 4,0x90
- L055xts_enc_four:
- movaps %xmm1,%xmm6
- movups (%esi),%xmm2
- movups 16(%esi),%xmm3
- movups 32(%esi),%xmm4
- xorps (%esp),%xmm2
- movups 48(%esi),%xmm5
- leal 64(%esi),%esi
- xorps 16(%esp),%xmm3
- xorps %xmm7,%xmm4
- xorps %xmm6,%xmm5
- call __aesni_encrypt4
- xorps (%esp),%xmm2
- xorps 16(%esp),%xmm3
- xorps %xmm7,%xmm4
- movups %xmm2,(%edi)
- xorps %xmm6,%xmm5
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- leal 64(%edi),%edi
- movdqa %xmm6,%xmm1
- jmp L056xts_enc_done
- .align 4,0x90
- L051xts_enc_done6x:
- movl 112(%esp),%eax
- andl $15,%eax
- jz L058xts_enc_ret
- movdqa %xmm1,%xmm5
- movl %eax,112(%esp)
- jmp L059xts_enc_steal
- .align 4,0x90
- L056xts_enc_done:
- movl 112(%esp),%eax
- pxor %xmm0,%xmm0
- andl $15,%eax
- jz L058xts_enc_ret
- pcmpgtd %xmm1,%xmm0
- movl %eax,112(%esp)
- pshufd $19,%xmm0,%xmm5
- paddq %xmm1,%xmm1
- pand 96(%esp),%xmm5
- pxor %xmm1,%xmm5
- L059xts_enc_steal:
- movzbl (%esi),%ecx
- movzbl -16(%edi),%edx
- leal 1(%esi),%esi
- movb %cl,-16(%edi)
- movb %dl,(%edi)
- leal 1(%edi),%edi
- subl $1,%eax
- jnz L059xts_enc_steal
- subl 112(%esp),%edi
- movl %ebp,%edx
- movl %ebx,%ecx
- movups -16(%edi),%xmm2
- xorps %xmm5,%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L060enc1_loop_10:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L060enc1_loop_10
- .byte 102,15,56,221,209
- xorps %xmm5,%xmm2
- movups %xmm2,-16(%edi)
- L058xts_enc_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- movdqa %xmm0,(%esp)
- pxor %xmm3,%xmm3
- movdqa %xmm0,16(%esp)
- pxor %xmm4,%xmm4
- movdqa %xmm0,32(%esp)
- pxor %xmm5,%xmm5
- movdqa %xmm0,48(%esp)
- pxor %xmm6,%xmm6
- movdqa %xmm0,64(%esp)
- pxor %xmm7,%xmm7
- movdqa %xmm0,80(%esp)
- movl 116(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_xts_decrypt
- .private_extern _aes_hw_xts_decrypt
- .align 4
- _aes_hw_xts_decrypt:
- L_aes_hw_xts_decrypt_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 36(%esp),%edx
- movl 40(%esp),%esi
- movl 240(%edx),%ecx
- movups (%esi),%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L061enc1_loop_11:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L061enc1_loop_11
- .byte 102,15,56,221,209
- movl 20(%esp),%esi
- movl 24(%esp),%edi
- movl 28(%esp),%eax
- movl 32(%esp),%edx
- movl %esp,%ebp
- subl $120,%esp
- andl $-16,%esp
- xorl %ebx,%ebx
- testl $15,%eax
- setnz %bl
- shll $4,%ebx
- subl %ebx,%eax
- movl $135,96(%esp)
- movl $0,100(%esp)
- movl $1,104(%esp)
- movl $0,108(%esp)
- movl %eax,112(%esp)
- movl %ebp,116(%esp)
- movl 240(%edx),%ecx
- movl %edx,%ebp
- movl %ecx,%ebx
- movdqa %xmm2,%xmm1
- pxor %xmm0,%xmm0
- movdqa 96(%esp),%xmm3
- pcmpgtd %xmm1,%xmm0
- andl $-16,%eax
- subl $96,%eax
- jc L062xts_dec_short
- shll $4,%ecx
- movl $16,%ebx
- subl %ecx,%ebx
- leal 32(%edx,%ecx,1),%edx
- jmp L063xts_dec_loop6
- .align 4,0x90
- L063xts_dec_loop6:
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,16(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,32(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,48(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- pshufd $19,%xmm0,%xmm7
- movdqa %xmm1,64(%esp)
- paddq %xmm1,%xmm1
- movups (%ebp),%xmm0
- pand %xmm3,%xmm7
- movups (%esi),%xmm2
- pxor %xmm1,%xmm7
- movl %ebx,%ecx
- movdqu 16(%esi),%xmm3
- xorps %xmm0,%xmm2
- movdqu 32(%esi),%xmm4
- pxor %xmm0,%xmm3
- movdqu 48(%esi),%xmm5
- pxor %xmm0,%xmm4
- movdqu 64(%esi),%xmm6
- pxor %xmm0,%xmm5
- movdqu 80(%esi),%xmm1
- pxor %xmm0,%xmm6
- leal 96(%esi),%esi
- pxor (%esp),%xmm2
- movdqa %xmm7,80(%esp)
- pxor %xmm1,%xmm7
- movups 16(%ebp),%xmm1
- pxor 16(%esp),%xmm3
- pxor 32(%esp),%xmm4
- .byte 102,15,56,222,209
- pxor 48(%esp),%xmm5
- pxor 64(%esp),%xmm6
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm7
- movups 32(%ebp),%xmm0
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- call L_aesni_decrypt6_enter
- movdqa 80(%esp),%xmm1
- pxor %xmm0,%xmm0
- xorps (%esp),%xmm2
- pcmpgtd %xmm1,%xmm0
- xorps 16(%esp),%xmm3
- movups %xmm2,(%edi)
- xorps 32(%esp),%xmm4
- movups %xmm3,16(%edi)
- xorps 48(%esp),%xmm5
- movups %xmm4,32(%edi)
- xorps 64(%esp),%xmm6
- movups %xmm5,48(%edi)
- xorps %xmm1,%xmm7
- movups %xmm6,64(%edi)
- pshufd $19,%xmm0,%xmm2
- movups %xmm7,80(%edi)
- leal 96(%edi),%edi
- movdqa 96(%esp),%xmm3
- pxor %xmm0,%xmm0
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- subl $96,%eax
- jnc L063xts_dec_loop6
- movl 240(%ebp),%ecx
- movl %ebp,%edx
- movl %ecx,%ebx
- L062xts_dec_short:
- addl $96,%eax
- jz L064xts_dec_done6x
- movdqa %xmm1,%xmm5
- cmpl $32,%eax
- jb L065xts_dec_one
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- je L066xts_dec_two
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,%xmm6
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- cmpl $64,%eax
- jb L067xts_dec_three
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa %xmm1,%xmm7
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- movdqa %xmm5,(%esp)
- movdqa %xmm6,16(%esp)
- je L068xts_dec_four
- movdqa %xmm7,32(%esp)
- pshufd $19,%xmm0,%xmm7
- movdqa %xmm1,48(%esp)
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm7
- pxor %xmm1,%xmm7
- movdqu (%esi),%xmm2
- movdqu 16(%esi),%xmm3
- movdqu 32(%esi),%xmm4
- pxor (%esp),%xmm2
- movdqu 48(%esi),%xmm5
- pxor 16(%esp),%xmm3
- movdqu 64(%esi),%xmm6
- pxor 32(%esp),%xmm4
- leal 80(%esi),%esi
- pxor 48(%esp),%xmm5
- movdqa %xmm7,64(%esp)
- pxor %xmm7,%xmm6
- call __aesni_decrypt6
- movaps 64(%esp),%xmm1
- xorps (%esp),%xmm2
- xorps 16(%esp),%xmm3
- xorps 32(%esp),%xmm4
- movups %xmm2,(%edi)
- xorps 48(%esp),%xmm5
- movups %xmm3,16(%edi)
- xorps %xmm1,%xmm6
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- movups %xmm6,64(%edi)
- leal 80(%edi),%edi
- jmp L069xts_dec_done
- .align 4,0x90
- L065xts_dec_one:
- movups (%esi),%xmm2
- leal 16(%esi),%esi
- xorps %xmm5,%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L070dec1_loop_12:
- .byte 102,15,56,222,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L070dec1_loop_12
- .byte 102,15,56,223,209
- xorps %xmm5,%xmm2
- movups %xmm2,(%edi)
- leal 16(%edi),%edi
- movdqa %xmm5,%xmm1
- jmp L069xts_dec_done
- .align 4,0x90
- L066xts_dec_two:
- movaps %xmm1,%xmm6
- movups (%esi),%xmm2
- movups 16(%esi),%xmm3
- leal 32(%esi),%esi
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- call __aesni_decrypt2
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- leal 32(%edi),%edi
- movdqa %xmm6,%xmm1
- jmp L069xts_dec_done
- .align 4,0x90
- L067xts_dec_three:
- movaps %xmm1,%xmm7
- movups (%esi),%xmm2
- movups 16(%esi),%xmm3
- movups 32(%esi),%xmm4
- leal 48(%esi),%esi
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- xorps %xmm7,%xmm4
- call __aesni_decrypt3
- xorps %xmm5,%xmm2
- xorps %xmm6,%xmm3
- xorps %xmm7,%xmm4
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- leal 48(%edi),%edi
- movdqa %xmm7,%xmm1
- jmp L069xts_dec_done
- .align 4,0x90
- L068xts_dec_four:
- movaps %xmm1,%xmm6
- movups (%esi),%xmm2
- movups 16(%esi),%xmm3
- movups 32(%esi),%xmm4
- xorps (%esp),%xmm2
- movups 48(%esi),%xmm5
- leal 64(%esi),%esi
- xorps 16(%esp),%xmm3
- xorps %xmm7,%xmm4
- xorps %xmm6,%xmm5
- call __aesni_decrypt4
- xorps (%esp),%xmm2
- xorps 16(%esp),%xmm3
- xorps %xmm7,%xmm4
- movups %xmm2,(%edi)
- xorps %xmm6,%xmm5
- movups %xmm3,16(%edi)
- movups %xmm4,32(%edi)
- movups %xmm5,48(%edi)
- leal 64(%edi),%edi
- movdqa %xmm6,%xmm1
- jmp L069xts_dec_done
- .align 4,0x90
- L064xts_dec_done6x:
- movl 112(%esp),%eax
- andl $15,%eax
- jz L071xts_dec_ret
- movl %eax,112(%esp)
- jmp L072xts_dec_only_one_more
- .align 4,0x90
- L069xts_dec_done:
- movl 112(%esp),%eax
- pxor %xmm0,%xmm0
- andl $15,%eax
- jz L071xts_dec_ret
- pcmpgtd %xmm1,%xmm0
- movl %eax,112(%esp)
- pshufd $19,%xmm0,%xmm2
- pxor %xmm0,%xmm0
- movdqa 96(%esp),%xmm3
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm2
- pcmpgtd %xmm1,%xmm0
- pxor %xmm2,%xmm1
- L072xts_dec_only_one_more:
- pshufd $19,%xmm0,%xmm5
- movdqa %xmm1,%xmm6
- paddq %xmm1,%xmm1
- pand %xmm3,%xmm5
- pxor %xmm1,%xmm5
- movl %ebp,%edx
- movl %ebx,%ecx
- movups (%esi),%xmm2
- xorps %xmm5,%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L073dec1_loop_13:
- .byte 102,15,56,222,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L073dec1_loop_13
- .byte 102,15,56,223,209
- xorps %xmm5,%xmm2
- movups %xmm2,(%edi)
- L074xts_dec_steal:
- movzbl 16(%esi),%ecx
- movzbl (%edi),%edx
- leal 1(%esi),%esi
- movb %cl,(%edi)
- movb %dl,16(%edi)
- leal 1(%edi),%edi
- subl $1,%eax
- jnz L074xts_dec_steal
- subl 112(%esp),%edi
- movl %ebp,%edx
- movl %ebx,%ecx
- movups (%edi),%xmm2
- xorps %xmm6,%xmm2
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L075dec1_loop_14:
- .byte 102,15,56,222,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L075dec1_loop_14
- .byte 102,15,56,223,209
- xorps %xmm6,%xmm2
- movups %xmm2,(%edi)
- L071xts_dec_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- movdqa %xmm0,(%esp)
- pxor %xmm3,%xmm3
- movdqa %xmm0,16(%esp)
- pxor %xmm4,%xmm4
- movdqa %xmm0,32(%esp)
- pxor %xmm5,%xmm5
- movdqa %xmm0,48(%esp)
- pxor %xmm6,%xmm6
- movdqa %xmm0,64(%esp)
- pxor %xmm7,%xmm7
- movdqa %xmm0,80(%esp)
- movl 116(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_cbc_encrypt
- .private_extern _aes_hw_cbc_encrypt
- .align 4
- _aes_hw_cbc_encrypt:
- L_aes_hw_cbc_encrypt_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%esi
- movl %esp,%ebx
- movl 24(%esp),%edi
- subl $24,%ebx
- movl 28(%esp),%eax
- andl $-16,%ebx
- movl 32(%esp),%edx
- movl 36(%esp),%ebp
- testl %eax,%eax
- jz L076cbc_abort
- cmpl $0,40(%esp)
- xchgl %esp,%ebx
- movups (%ebp),%xmm7
- movl 240(%edx),%ecx
- movl %edx,%ebp
- movl %ebx,16(%esp)
- movl %ecx,%ebx
- je L077cbc_decrypt
- movaps %xmm7,%xmm2
- cmpl $16,%eax
- jb L078cbc_enc_tail
- subl $16,%eax
- jmp L079cbc_enc_loop
- .align 4,0x90
- L079cbc_enc_loop:
- movups (%esi),%xmm7
- leal 16(%esi),%esi
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- xorps %xmm0,%xmm7
- leal 32(%edx),%edx
- xorps %xmm7,%xmm2
- L080enc1_loop_15:
- .byte 102,15,56,220,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L080enc1_loop_15
- .byte 102,15,56,221,209
- movl %ebx,%ecx
- movl %ebp,%edx
- movups %xmm2,(%edi)
- leal 16(%edi),%edi
- subl $16,%eax
- jnc L079cbc_enc_loop
- addl $16,%eax
- jnz L078cbc_enc_tail
- movaps %xmm2,%xmm7
- pxor %xmm2,%xmm2
- jmp L081cbc_ret
- L078cbc_enc_tail:
- movl %eax,%ecx
- .long 2767451785
- movl $16,%ecx
- subl %eax,%ecx
- xorl %eax,%eax
- .long 2868115081
- leal -16(%edi),%edi
- movl %ebx,%ecx
- movl %edi,%esi
- movl %ebp,%edx
- jmp L079cbc_enc_loop
- .align 4,0x90
- L077cbc_decrypt:
- cmpl $80,%eax
- jbe L082cbc_dec_tail
- movaps %xmm7,(%esp)
- subl $80,%eax
- jmp L083cbc_dec_loop6_enter
- .align 4,0x90
- L084cbc_dec_loop6:
- movaps %xmm0,(%esp)
- movups %xmm7,(%edi)
- leal 16(%edi),%edi
- L083cbc_dec_loop6_enter:
- movdqu (%esi),%xmm2
- movdqu 16(%esi),%xmm3
- movdqu 32(%esi),%xmm4
- movdqu 48(%esi),%xmm5
- movdqu 64(%esi),%xmm6
- movdqu 80(%esi),%xmm7
- call __aesni_decrypt6
- movups (%esi),%xmm1
- movups 16(%esi),%xmm0
- xorps (%esp),%xmm2
- xorps %xmm1,%xmm3
- movups 32(%esi),%xmm1
- xorps %xmm0,%xmm4
- movups 48(%esi),%xmm0
- xorps %xmm1,%xmm5
- movups 64(%esi),%xmm1
- xorps %xmm0,%xmm6
- movups 80(%esi),%xmm0
- xorps %xmm1,%xmm7
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- leal 96(%esi),%esi
- movups %xmm4,32(%edi)
- movl %ebx,%ecx
- movups %xmm5,48(%edi)
- movl %ebp,%edx
- movups %xmm6,64(%edi)
- leal 80(%edi),%edi
- subl $96,%eax
- ja L084cbc_dec_loop6
- movaps %xmm7,%xmm2
- movaps %xmm0,%xmm7
- addl $80,%eax
- jle L085cbc_dec_clear_tail_collected
- movups %xmm2,(%edi)
- leal 16(%edi),%edi
- L082cbc_dec_tail:
- movups (%esi),%xmm2
- movaps %xmm2,%xmm6
- cmpl $16,%eax
- jbe L086cbc_dec_one
- movups 16(%esi),%xmm3
- movaps %xmm3,%xmm5
- cmpl $32,%eax
- jbe L087cbc_dec_two
- movups 32(%esi),%xmm4
- cmpl $48,%eax
- jbe L088cbc_dec_three
- movups 48(%esi),%xmm5
- cmpl $64,%eax
- jbe L089cbc_dec_four
- movups 64(%esi),%xmm6
- movaps %xmm7,(%esp)
- movups (%esi),%xmm2
- xorps %xmm7,%xmm7
- call __aesni_decrypt6
- movups (%esi),%xmm1
- movups 16(%esi),%xmm0
- xorps (%esp),%xmm2
- xorps %xmm1,%xmm3
- movups 32(%esi),%xmm1
- xorps %xmm0,%xmm4
- movups 48(%esi),%xmm0
- xorps %xmm1,%xmm5
- movups 64(%esi),%xmm7
- xorps %xmm0,%xmm6
- movups %xmm2,(%edi)
- movups %xmm3,16(%edi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%edi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%edi)
- pxor %xmm5,%xmm5
- leal 64(%edi),%edi
- movaps %xmm6,%xmm2
- pxor %xmm6,%xmm6
- subl $80,%eax
- jmp L090cbc_dec_tail_collected
- .align 4,0x90
- L086cbc_dec_one:
- movups (%edx),%xmm0
- movups 16(%edx),%xmm1
- leal 32(%edx),%edx
- xorps %xmm0,%xmm2
- L091dec1_loop_16:
- .byte 102,15,56,222,209
- decl %ecx
- movups (%edx),%xmm1
- leal 16(%edx),%edx
- jnz L091dec1_loop_16
- .byte 102,15,56,223,209
- xorps %xmm7,%xmm2
- movaps %xmm6,%xmm7
- subl $16,%eax
- jmp L090cbc_dec_tail_collected
- .align 4,0x90
- L087cbc_dec_two:
- call __aesni_decrypt2
- xorps %xmm7,%xmm2
- xorps %xmm6,%xmm3
- movups %xmm2,(%edi)
- movaps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- leal 16(%edi),%edi
- movaps %xmm5,%xmm7
- subl $32,%eax
- jmp L090cbc_dec_tail_collected
- .align 4,0x90
- L088cbc_dec_three:
- call __aesni_decrypt3
- xorps %xmm7,%xmm2
- xorps %xmm6,%xmm3
- xorps %xmm5,%xmm4
- movups %xmm2,(%edi)
- movaps %xmm4,%xmm2
- pxor %xmm4,%xmm4
- movups %xmm3,16(%edi)
- pxor %xmm3,%xmm3
- leal 32(%edi),%edi
- movups 32(%esi),%xmm7
- subl $48,%eax
- jmp L090cbc_dec_tail_collected
- .align 4,0x90
- L089cbc_dec_four:
- call __aesni_decrypt4
- movups 16(%esi),%xmm1
- movups 32(%esi),%xmm0
- xorps %xmm7,%xmm2
- movups 48(%esi),%xmm7
- xorps %xmm6,%xmm3
- movups %xmm2,(%edi)
- xorps %xmm1,%xmm4
- movups %xmm3,16(%edi)
- pxor %xmm3,%xmm3
- xorps %xmm0,%xmm5
- movups %xmm4,32(%edi)
- pxor %xmm4,%xmm4
- leal 48(%edi),%edi
- movaps %xmm5,%xmm2
- pxor %xmm5,%xmm5
- subl $64,%eax
- jmp L090cbc_dec_tail_collected
- .align 4,0x90
- L085cbc_dec_clear_tail_collected:
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- L090cbc_dec_tail_collected:
- andl $15,%eax
- jnz L092cbc_dec_tail_partial
- movups %xmm2,(%edi)
- pxor %xmm0,%xmm0
- jmp L081cbc_ret
- .align 4,0x90
- L092cbc_dec_tail_partial:
- movaps %xmm2,(%esp)
- pxor %xmm0,%xmm0
- movl $16,%ecx
- movl %esp,%esi
- subl %eax,%ecx
- .long 2767451785
- movdqa %xmm2,(%esp)
- L081cbc_ret:
- movl 16(%esp),%esp
- movl 36(%esp),%ebp
- pxor %xmm2,%xmm2
- pxor %xmm1,%xmm1
- movups %xmm7,(%ebp)
- pxor %xmm7,%xmm7
- L076cbc_abort:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .private_extern __aesni_set_encrypt_key
- .align 4
- __aesni_set_encrypt_key:
- pushl %ebp
- pushl %ebx
- testl %eax,%eax
- jz L093bad_pointer
- testl %edx,%edx
- jz L093bad_pointer
- call L094pic
- L094pic:
- popl %ebx
- leal Lkey_const-L094pic(%ebx),%ebx
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-Lkey_const(%ebx),%ebp
- movups (%eax),%xmm0
- xorps %xmm4,%xmm4
- movl 4(%ebp),%ebp
- leal 16(%edx),%edx
- andl $268437504,%ebp
- cmpl $256,%ecx
- je L09514rounds
- cmpl $192,%ecx
- je L09612rounds
- cmpl $128,%ecx
- jne L097bad_keybits
- .align 4,0x90
- L09810rounds:
- cmpl $268435456,%ebp
- je L09910rounds_alt
- movl $9,%ecx
- movups %xmm0,-16(%edx)
- .byte 102,15,58,223,200,1
- call L100key_128_cold
- .byte 102,15,58,223,200,2
- call L101key_128
- .byte 102,15,58,223,200,4
- call L101key_128
- .byte 102,15,58,223,200,8
- call L101key_128
- .byte 102,15,58,223,200,16
- call L101key_128
- .byte 102,15,58,223,200,32
- call L101key_128
- .byte 102,15,58,223,200,64
- call L101key_128
- .byte 102,15,58,223,200,128
- call L101key_128
- .byte 102,15,58,223,200,27
- call L101key_128
- .byte 102,15,58,223,200,54
- call L101key_128
- movups %xmm0,(%edx)
- movl %ecx,80(%edx)
- jmp L102good_key
- .align 4,0x90
- L101key_128:
- movups %xmm0,(%edx)
- leal 16(%edx),%edx
- L100key_128_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- ret
- .align 4,0x90
- L09910rounds_alt:
- movdqa (%ebx),%xmm5
- movl $8,%ecx
- movdqa 32(%ebx),%xmm4
- movdqa %xmm0,%xmm2
- movdqu %xmm0,-16(%edx)
- L103loop_key128:
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- leal 16(%edx),%edx
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,-16(%edx)
- movdqa %xmm0,%xmm2
- decl %ecx
- jnz L103loop_key128
- movdqa 48(%ebx),%xmm4
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%edx)
- movdqa %xmm0,%xmm2
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,16(%edx)
- movl $9,%ecx
- movl %ecx,96(%edx)
- jmp L102good_key
- .align 4,0x90
- L09612rounds:
- movq 16(%eax),%xmm2
- cmpl $268435456,%ebp
- je L10412rounds_alt
- movl $11,%ecx
- movups %xmm0,-16(%edx)
- .byte 102,15,58,223,202,1
- call L105key_192a_cold
- .byte 102,15,58,223,202,2
- call L106key_192b
- .byte 102,15,58,223,202,4
- call L107key_192a
- .byte 102,15,58,223,202,8
- call L106key_192b
- .byte 102,15,58,223,202,16
- call L107key_192a
- .byte 102,15,58,223,202,32
- call L106key_192b
- .byte 102,15,58,223,202,64
- call L107key_192a
- .byte 102,15,58,223,202,128
- call L106key_192b
- movups %xmm0,(%edx)
- movl %ecx,48(%edx)
- jmp L102good_key
- .align 4,0x90
- L107key_192a:
- movups %xmm0,(%edx)
- leal 16(%edx),%edx
- .align 4,0x90
- L105key_192a_cold:
- movaps %xmm2,%xmm5
- L108key_192b_warm:
- shufps $16,%xmm0,%xmm4
- movdqa %xmm2,%xmm3
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- pslldq $4,%xmm3
- xorps %xmm4,%xmm0
- pshufd $85,%xmm1,%xmm1
- pxor %xmm3,%xmm2
- pxor %xmm1,%xmm0
- pshufd $255,%xmm0,%xmm3
- pxor %xmm3,%xmm2
- ret
- .align 4,0x90
- L106key_192b:
- movaps %xmm0,%xmm3
- shufps $68,%xmm0,%xmm5
- movups %xmm5,(%edx)
- shufps $78,%xmm2,%xmm3
- movups %xmm3,16(%edx)
- leal 32(%edx),%edx
- jmp L108key_192b_warm
- .align 4,0x90
- L10412rounds_alt:
- movdqa 16(%ebx),%xmm5
- movdqa 32(%ebx),%xmm4
- movl $8,%ecx
- movdqu %xmm0,-16(%edx)
- L109loop_key192:
- movq %xmm2,(%edx)
- movdqa %xmm2,%xmm1
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- pslld $1,%xmm4
- leal 24(%edx),%edx
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pshufd $255,%xmm0,%xmm3
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pxor %xmm2,%xmm0
- pxor %xmm3,%xmm2
- movdqu %xmm0,-16(%edx)
- decl %ecx
- jnz L109loop_key192
- movl $11,%ecx
- movl %ecx,32(%edx)
- jmp L102good_key
- .align 4,0x90
- L09514rounds:
- movups 16(%eax),%xmm2
- leal 16(%edx),%edx
- cmpl $268435456,%ebp
- je L11014rounds_alt
- movl $13,%ecx
- movups %xmm0,-32(%edx)
- movups %xmm2,-16(%edx)
- .byte 102,15,58,223,202,1
- call L111key_256a_cold
- .byte 102,15,58,223,200,1
- call L112key_256b
- .byte 102,15,58,223,202,2
- call L113key_256a
- .byte 102,15,58,223,200,2
- call L112key_256b
- .byte 102,15,58,223,202,4
- call L113key_256a
- .byte 102,15,58,223,200,4
- call L112key_256b
- .byte 102,15,58,223,202,8
- call L113key_256a
- .byte 102,15,58,223,200,8
- call L112key_256b
- .byte 102,15,58,223,202,16
- call L113key_256a
- .byte 102,15,58,223,200,16
- call L112key_256b
- .byte 102,15,58,223,202,32
- call L113key_256a
- .byte 102,15,58,223,200,32
- call L112key_256b
- .byte 102,15,58,223,202,64
- call L113key_256a
- movups %xmm0,(%edx)
- movl %ecx,16(%edx)
- xorl %eax,%eax
- jmp L102good_key
- .align 4,0x90
- L113key_256a:
- movups %xmm2,(%edx)
- leal 16(%edx),%edx
- L111key_256a_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- ret
- .align 4,0x90
- L112key_256b:
- movups %xmm0,(%edx)
- leal 16(%edx),%edx
- shufps $16,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $140,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $170,%xmm1,%xmm1
- xorps %xmm1,%xmm2
- ret
- .align 4,0x90
- L11014rounds_alt:
- movdqa (%ebx),%xmm5
- movdqa 32(%ebx),%xmm4
- movl $7,%ecx
- movdqu %xmm0,-32(%edx)
- movdqa %xmm2,%xmm1
- movdqu %xmm2,-16(%edx)
- L114loop_key256:
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pslld $1,%xmm4
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%edx)
- decl %ecx
- jz L115done_key256
- pshufd $255,%xmm0,%xmm2
- pxor %xmm3,%xmm3
- .byte 102,15,56,221,211
- movdqa %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm3,%xmm1
- pxor %xmm1,%xmm2
- movdqu %xmm2,16(%edx)
- leal 32(%edx),%edx
- movdqa %xmm2,%xmm1
- jmp L114loop_key256
- L115done_key256:
- movl $13,%ecx
- movl %ecx,16(%edx)
- L102good_key:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- xorl %eax,%eax
- popl %ebx
- popl %ebp
- ret
- .align 2,0x90
- L093bad_pointer:
- movl $-1,%eax
- popl %ebx
- popl %ebp
- ret
- .align 2,0x90
- L097bad_keybits:
- pxor %xmm0,%xmm0
- movl $-2,%eax
- popl %ebx
- popl %ebp
- ret
- .globl _aes_hw_set_encrypt_key
- .private_extern _aes_hw_set_encrypt_key
- .align 4
- _aes_hw_set_encrypt_key:
- L_aes_hw_set_encrypt_key_begin:
- #ifdef BORINGSSL_DISPATCH_TEST
- pushl %ebx
- pushl %edx
- call L116pic
- L116pic:
- popl %ebx
- leal _BORINGSSL_function_hit+3-L116pic(%ebx),%ebx
- movl $1,%edx
- movb %dl,(%ebx)
- popl %edx
- popl %ebx
- #endif
- movl 4(%esp),%eax
- movl 8(%esp),%ecx
- movl 12(%esp),%edx
- call __aesni_set_encrypt_key
- ret
- .globl _aes_hw_set_decrypt_key
- .private_extern _aes_hw_set_decrypt_key
- .align 4
- _aes_hw_set_decrypt_key:
- L_aes_hw_set_decrypt_key_begin:
- movl 4(%esp),%eax
- movl 8(%esp),%ecx
- movl 12(%esp),%edx
- call __aesni_set_encrypt_key
- movl 12(%esp),%edx
- shll $4,%ecx
- testl %eax,%eax
- jnz L117dec_key_ret
- leal 16(%edx,%ecx,1),%eax
- movups (%edx),%xmm0
- movups (%eax),%xmm1
- movups %xmm0,(%eax)
- movups %xmm1,(%edx)
- leal 16(%edx),%edx
- leal -16(%eax),%eax
- L118dec_key_inverse:
- movups (%edx),%xmm0
- movups (%eax),%xmm1
- .byte 102,15,56,219,192
- .byte 102,15,56,219,201
- leal 16(%edx),%edx
- leal -16(%eax),%eax
- movups %xmm0,16(%eax)
- movups %xmm1,-16(%edx)
- cmpl %edx,%eax
- ja L118dec_key_inverse
- movups (%edx),%xmm0
- .byte 102,15,56,219,192
- movups %xmm0,(%edx)
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- xorl %eax,%eax
- L117dec_key_ret:
- ret
- .align 6,0x90
- Lkey_const:
- .long 202313229,202313229,202313229,202313229
- .long 67569157,67569157,67569157,67569157
- .long 1,1,1,1
- .long 27,27,27,27
- .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
- .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
- .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
- .byte 115,108,46,111,114,103,62,0
- .section __IMPORT,__pointers,non_lazy_symbol_pointers
- L_OPENSSL_ia32cap_P$non_lazy_ptr:
- .indirect_symbol _OPENSSL_ia32cap_P
- .long 0
- #endif
|