1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503 |
- // This file is generated from a similarly-named Perl script in the BoringSSL
- // source tree. Do not edit by hand.
- #if defined(__has_feature)
- #if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
- #define OPENSSL_NO_ASM
- #endif
- #endif
- #if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
- #if defined(BORINGSSL_PREFIX)
- #include <boringssl_prefix_symbols_asm.h>
- #endif
- .text
- .globl _aes_hw_encrypt
- .private_extern _aes_hw_encrypt
- .p2align 4
- _aes_hw_encrypt:
- #ifdef BORINGSSL_DISPATCH_TEST
- movb $1,_BORINGSSL_function_hit+1(%rip)
- #endif
- movups (%rdi),%xmm2
- movl 240(%rdx),%eax
- movups (%rdx),%xmm0
- movups 16(%rdx),%xmm1
- leaq 32(%rdx),%rdx
- xorps %xmm0,%xmm2
- L$oop_enc1_1:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rdx),%xmm1
- leaq 16(%rdx),%rdx
- jnz L$oop_enc1_1
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- .byte 0xf3,0xc3
- .globl _aes_hw_decrypt
- .private_extern _aes_hw_decrypt
- .p2align 4
- _aes_hw_decrypt:
- movups (%rdi),%xmm2
- movl 240(%rdx),%eax
- movups (%rdx),%xmm0
- movups 16(%rdx),%xmm1
- leaq 32(%rdx),%rdx
- xorps %xmm0,%xmm2
- L$oop_dec1_2:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rdx),%xmm1
- leaq 16(%rdx),%rdx
- jnz L$oop_dec1_2
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt2:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$enc_loop2:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop2
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt2:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$dec_loop2:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop2
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt3:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$enc_loop3:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop3
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt3:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$dec_loop3:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop3
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt4:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- xorps %xmm0,%xmm5
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 0x0f,0x1f,0x00
- addq $16,%rax
- L$enc_loop4:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop4
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt4:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- xorps %xmm0,%xmm5
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 0x0f,0x1f,0x00
- addq $16,%rax
- L$dec_loop4:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop4
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt6:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,220,209
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,220,225
- pxor %xmm0,%xmm7
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$enc_loop6_enter
- .p2align 4
- L$enc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- L$enc_loop6_enter:
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt6:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,222,209
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,222,225
- pxor %xmm0,%xmm7
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$dec_loop6_enter
- .p2align 4
- L$dec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- L$dec_loop6_enter:
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop6
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt8:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,209
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm9
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$enc_loop8_inner
- .p2align 4
- L$enc_loop8:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- L$enc_loop8_inner:
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- L$enc_loop8_enter:
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop8
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- .byte 102,68,15,56,221,192
- .byte 102,68,15,56,221,200
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt8:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm9
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$dec_loop8_inner
- .p2align 4
- L$dec_loop8:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- L$dec_loop8_inner:
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- L$dec_loop8_enter:
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop8
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- .byte 102,68,15,56,223,192
- .byte 102,68,15,56,223,200
- .byte 0xf3,0xc3
- .globl _aes_hw_ecb_encrypt
- .private_extern _aes_hw_ecb_encrypt
- .p2align 4
- _aes_hw_ecb_encrypt:
- andq $-16,%rdx
- jz L$ecb_ret
- movl 240(%rcx),%eax
- movups (%rcx),%xmm0
- movq %rcx,%r11
- movl %eax,%r10d
- testl %r8d,%r8d
- jz L$ecb_decrypt
- cmpq $0x80,%rdx
- jb L$ecb_enc_tail
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- movdqu 96(%rdi),%xmm8
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- subq $0x80,%rdx
- jmp L$ecb_enc_loop8_enter
- .p2align 4
- L$ecb_enc_loop8:
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movdqu (%rdi),%xmm2
- movl %r10d,%eax
- movups %xmm3,16(%rsi)
- movdqu 16(%rdi),%xmm3
- movups %xmm4,32(%rsi)
- movdqu 32(%rdi),%xmm4
- movups %xmm5,48(%rsi)
- movdqu 48(%rdi),%xmm5
- movups %xmm6,64(%rsi)
- movdqu 64(%rdi),%xmm6
- movups %xmm7,80(%rsi)
- movdqu 80(%rdi),%xmm7
- movups %xmm8,96(%rsi)
- movdqu 96(%rdi),%xmm8
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- L$ecb_enc_loop8_enter:
- call _aesni_encrypt8
- subq $0x80,%rdx
- jnc L$ecb_enc_loop8
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movups %xmm3,16(%rsi)
- movl %r10d,%eax
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- movups %xmm8,96(%rsi)
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- addq $0x80,%rdx
- jz L$ecb_ret
- L$ecb_enc_tail:
- movups (%rdi),%xmm2
- cmpq $0x20,%rdx
- jb L$ecb_enc_one
- movups 16(%rdi),%xmm3
- je L$ecb_enc_two
- movups 32(%rdi),%xmm4
- cmpq $0x40,%rdx
- jb L$ecb_enc_three
- movups 48(%rdi),%xmm5
- je L$ecb_enc_four
- movups 64(%rdi),%xmm6
- cmpq $0x60,%rdx
- jb L$ecb_enc_five
- movups 80(%rdi),%xmm7
- je L$ecb_enc_six
- movdqu 96(%rdi),%xmm8
- xorps %xmm9,%xmm9
- call _aesni_encrypt8
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- movups %xmm8,96(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_one:
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_3:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_3
- .byte 102,15,56,221,209
- movups %xmm2,(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_two:
- call _aesni_encrypt2
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_three:
- call _aesni_encrypt3
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_four:
- call _aesni_encrypt4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_five:
- xorps %xmm7,%xmm7
- call _aesni_encrypt6
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_six:
- call _aesni_encrypt6
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_decrypt:
- cmpq $0x80,%rdx
- jb L$ecb_dec_tail
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- movdqu 96(%rdi),%xmm8
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- subq $0x80,%rdx
- jmp L$ecb_dec_loop8_enter
- .p2align 4
- L$ecb_dec_loop8:
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movdqu (%rdi),%xmm2
- movl %r10d,%eax
- movups %xmm3,16(%rsi)
- movdqu 16(%rdi),%xmm3
- movups %xmm4,32(%rsi)
- movdqu 32(%rdi),%xmm4
- movups %xmm5,48(%rsi)
- movdqu 48(%rdi),%xmm5
- movups %xmm6,64(%rsi)
- movdqu 64(%rdi),%xmm6
- movups %xmm7,80(%rsi)
- movdqu 80(%rdi),%xmm7
- movups %xmm8,96(%rsi)
- movdqu 96(%rdi),%xmm8
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- L$ecb_dec_loop8_enter:
- call _aesni_decrypt8
- movups (%r11),%xmm0
- subq $0x80,%rdx
- jnc L$ecb_dec_loop8
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movq %r11,%rcx
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movl %r10d,%eax
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- movups %xmm8,96(%rsi)
- pxor %xmm8,%xmm8
- movups %xmm9,112(%rsi)
- pxor %xmm9,%xmm9
- leaq 128(%rsi),%rsi
- addq $0x80,%rdx
- jz L$ecb_ret
- L$ecb_dec_tail:
- movups (%rdi),%xmm2
- cmpq $0x20,%rdx
- jb L$ecb_dec_one
- movups 16(%rdi),%xmm3
- je L$ecb_dec_two
- movups 32(%rdi),%xmm4
- cmpq $0x40,%rdx
- jb L$ecb_dec_three
- movups 48(%rdi),%xmm5
- je L$ecb_dec_four
- movups 64(%rdi),%xmm6
- cmpq $0x60,%rdx
- jb L$ecb_dec_five
- movups 80(%rdi),%xmm7
- je L$ecb_dec_six
- movups 96(%rdi),%xmm8
- movups (%rcx),%xmm0
- xorps %xmm9,%xmm9
- call _aesni_decrypt8
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- movups %xmm8,96(%rsi)
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_one:
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_4:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_4
- .byte 102,15,56,223,209
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_two:
- call _aesni_decrypt2
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_three:
- call _aesni_decrypt3
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_four:
- call _aesni_decrypt4
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_five:
- xorps %xmm7,%xmm7
- call _aesni_decrypt6
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_six:
- call _aesni_decrypt6
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- L$ecb_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- .byte 0xf3,0xc3
- .globl _aes_hw_ctr32_encrypt_blocks
- .private_extern _aes_hw_ctr32_encrypt_blocks
- .p2align 4
- _aes_hw_ctr32_encrypt_blocks:
- #ifdef BORINGSSL_DISPATCH_TEST
- movb $1,_BORINGSSL_function_hit(%rip)
- #endif
- cmpq $1,%rdx
- jne L$ctr32_bulk
- movups (%r8),%xmm2
- movups (%rdi),%xmm3
- movl 240(%rcx),%edx
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_5:
- .byte 102,15,56,220,209
- decl %edx
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_5
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- xorps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- movups %xmm2,(%rsi)
- xorps %xmm2,%xmm2
- jmp L$ctr32_epilogue
- .p2align 4
- L$ctr32_bulk:
- leaq (%rsp),%r11
- pushq %rbp
- subq $128,%rsp
- andq $-16,%rsp
- movdqu (%r8),%xmm2
- movdqu (%rcx),%xmm0
- movl 12(%r8),%r8d
- pxor %xmm0,%xmm2
- movl 12(%rcx),%ebp
- movdqa %xmm2,0(%rsp)
- bswapl %r8d
- movdqa %xmm2,%xmm3
- movdqa %xmm2,%xmm4
- movdqa %xmm2,%xmm5
- movdqa %xmm2,64(%rsp)
- movdqa %xmm2,80(%rsp)
- movdqa %xmm2,96(%rsp)
- movq %rdx,%r10
- movdqa %xmm2,112(%rsp)
- leaq 1(%r8),%rax
- leaq 2(%r8),%rdx
- bswapl %eax
- bswapl %edx
- xorl %ebp,%eax
- xorl %ebp,%edx
- .byte 102,15,58,34,216,3
- leaq 3(%r8),%rax
- movdqa %xmm3,16(%rsp)
- .byte 102,15,58,34,226,3
- bswapl %eax
- movq %r10,%rdx
- leaq 4(%r8),%r10
- movdqa %xmm4,32(%rsp)
- xorl %ebp,%eax
- bswapl %r10d
- .byte 102,15,58,34,232,3
- xorl %ebp,%r10d
- movdqa %xmm5,48(%rsp)
- leaq 5(%r8),%r9
- movl %r10d,64+12(%rsp)
- bswapl %r9d
- leaq 6(%r8),%r10
- movl 240(%rcx),%eax
- xorl %ebp,%r9d
- bswapl %r10d
- movl %r9d,80+12(%rsp)
- xorl %ebp,%r10d
- leaq 7(%r8),%r9
- movl %r10d,96+12(%rsp)
- bswapl %r9d
- leaq _OPENSSL_ia32cap_P(%rip),%r10
- movl 4(%r10),%r10d
- xorl %ebp,%r9d
- andl $71303168,%r10d
- movl %r9d,112+12(%rsp)
- movups 16(%rcx),%xmm1
- movdqa 64(%rsp),%xmm6
- movdqa 80(%rsp),%xmm7
- cmpq $8,%rdx
- jb L$ctr32_tail
- subq $6,%rdx
- cmpl $4194304,%r10d
- je L$ctr32_6x
- leaq 128(%rcx),%rcx
- subq $2,%rdx
- jmp L$ctr32_loop8
- .p2align 4
- L$ctr32_6x:
- shll $4,%eax
- movl $48,%r10d
- bswapl %ebp
- leaq 32(%rcx,%rax,1),%rcx
- subq %rax,%r10
- jmp L$ctr32_loop6
- .p2align 4
- L$ctr32_loop6:
- addl $6,%r8d
- movups -48(%rcx,%r10,1),%xmm0
- .byte 102,15,56,220,209
- movl %r8d,%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,217
- .byte 0x0f,0x38,0xf1,0x44,0x24,12
- leal 1(%r8),%eax
- .byte 102,15,56,220,225
- xorl %ebp,%eax
- .byte 0x0f,0x38,0xf1,0x44,0x24,28
- .byte 102,15,56,220,233
- leal 2(%r8),%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,241
- .byte 0x0f,0x38,0xf1,0x44,0x24,44
- leal 3(%r8),%eax
- .byte 102,15,56,220,249
- movups -32(%rcx,%r10,1),%xmm1
- xorl %ebp,%eax
- .byte 102,15,56,220,208
- .byte 0x0f,0x38,0xf1,0x44,0x24,60
- leal 4(%r8),%eax
- .byte 102,15,56,220,216
- xorl %ebp,%eax
- .byte 0x0f,0x38,0xf1,0x44,0x24,76
- .byte 102,15,56,220,224
- leal 5(%r8),%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,232
- .byte 0x0f,0x38,0xf1,0x44,0x24,92
- movq %r10,%rax
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%r10,1),%xmm0
- call L$enc_loop6
- movdqu (%rdi),%xmm8
- movdqu 16(%rdi),%xmm9
- movdqu 32(%rdi),%xmm10
- movdqu 48(%rdi),%xmm11
- movdqu 64(%rdi),%xmm12
- movdqu 80(%rdi),%xmm13
- leaq 96(%rdi),%rdi
- movups -64(%rcx,%r10,1),%xmm1
- pxor %xmm2,%xmm8
- movaps 0(%rsp),%xmm2
- pxor %xmm3,%xmm9
- movaps 16(%rsp),%xmm3
- pxor %xmm4,%xmm10
- movaps 32(%rsp),%xmm4
- pxor %xmm5,%xmm11
- movaps 48(%rsp),%xmm5
- pxor %xmm6,%xmm12
- movaps 64(%rsp),%xmm6
- pxor %xmm7,%xmm13
- movaps 80(%rsp),%xmm7
- movdqu %xmm8,(%rsi)
- movdqu %xmm9,16(%rsi)
- movdqu %xmm10,32(%rsi)
- movdqu %xmm11,48(%rsi)
- movdqu %xmm12,64(%rsi)
- movdqu %xmm13,80(%rsi)
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc L$ctr32_loop6
- addq $6,%rdx
- jz L$ctr32_done
- leal -48(%r10),%eax
- leaq -80(%rcx,%r10,1),%rcx
- negl %eax
- shrl $4,%eax
- jmp L$ctr32_tail
- .p2align 5
- L$ctr32_loop8:
- addl $8,%r8d
- movdqa 96(%rsp),%xmm8
- .byte 102,15,56,220,209
- movl %r8d,%r9d
- movdqa 112(%rsp),%xmm9
- .byte 102,15,56,220,217
- bswapl %r9d
- movups 32-128(%rcx),%xmm0
- .byte 102,15,56,220,225
- xorl %ebp,%r9d
- nop
- .byte 102,15,56,220,233
- movl %r9d,0+12(%rsp)
- leaq 1(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 48-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,16+12(%rsp)
- leaq 2(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 64-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,32+12(%rsp)
- leaq 3(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 80-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,48+12(%rsp)
- leaq 4(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 96-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,64+12(%rsp)
- leaq 5(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 112-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,80+12(%rsp)
- leaq 6(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 128-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,96+12(%rsp)
- leaq 7(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 144-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- xorl %ebp,%r9d
- movdqu 0(%rdi),%xmm10
- .byte 102,15,56,220,232
- movl %r9d,112+12(%rsp)
- cmpl $11,%eax
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 160-128(%rcx),%xmm0
- jb L$ctr32_enc_done
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 176-128(%rcx),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 192-128(%rcx),%xmm0
- je L$ctr32_enc_done
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 208-128(%rcx),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 224-128(%rcx),%xmm0
- jmp L$ctr32_enc_done
- .p2align 4
- L$ctr32_enc_done:
- movdqu 16(%rdi),%xmm11
- pxor %xmm0,%xmm10
- movdqu 32(%rdi),%xmm12
- pxor %xmm0,%xmm11
- movdqu 48(%rdi),%xmm13
- pxor %xmm0,%xmm12
- movdqu 64(%rdi),%xmm14
- pxor %xmm0,%xmm13
- movdqu 80(%rdi),%xmm15
- pxor %xmm0,%xmm14
- pxor %xmm0,%xmm15
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movdqu 96(%rdi),%xmm1
- leaq 128(%rdi),%rdi
- .byte 102,65,15,56,221,210
- pxor %xmm0,%xmm1
- movdqu 112-128(%rdi),%xmm10
- .byte 102,65,15,56,221,219
- pxor %xmm0,%xmm10
- movdqa 0(%rsp),%xmm11
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- movdqa 16(%rsp),%xmm12
- movdqa 32(%rsp),%xmm13
- .byte 102,65,15,56,221,246
- .byte 102,65,15,56,221,255
- movdqa 48(%rsp),%xmm14
- movdqa 64(%rsp),%xmm15
- .byte 102,68,15,56,221,193
- movdqa 80(%rsp),%xmm0
- movups 16-128(%rcx),%xmm1
- .byte 102,69,15,56,221,202
- movups %xmm2,(%rsi)
- movdqa %xmm11,%xmm2
- movups %xmm3,16(%rsi)
- movdqa %xmm12,%xmm3
- movups %xmm4,32(%rsi)
- movdqa %xmm13,%xmm4
- movups %xmm5,48(%rsi)
- movdqa %xmm14,%xmm5
- movups %xmm6,64(%rsi)
- movdqa %xmm15,%xmm6
- movups %xmm7,80(%rsi)
- movdqa %xmm0,%xmm7
- movups %xmm8,96(%rsi)
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- subq $8,%rdx
- jnc L$ctr32_loop8
- addq $8,%rdx
- jz L$ctr32_done
- leaq -128(%rcx),%rcx
- L$ctr32_tail:
- leaq 16(%rcx),%rcx
- cmpq $4,%rdx
- jb L$ctr32_loop3
- je L$ctr32_loop4
- shll $4,%eax
- movdqa 96(%rsp),%xmm8
- pxor %xmm9,%xmm9
- movups 16(%rcx),%xmm0
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- leaq 32-16(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,225
- addq $16,%rax
- movups (%rdi),%xmm10
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- movups 16(%rdi),%xmm11
- movups 32(%rdi),%xmm12
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- call L$enc_loop8_enter
- movdqu 48(%rdi),%xmm13
- pxor %xmm10,%xmm2
- movdqu 64(%rdi),%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm10,%xmm6
- movdqu %xmm5,48(%rsi)
- movdqu %xmm6,64(%rsi)
- cmpq $6,%rdx
- jb L$ctr32_done
- movups 80(%rdi),%xmm11
- xorps %xmm11,%xmm7
- movups %xmm7,80(%rsi)
- je L$ctr32_done
- movups 96(%rdi),%xmm12
- xorps %xmm12,%xmm8
- movups %xmm8,96(%rsi)
- jmp L$ctr32_done
- .p2align 5
- L$ctr32_loop4:
- .byte 102,15,56,220,209
- leaq 16(%rcx),%rcx
- decl %eax
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx),%xmm1
- jnz L$ctr32_loop4
- .byte 102,15,56,221,209
- .byte 102,15,56,221,217
- movups (%rdi),%xmm10
- movups 16(%rdi),%xmm11
- .byte 102,15,56,221,225
- .byte 102,15,56,221,233
- movups 32(%rdi),%xmm12
- movups 48(%rdi),%xmm13
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- xorps %xmm11,%xmm3
- movups %xmm3,16(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm4,32(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm5,48(%rsi)
- jmp L$ctr32_done
- .p2align 5
- L$ctr32_loop3:
- .byte 102,15,56,220,209
- leaq 16(%rcx),%rcx
- decl %eax
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%rcx),%xmm1
- jnz L$ctr32_loop3
- .byte 102,15,56,221,209
- .byte 102,15,56,221,217
- .byte 102,15,56,221,225
- movups (%rdi),%xmm10
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- cmpq $2,%rdx
- jb L$ctr32_done
- movups 16(%rdi),%xmm11
- xorps %xmm11,%xmm3
- movups %xmm3,16(%rsi)
- je L$ctr32_done
- movups 32(%rdi),%xmm12
- xorps %xmm12,%xmm4
- movups %xmm4,32(%rsi)
- L$ctr32_done:
- xorps %xmm0,%xmm0
- xorl %ebp,%ebp
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- movaps %xmm0,112(%rsp)
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- leaq (%r11),%rsp
- L$ctr32_epilogue:
- .byte 0xf3,0xc3
- .globl _aes_hw_cbc_encrypt
- .private_extern _aes_hw_cbc_encrypt
- .p2align 4
- _aes_hw_cbc_encrypt:
- testq %rdx,%rdx
- jz L$cbc_ret
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- testl %r9d,%r9d
- jz L$cbc_decrypt
- movups (%r8),%xmm2
- movl %r10d,%eax
- cmpq $16,%rdx
- jb L$cbc_enc_tail
- subq $16,%rdx
- jmp L$cbc_enc_loop
- .p2align 4
- L$cbc_enc_loop:
- movups (%rdi),%xmm3
- leaq 16(%rdi),%rdi
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm3
- leaq 32(%rcx),%rcx
- xorps %xmm3,%xmm2
- L$oop_enc1_6:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_6
- .byte 102,15,56,221,209
- movl %r10d,%eax
- movq %r11,%rcx
- movups %xmm2,0(%rsi)
- leaq 16(%rsi),%rsi
- subq $16,%rdx
- jnc L$cbc_enc_loop
- addq $16,%rdx
- jnz L$cbc_enc_tail
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%r8)
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- jmp L$cbc_ret
- L$cbc_enc_tail:
- movq %rdx,%rcx
- xchgq %rdi,%rsi
- .long 0x9066A4F3
- movl $16,%ecx
- subq %rdx,%rcx
- xorl %eax,%eax
- .long 0x9066AAF3
- leaq -16(%rdi),%rdi
- movl %r10d,%eax
- movq %rdi,%rsi
- movq %r11,%rcx
- xorq %rdx,%rdx
- jmp L$cbc_enc_loop
- .p2align 4
- L$cbc_decrypt:
- cmpq $16,%rdx
- jne L$cbc_decrypt_bulk
- movdqu (%rdi),%xmm2
- movdqu (%r8),%xmm3
- movdqa %xmm2,%xmm4
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_7:
- .byte 102,15,56,222,209
- decl %r10d
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_7
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movdqu %xmm4,(%r8)
- xorps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp L$cbc_ret
- .p2align 4
- L$cbc_decrypt_bulk:
- leaq (%rsp),%r11
- pushq %rbp
- subq $16,%rsp
- andq $-16,%rsp
- movq %rcx,%rbp
- movups (%r8),%xmm10
- movl %r10d,%eax
- cmpq $0x50,%rdx
- jbe L$cbc_dec_tail
- movups (%rcx),%xmm0
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqa %xmm2,%xmm11
- movdqu 32(%rdi),%xmm4
- movdqa %xmm3,%xmm12
- movdqu 48(%rdi),%xmm5
- movdqa %xmm4,%xmm13
- movdqu 64(%rdi),%xmm6
- movdqa %xmm5,%xmm14
- movdqu 80(%rdi),%xmm7
- movdqa %xmm6,%xmm15
- leaq _OPENSSL_ia32cap_P(%rip),%r9
- movl 4(%r9),%r9d
- cmpq $0x70,%rdx
- jbe L$cbc_dec_six_or_seven
- andl $71303168,%r9d
- subq $0x50,%rdx
- cmpl $4194304,%r9d
- je L$cbc_dec_loop6_enter
- subq $0x20,%rdx
- leaq 112(%rcx),%rcx
- jmp L$cbc_dec_loop8_enter
- .p2align 4
- L$cbc_dec_loop8:
- movups %xmm9,(%rsi)
- leaq 16(%rsi),%rsi
- L$cbc_dec_loop8_enter:
- movdqu 96(%rdi),%xmm8
- pxor %xmm0,%xmm2
- movdqu 112(%rdi),%xmm9
- pxor %xmm0,%xmm3
- movups 16-112(%rcx),%xmm1
- pxor %xmm0,%xmm4
- movq $-1,%rbp
- cmpq $0x70,%rdx
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm9
- movups 32-112(%rcx),%xmm0
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- adcq $0,%rbp
- andq $128,%rbp
- .byte 102,68,15,56,222,201
- addq %rdi,%rbp
- movups 48-112(%rcx),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 64-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 80-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 96-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 112-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 128-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 144-112(%rcx),%xmm1
- cmpl $11,%eax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 160-112(%rcx),%xmm0
- jb L$cbc_dec_done
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 176-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 192-112(%rcx),%xmm0
- je L$cbc_dec_done
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 208-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 224-112(%rcx),%xmm0
- jmp L$cbc_dec_done
- .p2align 4
- L$cbc_dec_done:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm10
- pxor %xmm0,%xmm11
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm0,%xmm12
- pxor %xmm0,%xmm13
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- pxor %xmm0,%xmm14
- pxor %xmm0,%xmm15
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movdqu 80(%rdi),%xmm1
- .byte 102,65,15,56,223,210
- movdqu 96(%rdi),%xmm10
- pxor %xmm0,%xmm1
- .byte 102,65,15,56,223,219
- pxor %xmm0,%xmm10
- movdqu 112(%rdi),%xmm0
- .byte 102,65,15,56,223,228
- leaq 128(%rdi),%rdi
- movdqu 0(%rbp),%xmm11
- .byte 102,65,15,56,223,237
- .byte 102,65,15,56,223,246
- movdqu 16(%rbp),%xmm12
- movdqu 32(%rbp),%xmm13
- .byte 102,65,15,56,223,255
- .byte 102,68,15,56,223,193
- movdqu 48(%rbp),%xmm14
- movdqu 64(%rbp),%xmm15
- .byte 102,69,15,56,223,202
- movdqa %xmm0,%xmm10
- movdqu 80(%rbp),%xmm1
- movups -112(%rcx),%xmm0
- movups %xmm2,(%rsi)
- movdqa %xmm11,%xmm2
- movups %xmm3,16(%rsi)
- movdqa %xmm12,%xmm3
- movups %xmm4,32(%rsi)
- movdqa %xmm13,%xmm4
- movups %xmm5,48(%rsi)
- movdqa %xmm14,%xmm5
- movups %xmm6,64(%rsi)
- movdqa %xmm15,%xmm6
- movups %xmm7,80(%rsi)
- movdqa %xmm1,%xmm7
- movups %xmm8,96(%rsi)
- leaq 112(%rsi),%rsi
- subq $0x80,%rdx
- ja L$cbc_dec_loop8
- movaps %xmm9,%xmm2
- leaq -112(%rcx),%rcx
- addq $0x70,%rdx
- jle L$cbc_dec_clear_tail_collected
- movups %xmm9,(%rsi)
- leaq 16(%rsi),%rsi
- cmpq $0x50,%rdx
- jbe L$cbc_dec_tail
- movaps %xmm11,%xmm2
- L$cbc_dec_six_or_seven:
- cmpq $0x60,%rdx
- ja L$cbc_dec_seven
- movaps %xmm7,%xmm8
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movaps %xmm8,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- pxor %xmm15,%xmm7
- movdqu %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- leaq 80(%rsi),%rsi
- movdqa %xmm7,%xmm2
- pxor %xmm7,%xmm7
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_seven:
- movups 96(%rdi),%xmm8
- xorps %xmm9,%xmm9
- call _aesni_decrypt8
- movups 80(%rdi),%xmm9
- pxor %xmm10,%xmm2
- movups 96(%rdi),%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- pxor %xmm15,%xmm7
- movdqu %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- pxor %xmm9,%xmm8
- movdqu %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- leaq 96(%rsi),%rsi
- movdqa %xmm8,%xmm2
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_loop6:
- movups %xmm7,(%rsi)
- leaq 16(%rsi),%rsi
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqa %xmm2,%xmm11
- movdqu 32(%rdi),%xmm4
- movdqa %xmm3,%xmm12
- movdqu 48(%rdi),%xmm5
- movdqa %xmm4,%xmm13
- movdqu 64(%rdi),%xmm6
- movdqa %xmm5,%xmm14
- movdqu 80(%rdi),%xmm7
- movdqa %xmm6,%xmm15
- L$cbc_dec_loop6_enter:
- leaq 96(%rdi),%rdi
- movdqa %xmm7,%xmm8
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movdqa %xmm8,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm14,%xmm6
- movq %rbp,%rcx
- movdqu %xmm5,48(%rsi)
- pxor %xmm15,%xmm7
- movl %r10d,%eax
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- subq $0x60,%rdx
- ja L$cbc_dec_loop6
- movdqa %xmm7,%xmm2
- addq $0x50,%rdx
- jle L$cbc_dec_clear_tail_collected
- movups %xmm7,(%rsi)
- leaq 16(%rsi),%rsi
- L$cbc_dec_tail:
- movups (%rdi),%xmm2
- subq $0x10,%rdx
- jbe L$cbc_dec_one
- movups 16(%rdi),%xmm3
- movaps %xmm2,%xmm11
- subq $0x10,%rdx
- jbe L$cbc_dec_two
- movups 32(%rdi),%xmm4
- movaps %xmm3,%xmm12
- subq $0x10,%rdx
- jbe L$cbc_dec_three
- movups 48(%rdi),%xmm5
- movaps %xmm4,%xmm13
- subq $0x10,%rdx
- jbe L$cbc_dec_four
- movups 64(%rdi),%xmm6
- movaps %xmm5,%xmm14
- movaps %xmm6,%xmm15
- xorps %xmm7,%xmm7
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movaps %xmm15,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- leaq 64(%rsi),%rsi
- movdqa %xmm6,%xmm2
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- subq $0x10,%rdx
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_one:
- movaps %xmm2,%xmm11
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_8:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_8
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movaps %xmm11,%xmm10
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_two:
- movaps %xmm3,%xmm12
- call _aesni_decrypt2
- pxor %xmm10,%xmm2
- movaps %xmm12,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- movdqa %xmm3,%xmm2
- pxor %xmm3,%xmm3
- leaq 16(%rsi),%rsi
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_three:
- movaps %xmm4,%xmm13
- call _aesni_decrypt3
- pxor %xmm10,%xmm2
- movaps %xmm13,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movdqa %xmm4,%xmm2
- pxor %xmm4,%xmm4
- leaq 32(%rsi),%rsi
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_four:
- movaps %xmm5,%xmm14
- call _aesni_decrypt4
- pxor %xmm10,%xmm2
- movaps %xmm14,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movdqa %xmm5,%xmm2
- pxor %xmm5,%xmm5
- leaq 48(%rsi),%rsi
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_clear_tail_collected:
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- L$cbc_dec_tail_collected:
- movups %xmm10,(%r8)
- andq $15,%rdx
- jnz L$cbc_dec_tail_partial
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp L$cbc_dec_ret
- .p2align 4
- L$cbc_dec_tail_partial:
- movaps %xmm2,(%rsp)
- pxor %xmm2,%xmm2
- movq $16,%rcx
- movq %rsi,%rdi
- subq %rdx,%rcx
- leaq (%rsp),%rsi
- .long 0x9066A4F3
- movdqa %xmm2,(%rsp)
- L$cbc_dec_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movq -8(%r11),%rbp
- leaq (%r11),%rsp
- L$cbc_ret:
- .byte 0xf3,0xc3
- .globl _aes_hw_set_decrypt_key
- .private_extern _aes_hw_set_decrypt_key
- .p2align 4
- _aes_hw_set_decrypt_key:
- .byte 0x48,0x83,0xEC,0x08
- call __aesni_set_encrypt_key
- shll $4,%esi
- testl %eax,%eax
- jnz L$dec_key_ret
- leaq 16(%rdx,%rsi,1),%rdi
- movups (%rdx),%xmm0
- movups (%rdi),%xmm1
- movups %xmm0,(%rdi)
- movups %xmm1,(%rdx)
- leaq 16(%rdx),%rdx
- leaq -16(%rdi),%rdi
- L$dec_key_inverse:
- movups (%rdx),%xmm0
- movups (%rdi),%xmm1
- .byte 102,15,56,219,192
- .byte 102,15,56,219,201
- leaq 16(%rdx),%rdx
- leaq -16(%rdi),%rdi
- movups %xmm0,16(%rdi)
- movups %xmm1,-16(%rdx)
- cmpq %rdx,%rdi
- ja L$dec_key_inverse
- movups (%rdx),%xmm0
- .byte 102,15,56,219,192
- pxor %xmm1,%xmm1
- movups %xmm0,(%rdi)
- pxor %xmm0,%xmm0
- L$dec_key_ret:
- addq $8,%rsp
- .byte 0xf3,0xc3
- L$SEH_end_set_decrypt_key:
- .globl _aes_hw_set_encrypt_key
- .private_extern _aes_hw_set_encrypt_key
- .p2align 4
- _aes_hw_set_encrypt_key:
- __aesni_set_encrypt_key:
- #ifdef BORINGSSL_DISPATCH_TEST
- movb $1,_BORINGSSL_function_hit+3(%rip)
- #endif
- .byte 0x48,0x83,0xEC,0x08
- movq $-1,%rax
- testq %rdi,%rdi
- jz L$enc_key_ret
- testq %rdx,%rdx
- jz L$enc_key_ret
- movups (%rdi),%xmm0
- xorps %xmm4,%xmm4
- leaq _OPENSSL_ia32cap_P(%rip),%r10
- movl 4(%r10),%r10d
- andl $268437504,%r10d
- leaq 16(%rdx),%rax
- cmpl $256,%esi
- je L$14rounds
- cmpl $192,%esi
- je L$12rounds
- cmpl $128,%esi
- jne L$bad_keybits
- L$10rounds:
- movl $9,%esi
- cmpl $268435456,%r10d
- je L$10rounds_alt
- movups %xmm0,(%rdx)
- .byte 102,15,58,223,200,1
- call L$key_expansion_128_cold
- .byte 102,15,58,223,200,2
- call L$key_expansion_128
- .byte 102,15,58,223,200,4
- call L$key_expansion_128
- .byte 102,15,58,223,200,8
- call L$key_expansion_128
- .byte 102,15,58,223,200,16
- call L$key_expansion_128
- .byte 102,15,58,223,200,32
- call L$key_expansion_128
- .byte 102,15,58,223,200,64
- call L$key_expansion_128
- .byte 102,15,58,223,200,128
- call L$key_expansion_128
- .byte 102,15,58,223,200,27
- call L$key_expansion_128
- .byte 102,15,58,223,200,54
- call L$key_expansion_128
- movups %xmm0,(%rax)
- movl %esi,80(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$10rounds_alt:
- movdqa L$key_rotate(%rip),%xmm5
- movl $8,%r10d
- movdqa L$key_rcon1(%rip),%xmm4
- movdqa %xmm0,%xmm2
- movdqu %xmm0,(%rdx)
- jmp L$oop_key128
- .p2align 4
- L$oop_key128:
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- leaq 16(%rax),%rax
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,-16(%rax)
- movdqa %xmm0,%xmm2
- decl %r10d
- jnz L$oop_key128
- movdqa L$key_rcon1b(%rip),%xmm4
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%rax)
- movdqa %xmm0,%xmm2
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,16(%rax)
- movl %esi,96(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$12rounds:
- movq 16(%rdi),%xmm2
- movl $11,%esi
- cmpl $268435456,%r10d
- je L$12rounds_alt
- movups %xmm0,(%rdx)
- .byte 102,15,58,223,202,1
- call L$key_expansion_192a_cold
- .byte 102,15,58,223,202,2
- call L$key_expansion_192b
- .byte 102,15,58,223,202,4
- call L$key_expansion_192a
- .byte 102,15,58,223,202,8
- call L$key_expansion_192b
- .byte 102,15,58,223,202,16
- call L$key_expansion_192a
- .byte 102,15,58,223,202,32
- call L$key_expansion_192b
- .byte 102,15,58,223,202,64
- call L$key_expansion_192a
- .byte 102,15,58,223,202,128
- call L$key_expansion_192b
- movups %xmm0,(%rax)
- movl %esi,48(%rax)
- xorq %rax,%rax
- jmp L$enc_key_ret
- .p2align 4
- L$12rounds_alt:
- movdqa L$key_rotate192(%rip),%xmm5
- movdqa L$key_rcon1(%rip),%xmm4
- movl $8,%r10d
- movdqu %xmm0,(%rdx)
- jmp L$oop_key192
- .p2align 4
- L$oop_key192:
- movq %xmm2,0(%rax)
- movdqa %xmm2,%xmm1
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- pslld $1,%xmm4
- leaq 24(%rax),%rax
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pshufd $0xff,%xmm0,%xmm3
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pxor %xmm2,%xmm0
- pxor %xmm3,%xmm2
- movdqu %xmm0,-16(%rax)
- decl %r10d
- jnz L$oop_key192
- movl %esi,32(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$14rounds:
- movups 16(%rdi),%xmm2
- movl $13,%esi
- leaq 16(%rax),%rax
- cmpl $268435456,%r10d
- je L$14rounds_alt
- movups %xmm0,(%rdx)
- movups %xmm2,16(%rdx)
- .byte 102,15,58,223,202,1
- call L$key_expansion_256a_cold
- .byte 102,15,58,223,200,1
- call L$key_expansion_256b
- .byte 102,15,58,223,202,2
- call L$key_expansion_256a
- .byte 102,15,58,223,200,2
- call L$key_expansion_256b
- .byte 102,15,58,223,202,4
- call L$key_expansion_256a
- .byte 102,15,58,223,200,4
- call L$key_expansion_256b
- .byte 102,15,58,223,202,8
- call L$key_expansion_256a
- .byte 102,15,58,223,200,8
- call L$key_expansion_256b
- .byte 102,15,58,223,202,16
- call L$key_expansion_256a
- .byte 102,15,58,223,200,16
- call L$key_expansion_256b
- .byte 102,15,58,223,202,32
- call L$key_expansion_256a
- .byte 102,15,58,223,200,32
- call L$key_expansion_256b
- .byte 102,15,58,223,202,64
- call L$key_expansion_256a
- movups %xmm0,(%rax)
- movl %esi,16(%rax)
- xorq %rax,%rax
- jmp L$enc_key_ret
- .p2align 4
- L$14rounds_alt:
- movdqa L$key_rotate(%rip),%xmm5
- movdqa L$key_rcon1(%rip),%xmm4
- movl $7,%r10d
- movdqu %xmm0,0(%rdx)
- movdqa %xmm2,%xmm1
- movdqu %xmm2,16(%rdx)
- jmp L$oop_key256
- .p2align 4
- L$oop_key256:
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pslld $1,%xmm4
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%rax)
- decl %r10d
- jz L$done_key256
- pshufd $0xff,%xmm0,%xmm2
- pxor %xmm3,%xmm3
- .byte 102,15,56,221,211
- movdqa %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm3,%xmm1
- pxor %xmm1,%xmm2
- movdqu %xmm2,16(%rax)
- leaq 32(%rax),%rax
- movdqa %xmm2,%xmm1
- jmp L$oop_key256
- L$done_key256:
- movl %esi,16(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$bad_keybits:
- movq $-2,%rax
- L$enc_key_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- addq $8,%rsp
- .byte 0xf3,0xc3
- L$SEH_end_set_encrypt_key:
- .p2align 4
- L$key_expansion_128:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- L$key_expansion_128_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- .byte 0xf3,0xc3
- .p2align 4
- L$key_expansion_192a:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- L$key_expansion_192a_cold:
- movaps %xmm2,%xmm5
- L$key_expansion_192b_warm:
- shufps $16,%xmm0,%xmm4
- movdqa %xmm2,%xmm3
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- pslldq $4,%xmm3
- xorps %xmm4,%xmm0
- pshufd $85,%xmm1,%xmm1
- pxor %xmm3,%xmm2
- pxor %xmm1,%xmm0
- pshufd $255,%xmm0,%xmm3
- pxor %xmm3,%xmm2
- .byte 0xf3,0xc3
- .p2align 4
- L$key_expansion_192b:
- movaps %xmm0,%xmm3
- shufps $68,%xmm0,%xmm5
- movups %xmm5,(%rax)
- shufps $78,%xmm2,%xmm3
- movups %xmm3,16(%rax)
- leaq 32(%rax),%rax
- jmp L$key_expansion_192b_warm
- .p2align 4
- L$key_expansion_256a:
- movups %xmm2,(%rax)
- leaq 16(%rax),%rax
- L$key_expansion_256a_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- .byte 0xf3,0xc3
- .p2align 4
- L$key_expansion_256b:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- shufps $16,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $140,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $170,%xmm1,%xmm1
- xorps %xmm1,%xmm2
- .byte 0xf3,0xc3
- .p2align 6
- L$bswap_mask:
- .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
- L$increment32:
- .long 6,6,6,0
- L$increment64:
- .long 1,0,0,0
- L$xts_magic:
- .long 0x87,0,1,0
- L$increment1:
- .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
- L$key_rotate:
- .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d
- L$key_rotate192:
- .long 0x04070605,0x04070605,0x04070605,0x04070605
- L$key_rcon1:
- .long 1,1,1,1
- L$key_rcon1b:
- .long 0x1b,0x1b,0x1b,0x1b
- .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
- .p2align 6
- #endif
|