123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466 |
- // This file is generated from a similarly-named Perl script in the BoringSSL
- // source tree. Do not edit by hand.
- #if defined(__has_feature)
- #if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
- #define OPENSSL_NO_ASM
- #endif
- #endif
- #if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
- #if defined(BORINGSSL_PREFIX)
- #include <boringssl_prefix_symbols_asm.h>
- #endif
- .text
- .globl _sha1_block_data_order
- .private_extern _sha1_block_data_order
- .p2align 4
- _sha1_block_data_order:
- leaq _OPENSSL_ia32cap_P(%rip),%r10
- movl 0(%r10),%r9d
- movl 4(%r10),%r8d
- movl 8(%r10),%r10d
- testl $512,%r8d
- jz L$ialu
- testl $536870912,%r10d
- jnz _shaext_shortcut
- andl $296,%r10d
- cmpl $296,%r10d
- je _avx2_shortcut
- andl $268435456,%r8d
- andl $1073741824,%r9d
- orl %r9d,%r8d
- cmpl $1342177280,%r8d
- je _avx_shortcut
- jmp _ssse3_shortcut
- .p2align 4
- L$ialu:
- movq %rsp,%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- movq %rdi,%r8
- subq $72,%rsp
- movq %rsi,%r9
- andq $-64,%rsp
- movq %rdx,%r10
- movq %rax,64(%rsp)
- L$prologue:
- movl 0(%r8),%esi
- movl 4(%r8),%edi
- movl 8(%r8),%r11d
- movl 12(%r8),%r12d
- movl 16(%r8),%r13d
- jmp L$loop
- .p2align 4
- L$loop:
- movl 0(%r9),%edx
- bswapl %edx
- movl 4(%r9),%ebp
- movl %r12d,%eax
- movl %edx,0(%rsp)
- movl %esi,%ecx
- bswapl %ebp
- xorl %r11d,%eax
- roll $5,%ecx
- andl %edi,%eax
- leal 1518500249(%rdx,%r13,1),%r13d
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl 8(%r9),%r14d
- movl %r11d,%eax
- movl %ebp,4(%rsp)
- movl %r13d,%ecx
- bswapl %r14d
- xorl %edi,%eax
- roll $5,%ecx
- andl %esi,%eax
- leal 1518500249(%rbp,%r12,1),%r12d
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl 12(%r9),%edx
- movl %edi,%eax
- movl %r14d,8(%rsp)
- movl %r12d,%ecx
- bswapl %edx
- xorl %esi,%eax
- roll $5,%ecx
- andl %r13d,%eax
- leal 1518500249(%r14,%r11,1),%r11d
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl 16(%r9),%ebp
- movl %esi,%eax
- movl %edx,12(%rsp)
- movl %r11d,%ecx
- bswapl %ebp
- xorl %r13d,%eax
- roll $5,%ecx
- andl %r12d,%eax
- leal 1518500249(%rdx,%rdi,1),%edi
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl 20(%r9),%r14d
- movl %r13d,%eax
- movl %ebp,16(%rsp)
- movl %edi,%ecx
- bswapl %r14d
- xorl %r12d,%eax
- roll $5,%ecx
- andl %r11d,%eax
- leal 1518500249(%rbp,%rsi,1),%esi
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl 24(%r9),%edx
- movl %r12d,%eax
- movl %r14d,20(%rsp)
- movl %esi,%ecx
- bswapl %edx
- xorl %r11d,%eax
- roll $5,%ecx
- andl %edi,%eax
- leal 1518500249(%r14,%r13,1),%r13d
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl 28(%r9),%ebp
- movl %r11d,%eax
- movl %edx,24(%rsp)
- movl %r13d,%ecx
- bswapl %ebp
- xorl %edi,%eax
- roll $5,%ecx
- andl %esi,%eax
- leal 1518500249(%rdx,%r12,1),%r12d
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl 32(%r9),%r14d
- movl %edi,%eax
- movl %ebp,28(%rsp)
- movl %r12d,%ecx
- bswapl %r14d
- xorl %esi,%eax
- roll $5,%ecx
- andl %r13d,%eax
- leal 1518500249(%rbp,%r11,1),%r11d
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl 36(%r9),%edx
- movl %esi,%eax
- movl %r14d,32(%rsp)
- movl %r11d,%ecx
- bswapl %edx
- xorl %r13d,%eax
- roll $5,%ecx
- andl %r12d,%eax
- leal 1518500249(%r14,%rdi,1),%edi
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl 40(%r9),%ebp
- movl %r13d,%eax
- movl %edx,36(%rsp)
- movl %edi,%ecx
- bswapl %ebp
- xorl %r12d,%eax
- roll $5,%ecx
- andl %r11d,%eax
- leal 1518500249(%rdx,%rsi,1),%esi
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl 44(%r9),%r14d
- movl %r12d,%eax
- movl %ebp,40(%rsp)
- movl %esi,%ecx
- bswapl %r14d
- xorl %r11d,%eax
- roll $5,%ecx
- andl %edi,%eax
- leal 1518500249(%rbp,%r13,1),%r13d
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl 48(%r9),%edx
- movl %r11d,%eax
- movl %r14d,44(%rsp)
- movl %r13d,%ecx
- bswapl %edx
- xorl %edi,%eax
- roll $5,%ecx
- andl %esi,%eax
- leal 1518500249(%r14,%r12,1),%r12d
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl 52(%r9),%ebp
- movl %edi,%eax
- movl %edx,48(%rsp)
- movl %r12d,%ecx
- bswapl %ebp
- xorl %esi,%eax
- roll $5,%ecx
- andl %r13d,%eax
- leal 1518500249(%rdx,%r11,1),%r11d
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl 56(%r9),%r14d
- movl %esi,%eax
- movl %ebp,52(%rsp)
- movl %r11d,%ecx
- bswapl %r14d
- xorl %r13d,%eax
- roll $5,%ecx
- andl %r12d,%eax
- leal 1518500249(%rbp,%rdi,1),%edi
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl 60(%r9),%edx
- movl %r13d,%eax
- movl %r14d,56(%rsp)
- movl %edi,%ecx
- bswapl %edx
- xorl %r12d,%eax
- roll $5,%ecx
- andl %r11d,%eax
- leal 1518500249(%r14,%rsi,1),%esi
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- xorl 0(%rsp),%ebp
- movl %r12d,%eax
- movl %edx,60(%rsp)
- movl %esi,%ecx
- xorl 8(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 32(%rsp),%ebp
- andl %edi,%eax
- leal 1518500249(%rdx,%r13,1),%r13d
- roll $30,%edi
- xorl %r12d,%eax
- addl %ecx,%r13d
- roll $1,%ebp
- addl %eax,%r13d
- xorl 4(%rsp),%r14d
- movl %r11d,%eax
- movl %ebp,0(%rsp)
- movl %r13d,%ecx
- xorl 12(%rsp),%r14d
- xorl %edi,%eax
- roll $5,%ecx
- xorl 36(%rsp),%r14d
- andl %esi,%eax
- leal 1518500249(%rbp,%r12,1),%r12d
- roll $30,%esi
- xorl %r11d,%eax
- addl %ecx,%r12d
- roll $1,%r14d
- addl %eax,%r12d
- xorl 8(%rsp),%edx
- movl %edi,%eax
- movl %r14d,4(%rsp)
- movl %r12d,%ecx
- xorl 16(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- xorl 40(%rsp),%edx
- andl %r13d,%eax
- leal 1518500249(%r14,%r11,1),%r11d
- roll $30,%r13d
- xorl %edi,%eax
- addl %ecx,%r11d
- roll $1,%edx
- addl %eax,%r11d
- xorl 12(%rsp),%ebp
- movl %esi,%eax
- movl %edx,8(%rsp)
- movl %r11d,%ecx
- xorl 20(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 44(%rsp),%ebp
- andl %r12d,%eax
- leal 1518500249(%rdx,%rdi,1),%edi
- roll $30,%r12d
- xorl %esi,%eax
- addl %ecx,%edi
- roll $1,%ebp
- addl %eax,%edi
- xorl 16(%rsp),%r14d
- movl %r13d,%eax
- movl %ebp,12(%rsp)
- movl %edi,%ecx
- xorl 24(%rsp),%r14d
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 48(%rsp),%r14d
- andl %r11d,%eax
- leal 1518500249(%rbp,%rsi,1),%esi
- roll $30,%r11d
- xorl %r13d,%eax
- addl %ecx,%esi
- roll $1,%r14d
- addl %eax,%esi
- xorl 20(%rsp),%edx
- movl %edi,%eax
- movl %r14d,16(%rsp)
- movl %esi,%ecx
- xorl 28(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 52(%rsp),%edx
- leal 1859775393(%r14,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- xorl 24(%rsp),%ebp
- movl %esi,%eax
- movl %edx,20(%rsp)
- movl %r13d,%ecx
- xorl 32(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 56(%rsp),%ebp
- leal 1859775393(%rdx,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- xorl 28(%rsp),%r14d
- movl %r13d,%eax
- movl %ebp,24(%rsp)
- movl %r12d,%ecx
- xorl 36(%rsp),%r14d
- xorl %edi,%eax
- roll $5,%ecx
- xorl 60(%rsp),%r14d
- leal 1859775393(%rbp,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%r14d
- xorl 32(%rsp),%edx
- movl %r12d,%eax
- movl %r14d,28(%rsp)
- movl %r11d,%ecx
- xorl 40(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- xorl 0(%rsp),%edx
- leal 1859775393(%r14,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- xorl 36(%rsp),%ebp
- movl %r11d,%eax
- movl %edx,32(%rsp)
- movl %edi,%ecx
- xorl 44(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 4(%rsp),%ebp
- leal 1859775393(%rdx,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- xorl 40(%rsp),%r14d
- movl %edi,%eax
- movl %ebp,36(%rsp)
- movl %esi,%ecx
- xorl 48(%rsp),%r14d
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 8(%rsp),%r14d
- leal 1859775393(%rbp,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%r14d
- xorl 44(%rsp),%edx
- movl %esi,%eax
- movl %r14d,40(%rsp)
- movl %r13d,%ecx
- xorl 52(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 12(%rsp),%edx
- leal 1859775393(%r14,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- xorl 48(%rsp),%ebp
- movl %r13d,%eax
- movl %edx,44(%rsp)
- movl %r12d,%ecx
- xorl 56(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- xorl 16(%rsp),%ebp
- leal 1859775393(%rdx,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- xorl 52(%rsp),%r14d
- movl %r12d,%eax
- movl %ebp,48(%rsp)
- movl %r11d,%ecx
- xorl 60(%rsp),%r14d
- xorl %esi,%eax
- roll $5,%ecx
- xorl 20(%rsp),%r14d
- leal 1859775393(%rbp,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%r14d
- xorl 56(%rsp),%edx
- movl %r11d,%eax
- movl %r14d,52(%rsp)
- movl %edi,%ecx
- xorl 0(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 24(%rsp),%edx
- leal 1859775393(%r14,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- xorl 60(%rsp),%ebp
- movl %edi,%eax
- movl %edx,56(%rsp)
- movl %esi,%ecx
- xorl 4(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 28(%rsp),%ebp
- leal 1859775393(%rdx,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- xorl 0(%rsp),%r14d
- movl %esi,%eax
- movl %ebp,60(%rsp)
- movl %r13d,%ecx
- xorl 8(%rsp),%r14d
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 32(%rsp),%r14d
- leal 1859775393(%rbp,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%r14d
- xorl 4(%rsp),%edx
- movl %r13d,%eax
- movl %r14d,0(%rsp)
- movl %r12d,%ecx
- xorl 12(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- xorl 36(%rsp),%edx
- leal 1859775393(%r14,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- xorl 8(%rsp),%ebp
- movl %r12d,%eax
- movl %edx,4(%rsp)
- movl %r11d,%ecx
- xorl 16(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- xorl 40(%rsp),%ebp
- leal 1859775393(%rdx,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- xorl 12(%rsp),%r14d
- movl %r11d,%eax
- movl %ebp,8(%rsp)
- movl %edi,%ecx
- xorl 20(%rsp),%r14d
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 44(%rsp),%r14d
- leal 1859775393(%rbp,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%r14d
- xorl 16(%rsp),%edx
- movl %edi,%eax
- movl %r14d,12(%rsp)
- movl %esi,%ecx
- xorl 24(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 48(%rsp),%edx
- leal 1859775393(%r14,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- xorl 20(%rsp),%ebp
- movl %esi,%eax
- movl %edx,16(%rsp)
- movl %r13d,%ecx
- xorl 28(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 52(%rsp),%ebp
- leal 1859775393(%rdx,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- xorl 24(%rsp),%r14d
- movl %r13d,%eax
- movl %ebp,20(%rsp)
- movl %r12d,%ecx
- xorl 32(%rsp),%r14d
- xorl %edi,%eax
- roll $5,%ecx
- xorl 56(%rsp),%r14d
- leal 1859775393(%rbp,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%r14d
- xorl 28(%rsp),%edx
- movl %r12d,%eax
- movl %r14d,24(%rsp)
- movl %r11d,%ecx
- xorl 36(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- xorl 60(%rsp),%edx
- leal 1859775393(%r14,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- xorl 32(%rsp),%ebp
- movl %r11d,%eax
- movl %edx,28(%rsp)
- movl %edi,%ecx
- xorl 40(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 0(%rsp),%ebp
- leal 1859775393(%rdx,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- xorl 36(%rsp),%r14d
- movl %r12d,%eax
- movl %ebp,32(%rsp)
- movl %r12d,%ebx
- xorl 44(%rsp),%r14d
- andl %r11d,%eax
- movl %esi,%ecx
- xorl 4(%rsp),%r14d
- leal -1894007588(%rbp,%r13,1),%r13d
- xorl %r11d,%ebx
- roll $5,%ecx
- addl %eax,%r13d
- roll $1,%r14d
- andl %edi,%ebx
- addl %ecx,%r13d
- roll $30,%edi
- addl %ebx,%r13d
- xorl 40(%rsp),%edx
- movl %r11d,%eax
- movl %r14d,36(%rsp)
- movl %r11d,%ebx
- xorl 48(%rsp),%edx
- andl %edi,%eax
- movl %r13d,%ecx
- xorl 8(%rsp),%edx
- leal -1894007588(%r14,%r12,1),%r12d
- xorl %edi,%ebx
- roll $5,%ecx
- addl %eax,%r12d
- roll $1,%edx
- andl %esi,%ebx
- addl %ecx,%r12d
- roll $30,%esi
- addl %ebx,%r12d
- xorl 44(%rsp),%ebp
- movl %edi,%eax
- movl %edx,40(%rsp)
- movl %edi,%ebx
- xorl 52(%rsp),%ebp
- andl %esi,%eax
- movl %r12d,%ecx
- xorl 12(%rsp),%ebp
- leal -1894007588(%rdx,%r11,1),%r11d
- xorl %esi,%ebx
- roll $5,%ecx
- addl %eax,%r11d
- roll $1,%ebp
- andl %r13d,%ebx
- addl %ecx,%r11d
- roll $30,%r13d
- addl %ebx,%r11d
- xorl 48(%rsp),%r14d
- movl %esi,%eax
- movl %ebp,44(%rsp)
- movl %esi,%ebx
- xorl 56(%rsp),%r14d
- andl %r13d,%eax
- movl %r11d,%ecx
- xorl 16(%rsp),%r14d
- leal -1894007588(%rbp,%rdi,1),%edi
- xorl %r13d,%ebx
- roll $5,%ecx
- addl %eax,%edi
- roll $1,%r14d
- andl %r12d,%ebx
- addl %ecx,%edi
- roll $30,%r12d
- addl %ebx,%edi
- xorl 52(%rsp),%edx
- movl %r13d,%eax
- movl %r14d,48(%rsp)
- movl %r13d,%ebx
- xorl 60(%rsp),%edx
- andl %r12d,%eax
- movl %edi,%ecx
- xorl 20(%rsp),%edx
- leal -1894007588(%r14,%rsi,1),%esi
- xorl %r12d,%ebx
- roll $5,%ecx
- addl %eax,%esi
- roll $1,%edx
- andl %r11d,%ebx
- addl %ecx,%esi
- roll $30,%r11d
- addl %ebx,%esi
- xorl 56(%rsp),%ebp
- movl %r12d,%eax
- movl %edx,52(%rsp)
- movl %r12d,%ebx
- xorl 0(%rsp),%ebp
- andl %r11d,%eax
- movl %esi,%ecx
- xorl 24(%rsp),%ebp
- leal -1894007588(%rdx,%r13,1),%r13d
- xorl %r11d,%ebx
- roll $5,%ecx
- addl %eax,%r13d
- roll $1,%ebp
- andl %edi,%ebx
- addl %ecx,%r13d
- roll $30,%edi
- addl %ebx,%r13d
- xorl 60(%rsp),%r14d
- movl %r11d,%eax
- movl %ebp,56(%rsp)
- movl %r11d,%ebx
- xorl 4(%rsp),%r14d
- andl %edi,%eax
- movl %r13d,%ecx
- xorl 28(%rsp),%r14d
- leal -1894007588(%rbp,%r12,1),%r12d
- xorl %edi,%ebx
- roll $5,%ecx
- addl %eax,%r12d
- roll $1,%r14d
- andl %esi,%ebx
- addl %ecx,%r12d
- roll $30,%esi
- addl %ebx,%r12d
- xorl 0(%rsp),%edx
- movl %edi,%eax
- movl %r14d,60(%rsp)
- movl %edi,%ebx
- xorl 8(%rsp),%edx
- andl %esi,%eax
- movl %r12d,%ecx
- xorl 32(%rsp),%edx
- leal -1894007588(%r14,%r11,1),%r11d
- xorl %esi,%ebx
- roll $5,%ecx
- addl %eax,%r11d
- roll $1,%edx
- andl %r13d,%ebx
- addl %ecx,%r11d
- roll $30,%r13d
- addl %ebx,%r11d
- xorl 4(%rsp),%ebp
- movl %esi,%eax
- movl %edx,0(%rsp)
- movl %esi,%ebx
- xorl 12(%rsp),%ebp
- andl %r13d,%eax
- movl %r11d,%ecx
- xorl 36(%rsp),%ebp
- leal -1894007588(%rdx,%rdi,1),%edi
- xorl %r13d,%ebx
- roll $5,%ecx
- addl %eax,%edi
- roll $1,%ebp
- andl %r12d,%ebx
- addl %ecx,%edi
- roll $30,%r12d
- addl %ebx,%edi
- xorl 8(%rsp),%r14d
- movl %r13d,%eax
- movl %ebp,4(%rsp)
- movl %r13d,%ebx
- xorl 16(%rsp),%r14d
- andl %r12d,%eax
- movl %edi,%ecx
- xorl 40(%rsp),%r14d
- leal -1894007588(%rbp,%rsi,1),%esi
- xorl %r12d,%ebx
- roll $5,%ecx
- addl %eax,%esi
- roll $1,%r14d
- andl %r11d,%ebx
- addl %ecx,%esi
- roll $30,%r11d
- addl %ebx,%esi
- xorl 12(%rsp),%edx
- movl %r12d,%eax
- movl %r14d,8(%rsp)
- movl %r12d,%ebx
- xorl 20(%rsp),%edx
- andl %r11d,%eax
- movl %esi,%ecx
- xorl 44(%rsp),%edx
- leal -1894007588(%r14,%r13,1),%r13d
- xorl %r11d,%ebx
- roll $5,%ecx
- addl %eax,%r13d
- roll $1,%edx
- andl %edi,%ebx
- addl %ecx,%r13d
- roll $30,%edi
- addl %ebx,%r13d
- xorl 16(%rsp),%ebp
- movl %r11d,%eax
- movl %edx,12(%rsp)
- movl %r11d,%ebx
- xorl 24(%rsp),%ebp
- andl %edi,%eax
- movl %r13d,%ecx
- xorl 48(%rsp),%ebp
- leal -1894007588(%rdx,%r12,1),%r12d
- xorl %edi,%ebx
- roll $5,%ecx
- addl %eax,%r12d
- roll $1,%ebp
- andl %esi,%ebx
- addl %ecx,%r12d
- roll $30,%esi
- addl %ebx,%r12d
- xorl 20(%rsp),%r14d
- movl %edi,%eax
- movl %ebp,16(%rsp)
- movl %edi,%ebx
- xorl 28(%rsp),%r14d
- andl %esi,%eax
- movl %r12d,%ecx
- xorl 52(%rsp),%r14d
- leal -1894007588(%rbp,%r11,1),%r11d
- xorl %esi,%ebx
- roll $5,%ecx
- addl %eax,%r11d
- roll $1,%r14d
- andl %r13d,%ebx
- addl %ecx,%r11d
- roll $30,%r13d
- addl %ebx,%r11d
- xorl 24(%rsp),%edx
- movl %esi,%eax
- movl %r14d,20(%rsp)
- movl %esi,%ebx
- xorl 32(%rsp),%edx
- andl %r13d,%eax
- movl %r11d,%ecx
- xorl 56(%rsp),%edx
- leal -1894007588(%r14,%rdi,1),%edi
- xorl %r13d,%ebx
- roll $5,%ecx
- addl %eax,%edi
- roll $1,%edx
- andl %r12d,%ebx
- addl %ecx,%edi
- roll $30,%r12d
- addl %ebx,%edi
- xorl 28(%rsp),%ebp
- movl %r13d,%eax
- movl %edx,24(%rsp)
- movl %r13d,%ebx
- xorl 36(%rsp),%ebp
- andl %r12d,%eax
- movl %edi,%ecx
- xorl 60(%rsp),%ebp
- leal -1894007588(%rdx,%rsi,1),%esi
- xorl %r12d,%ebx
- roll $5,%ecx
- addl %eax,%esi
- roll $1,%ebp
- andl %r11d,%ebx
- addl %ecx,%esi
- roll $30,%r11d
- addl %ebx,%esi
- xorl 32(%rsp),%r14d
- movl %r12d,%eax
- movl %ebp,28(%rsp)
- movl %r12d,%ebx
- xorl 40(%rsp),%r14d
- andl %r11d,%eax
- movl %esi,%ecx
- xorl 0(%rsp),%r14d
- leal -1894007588(%rbp,%r13,1),%r13d
- xorl %r11d,%ebx
- roll $5,%ecx
- addl %eax,%r13d
- roll $1,%r14d
- andl %edi,%ebx
- addl %ecx,%r13d
- roll $30,%edi
- addl %ebx,%r13d
- xorl 36(%rsp),%edx
- movl %r11d,%eax
- movl %r14d,32(%rsp)
- movl %r11d,%ebx
- xorl 44(%rsp),%edx
- andl %edi,%eax
- movl %r13d,%ecx
- xorl 4(%rsp),%edx
- leal -1894007588(%r14,%r12,1),%r12d
- xorl %edi,%ebx
- roll $5,%ecx
- addl %eax,%r12d
- roll $1,%edx
- andl %esi,%ebx
- addl %ecx,%r12d
- roll $30,%esi
- addl %ebx,%r12d
- xorl 40(%rsp),%ebp
- movl %edi,%eax
- movl %edx,36(%rsp)
- movl %edi,%ebx
- xorl 48(%rsp),%ebp
- andl %esi,%eax
- movl %r12d,%ecx
- xorl 8(%rsp),%ebp
- leal -1894007588(%rdx,%r11,1),%r11d
- xorl %esi,%ebx
- roll $5,%ecx
- addl %eax,%r11d
- roll $1,%ebp
- andl %r13d,%ebx
- addl %ecx,%r11d
- roll $30,%r13d
- addl %ebx,%r11d
- xorl 44(%rsp),%r14d
- movl %esi,%eax
- movl %ebp,40(%rsp)
- movl %esi,%ebx
- xorl 52(%rsp),%r14d
- andl %r13d,%eax
- movl %r11d,%ecx
- xorl 12(%rsp),%r14d
- leal -1894007588(%rbp,%rdi,1),%edi
- xorl %r13d,%ebx
- roll $5,%ecx
- addl %eax,%edi
- roll $1,%r14d
- andl %r12d,%ebx
- addl %ecx,%edi
- roll $30,%r12d
- addl %ebx,%edi
- xorl 48(%rsp),%edx
- movl %r13d,%eax
- movl %r14d,44(%rsp)
- movl %r13d,%ebx
- xorl 56(%rsp),%edx
- andl %r12d,%eax
- movl %edi,%ecx
- xorl 16(%rsp),%edx
- leal -1894007588(%r14,%rsi,1),%esi
- xorl %r12d,%ebx
- roll $5,%ecx
- addl %eax,%esi
- roll $1,%edx
- andl %r11d,%ebx
- addl %ecx,%esi
- roll $30,%r11d
- addl %ebx,%esi
- xorl 52(%rsp),%ebp
- movl %edi,%eax
- movl %edx,48(%rsp)
- movl %esi,%ecx
- xorl 60(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 20(%rsp),%ebp
- leal -899497514(%rdx,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- xorl 56(%rsp),%r14d
- movl %esi,%eax
- movl %ebp,52(%rsp)
- movl %r13d,%ecx
- xorl 0(%rsp),%r14d
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 24(%rsp),%r14d
- leal -899497514(%rbp,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%r14d
- xorl 60(%rsp),%edx
- movl %r13d,%eax
- movl %r14d,56(%rsp)
- movl %r12d,%ecx
- xorl 4(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- xorl 28(%rsp),%edx
- leal -899497514(%r14,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- xorl 0(%rsp),%ebp
- movl %r12d,%eax
- movl %edx,60(%rsp)
- movl %r11d,%ecx
- xorl 8(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- xorl 32(%rsp),%ebp
- leal -899497514(%rdx,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- xorl 4(%rsp),%r14d
- movl %r11d,%eax
- movl %ebp,0(%rsp)
- movl %edi,%ecx
- xorl 12(%rsp),%r14d
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 36(%rsp),%r14d
- leal -899497514(%rbp,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%r14d
- xorl 8(%rsp),%edx
- movl %edi,%eax
- movl %r14d,4(%rsp)
- movl %esi,%ecx
- xorl 16(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 40(%rsp),%edx
- leal -899497514(%r14,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- xorl 12(%rsp),%ebp
- movl %esi,%eax
- movl %edx,8(%rsp)
- movl %r13d,%ecx
- xorl 20(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 44(%rsp),%ebp
- leal -899497514(%rdx,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- xorl 16(%rsp),%r14d
- movl %r13d,%eax
- movl %ebp,12(%rsp)
- movl %r12d,%ecx
- xorl 24(%rsp),%r14d
- xorl %edi,%eax
- roll $5,%ecx
- xorl 48(%rsp),%r14d
- leal -899497514(%rbp,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%r14d
- xorl 20(%rsp),%edx
- movl %r12d,%eax
- movl %r14d,16(%rsp)
- movl %r11d,%ecx
- xorl 28(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- xorl 52(%rsp),%edx
- leal -899497514(%r14,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- xorl 24(%rsp),%ebp
- movl %r11d,%eax
- movl %edx,20(%rsp)
- movl %edi,%ecx
- xorl 32(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 56(%rsp),%ebp
- leal -899497514(%rdx,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- xorl 28(%rsp),%r14d
- movl %edi,%eax
- movl %ebp,24(%rsp)
- movl %esi,%ecx
- xorl 36(%rsp),%r14d
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 60(%rsp),%r14d
- leal -899497514(%rbp,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%r14d
- xorl 32(%rsp),%edx
- movl %esi,%eax
- movl %r14d,28(%rsp)
- movl %r13d,%ecx
- xorl 40(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 0(%rsp),%edx
- leal -899497514(%r14,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- xorl 36(%rsp),%ebp
- movl %r13d,%eax
- movl %r12d,%ecx
- xorl 44(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- xorl 4(%rsp),%ebp
- leal -899497514(%rdx,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- xorl 40(%rsp),%r14d
- movl %r12d,%eax
- movl %r11d,%ecx
- xorl 48(%rsp),%r14d
- xorl %esi,%eax
- roll $5,%ecx
- xorl 8(%rsp),%r14d
- leal -899497514(%rbp,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%r14d
- xorl 44(%rsp),%edx
- movl %r11d,%eax
- movl %edi,%ecx
- xorl 52(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 12(%rsp),%edx
- leal -899497514(%r14,%rsi,1),%esi
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- xorl 48(%rsp),%ebp
- movl %edi,%eax
- movl %esi,%ecx
- xorl 56(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 16(%rsp),%ebp
- leal -899497514(%rdx,%r13,1),%r13d
- xorl %r11d,%eax
- addl %ecx,%r13d
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- xorl 52(%rsp),%r14d
- movl %esi,%eax
- movl %r13d,%ecx
- xorl 60(%rsp),%r14d
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 20(%rsp),%r14d
- leal -899497514(%rbp,%r12,1),%r12d
- xorl %edi,%eax
- addl %ecx,%r12d
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%r14d
- xorl 56(%rsp),%edx
- movl %r13d,%eax
- movl %r12d,%ecx
- xorl 0(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- xorl 24(%rsp),%edx
- leal -899497514(%r14,%r11,1),%r11d
- xorl %esi,%eax
- addl %ecx,%r11d
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- xorl 60(%rsp),%ebp
- movl %r12d,%eax
- movl %r11d,%ecx
- xorl 4(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- xorl 28(%rsp),%ebp
- leal -899497514(%rdx,%rdi,1),%edi
- xorl %r13d,%eax
- addl %ecx,%edi
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %r11d,%eax
- movl %edi,%ecx
- xorl %r13d,%eax
- leal -899497514(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl %r12d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- addl 0(%r8),%esi
- addl 4(%r8),%edi
- addl 8(%r8),%r11d
- addl 12(%r8),%r12d
- addl 16(%r8),%r13d
- movl %esi,0(%r8)
- movl %edi,4(%r8)
- movl %r11d,8(%r8)
- movl %r12d,12(%r8)
- movl %r13d,16(%r8)
- subq $1,%r10
- leaq 64(%r9),%r9
- jnz L$loop
- movq 64(%rsp),%rsi
- movq -40(%rsi),%r14
- movq -32(%rsi),%r13
- movq -24(%rsi),%r12
- movq -16(%rsi),%rbp
- movq -8(%rsi),%rbx
- leaq (%rsi),%rsp
- L$epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- sha1_block_data_order_shaext:
- _shaext_shortcut:
- movdqu (%rdi),%xmm0
- movd 16(%rdi),%xmm1
- movdqa K_XX_XX+160(%rip),%xmm3
- movdqu (%rsi),%xmm4
- pshufd $27,%xmm0,%xmm0
- movdqu 16(%rsi),%xmm5
- pshufd $27,%xmm1,%xmm1
- movdqu 32(%rsi),%xmm6
- .byte 102,15,56,0,227
- movdqu 48(%rsi),%xmm7
- .byte 102,15,56,0,235
- .byte 102,15,56,0,243
- movdqa %xmm1,%xmm9
- .byte 102,15,56,0,251
- jmp L$oop_shaext
- .p2align 4
- L$oop_shaext:
- decq %rdx
- leaq 64(%rsi),%r8
- paddd %xmm4,%xmm1
- cmovneq %r8,%rsi
- movdqa %xmm0,%xmm8
- .byte 15,56,201,229
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,0
- .byte 15,56,200,213
- pxor %xmm6,%xmm4
- .byte 15,56,201,238
- .byte 15,56,202,231
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,0
- .byte 15,56,200,206
- pxor %xmm7,%xmm5
- .byte 15,56,202,236
- .byte 15,56,201,247
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,0
- .byte 15,56,200,215
- pxor %xmm4,%xmm6
- .byte 15,56,201,252
- .byte 15,56,202,245
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,0
- .byte 15,56,200,204
- pxor %xmm5,%xmm7
- .byte 15,56,202,254
- .byte 15,56,201,229
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,0
- .byte 15,56,200,213
- pxor %xmm6,%xmm4
- .byte 15,56,201,238
- .byte 15,56,202,231
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,1
- .byte 15,56,200,206
- pxor %xmm7,%xmm5
- .byte 15,56,202,236
- .byte 15,56,201,247
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,1
- .byte 15,56,200,215
- pxor %xmm4,%xmm6
- .byte 15,56,201,252
- .byte 15,56,202,245
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,1
- .byte 15,56,200,204
- pxor %xmm5,%xmm7
- .byte 15,56,202,254
- .byte 15,56,201,229
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,1
- .byte 15,56,200,213
- pxor %xmm6,%xmm4
- .byte 15,56,201,238
- .byte 15,56,202,231
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,1
- .byte 15,56,200,206
- pxor %xmm7,%xmm5
- .byte 15,56,202,236
- .byte 15,56,201,247
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,2
- .byte 15,56,200,215
- pxor %xmm4,%xmm6
- .byte 15,56,201,252
- .byte 15,56,202,245
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,2
- .byte 15,56,200,204
- pxor %xmm5,%xmm7
- .byte 15,56,202,254
- .byte 15,56,201,229
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,2
- .byte 15,56,200,213
- pxor %xmm6,%xmm4
- .byte 15,56,201,238
- .byte 15,56,202,231
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,2
- .byte 15,56,200,206
- pxor %xmm7,%xmm5
- .byte 15,56,202,236
- .byte 15,56,201,247
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,2
- .byte 15,56,200,215
- pxor %xmm4,%xmm6
- .byte 15,56,201,252
- .byte 15,56,202,245
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,3
- .byte 15,56,200,204
- pxor %xmm5,%xmm7
- .byte 15,56,202,254
- movdqu (%rsi),%xmm4
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,3
- .byte 15,56,200,213
- movdqu 16(%rsi),%xmm5
- .byte 102,15,56,0,227
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,3
- .byte 15,56,200,206
- movdqu 32(%rsi),%xmm6
- .byte 102,15,56,0,235
- movdqa %xmm0,%xmm2
- .byte 15,58,204,193,3
- .byte 15,56,200,215
- movdqu 48(%rsi),%xmm7
- .byte 102,15,56,0,243
- movdqa %xmm0,%xmm1
- .byte 15,58,204,194,3
- .byte 65,15,56,200,201
- .byte 102,15,56,0,251
- paddd %xmm8,%xmm0
- movdqa %xmm1,%xmm9
- jnz L$oop_shaext
- pshufd $27,%xmm0,%xmm0
- pshufd $27,%xmm1,%xmm1
- movdqu %xmm0,(%rdi)
- movd %xmm1,16(%rdi)
- .byte 0xf3,0xc3
- .p2align 4
- sha1_block_data_order_ssse3:
- _ssse3_shortcut:
- movq %rsp,%r11
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- leaq -64(%rsp),%rsp
- andq $-64,%rsp
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
- shlq $6,%r10
- addq %r9,%r10
- leaq K_XX_XX+64(%rip),%r14
- movl 0(%r8),%eax
- movl 4(%r8),%ebx
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl %ebx,%esi
- movl 16(%r8),%ebp
- movl %ecx,%edi
- xorl %edx,%edi
- andl %edi,%esi
- movdqa 64(%r14),%xmm6
- movdqa -64(%r14),%xmm9
- movdqu 0(%r9),%xmm0
- movdqu 16(%r9),%xmm1
- movdqu 32(%r9),%xmm2
- movdqu 48(%r9),%xmm3
- .byte 102,15,56,0,198
- .byte 102,15,56,0,206
- .byte 102,15,56,0,214
- addq $64,%r9
- paddd %xmm9,%xmm0
- .byte 102,15,56,0,222
- paddd %xmm9,%xmm1
- paddd %xmm9,%xmm2
- movdqa %xmm0,0(%rsp)
- psubd %xmm9,%xmm0
- movdqa %xmm1,16(%rsp)
- psubd %xmm9,%xmm1
- movdqa %xmm2,32(%rsp)
- psubd %xmm9,%xmm2
- jmp L$oop_ssse3
- .p2align 4
- L$oop_ssse3:
- rorl $2,%ebx
- pshufd $238,%xmm0,%xmm4
- xorl %edx,%esi
- movdqa %xmm3,%xmm8
- paddd %xmm3,%xmm9
- movl %eax,%edi
- addl 0(%rsp),%ebp
- punpcklqdq %xmm1,%xmm4
- xorl %ecx,%ebx
- roll $5,%eax
- addl %esi,%ebp
- psrldq $4,%xmm8
- andl %ebx,%edi
- xorl %ecx,%ebx
- pxor %xmm0,%xmm4
- addl %eax,%ebp
- rorl $7,%eax
- pxor %xmm2,%xmm8
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 4(%rsp),%edx
- pxor %xmm8,%xmm4
- xorl %ebx,%eax
- roll $5,%ebp
- movdqa %xmm9,48(%rsp)
- addl %edi,%edx
- andl %eax,%esi
- movdqa %xmm4,%xmm10
- xorl %ebx,%eax
- addl %ebp,%edx
- rorl $7,%ebp
- movdqa %xmm4,%xmm8
- xorl %ebx,%esi
- pslldq $12,%xmm10
- paddd %xmm4,%xmm4
- movl %edx,%edi
- addl 8(%rsp),%ecx
- psrld $31,%xmm8
- xorl %eax,%ebp
- roll $5,%edx
- addl %esi,%ecx
- movdqa %xmm10,%xmm9
- andl %ebp,%edi
- xorl %eax,%ebp
- psrld $30,%xmm10
- addl %edx,%ecx
- rorl $7,%edx
- por %xmm8,%xmm4
- xorl %eax,%edi
- movl %ecx,%esi
- addl 12(%rsp),%ebx
- pslld $2,%xmm9
- pxor %xmm10,%xmm4
- xorl %ebp,%edx
- movdqa -64(%r14),%xmm10
- roll $5,%ecx
- addl %edi,%ebx
- andl %edx,%esi
- pxor %xmm9,%xmm4
- xorl %ebp,%edx
- addl %ecx,%ebx
- rorl $7,%ecx
- pshufd $238,%xmm1,%xmm5
- xorl %ebp,%esi
- movdqa %xmm4,%xmm9
- paddd %xmm4,%xmm10
- movl %ebx,%edi
- addl 16(%rsp),%eax
- punpcklqdq %xmm2,%xmm5
- xorl %edx,%ecx
- roll $5,%ebx
- addl %esi,%eax
- psrldq $4,%xmm9
- andl %ecx,%edi
- xorl %edx,%ecx
- pxor %xmm1,%xmm5
- addl %ebx,%eax
- rorl $7,%ebx
- pxor %xmm3,%xmm9
- xorl %edx,%edi
- movl %eax,%esi
- addl 20(%rsp),%ebp
- pxor %xmm9,%xmm5
- xorl %ecx,%ebx
- roll $5,%eax
- movdqa %xmm10,0(%rsp)
- addl %edi,%ebp
- andl %ebx,%esi
- movdqa %xmm5,%xmm8
- xorl %ecx,%ebx
- addl %eax,%ebp
- rorl $7,%eax
- movdqa %xmm5,%xmm9
- xorl %ecx,%esi
- pslldq $12,%xmm8
- paddd %xmm5,%xmm5
- movl %ebp,%edi
- addl 24(%rsp),%edx
- psrld $31,%xmm9
- xorl %ebx,%eax
- roll $5,%ebp
- addl %esi,%edx
- movdqa %xmm8,%xmm10
- andl %eax,%edi
- xorl %ebx,%eax
- psrld $30,%xmm8
- addl %ebp,%edx
- rorl $7,%ebp
- por %xmm9,%xmm5
- xorl %ebx,%edi
- movl %edx,%esi
- addl 28(%rsp),%ecx
- pslld $2,%xmm10
- pxor %xmm8,%xmm5
- xorl %eax,%ebp
- movdqa -32(%r14),%xmm8
- roll $5,%edx
- addl %edi,%ecx
- andl %ebp,%esi
- pxor %xmm10,%xmm5
- xorl %eax,%ebp
- addl %edx,%ecx
- rorl $7,%edx
- pshufd $238,%xmm2,%xmm6
- xorl %eax,%esi
- movdqa %xmm5,%xmm10
- paddd %xmm5,%xmm8
- movl %ecx,%edi
- addl 32(%rsp),%ebx
- punpcklqdq %xmm3,%xmm6
- xorl %ebp,%edx
- roll $5,%ecx
- addl %esi,%ebx
- psrldq $4,%xmm10
- andl %edx,%edi
- xorl %ebp,%edx
- pxor %xmm2,%xmm6
- addl %ecx,%ebx
- rorl $7,%ecx
- pxor %xmm4,%xmm10
- xorl %ebp,%edi
- movl %ebx,%esi
- addl 36(%rsp),%eax
- pxor %xmm10,%xmm6
- xorl %edx,%ecx
- roll $5,%ebx
- movdqa %xmm8,16(%rsp)
- addl %edi,%eax
- andl %ecx,%esi
- movdqa %xmm6,%xmm9
- xorl %edx,%ecx
- addl %ebx,%eax
- rorl $7,%ebx
- movdqa %xmm6,%xmm10
- xorl %edx,%esi
- pslldq $12,%xmm9
- paddd %xmm6,%xmm6
- movl %eax,%edi
- addl 40(%rsp),%ebp
- psrld $31,%xmm10
- xorl %ecx,%ebx
- roll $5,%eax
- addl %esi,%ebp
- movdqa %xmm9,%xmm8
- andl %ebx,%edi
- xorl %ecx,%ebx
- psrld $30,%xmm9
- addl %eax,%ebp
- rorl $7,%eax
- por %xmm10,%xmm6
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 44(%rsp),%edx
- pslld $2,%xmm8
- pxor %xmm9,%xmm6
- xorl %ebx,%eax
- movdqa -32(%r14),%xmm9
- roll $5,%ebp
- addl %edi,%edx
- andl %eax,%esi
- pxor %xmm8,%xmm6
- xorl %ebx,%eax
- addl %ebp,%edx
- rorl $7,%ebp
- pshufd $238,%xmm3,%xmm7
- xorl %ebx,%esi
- movdqa %xmm6,%xmm8
- paddd %xmm6,%xmm9
- movl %edx,%edi
- addl 48(%rsp),%ecx
- punpcklqdq %xmm4,%xmm7
- xorl %eax,%ebp
- roll $5,%edx
- addl %esi,%ecx
- psrldq $4,%xmm8
- andl %ebp,%edi
- xorl %eax,%ebp
- pxor %xmm3,%xmm7
- addl %edx,%ecx
- rorl $7,%edx
- pxor %xmm5,%xmm8
- xorl %eax,%edi
- movl %ecx,%esi
- addl 52(%rsp),%ebx
- pxor %xmm8,%xmm7
- xorl %ebp,%edx
- roll $5,%ecx
- movdqa %xmm9,32(%rsp)
- addl %edi,%ebx
- andl %edx,%esi
- movdqa %xmm7,%xmm10
- xorl %ebp,%edx
- addl %ecx,%ebx
- rorl $7,%ecx
- movdqa %xmm7,%xmm8
- xorl %ebp,%esi
- pslldq $12,%xmm10
- paddd %xmm7,%xmm7
- movl %ebx,%edi
- addl 56(%rsp),%eax
- psrld $31,%xmm8
- xorl %edx,%ecx
- roll $5,%ebx
- addl %esi,%eax
- movdqa %xmm10,%xmm9
- andl %ecx,%edi
- xorl %edx,%ecx
- psrld $30,%xmm10
- addl %ebx,%eax
- rorl $7,%ebx
- por %xmm8,%xmm7
- xorl %edx,%edi
- movl %eax,%esi
- addl 60(%rsp),%ebp
- pslld $2,%xmm9
- pxor %xmm10,%xmm7
- xorl %ecx,%ebx
- movdqa -32(%r14),%xmm10
- roll $5,%eax
- addl %edi,%ebp
- andl %ebx,%esi
- pxor %xmm9,%xmm7
- pshufd $238,%xmm6,%xmm9
- xorl %ecx,%ebx
- addl %eax,%ebp
- rorl $7,%eax
- pxor %xmm4,%xmm0
- xorl %ecx,%esi
- movl %ebp,%edi
- addl 0(%rsp),%edx
- punpcklqdq %xmm7,%xmm9
- xorl %ebx,%eax
- roll $5,%ebp
- pxor %xmm1,%xmm0
- addl %esi,%edx
- andl %eax,%edi
- movdqa %xmm10,%xmm8
- xorl %ebx,%eax
- paddd %xmm7,%xmm10
- addl %ebp,%edx
- pxor %xmm9,%xmm0
- rorl $7,%ebp
- xorl %ebx,%edi
- movl %edx,%esi
- addl 4(%rsp),%ecx
- movdqa %xmm0,%xmm9
- xorl %eax,%ebp
- roll $5,%edx
- movdqa %xmm10,48(%rsp)
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- pslld $2,%xmm0
- addl %edx,%ecx
- rorl $7,%edx
- psrld $30,%xmm9
- xorl %eax,%esi
- movl %ecx,%edi
- addl 8(%rsp),%ebx
- por %xmm9,%xmm0
- xorl %ebp,%edx
- roll $5,%ecx
- pshufd $238,%xmm7,%xmm10
- addl %esi,%ebx
- andl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 12(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- rorl $7,%ecx
- addl %ebx,%eax
- pxor %xmm5,%xmm1
- addl 16(%rsp),%ebp
- xorl %ecx,%esi
- punpcklqdq %xmm0,%xmm10
- movl %eax,%edi
- roll $5,%eax
- pxor %xmm2,%xmm1
- addl %esi,%ebp
- xorl %ecx,%edi
- movdqa %xmm8,%xmm9
- rorl $7,%ebx
- paddd %xmm0,%xmm8
- addl %eax,%ebp
- pxor %xmm10,%xmm1
- addl 20(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- roll $5,%ebp
- movdqa %xmm1,%xmm10
- addl %edi,%edx
- xorl %ebx,%esi
- movdqa %xmm8,0(%rsp)
- rorl $7,%eax
- addl %ebp,%edx
- addl 24(%rsp),%ecx
- pslld $2,%xmm1
- xorl %eax,%esi
- movl %edx,%edi
- psrld $30,%xmm10
- roll $5,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- rorl $7,%ebp
- por %xmm10,%xmm1
- addl %edx,%ecx
- addl 28(%rsp),%ebx
- pshufd $238,%xmm0,%xmm8
- xorl %ebp,%edi
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- rorl $7,%edx
- addl %ecx,%ebx
- pxor %xmm6,%xmm2
- addl 32(%rsp),%eax
- xorl %edx,%esi
- punpcklqdq %xmm1,%xmm8
- movl %ebx,%edi
- roll $5,%ebx
- pxor %xmm3,%xmm2
- addl %esi,%eax
- xorl %edx,%edi
- movdqa 0(%r14),%xmm10
- rorl $7,%ecx
- paddd %xmm1,%xmm9
- addl %ebx,%eax
- pxor %xmm8,%xmm2
- addl 36(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- roll $5,%eax
- movdqa %xmm2,%xmm8
- addl %edi,%ebp
- xorl %ecx,%esi
- movdqa %xmm9,16(%rsp)
- rorl $7,%ebx
- addl %eax,%ebp
- addl 40(%rsp),%edx
- pslld $2,%xmm2
- xorl %ebx,%esi
- movl %ebp,%edi
- psrld $30,%xmm8
- roll $5,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- rorl $7,%eax
- por %xmm8,%xmm2
- addl %ebp,%edx
- addl 44(%rsp),%ecx
- pshufd $238,%xmm1,%xmm9
- xorl %eax,%edi
- movl %edx,%esi
- roll $5,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- rorl $7,%ebp
- addl %edx,%ecx
- pxor %xmm7,%xmm3
- addl 48(%rsp),%ebx
- xorl %ebp,%esi
- punpcklqdq %xmm2,%xmm9
- movl %ecx,%edi
- roll $5,%ecx
- pxor %xmm4,%xmm3
- addl %esi,%ebx
- xorl %ebp,%edi
- movdqa %xmm10,%xmm8
- rorl $7,%edx
- paddd %xmm2,%xmm10
- addl %ecx,%ebx
- pxor %xmm9,%xmm3
- addl 52(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- roll $5,%ebx
- movdqa %xmm3,%xmm9
- addl %edi,%eax
- xorl %edx,%esi
- movdqa %xmm10,32(%rsp)
- rorl $7,%ecx
- addl %ebx,%eax
- addl 56(%rsp),%ebp
- pslld $2,%xmm3
- xorl %ecx,%esi
- movl %eax,%edi
- psrld $30,%xmm9
- roll $5,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- rorl $7,%ebx
- por %xmm9,%xmm3
- addl %eax,%ebp
- addl 60(%rsp),%edx
- pshufd $238,%xmm2,%xmm10
- xorl %ebx,%edi
- movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- rorl $7,%eax
- addl %ebp,%edx
- pxor %xmm0,%xmm4
- addl 0(%rsp),%ecx
- xorl %eax,%esi
- punpcklqdq %xmm3,%xmm10
- movl %edx,%edi
- roll $5,%edx
- pxor %xmm5,%xmm4
- addl %esi,%ecx
- xorl %eax,%edi
- movdqa %xmm8,%xmm9
- rorl $7,%ebp
- paddd %xmm3,%xmm8
- addl %edx,%ecx
- pxor %xmm10,%xmm4
- addl 4(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- roll $5,%ecx
- movdqa %xmm4,%xmm10
- addl %edi,%ebx
- xorl %ebp,%esi
- movdqa %xmm8,48(%rsp)
- rorl $7,%edx
- addl %ecx,%ebx
- addl 8(%rsp),%eax
- pslld $2,%xmm4
- xorl %edx,%esi
- movl %ebx,%edi
- psrld $30,%xmm10
- roll $5,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- rorl $7,%ecx
- por %xmm10,%xmm4
- addl %ebx,%eax
- addl 12(%rsp),%ebp
- pshufd $238,%xmm3,%xmm8
- xorl %ecx,%edi
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- rorl $7,%ebx
- addl %eax,%ebp
- pxor %xmm1,%xmm5
- addl 16(%rsp),%edx
- xorl %ebx,%esi
- punpcklqdq %xmm4,%xmm8
- movl %ebp,%edi
- roll $5,%ebp
- pxor %xmm6,%xmm5
- addl %esi,%edx
- xorl %ebx,%edi
- movdqa %xmm9,%xmm10
- rorl $7,%eax
- paddd %xmm4,%xmm9
- addl %ebp,%edx
- pxor %xmm8,%xmm5
- addl 20(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- roll $5,%edx
- movdqa %xmm5,%xmm8
- addl %edi,%ecx
- xorl %eax,%esi
- movdqa %xmm9,0(%rsp)
- rorl $7,%ebp
- addl %edx,%ecx
- addl 24(%rsp),%ebx
- pslld $2,%xmm5
- xorl %ebp,%esi
- movl %ecx,%edi
- psrld $30,%xmm8
- roll $5,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- rorl $7,%edx
- por %xmm8,%xmm5
- addl %ecx,%ebx
- addl 28(%rsp),%eax
- pshufd $238,%xmm4,%xmm9
- rorl $7,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- roll $5,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- pxor %xmm2,%xmm6
- addl 32(%rsp),%ebp
- andl %ecx,%esi
- xorl %edx,%ecx
- rorl $7,%ebx
- punpcklqdq %xmm5,%xmm9
- movl %eax,%edi
- xorl %ecx,%esi
- pxor %xmm7,%xmm6
- roll $5,%eax
- addl %esi,%ebp
- movdqa %xmm10,%xmm8
- xorl %ebx,%edi
- paddd %xmm5,%xmm10
- xorl %ecx,%ebx
- pxor %xmm9,%xmm6
- addl %eax,%ebp
- addl 36(%rsp),%edx
- andl %ebx,%edi
- xorl %ecx,%ebx
- rorl $7,%eax
- movdqa %xmm6,%xmm9
- movl %ebp,%esi
- xorl %ebx,%edi
- movdqa %xmm10,16(%rsp)
- roll $5,%ebp
- addl %edi,%edx
- xorl %eax,%esi
- pslld $2,%xmm6
- xorl %ebx,%eax
- addl %ebp,%edx
- psrld $30,%xmm9
- addl 40(%rsp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- por %xmm9,%xmm6
- rorl $7,%ebp
- movl %edx,%edi
- xorl %eax,%esi
- roll $5,%edx
- pshufd $238,%xmm5,%xmm10
- addl %esi,%ecx
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 44(%rsp),%ebx
- andl %ebp,%edi
- xorl %eax,%ebp
- rorl $7,%edx
- movl %ecx,%esi
- xorl %ebp,%edi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- pxor %xmm3,%xmm7
- addl 48(%rsp),%eax
- andl %edx,%esi
- xorl %ebp,%edx
- rorl $7,%ecx
- punpcklqdq %xmm6,%xmm10
- movl %ebx,%edi
- xorl %edx,%esi
- pxor %xmm0,%xmm7
- roll $5,%ebx
- addl %esi,%eax
- movdqa 32(%r14),%xmm9
- xorl %ecx,%edi
- paddd %xmm6,%xmm8
- xorl %edx,%ecx
- pxor %xmm10,%xmm7
- addl %ebx,%eax
- addl 52(%rsp),%ebp
- andl %ecx,%edi
- xorl %edx,%ecx
- rorl $7,%ebx
- movdqa %xmm7,%xmm10
- movl %eax,%esi
- xorl %ecx,%edi
- movdqa %xmm8,32(%rsp)
- roll $5,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- pslld $2,%xmm7
- xorl %ecx,%ebx
- addl %eax,%ebp
- psrld $30,%xmm10
- addl 56(%rsp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- por %xmm10,%xmm7
- rorl $7,%eax
- movl %ebp,%edi
- xorl %ebx,%esi
- roll $5,%ebp
- pshufd $238,%xmm6,%xmm8
- addl %esi,%edx
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 60(%rsp),%ecx
- andl %eax,%edi
- xorl %ebx,%eax
- rorl $7,%ebp
- movl %edx,%esi
- xorl %eax,%edi
- roll $5,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- pxor %xmm4,%xmm0
- addl 0(%rsp),%ebx
- andl %ebp,%esi
- xorl %eax,%ebp
- rorl $7,%edx
- punpcklqdq %xmm7,%xmm8
- movl %ecx,%edi
- xorl %ebp,%esi
- pxor %xmm1,%xmm0
- roll $5,%ecx
- addl %esi,%ebx
- movdqa %xmm9,%xmm10
- xorl %edx,%edi
- paddd %xmm7,%xmm9
- xorl %ebp,%edx
- pxor %xmm8,%xmm0
- addl %ecx,%ebx
- addl 4(%rsp),%eax
- andl %edx,%edi
- xorl %ebp,%edx
- rorl $7,%ecx
- movdqa %xmm0,%xmm8
- movl %ebx,%esi
- xorl %edx,%edi
- movdqa %xmm9,48(%rsp)
- roll $5,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- pslld $2,%xmm0
- xorl %edx,%ecx
- addl %ebx,%eax
- psrld $30,%xmm8
- addl 8(%rsp),%ebp
- andl %ecx,%esi
- xorl %edx,%ecx
- por %xmm8,%xmm0
- rorl $7,%ebx
- movl %eax,%edi
- xorl %ecx,%esi
- roll $5,%eax
- pshufd $238,%xmm7,%xmm9
- addl %esi,%ebp
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 12(%rsp),%edx
- andl %ebx,%edi
- xorl %ecx,%ebx
- rorl $7,%eax
- movl %ebp,%esi
- xorl %ebx,%edi
- roll $5,%ebp
- addl %edi,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- pxor %xmm5,%xmm1
- addl 16(%rsp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- rorl $7,%ebp
- punpcklqdq %xmm0,%xmm9
- movl %edx,%edi
- xorl %eax,%esi
- pxor %xmm2,%xmm1
- roll $5,%edx
- addl %esi,%ecx
- movdqa %xmm10,%xmm8
- xorl %ebp,%edi
- paddd %xmm0,%xmm10
- xorl %eax,%ebp
- pxor %xmm9,%xmm1
- addl %edx,%ecx
- addl 20(%rsp),%ebx
- andl %ebp,%edi
- xorl %eax,%ebp
- rorl $7,%edx
- movdqa %xmm1,%xmm9
- movl %ecx,%esi
- xorl %ebp,%edi
- movdqa %xmm10,0(%rsp)
- roll $5,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- pslld $2,%xmm1
- xorl %ebp,%edx
- addl %ecx,%ebx
- psrld $30,%xmm9
- addl 24(%rsp),%eax
- andl %edx,%esi
- xorl %ebp,%edx
- por %xmm9,%xmm1
- rorl $7,%ecx
- movl %ebx,%edi
- xorl %edx,%esi
- roll $5,%ebx
- pshufd $238,%xmm0,%xmm10
- addl %esi,%eax
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 28(%rsp),%ebp
- andl %ecx,%edi
- xorl %edx,%ecx
- rorl $7,%ebx
- movl %eax,%esi
- xorl %ecx,%edi
- roll $5,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- pxor %xmm6,%xmm2
- addl 32(%rsp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- rorl $7,%eax
- punpcklqdq %xmm1,%xmm10
- movl %ebp,%edi
- xorl %ebx,%esi
- pxor %xmm3,%xmm2
- roll $5,%ebp
- addl %esi,%edx
- movdqa %xmm8,%xmm9
- xorl %eax,%edi
- paddd %xmm1,%xmm8
- xorl %ebx,%eax
- pxor %xmm10,%xmm2
- addl %ebp,%edx
- addl 36(%rsp),%ecx
- andl %eax,%edi
- xorl %ebx,%eax
- rorl $7,%ebp
- movdqa %xmm2,%xmm10
- movl %edx,%esi
- xorl %eax,%edi
- movdqa %xmm8,16(%rsp)
- roll $5,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- pslld $2,%xmm2
- xorl %eax,%ebp
- addl %edx,%ecx
- psrld $30,%xmm10
- addl 40(%rsp),%ebx
- andl %ebp,%esi
- xorl %eax,%ebp
- por %xmm10,%xmm2
- rorl $7,%edx
- movl %ecx,%edi
- xorl %ebp,%esi
- roll $5,%ecx
- pshufd $238,%xmm1,%xmm8
- addl %esi,%ebx
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 44(%rsp),%eax
- andl %edx,%edi
- xorl %ebp,%edx
- rorl $7,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- roll $5,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- addl %ebx,%eax
- pxor %xmm7,%xmm3
- addl 48(%rsp),%ebp
- xorl %ecx,%esi
- punpcklqdq %xmm2,%xmm8
- movl %eax,%edi
- roll $5,%eax
- pxor %xmm4,%xmm3
- addl %esi,%ebp
- xorl %ecx,%edi
- movdqa %xmm9,%xmm10
- rorl $7,%ebx
- paddd %xmm2,%xmm9
- addl %eax,%ebp
- pxor %xmm8,%xmm3
- addl 52(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- roll $5,%ebp
- movdqa %xmm3,%xmm8
- addl %edi,%edx
- xorl %ebx,%esi
- movdqa %xmm9,32(%rsp)
- rorl $7,%eax
- addl %ebp,%edx
- addl 56(%rsp),%ecx
- pslld $2,%xmm3
- xorl %eax,%esi
- movl %edx,%edi
- psrld $30,%xmm8
- roll $5,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- rorl $7,%ebp
- por %xmm8,%xmm3
- addl %edx,%ecx
- addl 60(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- rorl $7,%edx
- addl %ecx,%ebx
- addl 0(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- roll $5,%ebx
- paddd %xmm3,%xmm10
- addl %esi,%eax
- xorl %edx,%edi
- movdqa %xmm10,48(%rsp)
- rorl $7,%ecx
- addl %ebx,%eax
- addl 4(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- rorl $7,%ebx
- addl %eax,%ebp
- addl 8(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- roll $5,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- rorl $7,%eax
- addl %ebp,%edx
- addl 12(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- roll $5,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- rorl $7,%ebp
- addl %edx,%ecx
- cmpq %r10,%r9
- je L$done_ssse3
- movdqa 64(%r14),%xmm6
- movdqa -64(%r14),%xmm9
- movdqu 0(%r9),%xmm0
- movdqu 16(%r9),%xmm1
- movdqu 32(%r9),%xmm2
- movdqu 48(%r9),%xmm3
- .byte 102,15,56,0,198
- addq $64,%r9
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- .byte 102,15,56,0,206
- roll $5,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- rorl $7,%edx
- paddd %xmm9,%xmm0
- addl %ecx,%ebx
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- movdqa %xmm0,0(%rsp)
- roll $5,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- rorl $7,%ecx
- psubd %xmm9,%xmm0
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- roll $5,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- rorl $7,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- rorl $7,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- .byte 102,15,56,0,214
- roll $5,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- rorl $7,%ebp
- paddd %xmm9,%xmm1
- addl %edx,%ecx
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- movdqa %xmm1,16(%rsp)
- roll $5,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- rorl $7,%edx
- psubd %xmm9,%xmm1
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- roll $5,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- rorl $7,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- rorl $7,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- .byte 102,15,56,0,222
- roll $5,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- rorl $7,%eax
- paddd %xmm9,%xmm2
- addl %ebp,%edx
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- movdqa %xmm2,32(%rsp)
- roll $5,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- rorl $7,%ebp
- psubd %xmm9,%xmm2
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- roll $5,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- rorl $7,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- rorl $7,%ecx
- addl %ebx,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- addl 12(%r8),%edx
- movl %eax,0(%r8)
- addl 16(%r8),%ebp
- movl %esi,4(%r8)
- movl %esi,%ebx
- movl %ecx,8(%r8)
- movl %ecx,%edi
- movl %edx,12(%r8)
- xorl %edx,%edi
- movl %ebp,16(%r8)
- andl %edi,%esi
- jmp L$oop_ssse3
- .p2align 4
- L$done_ssse3:
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- roll $5,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- rorl $7,%edx
- addl %ecx,%ebx
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- rorl $7,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- roll $5,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- rorl $7,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- rorl $7,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- roll $5,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- rorl $7,%ebp
- addl %edx,%ecx
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- rorl $7,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- roll $5,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- rorl $7,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- rorl $7,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- roll $5,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- rorl $7,%eax
- addl %ebp,%edx
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- roll $5,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- rorl $7,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- roll $5,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- rorl $7,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- rorl $7,%ecx
- addl %ebx,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- movl %eax,0(%r8)
- addl 12(%r8),%edx
- movl %esi,4(%r8)
- addl 16(%r8),%ebp
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- movq -40(%r11),%r14
- movq -32(%r11),%r13
- movq -24(%r11),%r12
- movq -16(%r11),%rbp
- movq -8(%r11),%rbx
- leaq (%r11),%rsp
- L$epilogue_ssse3:
- .byte 0xf3,0xc3
- .p2align 4
- sha1_block_data_order_avx:
- _avx_shortcut:
- movq %rsp,%r11
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- leaq -64(%rsp),%rsp
- vzeroupper
- andq $-64,%rsp
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
- shlq $6,%r10
- addq %r9,%r10
- leaq K_XX_XX+64(%rip),%r14
- movl 0(%r8),%eax
- movl 4(%r8),%ebx
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl %ebx,%esi
- movl 16(%r8),%ebp
- movl %ecx,%edi
- xorl %edx,%edi
- andl %edi,%esi
- vmovdqa 64(%r14),%xmm6
- vmovdqa -64(%r14),%xmm11
- vmovdqu 0(%r9),%xmm0
- vmovdqu 16(%r9),%xmm1
- vmovdqu 32(%r9),%xmm2
- vmovdqu 48(%r9),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- addq $64,%r9
- vpshufb %xmm6,%xmm1,%xmm1
- vpshufb %xmm6,%xmm2,%xmm2
- vpshufb %xmm6,%xmm3,%xmm3
- vpaddd %xmm11,%xmm0,%xmm4
- vpaddd %xmm11,%xmm1,%xmm5
- vpaddd %xmm11,%xmm2,%xmm6
- vmovdqa %xmm4,0(%rsp)
- vmovdqa %xmm5,16(%rsp)
- vmovdqa %xmm6,32(%rsp)
- jmp L$oop_avx
- .p2align 4
- L$oop_avx:
- shrdl $2,%ebx,%ebx
- xorl %edx,%esi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- movl %eax,%edi
- addl 0(%rsp),%ebp
- vpaddd %xmm3,%xmm11,%xmm9
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrldq $4,%xmm3,%xmm8
- addl %esi,%ebp
- andl %ebx,%edi
- vpxor %xmm0,%xmm4,%xmm4
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpxor %xmm2,%xmm8,%xmm8
- shrdl $7,%eax,%eax
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 4(%rsp),%edx
- vpxor %xmm8,%xmm4,%xmm4
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vmovdqa %xmm9,48(%rsp)
- addl %edi,%edx
- andl %eax,%esi
- vpsrld $31,%xmm4,%xmm8
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%esi
- vpslldq $12,%xmm4,%xmm10
- vpaddd %xmm4,%xmm4,%xmm4
- movl %edx,%edi
- addl 8(%rsp),%ecx
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm4,%xmm4
- addl %esi,%ecx
- andl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm4,%xmm4
- shrdl $7,%edx,%edx
- xorl %eax,%edi
- movl %ecx,%esi
- addl 12(%rsp),%ebx
- vpxor %xmm10,%xmm4,%xmm4
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- andl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %ebp,%esi
- vpalignr $8,%xmm1,%xmm2,%xmm5
- movl %ebx,%edi
- addl 16(%rsp),%eax
- vpaddd %xmm4,%xmm11,%xmm9
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrldq $4,%xmm4,%xmm8
- addl %esi,%eax
- andl %ecx,%edi
- vpxor %xmm1,%xmm5,%xmm5
- xorl %edx,%ecx
- addl %ebx,%eax
- vpxor %xmm3,%xmm8,%xmm8
- shrdl $7,%ebx,%ebx
- xorl %edx,%edi
- movl %eax,%esi
- addl 20(%rsp),%ebp
- vpxor %xmm8,%xmm5,%xmm5
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%ebp
- andl %ebx,%esi
- vpsrld $31,%xmm5,%xmm8
- xorl %ecx,%ebx
- addl %eax,%ebp
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- vpslldq $12,%xmm5,%xmm10
- vpaddd %xmm5,%xmm5,%xmm5
- movl %ebp,%edi
- addl 24(%rsp),%edx
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm5,%xmm5
- addl %esi,%edx
- andl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm5,%xmm5
- shrdl $7,%ebp,%ebp
- xorl %ebx,%edi
- movl %edx,%esi
- addl 28(%rsp),%ecx
- vpxor %xmm10,%xmm5,%xmm5
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vmovdqa -32(%r14),%xmm11
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- vpalignr $8,%xmm2,%xmm3,%xmm6
- movl %ecx,%edi
- addl 32(%rsp),%ebx
- vpaddd %xmm5,%xmm11,%xmm9
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- vpsrldq $4,%xmm5,%xmm8
- addl %esi,%ebx
- andl %edx,%edi
- vpxor %xmm2,%xmm6,%xmm6
- xorl %ebp,%edx
- addl %ecx,%ebx
- vpxor %xmm4,%xmm8,%xmm8
- shrdl $7,%ecx,%ecx
- xorl %ebp,%edi
- movl %ebx,%esi
- addl 36(%rsp),%eax
- vpxor %xmm8,%xmm6,%xmm6
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vmovdqa %xmm9,16(%rsp)
- addl %edi,%eax
- andl %ecx,%esi
- vpsrld $31,%xmm6,%xmm8
- xorl %edx,%ecx
- addl %ebx,%eax
- shrdl $7,%ebx,%ebx
- xorl %edx,%esi
- vpslldq $12,%xmm6,%xmm10
- vpaddd %xmm6,%xmm6,%xmm6
- movl %eax,%edi
- addl 40(%rsp),%ebp
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm6,%xmm6
- addl %esi,%ebp
- andl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm6,%xmm6
- shrdl $7,%eax,%eax
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 44(%rsp),%edx
- vpxor %xmm10,%xmm6,%xmm6
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- andl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%esi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- movl %edx,%edi
- addl 48(%rsp),%ecx
- vpaddd %xmm6,%xmm11,%xmm9
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpsrldq $4,%xmm6,%xmm8
- addl %esi,%ecx
- andl %ebp,%edi
- vpxor %xmm3,%xmm7,%xmm7
- xorl %eax,%ebp
- addl %edx,%ecx
- vpxor %xmm5,%xmm8,%xmm8
- shrdl $7,%edx,%edx
- xorl %eax,%edi
- movl %ecx,%esi
- addl 52(%rsp),%ebx
- vpxor %xmm8,%xmm7,%xmm7
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%ebx
- andl %edx,%esi
- vpsrld $31,%xmm7,%xmm8
- xorl %ebp,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %ebp,%esi
- vpslldq $12,%xmm7,%xmm10
- vpaddd %xmm7,%xmm7,%xmm7
- movl %ebx,%edi
- addl 56(%rsp),%eax
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm7,%xmm7
- addl %esi,%eax
- andl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm7,%xmm7
- shrdl $7,%ebx,%ebx
- xorl %edx,%edi
- movl %eax,%esi
- addl 60(%rsp),%ebp
- vpxor %xmm10,%xmm7,%xmm7
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- addl %edi,%ebp
- andl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm6,%xmm7,%xmm8
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- movl %ebp,%edi
- addl 0(%rsp),%edx
- vpxor %xmm1,%xmm0,%xmm0
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vpaddd %xmm7,%xmm11,%xmm9
- addl %esi,%edx
- andl %eax,%edi
- vpxor %xmm8,%xmm0,%xmm0
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%edi
- vpsrld $30,%xmm0,%xmm8
- vmovdqa %xmm9,48(%rsp)
- movl %edx,%esi
- addl 4(%rsp),%ecx
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpslld $2,%xmm0,%xmm0
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- movl %ecx,%edi
- addl 8(%rsp),%ebx
- vpor %xmm8,%xmm0,%xmm0
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- andl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 12(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm7,%xmm0,%xmm8
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- vpxor %xmm2,%xmm1,%xmm1
- addl %esi,%ebp
- xorl %ecx,%edi
- vpaddd %xmm0,%xmm11,%xmm9
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpxor %xmm8,%xmm1,%xmm1
- addl 20(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm1,%xmm8
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpslld $2,%xmm1,%xmm1
- addl 24(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpor %xmm8,%xmm1,%xmm1
- addl 28(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm0,%xmm1,%xmm8
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- vpxor %xmm3,%xmm2,%xmm2
- addl %esi,%eax
- xorl %edx,%edi
- vpaddd %xmm1,%xmm11,%xmm9
- vmovdqa 0(%r14),%xmm11
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpxor %xmm8,%xmm2,%xmm2
- addl 36(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm8
- vmovdqa %xmm9,16(%rsp)
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpslld $2,%xmm2,%xmm2
- addl 40(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpor %xmm8,%xmm2,%xmm2
- addl 44(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpalignr $8,%xmm1,%xmm2,%xmm8
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- vpxor %xmm4,%xmm3,%xmm3
- addl %esi,%ebx
- xorl %ebp,%edi
- vpaddd %xmm2,%xmm11,%xmm9
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpxor %xmm8,%xmm3,%xmm3
- addl 52(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm8
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- addl 56(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpor %xmm8,%xmm3,%xmm3
- addl 60(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpalignr $8,%xmm2,%xmm3,%xmm8
- vpxor %xmm0,%xmm4,%xmm4
- addl 0(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- vpxor %xmm5,%xmm4,%xmm4
- addl %esi,%ecx
- xorl %eax,%edi
- vpaddd %xmm3,%xmm11,%xmm9
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpxor %xmm8,%xmm4,%xmm4
- addl 4(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- vpsrld $30,%xmm4,%xmm8
- vmovdqa %xmm9,48(%rsp)
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpslld $2,%xmm4,%xmm4
- addl 8(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpor %xmm8,%xmm4,%xmm4
- addl 12(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm3,%xmm4,%xmm8
- vpxor %xmm1,%xmm5,%xmm5
- addl 16(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- vpxor %xmm6,%xmm5,%xmm5
- addl %esi,%edx
- xorl %ebx,%edi
- vpaddd %xmm4,%xmm11,%xmm9
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpxor %xmm8,%xmm5,%xmm5
- addl 20(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm5,%xmm8
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpslld $2,%xmm5,%xmm5
- addl 24(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpor %xmm8,%xmm5,%xmm5
- addl 28(%rsp),%eax
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm4,%xmm5,%xmm8
- vpxor %xmm2,%xmm6,%xmm6
- addl 32(%rsp),%ebp
- andl %ecx,%esi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- movl %eax,%edi
- xorl %ecx,%esi
- vpaddd %xmm5,%xmm11,%xmm9
- shldl $5,%eax,%eax
- addl %esi,%ebp
- vpxor %xmm8,%xmm6,%xmm6
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 36(%rsp),%edx
- vpsrld $30,%xmm6,%xmm8
- vmovdqa %xmm9,16(%rsp)
- andl %ebx,%edi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%esi
- vpslld $2,%xmm6,%xmm6
- xorl %ebx,%edi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 40(%rsp),%ecx
- andl %eax,%esi
- vpor %xmm8,%xmm6,%xmm6
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%edi
- xorl %eax,%esi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 44(%rsp),%ebx
- andl %ebp,%edi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- xorl %ebp,%edi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm5,%xmm6,%xmm8
- vpxor %xmm3,%xmm7,%xmm7
- addl 48(%rsp),%eax
- andl %edx,%esi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- vpxor %xmm0,%xmm7,%xmm7
- movl %ebx,%edi
- xorl %edx,%esi
- vpaddd %xmm6,%xmm11,%xmm9
- vmovdqa 32(%r14),%xmm11
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vpxor %xmm8,%xmm7,%xmm7
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 52(%rsp),%ebp
- vpsrld $30,%xmm7,%xmm8
- vmovdqa %xmm9,32(%rsp)
- andl %ecx,%edi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- vpslld $2,%xmm7,%xmm7
- xorl %ecx,%edi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 56(%rsp),%edx
- andl %ebx,%esi
- vpor %xmm8,%xmm7,%xmm7
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%edi
- xorl %ebx,%esi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 60(%rsp),%ecx
- andl %eax,%edi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%esi
- xorl %eax,%edi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- vpalignr $8,%xmm6,%xmm7,%xmm8
- vpxor %xmm4,%xmm0,%xmm0
- addl 0(%rsp),%ebx
- andl %ebp,%esi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- vpxor %xmm1,%xmm0,%xmm0
- movl %ecx,%edi
- xorl %ebp,%esi
- vpaddd %xmm7,%xmm11,%xmm9
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- vpxor %xmm8,%xmm0,%xmm0
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 4(%rsp),%eax
- vpsrld $30,%xmm0,%xmm8
- vmovdqa %xmm9,48(%rsp)
- andl %edx,%edi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- vpslld $2,%xmm0,%xmm0
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 8(%rsp),%ebp
- andl %ecx,%esi
- vpor %xmm8,%xmm0,%xmm0
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%edi
- xorl %ecx,%esi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 12(%rsp),%edx
- andl %ebx,%edi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%esi
- xorl %ebx,%edi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- vpalignr $8,%xmm7,%xmm0,%xmm8
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%rsp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- vpxor %xmm2,%xmm1,%xmm1
- movl %edx,%edi
- xorl %eax,%esi
- vpaddd %xmm0,%xmm11,%xmm9
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vpxor %xmm8,%xmm1,%xmm1
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 20(%rsp),%ebx
- vpsrld $30,%xmm1,%xmm8
- vmovdqa %xmm9,0(%rsp)
- andl %ebp,%edi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- vpslld $2,%xmm1,%xmm1
- xorl %ebp,%edi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 24(%rsp),%eax
- andl %edx,%esi
- vpor %xmm8,%xmm1,%xmm1
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%edi
- xorl %edx,%esi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 28(%rsp),%ebp
- andl %ecx,%edi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- xorl %ecx,%edi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm0,%xmm1,%xmm8
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%rsp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vpxor %xmm3,%xmm2,%xmm2
- movl %ebp,%edi
- xorl %ebx,%esi
- vpaddd %xmm1,%xmm11,%xmm9
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- vpxor %xmm8,%xmm2,%xmm2
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 36(%rsp),%ecx
- vpsrld $30,%xmm2,%xmm8
- vmovdqa %xmm9,16(%rsp)
- andl %eax,%edi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%esi
- vpslld $2,%xmm2,%xmm2
- xorl %eax,%edi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 40(%rsp),%ebx
- andl %ebp,%esi
- vpor %xmm8,%xmm2,%xmm2
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- movl %ecx,%edi
- xorl %ebp,%esi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 44(%rsp),%eax
- andl %edx,%edi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- addl %ebx,%eax
- vpalignr $8,%xmm1,%xmm2,%xmm8
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- vpxor %xmm4,%xmm3,%xmm3
- addl %esi,%ebp
- xorl %ecx,%edi
- vpaddd %xmm2,%xmm11,%xmm9
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpxor %xmm8,%xmm3,%xmm3
- addl 52(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm3,%xmm8
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpslld $2,%xmm3,%xmm3
- addl 56(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpor %xmm8,%xmm3,%xmm3
- addl 60(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 0(%rsp),%eax
- vpaddd %xmm3,%xmm11,%xmm9
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vmovdqa %xmm9,48(%rsp)
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 4(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 8(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 12(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- cmpq %r10,%r9
- je L$done_avx
- vmovdqa 64(%r14),%xmm6
- vmovdqa -64(%r14),%xmm11
- vmovdqu 0(%r9),%xmm0
- vmovdqu 16(%r9),%xmm1
- vmovdqu 32(%r9),%xmm2
- vmovdqu 48(%r9),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- addq $64,%r9
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- vpshufb %xmm6,%xmm1,%xmm1
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- vpaddd %xmm11,%xmm0,%xmm4
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vmovdqa %xmm4,0(%rsp)
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- vpshufb %xmm6,%xmm2,%xmm2
- movl %edx,%edi
- shldl $5,%edx,%edx
- vpaddd %xmm11,%xmm1,%xmm5
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vmovdqa %xmm5,16(%rsp)
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- vpshufb %xmm6,%xmm3,%xmm3
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- vpaddd %xmm11,%xmm2,%xmm6
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vmovdqa %xmm6,32(%rsp)
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- addl 12(%r8),%edx
- movl %eax,0(%r8)
- addl 16(%r8),%ebp
- movl %esi,4(%r8)
- movl %esi,%ebx
- movl %ecx,8(%r8)
- movl %ecx,%edi
- movl %edx,12(%r8)
- xorl %edx,%edi
- movl %ebp,16(%r8)
- andl %edi,%esi
- jmp L$oop_avx
- .p2align 4
- L$done_avx:
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vzeroupper
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- movl %eax,0(%r8)
- addl 12(%r8),%edx
- movl %esi,4(%r8)
- addl 16(%r8),%ebp
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- movq -40(%r11),%r14
- movq -32(%r11),%r13
- movq -24(%r11),%r12
- movq -16(%r11),%rbp
- movq -8(%r11),%rbx
- leaq (%r11),%rsp
- L$epilogue_avx:
- .byte 0xf3,0xc3
- .p2align 4
- sha1_block_data_order_avx2:
- _avx2_shortcut:
- movq %rsp,%r11
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- vzeroupper
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
- leaq -640(%rsp),%rsp
- shlq $6,%r10
- leaq 64(%r9),%r13
- andq $-128,%rsp
- addq %r9,%r10
- leaq K_XX_XX+64(%rip),%r14
- movl 0(%r8),%eax
- cmpq %r10,%r13
- cmovaeq %r9,%r13
- movl 4(%r8),%ebp
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl 16(%r8),%esi
- vmovdqu 64(%r14),%ymm6
- vmovdqu (%r9),%xmm0
- vmovdqu 16(%r9),%xmm1
- vmovdqu 32(%r9),%xmm2
- vmovdqu 48(%r9),%xmm3
- leaq 64(%r9),%r9
- vinserti128 $1,(%r13),%ymm0,%ymm0
- vinserti128 $1,16(%r13),%ymm1,%ymm1
- vpshufb %ymm6,%ymm0,%ymm0
- vinserti128 $1,32(%r13),%ymm2,%ymm2
- vpshufb %ymm6,%ymm1,%ymm1
- vinserti128 $1,48(%r13),%ymm3,%ymm3
- vpshufb %ymm6,%ymm2,%ymm2
- vmovdqu -64(%r14),%ymm11
- vpshufb %ymm6,%ymm3,%ymm3
- vpaddd %ymm11,%ymm0,%ymm4
- vpaddd %ymm11,%ymm1,%ymm5
- vmovdqu %ymm4,0(%rsp)
- vpaddd %ymm11,%ymm2,%ymm6
- vmovdqu %ymm5,32(%rsp)
- vpaddd %ymm11,%ymm3,%ymm7
- vmovdqu %ymm6,64(%rsp)
- vmovdqu %ymm7,96(%rsp)
- vpalignr $8,%ymm0,%ymm1,%ymm4
- vpsrldq $4,%ymm3,%ymm8
- vpxor %ymm0,%ymm4,%ymm4
- vpxor %ymm2,%ymm8,%ymm8
- vpxor %ymm8,%ymm4,%ymm4
- vpsrld $31,%ymm4,%ymm8
- vpslldq $12,%ymm4,%ymm10
- vpaddd %ymm4,%ymm4,%ymm4
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm4,%ymm4
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm4,%ymm4
- vpxor %ymm10,%ymm4,%ymm4
- vpaddd %ymm11,%ymm4,%ymm9
- vmovdqu %ymm9,128(%rsp)
- vpalignr $8,%ymm1,%ymm2,%ymm5
- vpsrldq $4,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm3,%ymm8,%ymm8
- vpxor %ymm8,%ymm5,%ymm5
- vpsrld $31,%ymm5,%ymm8
- vmovdqu -32(%r14),%ymm11
- vpslldq $12,%ymm5,%ymm10
- vpaddd %ymm5,%ymm5,%ymm5
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm5,%ymm5
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm5,%ymm5
- vpxor %ymm10,%ymm5,%ymm5
- vpaddd %ymm11,%ymm5,%ymm9
- vmovdqu %ymm9,160(%rsp)
- vpalignr $8,%ymm2,%ymm3,%ymm6
- vpsrldq $4,%ymm5,%ymm8
- vpxor %ymm2,%ymm6,%ymm6
- vpxor %ymm4,%ymm8,%ymm8
- vpxor %ymm8,%ymm6,%ymm6
- vpsrld $31,%ymm6,%ymm8
- vpslldq $12,%ymm6,%ymm10
- vpaddd %ymm6,%ymm6,%ymm6
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm6,%ymm6
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm6,%ymm6
- vpxor %ymm10,%ymm6,%ymm6
- vpaddd %ymm11,%ymm6,%ymm9
- vmovdqu %ymm9,192(%rsp)
- vpalignr $8,%ymm3,%ymm4,%ymm7
- vpsrldq $4,%ymm6,%ymm8
- vpxor %ymm3,%ymm7,%ymm7
- vpxor %ymm5,%ymm8,%ymm8
- vpxor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm7,%ymm8
- vpslldq $12,%ymm7,%ymm10
- vpaddd %ymm7,%ymm7,%ymm7
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm7,%ymm7
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm7,%ymm7
- vpxor %ymm10,%ymm7,%ymm7
- vpaddd %ymm11,%ymm7,%ymm9
- vmovdqu %ymm9,224(%rsp)
- leaq 128(%rsp),%r13
- jmp L$oop_avx2
- .p2align 5
- L$oop_avx2:
- rorxl $2,%ebp,%ebx
- andnl %edx,%ebp,%edi
- andl %ecx,%ebp
- xorl %edi,%ebp
- jmp L$align32_1
- .p2align 5
- L$align32_1:
- vpalignr $8,%ymm6,%ymm7,%ymm8
- vpxor %ymm4,%ymm0,%ymm0
- addl -128(%r13),%esi
- andnl %ecx,%eax,%edi
- vpxor %ymm1,%ymm0,%ymm0
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- vpxor %ymm8,%ymm0,%ymm0
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- vpsrld $30,%ymm0,%ymm8
- vpslld $2,%ymm0,%ymm0
- addl -124(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- vpor %ymm8,%ymm0,%ymm0
- addl %r12d,%edx
- xorl %edi,%esi
- addl -120(%r13),%ecx
- andnl %ebp,%edx,%edi
- vpaddd %ymm11,%ymm0,%ymm9
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- vmovdqu %ymm9,256(%rsp)
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -116(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl -96(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- vpalignr $8,%ymm7,%ymm0,%ymm8
- vpxor %ymm5,%ymm1,%ymm1
- addl -92(%r13),%eax
- andnl %edx,%ebp,%edi
- vpxor %ymm2,%ymm1,%ymm1
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- vpxor %ymm8,%ymm1,%ymm1
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- vpsrld $30,%ymm1,%ymm8
- vpslld $2,%ymm1,%ymm1
- addl -88(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- vpor %ymm8,%ymm1,%ymm1
- addl %r12d,%esi
- xorl %edi,%eax
- addl -84(%r13),%edx
- andnl %ebx,%esi,%edi
- vpaddd %ymm11,%ymm1,%ymm9
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- vmovdqu %ymm9,288(%rsp)
- addl %r12d,%edx
- xorl %edi,%esi
- addl -64(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -60(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- vpalignr $8,%ymm0,%ymm1,%ymm8
- vpxor %ymm6,%ymm2,%ymm2
- addl -56(%r13),%ebp
- andnl %esi,%ebx,%edi
- vpxor %ymm3,%ymm2,%ymm2
- vmovdqu 0(%r14),%ymm11
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- vpxor %ymm8,%ymm2,%ymm2
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- vpsrld $30,%ymm2,%ymm8
- vpslld $2,%ymm2,%ymm2
- addl -52(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- vpor %ymm8,%ymm2,%ymm2
- addl %r12d,%eax
- xorl %edi,%ebp
- addl -32(%r13),%esi
- andnl %ecx,%eax,%edi
- vpaddd %ymm11,%ymm2,%ymm9
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- vmovdqu %ymm9,320(%rsp)
- addl %r12d,%esi
- xorl %edi,%eax
- addl -28(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -24(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- vpalignr $8,%ymm1,%ymm2,%ymm8
- vpxor %ymm7,%ymm3,%ymm3
- addl -20(%r13),%ebx
- andnl %eax,%ecx,%edi
- vpxor %ymm4,%ymm3,%ymm3
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- vpxor %ymm8,%ymm3,%ymm3
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- vpsrld $30,%ymm3,%ymm8
- vpslld $2,%ymm3,%ymm3
- addl 0(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- vpor %ymm8,%ymm3,%ymm3
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl 4(%r13),%eax
- andnl %edx,%ebp,%edi
- vpaddd %ymm11,%ymm3,%ymm9
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- vmovdqu %ymm9,352(%rsp)
- addl %r12d,%eax
- xorl %edi,%ebp
- addl 8(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl 12(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- vpalignr $8,%ymm2,%ymm3,%ymm8
- vpxor %ymm0,%ymm4,%ymm4
- addl 32(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpxor %ymm8,%ymm4,%ymm4
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 36(%r13),%ebx
- vpsrld $30,%ymm4,%ymm8
- vpslld $2,%ymm4,%ymm4
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- vpor %ymm8,%ymm4,%ymm4
- addl 40(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- vpaddd %ymm11,%ymm4,%ymm9
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 44(%r13),%eax
- vmovdqu %ymm9,384(%rsp)
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl 64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vpalignr $8,%ymm3,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- addl 68(%r13),%edx
- leal (%rdx,%rax,1),%edx
- vpxor %ymm6,%ymm5,%ymm5
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- vpxor %ymm8,%ymm5,%ymm5
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 72(%r13),%ecx
- vpsrld $30,%ymm5,%ymm8
- vpslld $2,%ymm5,%ymm5
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- vpor %ymm8,%ymm5,%ymm5
- addl 76(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- vpaddd %ymm11,%ymm5,%ymm9
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl 96(%r13),%ebp
- vmovdqu %ymm9,416(%rsp)
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 100(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpalignr $8,%ymm4,%ymm5,%ymm8
- vpxor %ymm2,%ymm6,%ymm6
- addl 104(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- vpxor %ymm7,%ymm6,%ymm6
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- vpxor %ymm8,%ymm6,%ymm6
- addl %r12d,%esi
- xorl %ecx,%eax
- addl 108(%r13),%edx
- leaq 256(%r13),%r13
- vpsrld $30,%ymm6,%ymm8
- vpslld $2,%ymm6,%ymm6
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- vpor %ymm8,%ymm6,%ymm6
- addl -128(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- vpaddd %ymm11,%ymm6,%ymm9
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -124(%r13),%ebx
- vmovdqu %ymm9,448(%rsp)
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -120(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vpalignr $8,%ymm5,%ymm6,%ymm8
- vpxor %ymm3,%ymm7,%ymm7
- addl -116(%r13),%eax
- leal (%rax,%rbx,1),%eax
- vpxor %ymm0,%ymm7,%ymm7
- vmovdqu 32(%r14),%ymm11
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- vpxor %ymm8,%ymm7,%ymm7
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -96(%r13),%esi
- vpsrld $30,%ymm7,%ymm8
- vpslld $2,%ymm7,%ymm7
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vpor %ymm8,%ymm7,%ymm7
- addl -92(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpaddd %ymm11,%ymm7,%ymm9
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -88(%r13),%ecx
- vmovdqu %ymm9,480(%rsp)
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -84(%r13),%ebx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- jmp L$align32_2
- .p2align 5
- L$align32_2:
- vpalignr $8,%ymm6,%ymm7,%ymm8
- vpxor %ymm4,%ymm0,%ymm0
- addl -64(%r13),%ebp
- xorl %esi,%ecx
- vpxor %ymm1,%ymm0,%ymm0
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- vpxor %ymm8,%ymm0,%ymm0
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- vpsrld $30,%ymm0,%ymm8
- vpslld $2,%ymm0,%ymm0
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -60(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- vpor %ymm8,%ymm0,%ymm0
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- vpaddd %ymm11,%ymm0,%ymm9
- addl %r12d,%eax
- andl %edi,%ebp
- addl -56(%r13),%esi
- xorl %ecx,%ebp
- vmovdqu %ymm9,512(%rsp)
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl -52(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- addl -32(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- andl %edi,%edx
- vpalignr $8,%ymm7,%ymm0,%ymm8
- vpxor %ymm5,%ymm1,%ymm1
- addl -28(%r13),%ebx
- xorl %eax,%edx
- vpxor %ymm2,%ymm1,%ymm1
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- vpxor %ymm8,%ymm1,%ymm1
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- vpsrld $30,%ymm1,%ymm8
- vpslld $2,%ymm1,%ymm1
- addl %r12d,%ebx
- andl %edi,%ecx
- addl -24(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- vpor %ymm8,%ymm1,%ymm1
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- vpaddd %ymm11,%ymm1,%ymm9
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -20(%r13),%eax
- xorl %edx,%ebx
- vmovdqu %ymm9,544(%rsp)
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 0(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl 4(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- vpalignr $8,%ymm0,%ymm1,%ymm8
- vpxor %ymm6,%ymm2,%ymm2
- addl 8(%r13),%ecx
- xorl %ebp,%esi
- vpxor %ymm3,%ymm2,%ymm2
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- vpxor %ymm8,%ymm2,%ymm2
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpsrld $30,%ymm2,%ymm8
- vpslld $2,%ymm2,%ymm2
- addl %r12d,%ecx
- andl %edi,%edx
- addl 12(%r13),%ebx
- xorl %eax,%edx
- movl %esi,%edi
- xorl %eax,%edi
- vpor %ymm8,%ymm2,%ymm2
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- vpaddd %ymm11,%ymm2,%ymm9
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 32(%r13),%ebp
- xorl %esi,%ecx
- vmovdqu %ymm9,576(%rsp)
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 36(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 40(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- vpalignr $8,%ymm1,%ymm2,%ymm8
- vpxor %ymm7,%ymm3,%ymm3
- addl 44(%r13),%edx
- xorl %ebx,%eax
- vpxor %ymm4,%ymm3,%ymm3
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- vpxor %ymm8,%ymm3,%ymm3
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- vpsrld $30,%ymm3,%ymm8
- vpslld $2,%ymm3,%ymm3
- addl %r12d,%edx
- andl %edi,%esi
- addl 64(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- vpor %ymm8,%ymm3,%ymm3
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpaddd %ymm11,%ymm3,%ymm9
- addl %r12d,%ecx
- andl %edi,%edx
- addl 68(%r13),%ebx
- xorl %eax,%edx
- vmovdqu %ymm9,608(%rsp)
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 72(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 76(%r13),%eax
- xorl %edx,%ebx
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl 96(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl 100(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 104(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 108(%r13),%ebx
- leaq 256(%r13),%r13
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -128(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -124(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -120(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -116(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -96(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -92(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -88(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -84(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -60(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -56(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -52(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -32(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -28(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -24(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -20(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- addl %r12d,%edx
- leaq 128(%r9),%r13
- leaq 128(%r9),%rdi
- cmpq %r10,%r13
- cmovaeq %r9,%r13
- addl 0(%r8),%edx
- addl 4(%r8),%esi
- addl 8(%r8),%ebp
- movl %edx,0(%r8)
- addl 12(%r8),%ebx
- movl %esi,4(%r8)
- movl %edx,%eax
- addl 16(%r8),%ecx
- movl %ebp,%r12d
- movl %ebp,8(%r8)
- movl %ebx,%edx
- movl %ebx,12(%r8)
- movl %esi,%ebp
- movl %ecx,16(%r8)
- movl %ecx,%esi
- movl %r12d,%ecx
- cmpq %r10,%r9
- je L$done_avx2
- vmovdqu 64(%r14),%ymm6
- cmpq %r10,%rdi
- ja L$ast_avx2
- vmovdqu -64(%rdi),%xmm0
- vmovdqu -48(%rdi),%xmm1
- vmovdqu -32(%rdi),%xmm2
- vmovdqu -16(%rdi),%xmm3
- vinserti128 $1,0(%r13),%ymm0,%ymm0
- vinserti128 $1,16(%r13),%ymm1,%ymm1
- vinserti128 $1,32(%r13),%ymm2,%ymm2
- vinserti128 $1,48(%r13),%ymm3,%ymm3
- jmp L$ast_avx2
- .p2align 5
- L$ast_avx2:
- leaq 128+16(%rsp),%r13
- rorxl $2,%ebp,%ebx
- andnl %edx,%ebp,%edi
- andl %ecx,%ebp
- xorl %edi,%ebp
- subq $-128,%r9
- addl -128(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl -124(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -120(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -116(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl -96(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl -92(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- addl -88(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl -84(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -64(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -60(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl -56(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl -52(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- addl -32(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl -28(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -24(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -20(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl 0(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl 4(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- addl 8(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl 12(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 32(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 36(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl 40(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 44(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl 64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vmovdqu -64(%r14),%ymm11
- vpshufb %ymm6,%ymm0,%ymm0
- addl 68(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 72(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 76(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl 96(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 100(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpshufb %ymm6,%ymm1,%ymm1
- vpaddd %ymm11,%ymm0,%ymm8
- addl 104(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl 108(%r13),%edx
- leaq 256(%r13),%r13
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -128(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -124(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -120(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vmovdqu %ymm8,0(%rsp)
- vpshufb %ymm6,%ymm2,%ymm2
- vpaddd %ymm11,%ymm1,%ymm9
- addl -116(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -96(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -92(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -88(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -84(%r13),%ebx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- vmovdqu %ymm9,32(%rsp)
- vpshufb %ymm6,%ymm3,%ymm3
- vpaddd %ymm11,%ymm2,%ymm6
- addl -64(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -60(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl -56(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl -52(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- addl -32(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- andl %edi,%edx
- jmp L$align32_3
- .p2align 5
- L$align32_3:
- vmovdqu %ymm6,64(%rsp)
- vpaddd %ymm11,%ymm3,%ymm7
- addl -28(%r13),%ebx
- xorl %eax,%edx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- addl -24(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -20(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 0(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl 4(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- vmovdqu %ymm7,96(%rsp)
- addl 8(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- andl %edi,%edx
- addl 12(%r13),%ebx
- xorl %eax,%edx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 32(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 36(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 40(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- vpalignr $8,%ymm0,%ymm1,%ymm4
- addl 44(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- vpsrldq $4,%ymm3,%ymm8
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpxor %ymm0,%ymm4,%ymm4
- vpxor %ymm2,%ymm8,%ymm8
- xorl %ebp,%esi
- addl %r12d,%edx
- vpxor %ymm8,%ymm4,%ymm4
- andl %edi,%esi
- addl 64(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- vpsrld $31,%ymm4,%ymm8
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- vpslldq $12,%ymm4,%ymm10
- vpaddd %ymm4,%ymm4,%ymm4
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm4,%ymm4
- addl %r12d,%ecx
- andl %edi,%edx
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm4,%ymm4
- addl 68(%r13),%ebx
- xorl %eax,%edx
- vpxor %ymm10,%ymm4,%ymm4
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- vpaddd %ymm11,%ymm4,%ymm9
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- vmovdqu %ymm9,128(%rsp)
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 72(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 76(%r13),%eax
- xorl %edx,%ebx
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpalignr $8,%ymm1,%ymm2,%ymm5
- addl 96(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- vpsrldq $4,%ymm4,%ymm8
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm3,%ymm8,%ymm8
- addl 100(%r13),%edx
- leal (%rdx,%rax,1),%edx
- vpxor %ymm8,%ymm5,%ymm5
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- vpsrld $31,%ymm5,%ymm8
- vmovdqu -32(%r14),%ymm11
- xorl %ebx,%esi
- addl 104(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- vpslldq $12,%ymm5,%ymm10
- vpaddd %ymm5,%ymm5,%ymm5
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm5,%ymm5
- xorl %eax,%edx
- addl %r12d,%ecx
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm5,%ymm5
- xorl %ebp,%edx
- addl 108(%r13),%ebx
- leaq 256(%r13),%r13
- vpxor %ymm10,%ymm5,%ymm5
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- vpaddd %ymm11,%ymm5,%ymm9
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- vmovdqu %ymm9,160(%rsp)
- addl -128(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vpalignr $8,%ymm2,%ymm3,%ymm6
- addl -124(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- vpsrldq $4,%ymm5,%ymm8
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpxor %ymm2,%ymm6,%ymm6
- vpxor %ymm4,%ymm8,%ymm8
- addl -120(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- vpxor %ymm8,%ymm6,%ymm6
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- vpsrld $31,%ymm6,%ymm8
- xorl %ecx,%eax
- addl -116(%r13),%edx
- leal (%rdx,%rax,1),%edx
- vpslldq $12,%ymm6,%ymm10
- vpaddd %ymm6,%ymm6,%ymm6
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm6,%ymm6
- xorl %ebp,%esi
- addl %r12d,%edx
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm6,%ymm6
- xorl %ebx,%esi
- addl -96(%r13),%ecx
- vpxor %ymm10,%ymm6,%ymm6
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- vpaddd %ymm11,%ymm6,%ymm9
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- vmovdqu %ymm9,192(%rsp)
- addl -92(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- vpalignr $8,%ymm3,%ymm4,%ymm7
- addl -88(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- vpsrldq $4,%ymm6,%ymm8
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vpxor %ymm3,%ymm7,%ymm7
- vpxor %ymm5,%ymm8,%ymm8
- addl -84(%r13),%eax
- leal (%rax,%rbx,1),%eax
- vpxor %ymm8,%ymm7,%ymm7
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- vpsrld $31,%ymm7,%ymm8
- xorl %edx,%ebp
- addl -64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- vpslldq $12,%ymm7,%ymm10
- vpaddd %ymm7,%ymm7,%ymm7
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm7,%ymm7
- xorl %ebx,%eax
- addl %r12d,%esi
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm7,%ymm7
- xorl %ecx,%eax
- addl -60(%r13),%edx
- vpxor %ymm10,%ymm7,%ymm7
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpaddd %ymm11,%ymm7,%ymm9
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- vmovdqu %ymm9,224(%rsp)
- addl -56(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -52(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -32(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -28(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -24(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -20(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- addl %r12d,%edx
- leaq 128(%rsp),%r13
- addl 0(%r8),%edx
- addl 4(%r8),%esi
- addl 8(%r8),%ebp
- movl %edx,0(%r8)
- addl 12(%r8),%ebx
- movl %esi,4(%r8)
- movl %edx,%eax
- addl 16(%r8),%ecx
- movl %ebp,%r12d
- movl %ebp,8(%r8)
- movl %ebx,%edx
- movl %ebx,12(%r8)
- movl %esi,%ebp
- movl %ecx,16(%r8)
- movl %ecx,%esi
- movl %r12d,%ecx
- cmpq %r10,%r9
- jbe L$oop_avx2
- L$done_avx2:
- vzeroupper
- movq -40(%r11),%r14
- movq -32(%r11),%r13
- movq -24(%r11),%r12
- movq -16(%r11),%rbp
- movq -8(%r11),%rbx
- leaq (%r11),%rsp
- L$epilogue_avx2:
- .byte 0xf3,0xc3
- .p2align 6
- K_XX_XX:
- .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
- .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
- .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
- .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
- .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
- .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
- .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
- .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
- .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
- .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
- .byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0
- .byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
- .p2align 6
- #endif
|