aes.c 432 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740
  1. /* aes.c
  2. *
  3. * Copyright (C) 2006-2023 wolfSSL Inc.
  4. *
  5. * This file is part of wolfSSL.
  6. *
  7. * wolfSSL is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * wolfSSL is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  20. */
  21. /*
  22. DESCRIPTION
  23. This library provides the interfaces to the Advanced Encryption Standard (AES)
  24. for encrypting and decrypting data. AES is the standard known for a symmetric
  25. block cipher mechanism that uses n-bit binary string parameter key with 128-bits,
  26. 192-bits, and 256-bits of key sizes.
  27. */
  28. #ifdef HAVE_CONFIG_H
  29. #include <config.h>
  30. #endif
  31. #include <wolfssl/wolfcrypt/settings.h>
  32. #include <wolfssl/wolfcrypt/error-crypt.h>
  33. #if !defined(NO_AES)
  34. /* Tip: Locate the software cipher modes by searching for "Software AES" */
  35. #if defined(HAVE_FIPS) && \
  36. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  37. /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
  38. #define FIPS_NO_WRAPPERS
  39. #ifdef USE_WINDOWS_API
  40. #pragma code_seg(".fipsA$g")
  41. #pragma const_seg(".fipsB$g")
  42. #endif
  43. #endif
  44. #include <wolfssl/wolfcrypt/aes.h>
  45. #ifdef WOLFSSL_AESNI
  46. #include <wmmintrin.h>
  47. #include <emmintrin.h>
  48. #include <smmintrin.h>
  49. #endif /* WOLFSSL_AESNI */
  50. #include <wolfssl/wolfcrypt/cpuid.h>
  51. #ifdef WOLF_CRYPTO_CB
  52. #include <wolfssl/wolfcrypt/cryptocb.h>
  53. #endif
  54. #ifdef WOLFSSL_SECO_CAAM
  55. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  56. #endif
  57. #ifdef WOLFSSL_IMXRT_DCP
  58. #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
  59. #endif
  60. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  61. #include <wolfssl/wolfcrypt/port/nxp/se050_port.h>
  62. #endif
  63. #if defined(WOLFSSL_AES_SIV)
  64. #include <wolfssl/wolfcrypt/cmac.h>
  65. #endif /* WOLFSSL_AES_SIV */
  66. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  67. #include <wolfssl/wolfcrypt/port/psa/psa.h>
  68. #endif
  69. #if defined(WOLFSSL_TI_CRYPT)
  70. #include <wolfcrypt/src/port/ti/ti-aes.c>
  71. #else
  72. #include <wolfssl/wolfcrypt/logging.h>
  73. #ifdef NO_INLINE
  74. #include <wolfssl/wolfcrypt/misc.h>
  75. #else
  76. #define WOLFSSL_MISC_INCLUDED
  77. #include <wolfcrypt/src/misc.c>
  78. #endif
  79. #ifndef WOLFSSL_ARMASM
  80. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  81. /* case of possibly not using hardware acceleration for AES but using key
  82. blobs */
  83. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  84. #endif
  85. #ifdef DEBUG_AESNI
  86. #include <stdio.h>
  87. #endif
  88. #ifdef _MSC_VER
  89. /* 4127 warning constant while(1) */
  90. #pragma warning(disable: 4127)
  91. #endif
  92. /* Define AES implementation includes and functions */
  93. #if defined(STM32_CRYPTO)
  94. /* STM32F2/F4/F7/L4/L5/H7/WB55 hardware AES support for ECB, CBC, CTR and GCM modes */
  95. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESGCM) || defined(HAVE_AESCCM)
  96. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  97. Aes* aes, const byte* inBlock, byte* outBlock)
  98. {
  99. int ret = 0;
  100. #ifdef WOLFSSL_STM32_CUBEMX
  101. CRYP_HandleTypeDef hcryp;
  102. #else
  103. CRYP_InitTypeDef cryptInit;
  104. CRYP_KeyInitTypeDef keyInit;
  105. #endif
  106. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  107. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  108. if (ret < 0)
  109. return ret;
  110. #endif
  111. #ifdef WOLFSSL_STM32_CUBEMX
  112. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  113. if (ret != 0)
  114. return ret;
  115. ret = wolfSSL_CryptHwMutexLock();
  116. if (ret != 0)
  117. return ret;
  118. #if defined(STM32_HAL_V2)
  119. hcryp.Init.Algorithm = CRYP_AES_ECB;
  120. #elif defined(STM32_CRYPTO_AES_ONLY)
  121. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  122. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  123. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  124. #endif
  125. HAL_CRYP_Init(&hcryp);
  126. #if defined(STM32_HAL_V2)
  127. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  128. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  129. #elif defined(STM32_CRYPTO_AES_ONLY)
  130. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  131. outBlock, STM32_HAL_TIMEOUT);
  132. #else
  133. ret = HAL_CRYP_AESECB_Encrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  134. outBlock, STM32_HAL_TIMEOUT);
  135. #endif
  136. if (ret != HAL_OK) {
  137. ret = WC_TIMEOUT_E;
  138. }
  139. HAL_CRYP_DeInit(&hcryp);
  140. #else /* Standard Peripheral Library */
  141. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  142. if (ret != 0)
  143. return ret;
  144. ret = wolfSSL_CryptHwMutexLock();
  145. if (ret != 0)
  146. return ret;
  147. /* reset registers to their default values */
  148. CRYP_DeInit();
  149. /* setup key */
  150. CRYP_KeyInit(&keyInit);
  151. /* set direction and mode */
  152. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  153. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  154. CRYP_Init(&cryptInit);
  155. /* enable crypto processor */
  156. CRYP_Cmd(ENABLE);
  157. /* flush IN/OUT FIFOs */
  158. CRYP_FIFOFlush();
  159. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  160. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  161. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  162. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  163. /* wait until the complete message has been processed */
  164. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  165. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  166. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  167. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  168. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  169. /* disable crypto processor */
  170. CRYP_Cmd(DISABLE);
  171. #endif /* WOLFSSL_STM32_CUBEMX */
  172. wolfSSL_CryptHwMutexUnLock();
  173. wc_Stm32_Aes_Cleanup();
  174. return ret;
  175. }
  176. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESGCM || HAVE_AESCCM */
  177. #ifdef HAVE_AES_DECRYPT
  178. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESCCM)
  179. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  180. Aes* aes, const byte* inBlock, byte* outBlock)
  181. {
  182. int ret = 0;
  183. #ifdef WOLFSSL_STM32_CUBEMX
  184. CRYP_HandleTypeDef hcryp;
  185. #else
  186. CRYP_InitTypeDef cryptInit;
  187. CRYP_KeyInitTypeDef keyInit;
  188. #endif
  189. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  190. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  191. if (ret < 0)
  192. return ret;
  193. #endif
  194. #ifdef WOLFSSL_STM32_CUBEMX
  195. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  196. if (ret != 0)
  197. return ret;
  198. ret = wolfSSL_CryptHwMutexLock();
  199. if (ret != 0)
  200. return ret;
  201. #if defined(STM32_HAL_V2)
  202. hcryp.Init.Algorithm = CRYP_AES_ECB;
  203. #elif defined(STM32_CRYPTO_AES_ONLY)
  204. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  205. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  206. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  207. #endif
  208. HAL_CRYP_Init(&hcryp);
  209. #if defined(STM32_HAL_V2)
  210. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  211. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  212. #elif defined(STM32_CRYPTO_AES_ONLY)
  213. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  214. outBlock, STM32_HAL_TIMEOUT);
  215. #else
  216. ret = HAL_CRYP_AESECB_Decrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  217. outBlock, STM32_HAL_TIMEOUT);
  218. #endif
  219. if (ret != HAL_OK) {
  220. ret = WC_TIMEOUT_E;
  221. }
  222. HAL_CRYP_DeInit(&hcryp);
  223. #else /* Standard Peripheral Library */
  224. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  225. if (ret != 0)
  226. return ret;
  227. ret = wolfSSL_CryptHwMutexLock();
  228. if (ret != 0)
  229. return ret;
  230. /* reset registers to their default values */
  231. CRYP_DeInit();
  232. /* set direction and key */
  233. CRYP_KeyInit(&keyInit);
  234. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  235. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  236. CRYP_Init(&cryptInit);
  237. /* enable crypto processor */
  238. CRYP_Cmd(ENABLE);
  239. /* wait until decrypt key has been initialized */
  240. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  241. /* set direction and mode */
  242. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  243. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  244. CRYP_Init(&cryptInit);
  245. /* enable crypto processor */
  246. CRYP_Cmd(ENABLE);
  247. /* flush IN/OUT FIFOs */
  248. CRYP_FIFOFlush();
  249. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  250. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  251. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  252. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  253. /* wait until the complete message has been processed */
  254. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  255. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  256. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  257. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  258. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  259. /* disable crypto processor */
  260. CRYP_Cmd(DISABLE);
  261. #endif /* WOLFSSL_STM32_CUBEMX */
  262. wolfSSL_CryptHwMutexUnLock();
  263. wc_Stm32_Aes_Cleanup();
  264. return ret;
  265. }
  266. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESCCM */
  267. #endif /* HAVE_AES_DECRYPT */
  268. #elif defined(HAVE_COLDFIRE_SEC)
  269. /* Freescale Coldfire SEC support for CBC mode.
  270. * NOTE: no support for AES-CTR/GCM/CCM/Direct */
  271. #include <wolfssl/wolfcrypt/types.h>
  272. #include "sec.h"
  273. #include "mcf5475_sec.h"
  274. #include "mcf5475_siu.h"
  275. #elif defined(FREESCALE_LTC)
  276. #include "fsl_ltc.h"
  277. #if defined(FREESCALE_LTC_AES_GCM)
  278. #undef NEED_AES_TABLES
  279. #undef GCM_TABLE
  280. #endif
  281. /* if LTC doesn't have GCM, use software with LTC AES ECB mode */
  282. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  283. Aes* aes, const byte* inBlock, byte* outBlock)
  284. {
  285. word32 keySize = 0;
  286. byte* key = (byte*)aes->key;
  287. int ret = wc_AesGetKeySize(aes, &keySize);
  288. if (ret != 0)
  289. return ret;
  290. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  291. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  292. if (ret < 0)
  293. return ret;
  294. #endif
  295. if (wolfSSL_CryptHwMutexLock() == 0) {
  296. LTC_AES_EncryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  297. key, keySize);
  298. wolfSSL_CryptHwMutexUnLock();
  299. }
  300. return 0;
  301. }
  302. #ifdef HAVE_AES_DECRYPT
  303. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  304. Aes* aes, const byte* inBlock, byte* outBlock)
  305. {
  306. word32 keySize = 0;
  307. byte* key = (byte*)aes->key;
  308. int ret = wc_AesGetKeySize(aes, &keySize);
  309. if (ret != 0)
  310. return ret;
  311. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  312. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  313. if (ret < 0)
  314. return ret;
  315. #endif
  316. if (wolfSSL_CryptHwMutexLock() == 0) {
  317. LTC_AES_DecryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  318. key, keySize, kLTC_EncryptKey);
  319. wolfSSL_CryptHwMutexUnLock();
  320. }
  321. return 0;
  322. }
  323. #endif
  324. #elif defined(FREESCALE_MMCAU)
  325. /* Freescale mmCAU hardware AES support for Direct, CBC, CCM, GCM modes
  326. * through the CAU/mmCAU library. Documentation located in
  327. * ColdFire/ColdFire+ CAU and Kinetis mmCAU Software Library User
  328. * Guide (See note in README). */
  329. #ifdef FREESCALE_MMCAU_CLASSIC
  330. /* MMCAU 1.4 library used with non-KSDK / classic MQX builds */
  331. #include "cau_api.h"
  332. #else
  333. #include "fsl_mmcau.h"
  334. #endif
  335. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  336. Aes* aes, const byte* inBlock, byte* outBlock)
  337. {
  338. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  339. {
  340. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  341. if (ret < 0)
  342. return ret;
  343. }
  344. #endif
  345. if (wolfSSL_CryptHwMutexLock() == 0) {
  346. #ifdef FREESCALE_MMCAU_CLASSIC
  347. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  348. WOLFSSL_MSG("Bad cau_aes_encrypt alignment");
  349. return BAD_ALIGN_E;
  350. }
  351. cau_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  352. #else
  353. MMCAU_AES_EncryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  354. outBlock);
  355. #endif
  356. wolfSSL_CryptHwMutexUnLock();
  357. }
  358. return 0;
  359. }
  360. #ifdef HAVE_AES_DECRYPT
  361. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  362. Aes* aes, const byte* inBlock, byte* outBlock)
  363. {
  364. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  365. {
  366. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  367. if (ret < 0)
  368. return ret;
  369. }
  370. #endif
  371. if (wolfSSL_CryptHwMutexLock() == 0) {
  372. #ifdef FREESCALE_MMCAU_CLASSIC
  373. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  374. WOLFSSL_MSG("Bad cau_aes_decrypt alignment");
  375. return BAD_ALIGN_E;
  376. }
  377. cau_aes_decrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  378. #else
  379. MMCAU_AES_DecryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  380. outBlock);
  381. #endif
  382. wolfSSL_CryptHwMutexUnLock();
  383. }
  384. return 0;
  385. }
  386. #endif /* HAVE_AES_DECRYPT */
  387. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  388. #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
  389. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  390. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  391. Aes* aes, const byte* inBlock, byte* outBlock)
  392. {
  393. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  394. {
  395. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  396. if (ret < 0)
  397. return ret;
  398. }
  399. #endif
  400. /* Thread mutex protection handled in Pic32Crypto */
  401. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  402. outBlock, inBlock, AES_BLOCK_SIZE,
  403. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  404. }
  405. #endif
  406. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  407. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  408. Aes* aes, const byte* inBlock, byte* outBlock)
  409. {
  410. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  411. {
  412. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  413. if (ret < 0)
  414. return ret;
  415. }
  416. #endif
  417. /* Thread mutex protection handled in Pic32Crypto */
  418. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  419. outBlock, inBlock, AES_BLOCK_SIZE,
  420. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  421. }
  422. #endif
  423. #elif defined(WOLFSSL_NRF51_AES)
  424. /* Use built-in AES hardware - AES 128 ECB Encrypt Only */
  425. #include "wolfssl/wolfcrypt/port/nrf51.h"
  426. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  427. Aes* aes, const byte* inBlock, byte* outBlock)
  428. {
  429. int ret;
  430. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  431. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  432. if (ret < 0)
  433. return ret;
  434. #endif
  435. ret = wolfSSL_CryptHwMutexLock();
  436. if (ret == 0) {
  437. ret = nrf51_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds,
  438. outBlock);
  439. wolfSSL_CryptHwMutexUnLock();
  440. }
  441. return ret;
  442. }
  443. #ifdef HAVE_AES_DECRYPT
  444. #error nRF51 AES Hardware does not support decrypt
  445. #endif /* HAVE_AES_DECRYPT */
  446. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  447. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  448. #include <esp_log.h>
  449. #include <wolfssl/wolfcrypt/port/Espressif/esp32-crypt.h>
  450. const char* TAG = "aes";
  451. /* We'll use SW for fallback:
  452. * unsupported key lengths. (e.g. ESP32-S3)
  453. * chipsets not implemented.
  454. * hardware busy. */
  455. #define NEED_AES_TABLES
  456. #define NEED_AES_HW_FALLBACK
  457. #define NEED_SOFTWARE_AES_SETKEY
  458. #undef WOLFSSL_AES_DIRECT
  459. #define WOLFSSL_AES_DIRECT
  460. /* Encrypt: If we choose to never have a fallback to SW: */
  461. #if !defined(NEED_AES_HW_FALLBACK) && (defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT))
  462. static WARN_UNUSED_RESULT int wc_AesEncrypt( /* calling this one when NO_AES_192 is defined */
  463. Aes* aes, const byte* inBlock, byte* outBlock)
  464. {
  465. int ret;
  466. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  467. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  468. if (ret < 0)
  469. return ret;
  470. #endif
  471. /* Thread mutex protection handled in esp_aes_hw_InUse */
  472. #ifdef NEED_AES_HW_FALLBACK
  473. if (wc_esp32AesSupportedKeyLen(aes)) {
  474. ret = wc_esp32AesEncrypt(aes, inBlock, outBlock);
  475. }
  476. #else
  477. ret = wc_esp32AesEncrypt(aes, inBlock, outBlock);
  478. #endif
  479. return ret;
  480. }
  481. #endif
  482. /* Decrypt: If we choose to never have a fallback to SW: */
  483. #if !defined(NEED_AES_HW_FALLBACK) && (defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT))
  484. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  485. Aes* aes, const byte* inBlock, byte* outBlock)
  486. {
  487. int ret = 0;
  488. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  489. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  490. if (ret < 0)
  491. return ret;
  492. #endif
  493. /* Thread mutex protection handled in esp_aes_hw_InUse */
  494. #ifdef NEED_AES_HW_FALLBACK
  495. if (wc_esp32AesSupportedKeyLen(aes)) {
  496. ret = wc_esp32AesDecrypt(aes, inBlock, outBlock);
  497. }
  498. else {
  499. ret = wc_AesDecrypt_SW(aes, inBlock, outBlock);
  500. }
  501. #else
  502. /* if we don't need fallback, always use HW */
  503. ret = wc_esp32AesDecrypt(aes, inBlock, outBlock);
  504. #endif
  505. return ret;
  506. }
  507. #endif
  508. #elif defined(WOLFSSL_AESNI)
  509. #define NEED_AES_TABLES
  510. /* Each platform needs to query info type 1 from cpuid to see if aesni is
  511. * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
  512. */
  513. #ifndef AESNI_ALIGN
  514. #define AESNI_ALIGN 16
  515. #endif
  516. static int checkedAESNI = 0;
  517. static int haveAESNI = 0;
  518. static word32 intel_flags = 0;
  519. static WARN_UNUSED_RESULT int Check_CPU_support_AES(void)
  520. {
  521. intel_flags = cpuid_get_flags();
  522. return IS_INTEL_AESNI(intel_flags) != 0;
  523. }
  524. /* tell C compiler these are asm functions in case any mix up of ABI underscore
  525. prefix between clang/gcc/llvm etc */
  526. #ifdef HAVE_AES_CBC
  527. void AES_CBC_encrypt_AESNI(const unsigned char* in, unsigned char* out,
  528. unsigned char* ivec, unsigned long length,
  529. const unsigned char* KS, int nr)
  530. XASM_LINK("AES_CBC_encrypt_AESNI");
  531. #ifdef HAVE_AES_DECRYPT
  532. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  533. void AES_CBC_decrypt_AESNI_by4(const unsigned char* in, unsigned char* out,
  534. unsigned char* ivec, unsigned long length,
  535. const unsigned char* KS, int nr)
  536. XASM_LINK("AES_CBC_decrypt_AESNI_by4");
  537. #elif defined(WOLFSSL_AESNI_BY6)
  538. void AES_CBC_decrypt_AESNI_by6(const unsigned char* in, unsigned char* out,
  539. unsigned char* ivec, unsigned long length,
  540. const unsigned char* KS, int nr)
  541. XASM_LINK("AES_CBC_decrypt_AESNI_by6");
  542. #else /* WOLFSSL_AESNI_BYx */
  543. void AES_CBC_decrypt_AESNI_by8(const unsigned char* in, unsigned char* out,
  544. unsigned char* ivec, unsigned long length,
  545. const unsigned char* KS, int nr)
  546. XASM_LINK("AES_CBC_decrypt_AESNI_by8");
  547. #endif /* WOLFSSL_AESNI_BYx */
  548. #endif /* HAVE_AES_DECRYPT */
  549. #endif /* HAVE_AES_CBC */
  550. void AES_ECB_encrypt_AESNI(const unsigned char* in, unsigned char* out,
  551. unsigned long length, const unsigned char* KS, int nr)
  552. XASM_LINK("AES_ECB_encrypt_AESNI");
  553. #ifdef HAVE_AES_DECRYPT
  554. void AES_ECB_decrypt_AESNI(const unsigned char* in, unsigned char* out,
  555. unsigned long length, const unsigned char* KS, int nr)
  556. XASM_LINK("AES_ECB_decrypt_AESNI");
  557. #endif
  558. void AES_128_Key_Expansion_AESNI(const unsigned char* userkey,
  559. unsigned char* key_schedule)
  560. XASM_LINK("AES_128_Key_Expansion_AESNI");
  561. void AES_192_Key_Expansion_AESNI(const unsigned char* userkey,
  562. unsigned char* key_schedule)
  563. XASM_LINK("AES_192_Key_Expansion_AESNI");
  564. void AES_256_Key_Expansion_AESNI(const unsigned char* userkey,
  565. unsigned char* key_schedule)
  566. XASM_LINK("AES_256_Key_Expansion_AESNI");
  567. static WARN_UNUSED_RESULT int AES_set_encrypt_key_AESNI(
  568. const unsigned char *userKey, const int bits, Aes* aes)
  569. {
  570. int ret;
  571. ASSERT_SAVED_VECTOR_REGISTERS();
  572. if (!userKey || !aes)
  573. return BAD_FUNC_ARG;
  574. switch (bits) {
  575. case 128:
  576. AES_128_Key_Expansion_AESNI (userKey,(byte*)aes->key); aes->rounds = 10;
  577. return 0;
  578. case 192:
  579. AES_192_Key_Expansion_AESNI (userKey,(byte*)aes->key); aes->rounds = 12;
  580. return 0;
  581. case 256:
  582. AES_256_Key_Expansion_AESNI (userKey,(byte*)aes->key); aes->rounds = 14;
  583. return 0;
  584. default:
  585. ret = BAD_FUNC_ARG;
  586. }
  587. return ret;
  588. }
  589. #ifdef HAVE_AES_DECRYPT
  590. static WARN_UNUSED_RESULT int AES_set_decrypt_key_AESNI(
  591. const unsigned char* userKey, const int bits, Aes* aes)
  592. {
  593. word32 nr;
  594. #ifdef WOLFSSL_SMALL_STACK
  595. Aes *temp_key;
  596. #else
  597. Aes temp_key[1];
  598. #endif
  599. __m128i *Key_Schedule;
  600. __m128i *Temp_Key_Schedule;
  601. ASSERT_SAVED_VECTOR_REGISTERS();
  602. if (!userKey || !aes)
  603. return BAD_FUNC_ARG;
  604. #ifdef WOLFSSL_SMALL_STACK
  605. if ((temp_key = (Aes *)XMALLOC(sizeof *aes, aes->heap,
  606. DYNAMIC_TYPE_AES)) == NULL)
  607. return MEMORY_E;
  608. #endif
  609. if (AES_set_encrypt_key_AESNI(userKey,bits,temp_key) == BAD_FUNC_ARG) {
  610. #ifdef WOLFSSL_SMALL_STACK
  611. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  612. #endif
  613. return BAD_FUNC_ARG;
  614. }
  615. Key_Schedule = (__m128i*)aes->key;
  616. Temp_Key_Schedule = (__m128i*)temp_key->key;
  617. nr = temp_key->rounds;
  618. aes->rounds = nr;
  619. Key_Schedule[nr] = Temp_Key_Schedule[0];
  620. Key_Schedule[nr-1] = _mm_aesimc_si128(Temp_Key_Schedule[1]);
  621. Key_Schedule[nr-2] = _mm_aesimc_si128(Temp_Key_Schedule[2]);
  622. Key_Schedule[nr-3] = _mm_aesimc_si128(Temp_Key_Schedule[3]);
  623. Key_Schedule[nr-4] = _mm_aesimc_si128(Temp_Key_Schedule[4]);
  624. Key_Schedule[nr-5] = _mm_aesimc_si128(Temp_Key_Schedule[5]);
  625. Key_Schedule[nr-6] = _mm_aesimc_si128(Temp_Key_Schedule[6]);
  626. Key_Schedule[nr-7] = _mm_aesimc_si128(Temp_Key_Schedule[7]);
  627. Key_Schedule[nr-8] = _mm_aesimc_si128(Temp_Key_Schedule[8]);
  628. Key_Schedule[nr-9] = _mm_aesimc_si128(Temp_Key_Schedule[9]);
  629. if (nr>10) {
  630. Key_Schedule[nr-10] = _mm_aesimc_si128(Temp_Key_Schedule[10]);
  631. Key_Schedule[nr-11] = _mm_aesimc_si128(Temp_Key_Schedule[11]);
  632. }
  633. if (nr>12) {
  634. Key_Schedule[nr-12] = _mm_aesimc_si128(Temp_Key_Schedule[12]);
  635. Key_Schedule[nr-13] = _mm_aesimc_si128(Temp_Key_Schedule[13]);
  636. }
  637. Key_Schedule[0] = Temp_Key_Schedule[nr];
  638. #ifdef WOLFSSL_SMALL_STACK
  639. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  640. #endif
  641. return 0;
  642. }
  643. #endif /* HAVE_AES_DECRYPT */
  644. #elif (defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  645. && !defined(WOLFSSL_QNX_CAAM)) || \
  646. ((defined(WOLFSSL_AFALG) || defined(WOLFSSL_DEVCRYPTO_AES)) && \
  647. defined(HAVE_AESCCM))
  648. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  649. Aes* aes, const byte* inBlock, byte* outBlock)
  650. {
  651. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  652. {
  653. int ret =
  654. wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  655. if (ret < 0)
  656. return ret;
  657. }
  658. #endif
  659. return wc_AesEncryptDirect(aes, outBlock, inBlock);
  660. }
  661. #elif defined(WOLFSSL_AFALG)
  662. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  663. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  664. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  665. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  666. #include "hal_data.h"
  667. #ifndef WOLFSSL_SCE_AES256_HANDLE
  668. #define WOLFSSL_SCE_AES256_HANDLE g_sce_aes_256
  669. #endif
  670. #ifndef WOLFSSL_SCE_AES192_HANDLE
  671. #define WOLFSSL_SCE_AES192_HANDLE g_sce_aes_192
  672. #endif
  673. #ifndef WOLFSSL_SCE_AES128_HANDLE
  674. #define WOLFSSL_SCE_AES128_HANDLE g_sce_aes_128
  675. #endif
  676. static WARN_UNUSED_RESULT int AES_ECB_encrypt(
  677. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  678. {
  679. word32 ret;
  680. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  681. CRYPTO_WORD_ENDIAN_BIG) {
  682. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  683. }
  684. switch (aes->keylen) {
  685. #ifdef WOLFSSL_AES_128
  686. case AES_128_KEY_SIZE:
  687. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->encrypt(
  688. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key,
  689. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  690. (word32*)outBlock);
  691. break;
  692. #endif
  693. #ifdef WOLFSSL_AES_192
  694. case AES_192_KEY_SIZE:
  695. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->encrypt(
  696. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key,
  697. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  698. (word32*)outBlock);
  699. break;
  700. #endif
  701. #ifdef WOLFSSL_AES_256
  702. case AES_256_KEY_SIZE:
  703. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->encrypt(
  704. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key,
  705. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  706. (word32*)outBlock);
  707. break;
  708. #endif
  709. default:
  710. WOLFSSL_MSG("Unknown key size");
  711. return BAD_FUNC_ARG;
  712. }
  713. if (ret != SSP_SUCCESS) {
  714. /* revert input */
  715. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  716. return WC_HW_E;
  717. }
  718. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  719. CRYPTO_WORD_ENDIAN_BIG) {
  720. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  721. if (inBlock != outBlock) {
  722. /* revert input */
  723. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  724. }
  725. }
  726. return 0;
  727. }
  728. #if defined(HAVE_AES_DECRYPT)
  729. static WARN_UNUSED_RESULT int AES_ECB_decrypt(
  730. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  731. {
  732. word32 ret;
  733. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  734. CRYPTO_WORD_ENDIAN_BIG) {
  735. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  736. }
  737. switch (aes->keylen) {
  738. #ifdef WOLFSSL_AES_128
  739. case AES_128_KEY_SIZE:
  740. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->decrypt(
  741. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, aes->reg,
  742. (sz / sizeof(word32)), (word32*)inBlock,
  743. (word32*)outBlock);
  744. break;
  745. #endif
  746. #ifdef WOLFSSL_AES_192
  747. case AES_192_KEY_SIZE:
  748. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->decrypt(
  749. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, aes->reg,
  750. (sz / sizeof(word32)), (word32*)inBlock,
  751. (word32*)outBlock);
  752. break;
  753. #endif
  754. #ifdef WOLFSSL_AES_256
  755. case AES_256_KEY_SIZE:
  756. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->decrypt(
  757. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, aes->reg,
  758. (sz / sizeof(word32)), (word32*)inBlock,
  759. (word32*)outBlock);
  760. break;
  761. #endif
  762. default:
  763. WOLFSSL_MSG("Unknown key size");
  764. return BAD_FUNC_ARG;
  765. }
  766. if (ret != SSP_SUCCESS) {
  767. return WC_HW_E;
  768. }
  769. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  770. CRYPTO_WORD_ENDIAN_BIG) {
  771. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  772. if (inBlock != outBlock) {
  773. /* revert input */
  774. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  775. }
  776. }
  777. return 0;
  778. }
  779. #endif /* HAVE_AES_DECRYPT */
  780. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  781. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  782. Aes* aes, const byte* inBlock, byte* outBlock)
  783. {
  784. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  785. {
  786. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  787. if (ret < 0)
  788. return ret;
  789. }
  790. #endif
  791. return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  792. }
  793. #endif
  794. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  795. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  796. Aes* aes, const byte* inBlock, byte* outBlock)
  797. {
  798. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  799. {
  800. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  801. if (ret < 0)
  802. return ret;
  803. }
  804. #endif
  805. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  806. }
  807. #endif
  808. #elif defined(WOLFSSL_KCAPI_AES)
  809. /* Only CBC and GCM are in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  810. #if defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AESCCM) || \
  811. defined(WOLFSSL_CMAC) || defined(WOLFSSL_AES_OFB) || \
  812. defined(WOLFSSL_AES_CFB) || defined(HAVE_AES_ECB) || \
  813. defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_XTS) || \
  814. (defined(HAVE_AES_CBC) && defined(WOLFSSL_NO_KCAPI_AES_CBC))
  815. #define NEED_AES_TABLES
  816. #endif
  817. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  818. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  819. #else
  820. /* using wolfCrypt software implementation */
  821. #define NEED_AES_TABLES
  822. #endif
  823. #ifdef NEED_AES_TABLES
  824. #ifndef WC_AES_BITSLICED
  825. #if !defined(WOLFSSL_SILABS_SE_ACCEL) || \
  826. defined(NO_ESP32_CRYPT) || defined(NO_WOLFSSL_ESP32_CRYPT_AES) || \
  827. defined(NEED_AES_HW_FALLBACK)
  828. static const FLASH_QUALIFIER word32 rcon[] = {
  829. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  830. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  831. 0x1B000000, 0x36000000,
  832. /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  833. };
  834. #endif
  835. #ifndef WOLFSSL_AES_SMALL_TABLES
  836. static const FLASH_QUALIFIER word32 Te[4][256] = {
  837. {
  838. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  839. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  840. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  841. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  842. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  843. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  844. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  845. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  846. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  847. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  848. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  849. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  850. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  851. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  852. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  853. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  854. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  855. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  856. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  857. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  858. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  859. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  860. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  861. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  862. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  863. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  864. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  865. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  866. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  867. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  868. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  869. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  870. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  871. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  872. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  873. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  874. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  875. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  876. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  877. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  878. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  879. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  880. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  881. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  882. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  883. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  884. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  885. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  886. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  887. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  888. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  889. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  890. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  891. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  892. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  893. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  894. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  895. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  896. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  897. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  898. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  899. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  900. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  901. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  902. },
  903. {
  904. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  905. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  906. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  907. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  908. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  909. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  910. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  911. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  912. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  913. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  914. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  915. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  916. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  917. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  918. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  919. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  920. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  921. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  922. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  923. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  924. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  925. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  926. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  927. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  928. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  929. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  930. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  931. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  932. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  933. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  934. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  935. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  936. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  937. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  938. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  939. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  940. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  941. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  942. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  943. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  944. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  945. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  946. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  947. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  948. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  949. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  950. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  951. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  952. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  953. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  954. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  955. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  956. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  957. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  958. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  959. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  960. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  961. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  962. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  963. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  964. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  965. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  966. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  967. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  968. },
  969. {
  970. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  971. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  972. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  973. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  974. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  975. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  976. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  977. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  978. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  979. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  980. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  981. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  982. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  983. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  984. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  985. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  986. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  987. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  988. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  989. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  990. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  991. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  992. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  993. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  994. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  995. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  996. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  997. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  998. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  999. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  1000. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  1001. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  1002. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  1003. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  1004. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  1005. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  1006. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  1007. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  1008. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  1009. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  1010. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  1011. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  1012. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  1013. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  1014. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  1015. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  1016. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  1017. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  1018. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  1019. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  1020. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  1021. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  1022. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  1023. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  1024. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  1025. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  1026. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  1027. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  1028. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  1029. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  1030. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  1031. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  1032. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  1033. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  1034. },
  1035. {
  1036. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  1037. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  1038. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  1039. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  1040. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  1041. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  1042. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  1043. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  1044. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  1045. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  1046. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  1047. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  1048. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  1049. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  1050. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  1051. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  1052. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  1053. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  1054. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  1055. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  1056. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  1057. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  1058. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  1059. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  1060. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  1061. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  1062. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  1063. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  1064. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  1065. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  1066. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  1067. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  1068. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  1069. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  1070. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  1071. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  1072. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  1073. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  1074. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  1075. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  1076. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  1077. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  1078. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  1079. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  1080. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  1081. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  1082. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  1083. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  1084. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  1085. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  1086. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  1087. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  1088. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  1089. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  1090. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  1091. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  1092. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  1093. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  1094. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  1095. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  1096. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  1097. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  1098. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  1099. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  1100. }
  1101. };
  1102. #if defined(HAVE_AES_DECRYPT) && !defined(WOLFSSL_SILABS_SE_ACCEL)
  1103. static const FLASH_QUALIFIER word32 Td[4][256] = {
  1104. {
  1105. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  1106. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  1107. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  1108. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  1109. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  1110. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  1111. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  1112. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  1113. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  1114. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  1115. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  1116. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  1117. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  1118. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  1119. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  1120. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  1121. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  1122. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  1123. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  1124. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  1125. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  1126. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  1127. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  1128. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  1129. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  1130. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  1131. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  1132. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  1133. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  1134. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  1135. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  1136. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  1137. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  1138. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  1139. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  1140. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  1141. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  1142. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  1143. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  1144. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  1145. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  1146. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  1147. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  1148. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  1149. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  1150. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  1151. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  1152. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  1153. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  1154. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  1155. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  1156. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  1157. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  1158. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  1159. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  1160. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  1161. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  1162. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  1163. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  1164. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  1165. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  1166. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  1167. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  1168. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  1169. },
  1170. {
  1171. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  1172. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  1173. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  1174. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  1175. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  1176. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  1177. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  1178. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  1179. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  1180. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  1181. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  1182. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  1183. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  1184. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  1185. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  1186. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  1187. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  1188. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  1189. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  1190. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  1191. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  1192. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  1193. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  1194. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  1195. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  1196. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  1197. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  1198. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  1199. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  1200. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  1201. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  1202. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  1203. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  1204. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1205. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1206. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1207. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1208. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1209. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1210. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1211. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1212. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1213. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1214. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1215. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1216. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1217. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1218. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1219. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1220. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1221. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1222. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1223. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1224. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1225. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1226. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1227. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1228. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1229. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1230. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1231. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1232. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1233. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1234. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1235. },
  1236. {
  1237. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1238. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1239. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1240. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1241. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1242. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1243. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1244. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1245. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1246. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1247. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1248. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1249. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1250. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1251. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1252. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1253. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1254. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1255. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1256. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1257. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1258. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1259. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1260. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1261. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1262. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1263. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1264. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1265. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1266. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1267. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1268. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1269. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1270. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1271. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1272. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1273. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1274. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1275. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1276. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1277. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1278. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1279. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1280. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1281. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1282. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1283. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1284. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1285. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1286. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1287. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1288. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1289. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1290. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1291. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1292. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1293. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1294. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1295. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1296. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1297. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1298. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1299. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1300. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1301. },
  1302. {
  1303. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1304. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1305. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1306. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1307. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1308. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1309. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1310. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1311. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1312. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1313. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1314. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1315. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1316. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1317. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1318. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1319. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1320. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1321. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1322. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1323. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1324. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1325. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1326. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1327. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1328. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1329. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1330. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1331. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1332. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1333. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1334. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1335. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1336. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1337. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1338. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1339. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1340. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1341. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1342. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1343. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1344. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1345. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1346. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1347. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1348. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1349. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1350. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1351. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1352. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1353. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1354. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1355. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1356. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1357. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1358. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1359. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1360. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1361. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1362. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1363. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1364. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1365. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1366. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1367. }
  1368. };
  1369. #endif /* HAVE_AES_DECRYPT */
  1370. #endif /* WOLFSSL_AES_SMALL_TABLES */
  1371. #ifdef HAVE_AES_DECRYPT
  1372. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC) && \
  1373. !defined(WOLFSSL_SILABS_SE_ACCEL)) || \
  1374. defined(WOLFSSL_AES_DIRECT)
  1375. static const FLASH_QUALIFIER byte Td4[256] =
  1376. {
  1377. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1378. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1379. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1380. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1381. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1382. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1383. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1384. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1385. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1386. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1387. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1388. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1389. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1390. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1391. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1392. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1393. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1394. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1395. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1396. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1397. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1398. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1399. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1400. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1401. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1402. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1403. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1404. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1405. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1406. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1407. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1408. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1409. };
  1410. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  1411. #endif /* HAVE_AES_DECRYPT */
  1412. #define GETBYTE(x, y) (word32)((byte)((x) >> (8 * (y))))
  1413. #ifdef WOLFSSL_AES_SMALL_TABLES
  1414. static const byte Tsbox[256] = {
  1415. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1416. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1417. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1418. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1419. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1420. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1421. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1422. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1423. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1424. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1425. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1426. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1427. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1428. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1429. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1430. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1431. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1432. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1433. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1434. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1435. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1436. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1437. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1438. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1439. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1440. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1441. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1442. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1443. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1444. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1445. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1446. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1447. };
  1448. #define AES_XTIME(x) ((byte)((byte)((x) << 1) ^ ((0 - ((x) >> 7)) & 0x1b)))
  1449. static WARN_UNUSED_RESULT word32 col_mul(
  1450. word32 t, int i2, int i3, int ia, int ib)
  1451. {
  1452. byte t3 = GETBYTE(t, i3);
  1453. byte tm = AES_XTIME(GETBYTE(t, i2) ^ t3);
  1454. return GETBYTE(t, ia) ^ GETBYTE(t, ib) ^ t3 ^ tm;
  1455. }
  1456. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)
  1457. static WARN_UNUSED_RESULT word32 inv_col_mul(
  1458. word32 t, int i9, int ib, int id, int ie)
  1459. {
  1460. byte t9 = GETBYTE(t, i9);
  1461. byte tb = GETBYTE(t, ib);
  1462. byte td = GETBYTE(t, id);
  1463. byte te = GETBYTE(t, ie);
  1464. byte t0 = t9 ^ tb ^ td;
  1465. return t0 ^ AES_XTIME(AES_XTIME(AES_XTIME(t0 ^ te) ^ td ^ te) ^ tb ^ te);
  1466. }
  1467. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  1468. #endif /* WOLFSSL_AES_SMALL_TABLES */
  1469. #endif
  1470. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) || \
  1471. defined(HAVE_AESCCM) || defined(HAVE_AESGCM)
  1472. #ifndef WC_AES_BITSLICED
  1473. #ifndef WC_CACHE_LINE_SZ
  1474. #if defined(__x86_64__) || defined(_M_X64) || \
  1475. (defined(__ILP32__) && (__ILP32__ >= 1))
  1476. #define WC_CACHE_LINE_SZ 64
  1477. #else
  1478. /* default cache line size */
  1479. #define WC_CACHE_LINE_SZ 32
  1480. #endif
  1481. #endif
  1482. #ifndef WC_NO_CACHE_RESISTANT
  1483. #if defined(__riscv) && !defined(WOLFSSL_AES_TOUCH_LINES)
  1484. #define WOLFSSL_AES_TOUCH_LINES
  1485. #endif
  1486. #ifndef WOLFSSL_AES_SMALL_TABLES
  1487. /* load 4 Te Tables into cache by cache line stride */
  1488. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTe(void)
  1489. {
  1490. #ifndef WOLFSSL_AES_TOUCH_LINES
  1491. word32 x = 0;
  1492. int i,j;
  1493. for (i = 0; i < 4; i++) {
  1494. /* 256 elements, each one is 4 bytes */
  1495. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1496. x &= Te[i][j];
  1497. }
  1498. }
  1499. return x;
  1500. #else
  1501. return 0;
  1502. #endif
  1503. }
  1504. #else
  1505. /* load sbox into cache by cache line stride */
  1506. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchSBox(void)
  1507. {
  1508. #ifndef WOLFSSL_AES_TOUCH_LINES
  1509. word32 x = 0;
  1510. int i;
  1511. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ/4) {
  1512. x &= Tsbox[i];
  1513. }
  1514. return x;
  1515. #else
  1516. return 0;
  1517. #endif
  1518. }
  1519. #endif
  1520. #endif
  1521. #ifdef WOLFSSL_AES_TOUCH_LINES
  1522. #if WC_CACHE_LINE_SZ == 128
  1523. #define WC_CACHE_LINE_BITS 5
  1524. #define WC_CACHE_LINE_MASK_HI 0xe0
  1525. #define WC_CACHE_LINE_MASK_LO 0x1f
  1526. #define WC_CACHE_LINE_ADD 0x20
  1527. #elif WC_CACHE_LINE_SZ == 64
  1528. #define WC_CACHE_LINE_BITS 4
  1529. #define WC_CACHE_LINE_MASK_HI 0xf0
  1530. #define WC_CACHE_LINE_MASK_LO 0x0f
  1531. #define WC_CACHE_LINE_ADD 0x10
  1532. #elif WC_CACHE_LINE_SZ == 32
  1533. #define WC_CACHE_LINE_BITS 3
  1534. #define WC_CACHE_LINE_MASK_HI 0xf8
  1535. #define WC_CACHE_LINE_MASK_LO 0x07
  1536. #define WC_CACHE_LINE_ADD 0x08
  1537. #elif WC_CACHE_LINE_SZ == 16
  1538. #define WC_CACHE_LINE_BITS 2
  1539. #define WC_CACHE_LINE_MASK_HI 0xfc
  1540. #define WC_CACHE_LINE_MASK_LO 0x03
  1541. #define WC_CACHE_LINE_ADD 0x04
  1542. #else
  1543. #error Cache line size not supported
  1544. #endif
  1545. #ifndef WOLFSSL_AES_SMALL_TABLES
  1546. static word32 GetTable(const word32* t, byte o)
  1547. {
  1548. #if WC_CACHE_LINE_SZ == 64
  1549. word32 e;
  1550. byte hi = o & 0xf0;
  1551. byte lo = o & 0x0f;
  1552. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1553. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1554. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1555. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1556. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1557. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1558. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1559. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1560. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1561. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1562. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1563. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1564. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1565. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1566. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1567. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1568. return e;
  1569. #else
  1570. word32 e = 0;
  1571. int i;
  1572. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1573. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1574. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1575. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1576. hi -= WC_CACHE_LINE_ADD;
  1577. }
  1578. return e;
  1579. #endif
  1580. }
  1581. #endif
  1582. #ifdef WOLFSSL_AES_SMALL_TABLES
  1583. static byte GetTable8(const byte* t, byte o)
  1584. {
  1585. #if WC_CACHE_LINE_SZ == 64
  1586. byte e;
  1587. byte hi = o & 0xf0;
  1588. byte lo = o & 0x0f;
  1589. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1590. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1591. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1592. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1593. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1594. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1595. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1596. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1597. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1598. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1599. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1600. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1601. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1602. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1603. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1604. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1605. return e;
  1606. #else
  1607. byte e = 0;
  1608. int i;
  1609. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1610. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1611. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1612. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1613. hi -= WC_CACHE_LINE_ADD;
  1614. }
  1615. return e;
  1616. #endif
  1617. }
  1618. #endif
  1619. #ifndef WOLFSSL_AES_SMALL_TABLES
  1620. static void GetTable_Multi(const word32* t, word32* t0, byte o0,
  1621. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1622. {
  1623. word32 e0 = 0;
  1624. word32 e1 = 0;
  1625. word32 e2 = 0;
  1626. word32 e3 = 0;
  1627. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1628. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1629. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1630. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1631. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1632. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1633. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1634. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1635. int i;
  1636. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1637. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1638. hi0 -= WC_CACHE_LINE_ADD;
  1639. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1640. hi1 -= WC_CACHE_LINE_ADD;
  1641. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1642. hi2 -= WC_CACHE_LINE_ADD;
  1643. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1644. hi3 -= WC_CACHE_LINE_ADD;
  1645. }
  1646. *t0 = e0;
  1647. *t1 = e1;
  1648. *t2 = e2;
  1649. *t3 = e3;
  1650. }
  1651. static void XorTable_Multi(const word32* t, word32* t0, byte o0,
  1652. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1653. {
  1654. word32 e0 = 0;
  1655. word32 e1 = 0;
  1656. word32 e2 = 0;
  1657. word32 e3 = 0;
  1658. byte hi0 = o0 & 0xf0;
  1659. byte lo0 = o0 & 0x0f;
  1660. byte hi1 = o1 & 0xf0;
  1661. byte lo1 = o1 & 0x0f;
  1662. byte hi2 = o2 & 0xf0;
  1663. byte lo2 = o2 & 0x0f;
  1664. byte hi3 = o3 & 0xf0;
  1665. byte lo3 = o3 & 0x0f;
  1666. int i;
  1667. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1668. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1669. hi0 -= WC_CACHE_LINE_ADD;
  1670. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1671. hi1 -= WC_CACHE_LINE_ADD;
  1672. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1673. hi2 -= WC_CACHE_LINE_ADD;
  1674. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1675. hi3 -= WC_CACHE_LINE_ADD;
  1676. }
  1677. *t0 ^= e0;
  1678. *t1 ^= e1;
  1679. *t2 ^= e2;
  1680. *t3 ^= e3;
  1681. }
  1682. static word32 GetTable8_4(const byte* t, byte o0, byte o1, byte o2, byte o3)
  1683. {
  1684. word32 e = 0;
  1685. int i;
  1686. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1687. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1688. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1689. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1690. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1691. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1692. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1693. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1694. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1695. e |= (word32)(t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31)))
  1696. << 24;
  1697. hi0 -= WC_CACHE_LINE_ADD;
  1698. e |= (word32)(t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31)))
  1699. << 16;
  1700. hi1 -= WC_CACHE_LINE_ADD;
  1701. e |= (word32)(t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31)))
  1702. << 8;
  1703. hi2 -= WC_CACHE_LINE_ADD;
  1704. e |= (word32)(t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31)))
  1705. << 0;
  1706. hi3 -= WC_CACHE_LINE_ADD;
  1707. }
  1708. return e;
  1709. }
  1710. #endif
  1711. #else
  1712. #define GetTable(t, o) t[o]
  1713. #define GetTable8(t, o) t[o]
  1714. #define GetTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1715. *(t0) = (t)[o0]; *(t1) = (t)[o1]; *(t2) = (t)[o2]; *(t3) = (t)[o3]
  1716. #define XorTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1717. *(t0) ^= (t)[o0]; *(t1) ^= (t)[o1]; *(t2) ^= (t)[o2]; *(t3) ^= (t)[o3]
  1718. #define GetTable8_4(t, o0, o1, o2, o3) \
  1719. (((word32)(t)[o0] << 24) | ((word32)(t)[o1] << 16) | \
  1720. ((word32)(t)[o2] << 8) | ((word32)(t)[o3] << 0))
  1721. #endif
  1722. /* Encrypt a block using AES.
  1723. *
  1724. * @param [in] aes AES object.
  1725. * @param [in] inBlock Block to encrypt.
  1726. * @param [out] outBlock Encrypted block.
  1727. * @param [in] r Rounds divided by 2.
  1728. */
  1729. static void AesEncrypt_C(Aes* aes, const byte* inBlock, byte* outBlock,
  1730. word32 r)
  1731. {
  1732. word32 s0, s1, s2, s3;
  1733. word32 t0, t1, t2, t3;
  1734. const word32* rk;
  1735. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  1736. rk = aes->key_C_fallback;
  1737. #else
  1738. rk = aes->key;
  1739. #endif
  1740. /*
  1741. * map byte array block to cipher state
  1742. * and add initial round key:
  1743. */
  1744. XMEMCPY(&s0, inBlock, sizeof(s0));
  1745. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  1746. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  1747. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  1748. #ifdef LITTLE_ENDIAN_ORDER
  1749. s0 = ByteReverseWord32(s0);
  1750. s1 = ByteReverseWord32(s1);
  1751. s2 = ByteReverseWord32(s2);
  1752. s3 = ByteReverseWord32(s3);
  1753. #endif
  1754. /* AddRoundKey */
  1755. s0 ^= rk[0];
  1756. s1 ^= rk[1];
  1757. s2 ^= rk[2];
  1758. s3 ^= rk[3];
  1759. #ifndef WOLFSSL_AES_SMALL_TABLES
  1760. #ifndef WC_NO_CACHE_RESISTANT
  1761. s0 |= PreFetchTe();
  1762. #endif
  1763. #ifndef WOLFSSL_AES_TOUCH_LINES
  1764. #define ENC_ROUND_T_S(o) \
  1765. t0 = GetTable(Te[0], GETBYTE(s0, 3)) ^ GetTable(Te[1], GETBYTE(s1, 2)) ^ \
  1766. GetTable(Te[2], GETBYTE(s2, 1)) ^ GetTable(Te[3], GETBYTE(s3, 0)) ^ \
  1767. rk[(o)+4]; \
  1768. t1 = GetTable(Te[0], GETBYTE(s1, 3)) ^ GetTable(Te[1], GETBYTE(s2, 2)) ^ \
  1769. GetTable(Te[2], GETBYTE(s3, 1)) ^ GetTable(Te[3], GETBYTE(s0, 0)) ^ \
  1770. rk[(o)+5]; \
  1771. t2 = GetTable(Te[0], GETBYTE(s2, 3)) ^ GetTable(Te[1], GETBYTE(s3, 2)) ^ \
  1772. GetTable(Te[2], GETBYTE(s0, 1)) ^ GetTable(Te[3], GETBYTE(s1, 0)) ^ \
  1773. rk[(o)+6]; \
  1774. t3 = GetTable(Te[0], GETBYTE(s3, 3)) ^ GetTable(Te[1], GETBYTE(s0, 2)) ^ \
  1775. GetTable(Te[2], GETBYTE(s1, 1)) ^ GetTable(Te[3], GETBYTE(s2, 0)) ^ \
  1776. rk[(o)+7]
  1777. #define ENC_ROUND_S_T(o) \
  1778. s0 = GetTable(Te[0], GETBYTE(t0, 3)) ^ GetTable(Te[1], GETBYTE(t1, 2)) ^ \
  1779. GetTable(Te[2], GETBYTE(t2, 1)) ^ GetTable(Te[3], GETBYTE(t3, 0)) ^ \
  1780. rk[(o)+0]; \
  1781. s1 = GetTable(Te[0], GETBYTE(t1, 3)) ^ GetTable(Te[1], GETBYTE(t2, 2)) ^ \
  1782. GetTable(Te[2], GETBYTE(t3, 1)) ^ GetTable(Te[3], GETBYTE(t0, 0)) ^ \
  1783. rk[(o)+1]; \
  1784. s2 = GetTable(Te[0], GETBYTE(t2, 3)) ^ GetTable(Te[1], GETBYTE(t3, 2)) ^ \
  1785. GetTable(Te[2], GETBYTE(t0, 1)) ^ GetTable(Te[3], GETBYTE(t1, 0)) ^ \
  1786. rk[(o)+2]; \
  1787. s3 = GetTable(Te[0], GETBYTE(t3, 3)) ^ GetTable(Te[1], GETBYTE(t0, 2)) ^ \
  1788. GetTable(Te[2], GETBYTE(t1, 1)) ^ GetTable(Te[3], GETBYTE(t2, 0)) ^ \
  1789. rk[(o)+3]
  1790. #else
  1791. #define ENC_ROUND_T_S(o) \
  1792. GetTable_Multi(Te[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  1793. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  1794. XorTable_Multi(Te[1], &t0, GETBYTE(s1, 2), &t1, GETBYTE(s2, 2), \
  1795. &t2, GETBYTE(s3, 2), &t3, GETBYTE(s0, 2)); \
  1796. XorTable_Multi(Te[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  1797. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  1798. XorTable_Multi(Te[3], &t0, GETBYTE(s3, 0), &t1, GETBYTE(s0, 0), \
  1799. &t2, GETBYTE(s1, 0), &t3, GETBYTE(s2, 0)); \
  1800. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  1801. #define ENC_ROUND_S_T(o) \
  1802. GetTable_Multi(Te[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  1803. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  1804. XorTable_Multi(Te[1], &s0, GETBYTE(t1, 2), &s1, GETBYTE(t2, 2), \
  1805. &s2, GETBYTE(t3, 2), &s3, GETBYTE(t0, 2)); \
  1806. XorTable_Multi(Te[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  1807. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  1808. XorTable_Multi(Te[3], &s0, GETBYTE(t3, 0), &s1, GETBYTE(t0, 0), \
  1809. &s2, GETBYTE(t1, 0), &s3, GETBYTE(t2, 0)); \
  1810. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  1811. #endif
  1812. #ifndef WOLFSSL_AES_NO_UNROLL
  1813. /* Unroll the loop. */
  1814. ENC_ROUND_T_S( 0);
  1815. ENC_ROUND_S_T( 8); ENC_ROUND_T_S( 8);
  1816. ENC_ROUND_S_T(16); ENC_ROUND_T_S(16);
  1817. ENC_ROUND_S_T(24); ENC_ROUND_T_S(24);
  1818. ENC_ROUND_S_T(32); ENC_ROUND_T_S(32);
  1819. if (r > 5) {
  1820. ENC_ROUND_S_T(40); ENC_ROUND_T_S(40);
  1821. if (r > 6) {
  1822. ENC_ROUND_S_T(48); ENC_ROUND_T_S(48);
  1823. }
  1824. }
  1825. rk += r * 8;
  1826. #else
  1827. /*
  1828. * Nr - 1 full rounds:
  1829. */
  1830. for (;;) {
  1831. ENC_ROUND_T_S(0);
  1832. rk += 8;
  1833. if (--r == 0) {
  1834. break;
  1835. }
  1836. ENC_ROUND_S_T(0);
  1837. }
  1838. #endif
  1839. /*
  1840. * apply last round and
  1841. * map cipher state to byte array block:
  1842. */
  1843. #ifndef WOLFSSL_AES_TOUCH_LINES
  1844. s0 =
  1845. (GetTable(Te[2], GETBYTE(t0, 3)) & 0xff000000) ^
  1846. (GetTable(Te[3], GETBYTE(t1, 2)) & 0x00ff0000) ^
  1847. (GetTable(Te[0], GETBYTE(t2, 1)) & 0x0000ff00) ^
  1848. (GetTable(Te[1], GETBYTE(t3, 0)) & 0x000000ff) ^
  1849. rk[0];
  1850. s1 =
  1851. (GetTable(Te[2], GETBYTE(t1, 3)) & 0xff000000) ^
  1852. (GetTable(Te[3], GETBYTE(t2, 2)) & 0x00ff0000) ^
  1853. (GetTable(Te[0], GETBYTE(t3, 1)) & 0x0000ff00) ^
  1854. (GetTable(Te[1], GETBYTE(t0, 0)) & 0x000000ff) ^
  1855. rk[1];
  1856. s2 =
  1857. (GetTable(Te[2], GETBYTE(t2, 3)) & 0xff000000) ^
  1858. (GetTable(Te[3], GETBYTE(t3, 2)) & 0x00ff0000) ^
  1859. (GetTable(Te[0], GETBYTE(t0, 1)) & 0x0000ff00) ^
  1860. (GetTable(Te[1], GETBYTE(t1, 0)) & 0x000000ff) ^
  1861. rk[2];
  1862. s3 =
  1863. (GetTable(Te[2], GETBYTE(t3, 3)) & 0xff000000) ^
  1864. (GetTable(Te[3], GETBYTE(t0, 2)) & 0x00ff0000) ^
  1865. (GetTable(Te[0], GETBYTE(t1, 1)) & 0x0000ff00) ^
  1866. (GetTable(Te[1], GETBYTE(t2, 0)) & 0x000000ff) ^
  1867. rk[3];
  1868. #else
  1869. {
  1870. word32 u0;
  1871. word32 u1;
  1872. word32 u2;
  1873. word32 u3;
  1874. s0 = rk[0]; s1 = rk[1]; s2 = rk[2]; s3 = rk[3];
  1875. GetTable_Multi(Te[2], &u0, GETBYTE(t0, 3), &u1, GETBYTE(t1, 3),
  1876. &u2, GETBYTE(t2, 3), &u3, GETBYTE(t3, 3));
  1877. s0 ^= u0 & 0xff000000; s1 ^= u1 & 0xff000000;
  1878. s2 ^= u2 & 0xff000000; s3 ^= u3 & 0xff000000;
  1879. GetTable_Multi(Te[3], &u0, GETBYTE(t1, 2), &u1, GETBYTE(t2, 2),
  1880. &u2, GETBYTE(t3, 2), &u3, GETBYTE(t0, 2));
  1881. s0 ^= u0 & 0x00ff0000; s1 ^= u1 & 0x00ff0000;
  1882. s2 ^= u2 & 0x00ff0000; s3 ^= u3 & 0x00ff0000;
  1883. GetTable_Multi(Te[0], &u0, GETBYTE(t2, 1), &u1, GETBYTE(t3, 1),
  1884. &u2, GETBYTE(t0, 1), &u3, GETBYTE(t1, 1));
  1885. s0 ^= u0 & 0x0000ff00; s1 ^= u1 & 0x0000ff00;
  1886. s2 ^= u2 & 0x0000ff00; s3 ^= u3 & 0x0000ff00;
  1887. GetTable_Multi(Te[1], &u0, GETBYTE(t3, 0), &u1, GETBYTE(t0, 0),
  1888. &u2, GETBYTE(t1, 0), &u3, GETBYTE(t2, 0));
  1889. s0 ^= u0 & 0x000000ff; s1 ^= u1 & 0x000000ff;
  1890. s2 ^= u2 & 0x000000ff; s3 ^= u3 & 0x000000ff;
  1891. }
  1892. #endif
  1893. #else
  1894. #ifndef WC_NO_CACHE_RESISTANT
  1895. s0 |= PreFetchSBox();
  1896. #endif
  1897. r *= 2;
  1898. /* Two rounds at a time */
  1899. for (rk += 4; r > 1; r--, rk += 4) {
  1900. t0 =
  1901. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  1902. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  1903. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  1904. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  1905. t1 =
  1906. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  1907. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  1908. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  1909. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  1910. t2 =
  1911. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  1912. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  1913. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  1914. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  1915. t3 =
  1916. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  1917. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  1918. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  1919. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  1920. s0 =
  1921. (col_mul(t0, 3, 2, 0, 1) << 24) ^
  1922. (col_mul(t0, 2, 1, 0, 3) << 16) ^
  1923. (col_mul(t0, 1, 0, 2, 3) << 8) ^
  1924. (col_mul(t0, 0, 3, 2, 1) ) ^
  1925. rk[0];
  1926. s1 =
  1927. (col_mul(t1, 3, 2, 0, 1) << 24) ^
  1928. (col_mul(t1, 2, 1, 0, 3) << 16) ^
  1929. (col_mul(t1, 1, 0, 2, 3) << 8) ^
  1930. (col_mul(t1, 0, 3, 2, 1) ) ^
  1931. rk[1];
  1932. s2 =
  1933. (col_mul(t2, 3, 2, 0, 1) << 24) ^
  1934. (col_mul(t2, 2, 1, 0, 3) << 16) ^
  1935. (col_mul(t2, 1, 0, 2, 3) << 8) ^
  1936. (col_mul(t2, 0, 3, 2, 1) ) ^
  1937. rk[2];
  1938. s3 =
  1939. (col_mul(t3, 3, 2, 0, 1) << 24) ^
  1940. (col_mul(t3, 2, 1, 0, 3) << 16) ^
  1941. (col_mul(t3, 1, 0, 2, 3) << 8) ^
  1942. (col_mul(t3, 0, 3, 2, 1) ) ^
  1943. rk[3];
  1944. }
  1945. t0 =
  1946. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  1947. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  1948. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  1949. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  1950. t1 =
  1951. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  1952. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  1953. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  1954. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  1955. t2 =
  1956. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  1957. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  1958. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  1959. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  1960. t3 =
  1961. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  1962. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  1963. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  1964. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  1965. s0 = t0 ^ rk[0];
  1966. s1 = t1 ^ rk[1];
  1967. s2 = t2 ^ rk[2];
  1968. s3 = t3 ^ rk[3];
  1969. #endif
  1970. /* write out */
  1971. #ifdef LITTLE_ENDIAN_ORDER
  1972. s0 = ByteReverseWord32(s0);
  1973. s1 = ByteReverseWord32(s1);
  1974. s2 = ByteReverseWord32(s2);
  1975. s3 = ByteReverseWord32(s3);
  1976. #endif
  1977. XMEMCPY(outBlock, &s0, sizeof(s0));
  1978. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  1979. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  1980. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  1981. }
  1982. #if defined(HAVE_AES_ECB) && !(defined(WOLFSSL_IMX6_CAAM) && \
  1983. !defined(NO_IMX6_CAAM_AES) && !defined(WOLFSSL_QNX_CAAM))
  1984. /* Encrypt a number of blocks using AES.
  1985. *
  1986. * @param [in] aes AES object.
  1987. * @param [in] in Block to encrypt.
  1988. * @param [out] out Encrypted block.
  1989. * @param [in] sz Number of blocks to encrypt.
  1990. */
  1991. static void AesEncryptBlocks_C(Aes* aes, const byte* in, byte* out, word32 sz)
  1992. {
  1993. word32 i;
  1994. for (i = 0; i < sz; i += AES_BLOCK_SIZE) {
  1995. AesEncrypt_C(aes, in, out, aes->rounds >> 1);
  1996. in += AES_BLOCK_SIZE;
  1997. out += AES_BLOCK_SIZE;
  1998. }
  1999. }
  2000. #endif
  2001. #else
  2002. /* Bit-sliced implementation based on work by "circuit minimization team" (CMT):
  2003. * http://cs-www.cs.yale.edu/homes/peralta/CircuitStuff/CMT.html
  2004. */
  2005. /* http://cs-www.cs.yale.edu/homes/peralta/CircuitStuff/SLP_AES_113.txt */
  2006. static void bs_sub_bytes(bs_word u[8])
  2007. {
  2008. bs_word y1, y2, y3, y4, y5, y6, y7, y8, y9;
  2009. bs_word y10, y11, y12, y13, y14, y15, y16, y17, y18, y19;
  2010. bs_word y20, y21;
  2011. bs_word t0, t1, t2, t3, t4, t5, t6, t7, t8, t9;
  2012. bs_word t10, t11, t12, t13, t14, t15, t16, t17, t18, t19;
  2013. bs_word t20, t21, t22, t23, t24, t25, t26, t27, t28, t29;
  2014. bs_word t30, t31, t32, t33, t34, t35, t36, t37, t38, t39;
  2015. bs_word t40, t41, t42, t43, t44, t45;
  2016. bs_word z0, z1, z2, z3, z4, z5, z6, z7, z8, z9;
  2017. bs_word z10, z11, z12, z13, z14, z15, z16, z17;
  2018. bs_word tc1, tc2, tc3, tc4, tc5, tc6, tc7, tc8, tc9;
  2019. bs_word tc10, tc11, tc12, tc13, tc14, tc16, tc17, tc18;
  2020. bs_word tc20, tc21, tc26;
  2021. bs_word U0, U1, U2, U3, U4, U5, U6, U7;
  2022. bs_word S0, S1, S2, S3, S4, S5, S6, S7;
  2023. U0 = u[7];
  2024. U1 = u[6];
  2025. U2 = u[5];
  2026. U3 = u[4];
  2027. U4 = u[3];
  2028. U5 = u[2];
  2029. U6 = u[1];
  2030. U7 = u[0];
  2031. y14 = U3 ^ U5;
  2032. y13 = U0 ^ U6;
  2033. y9 = U0 ^ U3;
  2034. y8 = U0 ^ U5;
  2035. t0 = U1 ^ U2;
  2036. y1 = t0 ^ U7;
  2037. y4 = y1 ^ U3;
  2038. y12 = y13 ^ y14;
  2039. y2 = y1 ^ U0;
  2040. y5 = y1 ^ U6;
  2041. y3 = y5 ^ y8;
  2042. t1 = U4 ^ y12;
  2043. y15 = t1 ^ U5;
  2044. y20 = t1 ^ U1;
  2045. y6 = y15 ^ U7;
  2046. y10 = y15 ^ t0;
  2047. y11 = y20 ^ y9;
  2048. y7 = U7 ^ y11;
  2049. y17 = y10 ^ y11;
  2050. y19 = y10 ^ y8;
  2051. y16 = t0 ^ y11;
  2052. y21 = y13 ^ y16;
  2053. y18 = U0 ^ y16;
  2054. t2 = y12 & y15;
  2055. t3 = y3 & y6;
  2056. t4 = t3 ^ t2;
  2057. t5 = y4 & U7;
  2058. t6 = t5 ^ t2;
  2059. t7 = y13 & y16;
  2060. t8 = y5 & y1;
  2061. t9 = t8 ^ t7;
  2062. t10 = y2 & y7;
  2063. t11 = t10 ^ t7;
  2064. t12 = y9 & y11;
  2065. t13 = y14 & y17;
  2066. t14 = t13 ^ t12;
  2067. t15 = y8 & y10;
  2068. t16 = t15 ^ t12;
  2069. t17 = t4 ^ y20;
  2070. t18 = t6 ^ t16;
  2071. t19 = t9 ^ t14;
  2072. t20 = t11 ^ t16;
  2073. t21 = t17 ^ t14;
  2074. t22 = t18 ^ y19;
  2075. t23 = t19 ^ y21;
  2076. t24 = t20 ^ y18;
  2077. t25 = t21 ^ t22;
  2078. t26 = t21 & t23;
  2079. t27 = t24 ^ t26;
  2080. t28 = t25 & t27;
  2081. t29 = t28 ^ t22;
  2082. t30 = t23 ^ t24;
  2083. t31 = t22 ^ t26;
  2084. t32 = t31 & t30;
  2085. t33 = t32 ^ t24;
  2086. t34 = t23 ^ t33;
  2087. t35 = t27 ^ t33;
  2088. t36 = t24 & t35;
  2089. t37 = t36 ^ t34;
  2090. t38 = t27 ^ t36;
  2091. t39 = t29 & t38;
  2092. t40 = t25 ^ t39;
  2093. t41 = t40 ^ t37;
  2094. t42 = t29 ^ t33;
  2095. t43 = t29 ^ t40;
  2096. t44 = t33 ^ t37;
  2097. t45 = t42 ^ t41;
  2098. z0 = t44 & y15;
  2099. z1 = t37 & y6;
  2100. z2 = t33 & U7;
  2101. z3 = t43 & y16;
  2102. z4 = t40 & y1;
  2103. z5 = t29 & y7;
  2104. z6 = t42 & y11;
  2105. z7 = t45 & y17;
  2106. z8 = t41 & y10;
  2107. z9 = t44 & y12;
  2108. z10 = t37 & y3;
  2109. z11 = t33 & y4;
  2110. z12 = t43 & y13;
  2111. z13 = t40 & y5;
  2112. z14 = t29 & y2;
  2113. z15 = t42 & y9;
  2114. z16 = t45 & y14;
  2115. z17 = t41 & y8;
  2116. tc1 = z15 ^ z16;
  2117. tc2 = z10 ^ tc1;
  2118. tc3 = z9 ^ tc2;
  2119. tc4 = z0 ^ z2;
  2120. tc5 = z1 ^ z0;
  2121. tc6 = z3 ^ z4;
  2122. tc7 = z12 ^ tc4;
  2123. tc8 = z7 ^ tc6;
  2124. tc9 = z8 ^ tc7;
  2125. tc10 = tc8 ^ tc9;
  2126. tc11 = tc6 ^ tc5;
  2127. tc12 = z3 ^ z5;
  2128. tc13 = z13 ^ tc1;
  2129. tc14 = tc4 ^ tc12;
  2130. S3 = tc3 ^ tc11;
  2131. tc16 = z6 ^ tc8;
  2132. tc17 = z14 ^ tc10;
  2133. tc18 = tc13 ^ tc14;
  2134. S7 = ~(z12 ^ tc18);
  2135. tc20 = z15 ^ tc16;
  2136. tc21 = tc2 ^ z11;
  2137. S0 = tc3 ^ tc16;
  2138. S6 = ~(tc10 ^ tc18);
  2139. S4 = tc14 ^ S3;
  2140. S1 = ~(S3 ^ tc16);
  2141. tc26 = tc17 ^ tc20;
  2142. S2 = ~(tc26 ^ z17);
  2143. S5 = tc21 ^ tc17;
  2144. u[0] = S7;
  2145. u[1] = S6;
  2146. u[2] = S5;
  2147. u[3] = S4;
  2148. u[4] = S3;
  2149. u[5] = S2;
  2150. u[6] = S1;
  2151. u[7] = S0;
  2152. }
  2153. #define BS_MASK_BIT_SET(w, j, bmask) \
  2154. (((bs_word)0 - (((w) >> (j)) & (bs_word)1)) & (bmask))
  2155. #define BS_TRANS_8(t, o, w, bmask, s) \
  2156. t[o + s + 0] |= BS_MASK_BIT_SET(w, s + 0, bmask); \
  2157. t[o + s + 1] |= BS_MASK_BIT_SET(w, s + 1, bmask); \
  2158. t[o + s + 2] |= BS_MASK_BIT_SET(w, s + 2, bmask); \
  2159. t[o + s + 3] |= BS_MASK_BIT_SET(w, s + 3, bmask); \
  2160. t[o + s + 4] |= BS_MASK_BIT_SET(w, s + 4, bmask); \
  2161. t[o + s + 5] |= BS_MASK_BIT_SET(w, s + 5, bmask); \
  2162. t[o + s + 6] |= BS_MASK_BIT_SET(w, s + 6, bmask); \
  2163. t[o + s + 7] |= BS_MASK_BIT_SET(w, s + 7, bmask)
  2164. static void bs_transpose(bs_word* t, bs_word* blocks)
  2165. {
  2166. bs_word bmask = 1;
  2167. int i;
  2168. XMEMSET(t, 0, sizeof(bs_word) * AES_BLOCK_BITS);
  2169. for (i = 0; i < BS_WORD_SIZE; i++) {
  2170. int j;
  2171. int o = 0;
  2172. for (j = 0; j < BS_BLOCK_WORDS; j++) {
  2173. #ifdef LITTLE_ENDIAN_ORDER
  2174. bs_word w = blocks[i * BS_BLOCK_WORDS + j];
  2175. #else
  2176. bs_word w = bs_bswap(blocks[i * BS_BLOCK_WORDS + j]);
  2177. #endif
  2178. #ifdef WOLFSSL_AES_NO_UNROLL
  2179. int k;
  2180. for (k = 0; k < BS_WORD_SIZE; k++) {
  2181. t[o + k] |= BS_MASK_BIT_SET(w, k, bmask);
  2182. }
  2183. #else
  2184. BS_TRANS_8(t, o, w, bmask, 0);
  2185. #if BS_WORD_SIZE >= 16
  2186. BS_TRANS_8(t, o, w, bmask, 8);
  2187. #endif
  2188. #if BS_WORD_SIZE >= 32
  2189. BS_TRANS_8(t, o, w, bmask, 16);
  2190. BS_TRANS_8(t, o, w, bmask, 24);
  2191. #endif
  2192. #if BS_WORD_SIZE >= 64
  2193. BS_TRANS_8(t, o, w, bmask, 32);
  2194. BS_TRANS_8(t, o, w, bmask, 40);
  2195. BS_TRANS_8(t, o, w, bmask, 48);
  2196. BS_TRANS_8(t, o, w, bmask, 56);
  2197. #endif
  2198. #endif
  2199. o += BS_WORD_SIZE;
  2200. }
  2201. bmask <<= 1;
  2202. }
  2203. }
  2204. #define BS_INV_TRANS_8(t, o, w, bmask, s) \
  2205. t[o + (s + 0) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 0, bmask); \
  2206. t[o + (s + 1) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 1, bmask); \
  2207. t[o + (s + 2) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 2, bmask); \
  2208. t[o + (s + 3) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 3, bmask); \
  2209. t[o + (s + 4) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 4, bmask); \
  2210. t[o + (s + 5) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 5, bmask); \
  2211. t[o + (s + 6) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 6, bmask); \
  2212. t[o + (s + 7) * BS_BLOCK_WORDS] |= BS_MASK_BIT_SET(w, s + 7, bmask)
  2213. static void bs_inv_transpose(bs_word* t, bs_word* blocks)
  2214. {
  2215. int o;
  2216. XMEMSET(t, 0, sizeof(bs_word) * AES_BLOCK_BITS);
  2217. for (o = 0; o < BS_BLOCK_WORDS; o++) {
  2218. int i;
  2219. for (i = 0; i < BS_WORD_SIZE; i++) {
  2220. #ifdef LITTLE_ENDIAN_ORDER
  2221. bs_word bmask = (bs_word)1 << i;
  2222. #else
  2223. bs_word bmask = bs_bswap((bs_word)1 << i);
  2224. #endif
  2225. bs_word w = blocks[(o << BS_WORD_SHIFT) + i];
  2226. #ifdef WOLFSSL_AES_NO_UNROLL
  2227. int j;
  2228. for (j = 0; j < BS_WORD_SIZE; j++) {
  2229. t[j * BS_BLOCK_WORDS + o] |= BS_MASK_BIT_SET(w, j, bmask);
  2230. }
  2231. #else
  2232. BS_INV_TRANS_8(t, o, w, bmask, 0);
  2233. #if BS_WORD_SIZE >= 16
  2234. BS_INV_TRANS_8(t, o, w, bmask, 8);
  2235. #endif
  2236. #if BS_WORD_SIZE >= 32
  2237. BS_INV_TRANS_8(t, o, w, bmask, 16);
  2238. BS_INV_TRANS_8(t, o, w, bmask, 24);
  2239. #endif
  2240. #if BS_WORD_SIZE >= 64
  2241. BS_INV_TRANS_8(t, o, w, bmask, 32);
  2242. BS_INV_TRANS_8(t, o, w, bmask, 40);
  2243. BS_INV_TRANS_8(t, o, w, bmask, 48);
  2244. BS_INV_TRANS_8(t, o, w, bmask, 56);
  2245. #endif
  2246. #endif
  2247. }
  2248. }
  2249. }
  2250. #define BS_ROW_OFF_0 0
  2251. #define BS_ROW_OFF_1 32
  2252. #define BS_ROW_OFF_2 64
  2253. #define BS_ROW_OFF_3 96
  2254. #define BS_ROW_ADD (AES_BLOCK_BITS / 16 + AES_BLOCK_BITS / 4)
  2255. #define BS_IDX_MASK 0x7f
  2256. #define BS_ASSIGN_8(d, od, s, os) \
  2257. d[(od) + 0] = s[(os) + 0]; \
  2258. d[(od) + 1] = s[(os) + 1]; \
  2259. d[(od) + 2] = s[(os) + 2]; \
  2260. d[(od) + 3] = s[(os) + 3]; \
  2261. d[(od) + 4] = s[(os) + 4]; \
  2262. d[(od) + 5] = s[(os) + 5]; \
  2263. d[(od) + 6] = s[(os) + 6]; \
  2264. d[(od) + 7] = s[(os) + 7]
  2265. static void bs_shift_rows(bs_word* t, bs_word* b)
  2266. {
  2267. int i;
  2268. for (i = 0; i < 128; i += 32) {
  2269. BS_ASSIGN_8(t, i + 0, b, ( 0 + i) & BS_IDX_MASK);
  2270. BS_ASSIGN_8(t, i + 8, b, ( 40 + i) & BS_IDX_MASK);
  2271. BS_ASSIGN_8(t, i + 16, b, ( 80 + i) & BS_IDX_MASK);
  2272. BS_ASSIGN_8(t, i + 24, b, (120 + i) & BS_IDX_MASK);
  2273. }
  2274. }
  2275. #define BS_SHIFT_OFF_0 0
  2276. #define BS_SHIFT_OFF_1 8
  2277. #define BS_SHIFT_OFF_2 16
  2278. #define BS_SHIFT_OFF_3 24
  2279. /* Shift rows and mix columns.
  2280. * See: See https://eprint.iacr.org/2009/129.pdf - Appendix A
  2281. */
  2282. #define BS_SHIFT_MIX_8(t, o, br0, br1, br2, br3, of) \
  2283. of = br0[7] ^ br1[7]; \
  2284. t[o+0] = br1[0] ^ br2[0] ^ br3[0] ^ of; \
  2285. t[o+1] = br0[0] ^ br1[0] ^ br1[1] ^ br2[1] ^ br3[1] ^ of; \
  2286. t[o+2] = br0[1] ^ br1[1] ^ br1[2] ^ br2[2] ^ br3[2]; \
  2287. t[o+3] = br0[2] ^ br1[2] ^ br1[3] ^ br2[3] ^ br3[3] ^ of; \
  2288. t[o+4] = br0[3] ^ br1[3] ^ br1[4] ^ br2[4] ^ br3[4] ^ of; \
  2289. t[o+5] = br0[4] ^ br1[4] ^ br1[5] ^ br2[5] ^ br3[5]; \
  2290. t[o+6] = br0[5] ^ br1[5] ^ br1[6] ^ br2[6] ^ br3[6]; \
  2291. t[o+7] = br0[6] ^ br1[6] ^ br1[7] ^ br2[7] ^ br3[7]
  2292. static void bs_shift_mix(bs_word* t, bs_word* b)
  2293. {
  2294. int i;
  2295. word8 or0 = BS_ROW_OFF_0 + BS_SHIFT_OFF_0;
  2296. word8 or1 = BS_ROW_OFF_1 + BS_SHIFT_OFF_1;
  2297. word8 or2 = BS_ROW_OFF_2 + BS_SHIFT_OFF_2;
  2298. word8 or3 = BS_ROW_OFF_3 + BS_SHIFT_OFF_3;
  2299. for (i = 0; i < AES_BLOCK_BITS; i += AES_BLOCK_BITS / 4) {
  2300. bs_word* br0 = b + or0;
  2301. bs_word* br1 = b + or1;
  2302. bs_word* br2 = b + or2;
  2303. bs_word* br3 = b + or3;
  2304. bs_word of;
  2305. BS_SHIFT_MIX_8(t, i + 0, br0, br1, br2, br3, of);
  2306. BS_SHIFT_MIX_8(t, i + 8, br1, br2, br3, br0, of);
  2307. BS_SHIFT_MIX_8(t, i + 16, br2, br3, br0, br1, of);
  2308. BS_SHIFT_MIX_8(t, i + 24, br3, br0, br1, br2, of);
  2309. or0 = (or0 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  2310. or1 = (or1 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  2311. or2 = (or2 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  2312. or3 = (or3 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  2313. }
  2314. }
  2315. static void bs_add_round_key(bs_word* out, bs_word* b, bs_word* rk)
  2316. {
  2317. xorbufout((byte*)out, (byte*)b, (byte*)rk, BS_BLOCK_SIZE);
  2318. }
  2319. static void bs_sub_bytes_blocks(bs_word* b)
  2320. {
  2321. int i;
  2322. for (i = 0; i < AES_BLOCK_BITS; i += 8) {
  2323. bs_sub_bytes(b + i);
  2324. }
  2325. }
  2326. static const FLASH_QUALIFIER byte bs_rcon[] = {
  2327. 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36,
  2328. /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  2329. };
  2330. static void bs_ke_sub_bytes(unsigned char* out, unsigned char *in) {
  2331. bs_word block[AES_BLOCK_BITS];
  2332. bs_word trans[AES_BLOCK_BITS];
  2333. XMEMSET(block, 0, sizeof(block));
  2334. XMEMCPY(block, in, 4);
  2335. bs_transpose(trans, block);
  2336. bs_sub_bytes_blocks(trans);
  2337. bs_inv_transpose(block, trans);
  2338. XMEMCPY(out, block, 4);
  2339. }
  2340. static void bs_ke_transform(unsigned char* out, unsigned char *in, word8 i) {
  2341. /* Rotate the input 8 bits to the left */
  2342. #ifdef LITTLE_ENDIAN_ORDER
  2343. *(word32*)out = rotrFixed(*(word32*)in, 8);
  2344. #else
  2345. *(word32*)out = rotlFixed(*(word32*)in, 8);
  2346. #endif
  2347. bs_ke_sub_bytes(out, out);
  2348. /* On just the first byte, add 2^i to the byte */
  2349. out[0] ^= bs_rcon[i];
  2350. }
  2351. static void bs_expand_key(unsigned char *in, word32 sz) {
  2352. unsigned char t[4];
  2353. word32 o;
  2354. word8 i = 0;
  2355. if (sz == 176) {
  2356. /* Total of 11 rounds - AES-128. */
  2357. for (o = 16; o < sz; o += 16) {
  2358. bs_ke_transform(t, in + o - 4, i);
  2359. i++;
  2360. *(word32*)(in + o + 0) = *(word32*)(in + o - 16) ^
  2361. *(word32*) t;
  2362. *(word32*)(in + o + 4) = *(word32*)(in + o - 12) ^
  2363. *(word32*)(in + o + 0);
  2364. *(word32*)(in + o + 8) = *(word32*)(in + o - 8) ^
  2365. *(word32*)(in + o + 4);
  2366. *(word32*)(in + o + 12) = *(word32*)(in + o - 4) ^
  2367. *(word32*)(in + o + 8);
  2368. }
  2369. }
  2370. else if (sz == 208) {
  2371. /* Total of 13 rounds - AES-192. */
  2372. for (o = 24; o < sz; o += 24) {
  2373. bs_ke_transform(t, in + o - 4, i);
  2374. i++;
  2375. *(word32*)(in + o + 0) = *(word32*)(in + o - 24) ^
  2376. *(word32*) t;
  2377. *(word32*)(in + o + 4) = *(word32*)(in + o - 20) ^
  2378. *(word32*)(in + o + 0);
  2379. *(word32*)(in + o + 8) = *(word32*)(in + o - 16) ^
  2380. *(word32*)(in + o + 4);
  2381. *(word32*)(in + o + 12) = *(word32*)(in + o - 12) ^
  2382. *(word32*)(in + o + 8);
  2383. *(word32*)(in + o + 16) = *(word32*)(in + o - 8) ^
  2384. *(word32*)(in + o + 12);
  2385. *(word32*)(in + o + 20) = *(word32*)(in + o - 4) ^
  2386. *(word32*)(in + o + 16);
  2387. }
  2388. }
  2389. else if (sz == 240) {
  2390. /* Total of 15 rounds - AES-256. */
  2391. for (o = 32; o < sz; o += 16) {
  2392. if ((o & 0x1f) == 0) {
  2393. bs_ke_transform(t, in + o - 4, i);
  2394. i++;
  2395. }
  2396. else {
  2397. bs_ke_sub_bytes(t, in + o - 4);
  2398. }
  2399. *(word32*)(in + o + 0) = *(word32*)(in + o - 32) ^
  2400. *(word32*) t;
  2401. *(word32*)(in + o + 4) = *(word32*)(in + o - 28) ^
  2402. *(word32*)(in + o + 0);
  2403. *(word32*)(in + o + 8) = *(word32*)(in + o - 24) ^
  2404. *(word32*)(in + o + 4);
  2405. *(word32*)(in + o + 12) = *(word32*)(in + o - 20) ^
  2406. *(word32*)(in + o + 8);
  2407. }
  2408. }
  2409. }
  2410. static void bs_set_key(bs_word* rk, const byte* key, word32 keyLen,
  2411. word32 rounds)
  2412. {
  2413. int i;
  2414. byte bs_key[15 * AES_BLOCK_SIZE];
  2415. int ksSz = (rounds + 1) * AES_BLOCK_SIZE;
  2416. bs_word block[AES_BLOCK_BITS];
  2417. /* Fist round. */
  2418. XMEMCPY(bs_key, key, keyLen);
  2419. bs_expand_key(bs_key, ksSz);
  2420. for (i = 0; i < ksSz; i += AES_BLOCK_SIZE) {
  2421. int k;
  2422. XMEMCPY(block, bs_key + i, AES_BLOCK_SIZE);
  2423. for (k = BS_BLOCK_WORDS; k < AES_BLOCK_BITS; k += BS_BLOCK_WORDS) {
  2424. int l;
  2425. for (l = 0; l < BS_BLOCK_WORDS; l++) {
  2426. block[k + l] = block[l];
  2427. }
  2428. }
  2429. bs_transpose(rk, block);
  2430. rk += AES_BLOCK_BITS;
  2431. }
  2432. }
  2433. static void bs_encrypt(bs_word* state, bs_word* rk, word32 r)
  2434. {
  2435. word32 i;
  2436. bs_word trans[AES_BLOCK_BITS];
  2437. bs_transpose(trans, state);
  2438. bs_add_round_key(trans, trans, rk);
  2439. for (i = 1; i < r; i++) {
  2440. bs_sub_bytes_blocks(trans);
  2441. bs_shift_mix(state, trans);
  2442. rk += AES_BLOCK_BITS;
  2443. bs_add_round_key(trans, state, rk);
  2444. }
  2445. bs_sub_bytes_blocks(trans);
  2446. bs_shift_rows(state, trans);
  2447. rk += AES_BLOCK_BITS;
  2448. bs_add_round_key(trans, state, rk);
  2449. bs_inv_transpose(state, trans);
  2450. }
  2451. /* Encrypt a block using AES.
  2452. *
  2453. * @param [in] aes AES object.
  2454. * @param [in] inBlock Block to encrypt.
  2455. * @param [out] outBlock Encrypted block.
  2456. * @param [in] r Rounds divided by 2.
  2457. */
  2458. static void AesEncrypt_C(Aes* aes, const byte* inBlock, byte* outBlock,
  2459. word32 r)
  2460. {
  2461. bs_word state[AES_BLOCK_BITS];
  2462. (void)r;
  2463. XMEMCPY(state, inBlock, AES_BLOCK_SIZE);
  2464. XMEMSET(((byte*)state) + AES_BLOCK_SIZE, 0, sizeof(state) - AES_BLOCK_SIZE);
  2465. bs_encrypt(state, aes->bs_key, aes->rounds);
  2466. XMEMCPY(outBlock, state, AES_BLOCK_SIZE);
  2467. }
  2468. #if defined(HAVE_AES_ECB) && !(defined(WOLFSSL_IMX6_CAAM) && \
  2469. !defined(NO_IMX6_CAAM_AES) && !defined(WOLFSSL_QNX_CAAM))
  2470. /* Encrypt a number of blocks using AES.
  2471. *
  2472. * @param [in] aes AES object.
  2473. * @param [in] in Block to encrypt.
  2474. * @param [out] out Encrypted block.
  2475. * @param [in] sz Number of blocks to encrypt.
  2476. */
  2477. static void AesEncryptBlocks_C(Aes* aes, const byte* in, byte* out, word32 sz)
  2478. {
  2479. bs_word state[AES_BLOCK_BITS];
  2480. while (sz >= BS_BLOCK_SIZE) {
  2481. XMEMCPY(state, in, BS_BLOCK_SIZE);
  2482. bs_encrypt(state, aes->bs_key, aes->rounds);
  2483. XMEMCPY(out, state, BS_BLOCK_SIZE);
  2484. sz -= BS_BLOCK_SIZE;
  2485. in += BS_BLOCK_SIZE;
  2486. out += BS_BLOCK_SIZE;
  2487. }
  2488. if (sz > 0) {
  2489. XMEMCPY(state, in, sz);
  2490. XMEMSET(((byte*)state) + sz, 0, sizeof(state) - sz);
  2491. bs_encrypt(state, aes->bs_key, aes->rounds);
  2492. XMEMCPY(out, state, sz);
  2493. }
  2494. }
  2495. #endif
  2496. #endif /* !WC_AES_BITSLICED */
  2497. /* this section disabled with NO_AES_192 */
  2498. /* calling this one when missing NO_AES_192 */
  2499. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  2500. Aes* aes, const byte* inBlock, byte* outBlock)
  2501. {
  2502. word32 r;
  2503. if (aes == NULL) {
  2504. return BAD_FUNC_ARG;
  2505. }
  2506. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  2507. {
  2508. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  2509. if (ret < 0)
  2510. return ret;
  2511. }
  2512. #endif
  2513. r = aes->rounds >> 1;
  2514. if (r > 7 || r == 0) {
  2515. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  2516. return KEYUSAGE_E;
  2517. }
  2518. #ifdef WOLFSSL_AESNI
  2519. if (aes->use_aesni) {
  2520. ASSERT_SAVED_VECTOR_REGISTERS();
  2521. #ifdef DEBUG_AESNI
  2522. printf("about to aes encrypt\n");
  2523. printf("in = %p\n", inBlock);
  2524. printf("out = %p\n", outBlock);
  2525. printf("aes->key = %p\n", aes->key);
  2526. printf("aes->rounds = %d\n", aes->rounds);
  2527. printf("sz = %d\n", AES_BLOCK_SIZE);
  2528. #endif
  2529. /* check alignment, decrypt doesn't need alignment */
  2530. if ((wc_ptr_t)inBlock % AESNI_ALIGN) {
  2531. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  2532. byte* tmp = (byte*)XMALLOC(AES_BLOCK_SIZE + AESNI_ALIGN, aes->heap,
  2533. DYNAMIC_TYPE_TMP_BUFFER);
  2534. byte* tmp_align;
  2535. if (tmp == NULL)
  2536. return MEMORY_E;
  2537. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  2538. XMEMCPY(tmp_align, inBlock, AES_BLOCK_SIZE);
  2539. AES_ECB_encrypt_AESNI(tmp_align, tmp_align, AES_BLOCK_SIZE,
  2540. (byte*)aes->key, (int)aes->rounds);
  2541. XMEMCPY(outBlock, tmp_align, AES_BLOCK_SIZE);
  2542. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2543. return 0;
  2544. #else
  2545. WOLFSSL_MSG("AES-ECB encrypt with bad alignment");
  2546. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  2547. return BAD_ALIGN_E;
  2548. #endif
  2549. }
  2550. AES_ECB_encrypt_AESNI(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  2551. (int)aes->rounds);
  2552. return 0;
  2553. }
  2554. else {
  2555. #ifdef DEBUG_AESNI
  2556. printf("Skipping AES-NI\n");
  2557. #endif
  2558. }
  2559. #endif /* WOLFSSL_AESNI */
  2560. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  2561. AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  2562. return 0;
  2563. #endif
  2564. #if defined(WOLFSSL_IMXRT_DCP)
  2565. if (aes->keylen == 16) {
  2566. DCPAesEcbEncrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  2567. return 0;
  2568. }
  2569. #endif
  2570. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2571. if (aes->useSWCrypt == 0) {
  2572. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  2573. AES_ENCRYPTION, kAlgorithm_SSS_AES_ECB);
  2574. }
  2575. #endif
  2576. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AES_HW_FALLBACK)
  2577. ESP_LOGV(TAG, "wc_AesEncrypt fallback check");
  2578. if (wc_esp32AesSupportedKeyLen(aes)) {
  2579. return wc_esp32AesEncrypt(aes, inBlock, outBlock);
  2580. }
  2581. else {
  2582. /* For example, the ESP32-S3 does not support HW for len = 24,
  2583. * so fall back to SW */
  2584. #ifdef DEBUG_WOLFSSL
  2585. ESP_LOGW(TAG, "wc_AesEncrypt HW Falling back, unsupported keylen = %d",
  2586. aes->keylen);
  2587. #endif
  2588. }
  2589. #endif
  2590. AesEncrypt_C(aes, inBlock, outBlock, r);
  2591. return 0;
  2592. } /* wc_AesEncrypt */
  2593. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT || HAVE_AESGCM */
  2594. #if defined(HAVE_AES_DECRYPT)
  2595. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC) && \
  2596. !defined(WOLFSSL_SILABS_SE_ACCEL)) || \
  2597. defined(WOLFSSL_AES_DIRECT)
  2598. #ifndef WC_AES_BITSLICED
  2599. #ifndef WC_NO_CACHE_RESISTANT
  2600. #ifndef WOLFSSL_AES_SMALL_TABLES
  2601. /* load 4 Td Tables into cache by cache line stride */
  2602. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd(void)
  2603. {
  2604. word32 x = 0;
  2605. int i,j;
  2606. for (i = 0; i < 4; i++) {
  2607. /* 256 elements, each one is 4 bytes */
  2608. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  2609. x &= Td[i][j];
  2610. }
  2611. }
  2612. return x;
  2613. }
  2614. #endif
  2615. /* load Td Table4 into cache by cache line stride */
  2616. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd4(void)
  2617. {
  2618. #ifndef WOLFSSL_AES_TOUCH_LINES
  2619. word32 x = 0;
  2620. int i;
  2621. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ) {
  2622. x &= (word32)Td4[i];
  2623. }
  2624. return x;
  2625. #else
  2626. return 0;
  2627. #endif
  2628. }
  2629. #endif
  2630. /* Decrypt a block using AES.
  2631. *
  2632. * @param [in] aes AES object.
  2633. * @param [in] inBlock Block to encrypt.
  2634. * @param [out] outBlock Encrypted block.
  2635. * @param [in] r Rounds divided by 2.
  2636. */
  2637. static void AesDecrypt_C(Aes* aes, const byte* inBlock, byte* outBlock,
  2638. word32 r)
  2639. {
  2640. word32 s0, s1, s2, s3;
  2641. word32 t0, t1, t2, t3;
  2642. const word32* rk;
  2643. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  2644. rk = aes->key_C_fallback;
  2645. #else
  2646. rk = aes->key;
  2647. #endif
  2648. /*
  2649. * map byte array block to cipher state
  2650. * and add initial round key:
  2651. */
  2652. XMEMCPY(&s0, inBlock, sizeof(s0));
  2653. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  2654. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  2655. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  2656. #ifdef LITTLE_ENDIAN_ORDER
  2657. s0 = ByteReverseWord32(s0);
  2658. s1 = ByteReverseWord32(s1);
  2659. s2 = ByteReverseWord32(s2);
  2660. s3 = ByteReverseWord32(s3);
  2661. #endif
  2662. s0 ^= rk[0];
  2663. s1 ^= rk[1];
  2664. s2 ^= rk[2];
  2665. s3 ^= rk[3];
  2666. #ifndef WOLFSSL_AES_SMALL_TABLES
  2667. #ifndef WC_NO_CACHE_RESISTANT
  2668. s0 |= PreFetchTd();
  2669. #endif
  2670. #ifndef WOLFSSL_AES_TOUCH_LINES
  2671. /* Unroll the loop. */
  2672. #define DEC_ROUND_T_S(o) \
  2673. t0 = GetTable(Td[0], GETBYTE(s0, 3)) ^ GetTable(Td[1], GETBYTE(s3, 2)) ^ \
  2674. GetTable(Td[2], GETBYTE(s2, 1)) ^ GetTable(Td[3], GETBYTE(s1, 0)) ^ rk[(o)+4]; \
  2675. t1 = GetTable(Td[0], GETBYTE(s1, 3)) ^ GetTable(Td[1], GETBYTE(s0, 2)) ^ \
  2676. GetTable(Td[2], GETBYTE(s3, 1)) ^ GetTable(Td[3], GETBYTE(s2, 0)) ^ rk[(o)+5]; \
  2677. t2 = GetTable(Td[0], GETBYTE(s2, 3)) ^ GetTable(Td[1], GETBYTE(s1, 2)) ^ \
  2678. GetTable(Td[2], GETBYTE(s0, 1)) ^ GetTable(Td[3], GETBYTE(s3, 0)) ^ rk[(o)+6]; \
  2679. t3 = GetTable(Td[0], GETBYTE(s3, 3)) ^ GetTable(Td[1], GETBYTE(s2, 2)) ^ \
  2680. GetTable(Td[2], GETBYTE(s1, 1)) ^ GetTable(Td[3], GETBYTE(s0, 0)) ^ rk[(o)+7]
  2681. #define DEC_ROUND_S_T(o) \
  2682. s0 = GetTable(Td[0], GETBYTE(t0, 3)) ^ GetTable(Td[1], GETBYTE(t3, 2)) ^ \
  2683. GetTable(Td[2], GETBYTE(t2, 1)) ^ GetTable(Td[3], GETBYTE(t1, 0)) ^ rk[(o)+0]; \
  2684. s1 = GetTable(Td[0], GETBYTE(t1, 3)) ^ GetTable(Td[1], GETBYTE(t0, 2)) ^ \
  2685. GetTable(Td[2], GETBYTE(t3, 1)) ^ GetTable(Td[3], GETBYTE(t2, 0)) ^ rk[(o)+1]; \
  2686. s2 = GetTable(Td[0], GETBYTE(t2, 3)) ^ GetTable(Td[1], GETBYTE(t1, 2)) ^ \
  2687. GetTable(Td[2], GETBYTE(t0, 1)) ^ GetTable(Td[3], GETBYTE(t3, 0)) ^ rk[(o)+2]; \
  2688. s3 = GetTable(Td[0], GETBYTE(t3, 3)) ^ GetTable(Td[1], GETBYTE(t2, 2)) ^ \
  2689. GetTable(Td[2], GETBYTE(t1, 1)) ^ GetTable(Td[3], GETBYTE(t0, 0)) ^ rk[(o)+3]
  2690. #else
  2691. #define DEC_ROUND_T_S(o) \
  2692. GetTable_Multi(Td[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  2693. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  2694. XorTable_Multi(Td[1], &t0, GETBYTE(s3, 2), &t1, GETBYTE(s0, 2), \
  2695. &t2, GETBYTE(s1, 2), &t3, GETBYTE(s2, 2)); \
  2696. XorTable_Multi(Td[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  2697. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  2698. XorTable_Multi(Td[3], &t0, GETBYTE(s1, 0), &t1, GETBYTE(s2, 0), \
  2699. &t2, GETBYTE(s3, 0), &t3, GETBYTE(s0, 0)); \
  2700. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  2701. #define DEC_ROUND_S_T(o) \
  2702. GetTable_Multi(Td[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  2703. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  2704. XorTable_Multi(Td[1], &s0, GETBYTE(t3, 2), &s1, GETBYTE(t0, 2), \
  2705. &s2, GETBYTE(t1, 2), &s3, GETBYTE(t2, 2)); \
  2706. XorTable_Multi(Td[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  2707. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  2708. XorTable_Multi(Td[3], &s0, GETBYTE(t1, 0), &s1, GETBYTE(t2, 0), \
  2709. &s2, GETBYTE(t3, 0), &s3, GETBYTE(t0, 0)); \
  2710. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  2711. #endif
  2712. #ifndef WOLFSSL_AES_NO_UNROLL
  2713. DEC_ROUND_T_S( 0);
  2714. DEC_ROUND_S_T( 8); DEC_ROUND_T_S( 8);
  2715. DEC_ROUND_S_T(16); DEC_ROUND_T_S(16);
  2716. DEC_ROUND_S_T(24); DEC_ROUND_T_S(24);
  2717. DEC_ROUND_S_T(32); DEC_ROUND_T_S(32);
  2718. if (r > 5) {
  2719. DEC_ROUND_S_T(40); DEC_ROUND_T_S(40);
  2720. if (r > 6) {
  2721. DEC_ROUND_S_T(48); DEC_ROUND_T_S(48);
  2722. }
  2723. }
  2724. rk += r * 8;
  2725. #else
  2726. /*
  2727. * Nr - 1 full rounds:
  2728. */
  2729. for (;;) {
  2730. DEC_ROUND_T_S(0);
  2731. rk += 8;
  2732. if (--r == 0) {
  2733. break;
  2734. }
  2735. DEC_ROUND_S_T(0);
  2736. }
  2737. #endif
  2738. /*
  2739. * apply last round and
  2740. * map cipher state to byte array block:
  2741. */
  2742. #ifndef WC_NO_CACHE_RESISTANT
  2743. t0 |= PreFetchTd4();
  2744. #endif
  2745. s0 = GetTable8_4(Td4, GETBYTE(t0, 3), GETBYTE(t3, 2),
  2746. GETBYTE(t2, 1), GETBYTE(t1, 0)) ^ rk[0];
  2747. s1 = GetTable8_4(Td4, GETBYTE(t1, 3), GETBYTE(t0, 2),
  2748. GETBYTE(t3, 1), GETBYTE(t2, 0)) ^ rk[1];
  2749. s2 = GetTable8_4(Td4, GETBYTE(t2, 3), GETBYTE(t1, 2),
  2750. GETBYTE(t0, 1), GETBYTE(t3, 0)) ^ rk[2];
  2751. s3 = GetTable8_4(Td4, GETBYTE(t3, 3), GETBYTE(t2, 2),
  2752. GETBYTE(t1, 1), GETBYTE(t0, 0)) ^ rk[3];
  2753. #else
  2754. #ifndef WC_NO_CACHE_RESISTANT
  2755. s0 |= PreFetchTd4();
  2756. #endif
  2757. r *= 2;
  2758. for (rk += 4; r > 1; r--, rk += 4) {
  2759. t0 =
  2760. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2761. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2762. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2763. ((word32)GetTable8(Td4, GETBYTE(s1, 0))) ^
  2764. rk[0];
  2765. t1 =
  2766. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2767. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2768. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2769. ((word32)GetTable8(Td4, GETBYTE(s2, 0))) ^
  2770. rk[1];
  2771. t2 =
  2772. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2773. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2774. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2775. ((word32)GetTable8(Td4, GETBYTE(s3, 0))) ^
  2776. rk[2];
  2777. t3 =
  2778. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2779. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2780. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2781. ((word32)GetTable8(Td4, GETBYTE(s0, 0))) ^
  2782. rk[3];
  2783. s0 =
  2784. (inv_col_mul(t0, 0, 2, 1, 3) << 24) ^
  2785. (inv_col_mul(t0, 3, 1, 0, 2) << 16) ^
  2786. (inv_col_mul(t0, 2, 0, 3, 1) << 8) ^
  2787. (inv_col_mul(t0, 1, 3, 2, 0) );
  2788. s1 =
  2789. (inv_col_mul(t1, 0, 2, 1, 3) << 24) ^
  2790. (inv_col_mul(t1, 3, 1, 0, 2) << 16) ^
  2791. (inv_col_mul(t1, 2, 0, 3, 1) << 8) ^
  2792. (inv_col_mul(t1, 1, 3, 2, 0) );
  2793. s2 =
  2794. (inv_col_mul(t2, 0, 2, 1, 3) << 24) ^
  2795. (inv_col_mul(t2, 3, 1, 0, 2) << 16) ^
  2796. (inv_col_mul(t2, 2, 0, 3, 1) << 8) ^
  2797. (inv_col_mul(t2, 1, 3, 2, 0) );
  2798. s3 =
  2799. (inv_col_mul(t3, 0, 2, 1, 3) << 24) ^
  2800. (inv_col_mul(t3, 3, 1, 0, 2) << 16) ^
  2801. (inv_col_mul(t3, 2, 0, 3, 1) << 8) ^
  2802. (inv_col_mul(t3, 1, 3, 2, 0) );
  2803. }
  2804. t0 =
  2805. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2806. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2807. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2808. ((word32)GetTable8(Td4, GETBYTE(s1, 0)));
  2809. t1 =
  2810. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2811. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2812. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2813. ((word32)GetTable8(Td4, GETBYTE(s2, 0)));
  2814. t2 =
  2815. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2816. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2817. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2818. ((word32)GetTable8(Td4, GETBYTE(s3, 0)));
  2819. t3 =
  2820. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2821. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2822. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2823. ((word32)GetTable8(Td4, GETBYTE(s0, 0)));
  2824. s0 = t0 ^ rk[0];
  2825. s1 = t1 ^ rk[1];
  2826. s2 = t2 ^ rk[2];
  2827. s3 = t3 ^ rk[3];
  2828. #endif
  2829. /* write out */
  2830. #ifdef LITTLE_ENDIAN_ORDER
  2831. s0 = ByteReverseWord32(s0);
  2832. s1 = ByteReverseWord32(s1);
  2833. s2 = ByteReverseWord32(s2);
  2834. s3 = ByteReverseWord32(s3);
  2835. #endif
  2836. XMEMCPY(outBlock, &s0, sizeof(s0));
  2837. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2838. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2839. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2840. }
  2841. #if defined(HAVE_AES_ECB) && !(defined(WOLFSSL_IMX6_CAAM) && \
  2842. !defined(NO_IMX6_CAAM_AES) && !defined(WOLFSSL_QNX_CAAM))
  2843. /* Decrypt a number of blocks using AES.
  2844. *
  2845. * @param [in] aes AES object.
  2846. * @param [in] in Block to encrypt.
  2847. * @param [out] out Encrypted block.
  2848. * @param [in] sz Number of blocks to encrypt.
  2849. */
  2850. static void AesDecryptBlocks_C(Aes* aes, const byte* in, byte* out, word32 sz)
  2851. {
  2852. word32 i;
  2853. for (i = 0; i < sz; i += AES_BLOCK_SIZE) {
  2854. AesDecrypt_C(aes, in, out, aes->rounds >> 1);
  2855. in += AES_BLOCK_SIZE;
  2856. out += AES_BLOCK_SIZE;
  2857. }
  2858. }
  2859. #endif
  2860. #else
  2861. /* http://cs-www.cs.yale.edu/homes/peralta/CircuitStuff/Sinv.txt */
  2862. static void bs_inv_sub_bytes(bs_word u[8])
  2863. {
  2864. bs_word U0, U1, U2, U3, U4, U5, U6, U7;
  2865. bs_word Y0, Y1, Y2, Y3, Y4, Y5, Y6, Y7;
  2866. bs_word RTL0, RTL1, RTL2;
  2867. bs_word sa0, sa1;
  2868. bs_word sb0, sb1;
  2869. bs_word ab0, ab1, ab2, ab3;
  2870. bs_word ab20, ab21, ab22, ab23;
  2871. bs_word al, ah, aa, bl, bh, bb;
  2872. bs_word abcd1, abcd2, abcd3, abcd4, abcd5, abcd6;
  2873. bs_word ph11, ph12, ph13, ph01, ph02, ph03;
  2874. bs_word pl01, pl02, pl03, pl11, pl12, pl13;
  2875. bs_word r1, r2, r3, r4, r5, r6, r7, r8, r9;
  2876. bs_word rr1, rr2;
  2877. bs_word r10, r11;
  2878. bs_word cp1, cp2, cp3, cp4;
  2879. bs_word vr1, vr2, vr3;
  2880. bs_word pr1, pr2, pr3;
  2881. bs_word wr1, wr2, wr3;
  2882. bs_word qr1, qr2, qr3;
  2883. bs_word tinv1, tinv2, tinv3, tinv4, tinv5, tinv6, tinv7, tinv8, tinv9;
  2884. bs_word tinv10, tinv11, tinv12, tinv13;
  2885. bs_word t01, t02;
  2886. bs_word d0, d1, d2, d3;
  2887. bs_word dl, dd, dh;
  2888. bs_word sd0, sd1;
  2889. bs_word p0, p1, p2, p3, p4, p6, p7;
  2890. bs_word X11, X13, X14, X16, X18, X19;
  2891. bs_word S0, S1, S2, S3, S4, S5, S6, S7;
  2892. U0 = u[7];
  2893. U1 = u[6];
  2894. U2 = u[5];
  2895. U3 = u[4];
  2896. U4 = u[3];
  2897. U5 = u[2];
  2898. U6 = u[1];
  2899. U7 = u[0];
  2900. Y0 = U0 ^ U3;
  2901. Y2 = ~(U1 ^ U3);
  2902. Y4 = U0 ^ Y2;
  2903. RTL0 = U6 ^ U7;
  2904. Y1 = Y2 ^ RTL0;
  2905. Y7 = ~(U2 ^ Y1);
  2906. RTL1 = U3 ^ U4;
  2907. Y6 = ~(U7 ^ RTL1);
  2908. Y3 = Y1 ^ RTL1;
  2909. RTL2 = ~(U0 ^ U2);
  2910. Y5 = U5 ^ RTL2;
  2911. sa1 = Y0 ^ Y2;
  2912. sa0 = Y1 ^ Y3;
  2913. sb1 = Y4 ^ Y6;
  2914. sb0 = Y5 ^ Y7;
  2915. ah = Y0 ^ Y1;
  2916. al = Y2 ^ Y3;
  2917. aa = sa0 ^ sa1;
  2918. bh = Y4 ^ Y5;
  2919. bl = Y6 ^ Y7;
  2920. bb = sb0 ^ sb1;
  2921. ab20 = sa0 ^ sb0;
  2922. ab22 = al ^ bl;
  2923. ab23 = Y3 ^ Y7;
  2924. ab21 = sa1 ^ sb1;
  2925. abcd1 = ah & bh;
  2926. rr1 = Y0 & Y4;
  2927. ph11 = ab20 ^ abcd1;
  2928. t01 = Y1 & Y5;
  2929. ph01 = t01 ^ abcd1;
  2930. abcd2 = al & bl;
  2931. r1 = Y2 & Y6;
  2932. pl11 = ab22 ^ abcd2;
  2933. r2 = Y3 & Y7;
  2934. pl01 = r2 ^ abcd2;
  2935. r3 = sa0 & sb0;
  2936. vr1 = aa & bb;
  2937. pr1 = vr1 ^ r3;
  2938. wr1 = sa1 & sb1;
  2939. qr1 = wr1 ^ r3;
  2940. ab0 = ph11 ^ rr1;
  2941. ab1 = ph01 ^ ab21;
  2942. ab2 = pl11 ^ r1;
  2943. ab3 = pl01 ^ qr1;
  2944. cp1 = ab0 ^ pr1;
  2945. cp2 = ab1 ^ qr1;
  2946. cp3 = ab2 ^ pr1;
  2947. cp4 = ab3 ^ ab23;
  2948. tinv1 = cp3 ^ cp4;
  2949. tinv2 = cp3 & cp1;
  2950. tinv3 = cp2 ^ tinv2;
  2951. tinv4 = cp1 ^ cp2;
  2952. tinv5 = cp4 ^ tinv2;
  2953. tinv6 = tinv5 & tinv4;
  2954. tinv7 = tinv3 & tinv1;
  2955. d2 = cp4 ^ tinv7;
  2956. d0 = cp2 ^ tinv6;
  2957. tinv8 = cp1 & cp4;
  2958. tinv9 = tinv4 & tinv8;
  2959. tinv10 = tinv4 ^ tinv2;
  2960. d1 = tinv9 ^ tinv10;
  2961. tinv11 = cp2 & cp3;
  2962. tinv12 = tinv1 & tinv11;
  2963. tinv13 = tinv1 ^ tinv2;
  2964. d3 = tinv12 ^ tinv13;
  2965. sd1 = d1 ^ d3;
  2966. sd0 = d0 ^ d2;
  2967. dl = d0 ^ d1;
  2968. dh = d2 ^ d3;
  2969. dd = sd0 ^ sd1;
  2970. abcd3 = dh & bh;
  2971. rr2 = d3 & Y4;
  2972. t02 = d2 & Y5;
  2973. abcd4 = dl & bl;
  2974. r4 = d1 & Y6;
  2975. r5 = d0 & Y7;
  2976. r6 = sd0 & sb0;
  2977. vr2 = dd & bb;
  2978. wr2 = sd1 & sb1;
  2979. abcd5 = dh & ah;
  2980. r7 = d3 & Y0;
  2981. r8 = d2 & Y1;
  2982. abcd6 = dl & al;
  2983. r9 = d1 & Y2;
  2984. r10 = d0 & Y3;
  2985. r11 = sd0 & sa0;
  2986. vr3 = dd & aa;
  2987. wr3 = sd1 & sa1;
  2988. ph12 = rr2 ^ abcd3;
  2989. ph02 = t02 ^ abcd3;
  2990. pl12 = r4 ^ abcd4;
  2991. pl02 = r5 ^ abcd4;
  2992. pr2 = vr2 ^ r6;
  2993. qr2 = wr2 ^ r6;
  2994. p0 = ph12 ^ pr2;
  2995. p1 = ph02 ^ qr2;
  2996. p2 = pl12 ^ pr2;
  2997. p3 = pl02 ^ qr2;
  2998. ph13 = r7 ^ abcd5;
  2999. ph03 = r8 ^ abcd5;
  3000. pl13 = r9 ^ abcd6;
  3001. pl03 = r10 ^ abcd6;
  3002. pr3 = vr3 ^ r11;
  3003. qr3 = wr3 ^ r11;
  3004. p4 = ph13 ^ pr3;
  3005. S7 = ph03 ^ qr3;
  3006. p6 = pl13 ^ pr3;
  3007. p7 = pl03 ^ qr3;
  3008. S3 = p1 ^ p6;
  3009. S6 = p2 ^ p6;
  3010. S0 = p3 ^ p6;
  3011. X11 = p0 ^ p2;
  3012. S5 = S0 ^ X11;
  3013. X13 = p4 ^ p7;
  3014. X14 = X11 ^ X13;
  3015. S1 = S3 ^ X14;
  3016. X16 = p1 ^ S7;
  3017. S2 = X14 ^ X16;
  3018. X18 = p0 ^ p4;
  3019. X19 = S5 ^ X16;
  3020. S4 = X18 ^ X19;
  3021. u[0] = S7;
  3022. u[1] = S6;
  3023. u[2] = S5;
  3024. u[3] = S4;
  3025. u[4] = S3;
  3026. u[5] = S2;
  3027. u[6] = S1;
  3028. u[7] = S0;
  3029. }
  3030. static void bs_inv_shift_rows(bs_word* b)
  3031. {
  3032. bs_word t[AES_BLOCK_BITS];
  3033. int i;
  3034. for (i = 0; i < 128; i += 32) {
  3035. BS_ASSIGN_8(t, i + 0, b, ( 0 + i) & BS_IDX_MASK);
  3036. BS_ASSIGN_8(t, i + 8, b, (104 + i) & BS_IDX_MASK);
  3037. BS_ASSIGN_8(t, i + 16, b, ( 80 + i) & BS_IDX_MASK);
  3038. BS_ASSIGN_8(t, i + 24, b, ( 56 + i) & BS_IDX_MASK);
  3039. }
  3040. XMEMCPY(b, t, sizeof(t));
  3041. }
  3042. #define O0 0
  3043. #define O1 8
  3044. #define O2 16
  3045. #define O3 24
  3046. #define BS_INV_MIX_SHIFT_8(br, b, O0, O1, O2, O3, of0, of1, of2) \
  3047. of0 = b[O0+7] ^ b[O0+6] ^ b[O0+5] ^ b[O1 + 7] ^ b[O1+5] ^ \
  3048. b[O2+6] ^ b[O2+5] ^ b[O3+5]; \
  3049. of1 = b[O0+7] ^ b[O0+6] ^ b[O1+6] ^ \
  3050. b[O2+7] ^ b[O2+6] ^ b[O3+6]; \
  3051. of2 = b[O0+7] ^ b[O1+7] ^ \
  3052. b[O2+7] ^ b[O3+7]; \
  3053. \
  3054. br[0] = b[O1+0] ^ \
  3055. b[O2+0] ^ b[O3+0] ^ of0; \
  3056. br[1] = b[O0+0] ^ b[O1+0] ^ b[O1+1] ^ \
  3057. b[O2+1] ^ b[O3+1] ^ of0 ^ of1; \
  3058. br[2] = b[O0+1] ^ b[O0+0] ^ b[O1+1] ^ b[O1+2] ^ \
  3059. b[O2+2] ^ b[O2+0] ^ b[O3+2] ^ of1 ^ of2; \
  3060. br[3] = b[O0+2] ^ b[O0+1] ^ b[O0+0] ^ b[O1+0] ^ b[O1+2] ^ b[O1+3] ^ \
  3061. b[O2+3] ^ b[O2+1] ^ b[O2+0] ^ b[O3+3] ^ b[O3+0] ^ of0 ^ of2; \
  3062. br[4] = b[O0+3] ^ b[O0+2] ^ b[O0+1] ^ b[O1+1] ^ b[O1+3] ^ b[O1+4] ^ \
  3063. b[O2+4] ^ b[O2+2] ^ b[O2+1] ^ b[O3+4] ^ b[O3+1] ^ of0 ^ of1; \
  3064. br[5] = b[O0+4] ^ b[O0+3] ^ b[O0+2] ^ b[O1+2] ^ b[O1+4] ^ b[O1+5] ^ \
  3065. b[O2+5] ^ b[O2+3] ^ b[O2+2] ^ b[O3+5] ^ b[O3+2] ^ of1 ^ of2; \
  3066. br[6] = b[O0+5] ^ b[O0+4] ^ b[O0+3] ^ b[O1+3] ^ b[O1+5] ^ b[O1+6] ^ \
  3067. b[O2+6] ^ b[O2+4] ^ b[O2+3] ^ b[O3+6] ^ b[O3+3] ^ of2; \
  3068. br[7] = b[O0+6] ^ b[O0+5] ^ b[O0+4] ^ b[O1+4] ^ b[O1+6] ^ b[O1+7] ^ \
  3069. b[O2+7] ^ b[O2+5] ^ b[O2+4] ^ b[O3+7] ^ b[O3+4]
  3070. /* Inverse mix columns and shift rows. */
  3071. static void bs_inv_mix_shift(bs_word* t, bs_word* b)
  3072. {
  3073. bs_word* bp = b;
  3074. word8 or0 = BS_ROW_OFF_0 + BS_SHIFT_OFF_0;
  3075. word8 or1 = BS_ROW_OFF_1 + BS_SHIFT_OFF_1;
  3076. word8 or2 = BS_ROW_OFF_2 + BS_SHIFT_OFF_2;
  3077. word8 or3 = BS_ROW_OFF_3 + BS_SHIFT_OFF_3;
  3078. int i;
  3079. for (i = 0; i < AES_BLOCK_BITS / 4; i += AES_BLOCK_BITS / 16) {
  3080. bs_word* br;
  3081. bs_word of0;
  3082. bs_word of1;
  3083. bs_word of2;
  3084. br = t + or0;
  3085. BS_INV_MIX_SHIFT_8(br, bp, O0, O1, O2, O3, of0, of1, of2);
  3086. br = t + or1;
  3087. BS_INV_MIX_SHIFT_8(br, bp, O1, O2, O3, O0, of0, of1, of2);
  3088. br = t + or2;
  3089. BS_INV_MIX_SHIFT_8(br, bp, O2, O3, O0, O1, of0, of1, of2);
  3090. br = t + or3;
  3091. BS_INV_MIX_SHIFT_8(br, bp, O3, O0, O1, O2, of0, of1, of2);
  3092. or0 = (or0 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  3093. or1 = (or1 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  3094. or2 = (or2 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  3095. or3 = (or3 + AES_BLOCK_BITS / 4) & BS_IDX_MASK;
  3096. bp += AES_BLOCK_BITS / 4;
  3097. }
  3098. }
  3099. static void bs_inv_sub_bytes_blocks(bs_word* b)
  3100. {
  3101. int i;
  3102. for (i = 0; i < AES_BLOCK_BITS; i += 8) {
  3103. bs_inv_sub_bytes(b + i);
  3104. }
  3105. }
  3106. static void bs_decrypt(bs_word* state, bs_word* rk, word32 r)
  3107. {
  3108. int i;
  3109. bs_word trans[AES_BLOCK_BITS];
  3110. bs_transpose(trans, state);
  3111. rk += r * AES_BLOCK_BITS;
  3112. bs_add_round_key(trans, trans, rk);
  3113. bs_inv_shift_rows(trans);
  3114. bs_inv_sub_bytes_blocks(trans);
  3115. rk -= AES_BLOCK_BITS;
  3116. bs_add_round_key(trans, trans, rk);
  3117. for (i = (int)r - 2; i >= 0; i--) {
  3118. bs_inv_mix_shift(state, trans);
  3119. bs_inv_sub_bytes_blocks(state);
  3120. rk -= AES_BLOCK_BITS;
  3121. bs_add_round_key(trans, state, rk);
  3122. }
  3123. bs_inv_transpose(state, trans);
  3124. }
  3125. #ifdef WOLFSSL_AES_DIRECT
  3126. /* Decrypt a block using AES.
  3127. *
  3128. * @param [in] aes AES object.
  3129. * @param [in] inBlock Block to encrypt.
  3130. * @param [out] outBlock Encrypted block.
  3131. * @param [in] r Rounds divided by 2.
  3132. */
  3133. static void AesDecrypt_C(Aes* aes, const byte* inBlock, byte* outBlock,
  3134. word32 r)
  3135. {
  3136. bs_word state[AES_BLOCK_BITS];
  3137. (void)r;
  3138. XMEMCPY(state, inBlock, AES_BLOCK_SIZE);
  3139. XMEMSET(((byte*)state) + AES_BLOCK_SIZE, 0, sizeof(state) - AES_BLOCK_SIZE);
  3140. bs_decrypt(state, aes->bs_key, aes->rounds);
  3141. XMEMCPY(outBlock, state, AES_BLOCK_SIZE);
  3142. }
  3143. #endif
  3144. #if defined(HAVE_AES_ECB) && !(defined(WOLFSSL_IMX6_CAAM) && \
  3145. !defined(NO_IMX6_CAAM_AES) && !defined(WOLFSSL_QNX_CAAM))
  3146. /* Decrypt a number of blocks using AES.
  3147. *
  3148. * @param [in] aes AES object.
  3149. * @param [in] in Block to encrypt.
  3150. * @param [out] out Encrypted block.
  3151. * @param [in] sz Number of blocks to encrypt.
  3152. */
  3153. static void AesDecryptBlocks_C(Aes* aes, const byte* in, byte* out, word32 sz)
  3154. {
  3155. bs_word state[AES_BLOCK_BITS];
  3156. while (sz >= BS_BLOCK_SIZE) {
  3157. XMEMCPY(state, in, BS_BLOCK_SIZE);
  3158. bs_decrypt(state, aes->bs_key, aes->rounds);
  3159. XMEMCPY(out, state, BS_BLOCK_SIZE);
  3160. sz -= BS_BLOCK_SIZE;
  3161. in += BS_BLOCK_SIZE;
  3162. out += BS_BLOCK_SIZE;
  3163. }
  3164. if (sz > 0) {
  3165. XMEMCPY(state, in, sz);
  3166. XMEMSET(((byte*)state) + sz, 0, sizeof(state) - sz);
  3167. bs_decrypt(state, aes->bs_key, aes->rounds);
  3168. XMEMCPY(out, state, sz);
  3169. }
  3170. }
  3171. #endif
  3172. #endif
  3173. #if !defined(WC_AES_BITSLICED) || defined(WOLFSSL_AES_DIRECT)
  3174. /* Software AES - ECB Decrypt */
  3175. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  3176. Aes* aes, const byte* inBlock, byte* outBlock)
  3177. {
  3178. word32 r;
  3179. if (aes == NULL) {
  3180. return BAD_FUNC_ARG;
  3181. }
  3182. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3183. {
  3184. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3185. if (ret < 0)
  3186. return ret;
  3187. }
  3188. #endif
  3189. r = aes->rounds >> 1;
  3190. if (r > 7 || r == 0) {
  3191. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  3192. return KEYUSAGE_E;
  3193. }
  3194. #ifdef WOLFSSL_AESNI
  3195. if (aes->use_aesni) {
  3196. ASSERT_SAVED_VECTOR_REGISTERS();
  3197. #ifdef DEBUG_AESNI
  3198. printf("about to aes decrypt\n");
  3199. printf("in = %p\n", inBlock);
  3200. printf("out = %p\n", outBlock);
  3201. printf("aes->key = %p\n", aes->key);
  3202. printf("aes->rounds = %d\n", aes->rounds);
  3203. printf("sz = %d\n", AES_BLOCK_SIZE);
  3204. #endif
  3205. /* if input and output same will overwrite input iv */
  3206. if ((const byte*)aes->tmp != inBlock)
  3207. XMEMCPY(aes->tmp, inBlock, AES_BLOCK_SIZE);
  3208. AES_ECB_decrypt_AESNI(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  3209. (int)aes->rounds);
  3210. return 0;
  3211. }
  3212. else {
  3213. #ifdef DEBUG_AESNI
  3214. printf("Skipping AES-NI\n");
  3215. #endif
  3216. }
  3217. #endif /* WOLFSSL_AESNI */
  3218. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  3219. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  3220. #endif
  3221. #if defined(WOLFSSL_IMXRT_DCP)
  3222. if (aes->keylen == 16) {
  3223. DCPAesEcbDecrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  3224. return 0;
  3225. }
  3226. #endif
  3227. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3228. if (aes->useSWCrypt == 0) {
  3229. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  3230. AES_DECRYPTION, kAlgorithm_SSS_AES_ECB);
  3231. }
  3232. #endif
  3233. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AES_HW_FALLBACK)
  3234. if (wc_esp32AesSupportedKeyLen(aes)) {
  3235. return wc_esp32AesDecrypt(aes, inBlock, outBlock);
  3236. }
  3237. else {
  3238. /* For example, the ESP32-S3 does not support HW for len = 24,
  3239. * so fall back to SW */
  3240. #ifdef DEBUG_WOLFSSL
  3241. ESP_LOGW(TAG, "wc_AesDecrypt HW Falling back, "
  3242. "unsupported keylen = %d", aes->keylen);
  3243. #endif
  3244. } /* else !wc_esp32AesSupportedKeyLen for ESP32 */
  3245. #endif
  3246. AesDecrypt_C(aes, inBlock, outBlock, r);
  3247. return 0;
  3248. } /* wc_AesDecrypt[_SW]() */
  3249. #endif /* !WC_AES_BITSLICED || WOLFSSL_AES_DIRECT */
  3250. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  3251. #endif /* HAVE_AES_DECRYPT */
  3252. #endif /* NEED_AES_TABLES */
  3253. /* wc_AesSetKey */
  3254. #if defined(STM32_CRYPTO)
  3255. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3256. const byte* iv, int dir)
  3257. {
  3258. word32 *rk;
  3259. (void)dir;
  3260. if (aes == NULL || (keylen != 16 &&
  3261. #ifdef WOLFSSL_AES_192
  3262. keylen != 24 &&
  3263. #endif
  3264. keylen != 32)) {
  3265. return BAD_FUNC_ARG;
  3266. }
  3267. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3268. {
  3269. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3270. if (ret < 0)
  3271. return ret;
  3272. }
  3273. #endif
  3274. rk = aes->key;
  3275. aes->keylen = keylen;
  3276. aes->rounds = keylen/4 + 6;
  3277. XMEMCPY(rk, userKey, keylen);
  3278. #if !defined(WOLFSSL_STM32_CUBEMX) || defined(STM32_HAL_V2)
  3279. ByteReverseWords(rk, rk, keylen);
  3280. #endif
  3281. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  3282. defined(WOLFSSL_AES_OFB)
  3283. aes->left = 0;
  3284. #endif
  3285. return wc_AesSetIV(aes, iv);
  3286. }
  3287. #if defined(WOLFSSL_AES_DIRECT)
  3288. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3289. const byte* iv, int dir)
  3290. {
  3291. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  3292. }
  3293. #endif
  3294. #elif defined(HAVE_COLDFIRE_SEC)
  3295. #if defined (HAVE_THREADX)
  3296. #include "memory_pools.h"
  3297. extern TX_BYTE_POOL mp_ncached; /* Non Cached memory pool */
  3298. #endif
  3299. #define AES_BUFFER_SIZE (AES_BLOCK_SIZE * 64)
  3300. static unsigned char *AESBuffIn = NULL;
  3301. static unsigned char *AESBuffOut = NULL;
  3302. static byte *secReg;
  3303. static byte *secKey;
  3304. static volatile SECdescriptorType *secDesc;
  3305. static wolfSSL_Mutex Mutex_AesSEC;
  3306. #define SEC_DESC_AES_CBC_ENCRYPT 0x60300010
  3307. #define SEC_DESC_AES_CBC_DECRYPT 0x60200010
  3308. extern volatile unsigned char __MBAR[];
  3309. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3310. const byte* iv, int dir)
  3311. {
  3312. if (AESBuffIn == NULL) {
  3313. #if defined (HAVE_THREADX)
  3314. int s1, s2, s3, s4, s5;
  3315. s5 = tx_byte_allocate(&mp_ncached,(void *)&secDesc,
  3316. sizeof(SECdescriptorType), TX_NO_WAIT);
  3317. s1 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffIn,
  3318. AES_BUFFER_SIZE, TX_NO_WAIT);
  3319. s2 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffOut,
  3320. AES_BUFFER_SIZE, TX_NO_WAIT);
  3321. s3 = tx_byte_allocate(&mp_ncached, (void *)&secKey,
  3322. AES_BLOCK_SIZE*2, TX_NO_WAIT);
  3323. s4 = tx_byte_allocate(&mp_ncached, (void *)&secReg,
  3324. AES_BLOCK_SIZE, TX_NO_WAIT);
  3325. if (s1 || s2 || s3 || s4 || s5)
  3326. return BAD_FUNC_ARG;
  3327. #else
  3328. #warning "Allocate non-Cache buffers"
  3329. #endif
  3330. wc_InitMutex(&Mutex_AesSEC);
  3331. }
  3332. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  3333. return BAD_FUNC_ARG;
  3334. if (aes == NULL)
  3335. return BAD_FUNC_ARG;
  3336. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3337. {
  3338. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3339. if (ret < 0)
  3340. return ret;
  3341. }
  3342. #endif
  3343. aes->keylen = keylen;
  3344. aes->rounds = keylen/4 + 6;
  3345. XMEMCPY(aes->key, userKey, keylen);
  3346. if (iv)
  3347. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  3348. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  3349. defined(WOLFSSL_AES_OFB)
  3350. aes->left = 0;
  3351. #endif
  3352. return 0;
  3353. }
  3354. #elif defined(FREESCALE_LTC)
  3355. int wc_AesSetKeyLocal(Aes* aes, const byte* userKey, word32 keylen,
  3356. const byte* iv, int dir, int checkKeyLen)
  3357. {
  3358. if (aes == NULL)
  3359. return BAD_FUNC_ARG;
  3360. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3361. {
  3362. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3363. if (ret < 0)
  3364. return ret;
  3365. }
  3366. #endif
  3367. if (checkKeyLen) {
  3368. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  3369. return BAD_FUNC_ARG;
  3370. }
  3371. (void)dir;
  3372. aes->rounds = keylen/4 + 6;
  3373. XMEMCPY(aes->key, userKey, keylen);
  3374. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  3375. defined(WOLFSSL_AES_OFB)
  3376. aes->left = 0;
  3377. #endif
  3378. return wc_AesSetIV(aes, iv);
  3379. }
  3380. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3381. const byte* iv, int dir)
  3382. {
  3383. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  3384. }
  3385. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3386. const byte* iv, int dir)
  3387. {
  3388. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  3389. }
  3390. #elif defined(FREESCALE_MMCAU)
  3391. int wc_AesSetKeyLocal(Aes* aes, const byte* userKey, word32 keylen,
  3392. const byte* iv, int dir, int checkKeyLen)
  3393. {
  3394. int ret;
  3395. byte* rk;
  3396. byte* tmpKey = (byte*)userKey;
  3397. int tmpKeyDynamic = 0;
  3398. word32 alignOffset = 0;
  3399. (void)dir;
  3400. if (aes == NULL)
  3401. return BAD_FUNC_ARG;
  3402. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3403. {
  3404. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3405. if (ret < 0)
  3406. return ret;
  3407. }
  3408. #endif
  3409. if (checkKeyLen) {
  3410. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  3411. return BAD_FUNC_ARG;
  3412. }
  3413. rk = (byte*)aes->key;
  3414. if (rk == NULL)
  3415. return BAD_FUNC_ARG;
  3416. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  3417. defined(WOLFSSL_AES_OFB)
  3418. aes->left = 0;
  3419. #endif
  3420. aes->rounds = keylen/4 + 6;
  3421. #ifdef FREESCALE_MMCAU_CLASSIC
  3422. if ((wc_ptr_t)userKey % WOLFSSL_MMCAU_ALIGNMENT) {
  3423. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  3424. byte* tmp = (byte*)XMALLOC(keylen + WOLFSSL_MMCAU_ALIGNMENT,
  3425. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3426. if (tmp == NULL) {
  3427. return MEMORY_E;
  3428. }
  3429. alignOffset = WOLFSSL_MMCAU_ALIGNMENT -
  3430. ((wc_ptr_t)tmp % WOLFSSL_MMCAU_ALIGNMENT);
  3431. tmpKey = tmp + alignOffset;
  3432. XMEMCPY(tmpKey, userKey, keylen);
  3433. tmpKeyDynamic = 1;
  3434. #else
  3435. WOLFSSL_MSG("Bad cau_aes_set_key alignment");
  3436. return BAD_ALIGN_E;
  3437. #endif
  3438. }
  3439. #endif
  3440. ret = wolfSSL_CryptHwMutexLock();
  3441. if(ret == 0) {
  3442. #ifdef FREESCALE_MMCAU_CLASSIC
  3443. cau_aes_set_key(tmpKey, keylen*8, rk);
  3444. #else
  3445. MMCAU_AES_SetKey(tmpKey, keylen, rk);
  3446. #endif
  3447. wolfSSL_CryptHwMutexUnLock();
  3448. ret = wc_AesSetIV(aes, iv);
  3449. }
  3450. if (tmpKeyDynamic == 1) {
  3451. XFREE(tmpKey - alignOffset, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3452. }
  3453. return ret;
  3454. }
  3455. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3456. const byte* iv, int dir)
  3457. {
  3458. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  3459. }
  3460. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3461. const byte* iv, int dir)
  3462. {
  3463. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  3464. }
  3465. #elif defined(WOLFSSL_NRF51_AES)
  3466. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3467. const byte* iv, int dir)
  3468. {
  3469. int ret;
  3470. (void)dir;
  3471. (void)iv;
  3472. if (aes == NULL || keylen != 16)
  3473. return BAD_FUNC_ARG;
  3474. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3475. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3476. if (ret < 0)
  3477. return ret;
  3478. #endif
  3479. aes->keylen = keylen;
  3480. aes->rounds = keylen/4 + 6;
  3481. XMEMCPY(aes->key, userKey, keylen);
  3482. ret = nrf51_aes_set_key(userKey);
  3483. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  3484. defined(WOLFSSL_AES_OFB)
  3485. aes->left = 0;
  3486. #endif
  3487. return ret;
  3488. }
  3489. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3490. const byte* iv, int dir)
  3491. {
  3492. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  3493. }
  3494. #elif defined(WOLFSSL_ESP32_CRYPT) && !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  3495. /* This is the only definition for HW only.
  3496. * but needs to be renamed when fallback needed.
  3497. * See call in wc_AesSetKey() */
  3498. int wc_AesSetKey_for_ESP32(Aes* aes, const byte* userKey, word32 keylen,
  3499. const byte* iv, int dir)
  3500. {
  3501. (void)dir;
  3502. (void)iv;
  3503. ESP_LOGV(TAG, "wc_AesSetKey_for_ESP32");
  3504. if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) {
  3505. return BAD_FUNC_ARG;
  3506. }
  3507. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3508. {
  3509. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3510. if (ret < 0)
  3511. return ret;
  3512. }
  3513. #endif
  3514. #if !defined(WOLFSSL_AES_128)
  3515. if (keylen == 16) {
  3516. return BAD_FUNC_ARG;
  3517. }
  3518. #endif
  3519. #if !defined(WOLFSSL_AES_192)
  3520. if (keylen == 24) {
  3521. return BAD_FUNC_ARG;
  3522. }
  3523. #endif
  3524. #if !defined(WOLFSSL_AES_256)
  3525. if (keylen == 32) {
  3526. return BAD_FUNC_ARG;
  3527. }
  3528. #endif
  3529. aes->keylen = keylen;
  3530. aes->rounds = keylen/4 + 6;
  3531. XMEMCPY(aes->key, userKey, keylen);
  3532. #if defined(WOLFSSL_AES_COUNTER)
  3533. aes->left = 0;
  3534. #endif
  3535. return wc_AesSetIV(aes, iv);
  3536. } /* wc_AesSetKey */
  3537. /* end #elif ESP32 */
  3538. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  3539. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  3540. int dir)
  3541. {
  3542. SaSiError_t ret = SASI_OK;
  3543. SaSiAesIv_t iv_aes;
  3544. if (aes == NULL ||
  3545. (keylen != AES_128_KEY_SIZE &&
  3546. keylen != AES_192_KEY_SIZE &&
  3547. keylen != AES_256_KEY_SIZE)) {
  3548. return BAD_FUNC_ARG;
  3549. }
  3550. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3551. {
  3552. int ret2 =
  3553. wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3554. if (ret2 < 0)
  3555. return ret2;
  3556. }
  3557. #endif
  3558. #if defined(AES_MAX_KEY_SIZE)
  3559. if (keylen > (AES_MAX_KEY_SIZE/8)) {
  3560. return BAD_FUNC_ARG;
  3561. }
  3562. #endif
  3563. if (dir != AES_ENCRYPTION &&
  3564. dir != AES_DECRYPTION) {
  3565. return BAD_FUNC_ARG;
  3566. }
  3567. if (dir == AES_ENCRYPTION) {
  3568. aes->ctx.mode = SASI_AES_ENCRYPT;
  3569. SaSi_AesInit(&aes->ctx.user_ctx,
  3570. SASI_AES_ENCRYPT,
  3571. SASI_AES_MODE_CBC,
  3572. SASI_AES_PADDING_NONE);
  3573. }
  3574. else {
  3575. aes->ctx.mode = SASI_AES_DECRYPT;
  3576. SaSi_AesInit(&aes->ctx.user_ctx,
  3577. SASI_AES_DECRYPT,
  3578. SASI_AES_MODE_CBC,
  3579. SASI_AES_PADDING_NONE);
  3580. }
  3581. aes->keylen = keylen;
  3582. aes->rounds = keylen/4 + 6;
  3583. XMEMCPY(aes->key, userKey, keylen);
  3584. aes->ctx.key.pKey = (byte*)aes->key;
  3585. aes->ctx.key.keySize= keylen;
  3586. ret = SaSi_AesSetKey(&aes->ctx.user_ctx,
  3587. SASI_AES_USER_KEY,
  3588. &aes->ctx.key,
  3589. sizeof(aes->ctx.key));
  3590. if (ret != SASI_OK) {
  3591. return BAD_FUNC_ARG;
  3592. }
  3593. ret = wc_AesSetIV(aes, iv);
  3594. if (iv)
  3595. XMEMCPY(iv_aes, iv, AES_BLOCK_SIZE);
  3596. else
  3597. XMEMSET(iv_aes, 0, AES_BLOCK_SIZE);
  3598. ret = SaSi_AesSetIv(&aes->ctx.user_ctx, iv_aes);
  3599. if (ret != SASI_OK) {
  3600. return ret;
  3601. }
  3602. return ret;
  3603. }
  3604. #if defined(WOLFSSL_AES_DIRECT)
  3605. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3606. const byte* iv, int dir)
  3607. {
  3608. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  3609. }
  3610. #endif
  3611. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  3612. && !defined(WOLFSSL_QNX_CAAM)
  3613. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3614. #elif defined(WOLFSSL_AFALG)
  3615. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3616. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  3617. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  3618. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  3619. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  3620. #elif defined(WOLFSSL_RENESAS_FSPSM_CRYPTONLY) && \
  3621. !defined(NO_WOLFSSL_RENESAS_FSPSM_AES)
  3622. /* implemented in wolfcrypt/src/port/renesas/renesas_fspsm_aes.c */
  3623. #else
  3624. #define NEED_SOFTWARE_AES_SETKEY
  3625. #endif
  3626. /* Either we fell though with no HW support at all,
  3627. * or perhaps there's HW support for *some* keylengths
  3628. * and we need both HW and SW. */
  3629. #ifdef NEED_SOFTWARE_AES_SETKEY
  3630. #ifdef NEED_AES_TABLES
  3631. #ifndef WC_AES_BITSLICED
  3632. /* Set the AES key and expand.
  3633. *
  3634. * @param [in] aes AES object.
  3635. * @param [in] key Block to encrypt.
  3636. * @param [in] keySz Number of bytes in key.
  3637. * @param [in] dir Direction of crypt: AES_ENCRYPTION or AES_DECRYPTION.
  3638. */
  3639. static void AesSetKey_C(Aes* aes, const byte* key, word32 keySz, int dir)
  3640. {
  3641. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  3642. word32* rk = aes->key_C_fallback;
  3643. #else
  3644. word32* rk = aes->key;
  3645. #endif
  3646. word32 temp;
  3647. unsigned int i = 0;
  3648. XMEMCPY(rk, key, keySz);
  3649. #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  3650. (!defined(WOLFSSL_ESP32_CRYPT) || defined(NO_WOLFSSL_ESP32_CRYPT_AES))
  3651. /* Always reverse words when using only SW */
  3652. {
  3653. ByteReverseWords(rk, rk, keySz);
  3654. }
  3655. #else
  3656. /* Sometimes reverse words when using supported HW */
  3657. #if defined(WOLFSSL_ESPIDF)
  3658. /* Some platforms may need SW fallback (e.g. AES192) */
  3659. #if defined(NEED_AES_HW_FALLBACK)
  3660. {
  3661. ESP_LOGV(TAG, "wc_AesEncrypt fallback check");
  3662. if (wc_esp32AesSupportedKeyLen(aes)) {
  3663. /* don't reverse for HW supported key lengths */
  3664. }
  3665. else {
  3666. ByteReverseWords(rk, rk, keySz);
  3667. }
  3668. }
  3669. #else
  3670. /* If we don't need SW fallback, don't need to reverse words. */
  3671. #endif /* NEED_AES_HW_FALLBACK */
  3672. #endif /* WOLFSSL_ESPIDF */
  3673. #endif /* LITTLE_ENDIAN_ORDER, etc */
  3674. switch (keySz) {
  3675. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \
  3676. defined(WOLFSSL_AES_128)
  3677. case 16:
  3678. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3679. temp = (word32)-1;
  3680. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  3681. #endif
  3682. while (1)
  3683. {
  3684. temp = rk[3];
  3685. rk[4] = rk[0] ^
  3686. #ifndef WOLFSSL_AES_SMALL_TABLES
  3687. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  3688. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  3689. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  3690. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  3691. #else
  3692. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  3693. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  3694. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  3695. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  3696. #endif
  3697. rcon[i];
  3698. rk[5] = rk[1] ^ rk[4];
  3699. rk[6] = rk[2] ^ rk[5];
  3700. rk[7] = rk[3] ^ rk[6];
  3701. if (++i == 10)
  3702. break;
  3703. rk += 4;
  3704. }
  3705. break;
  3706. #endif /* 128 */
  3707. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \
  3708. defined(WOLFSSL_AES_192)
  3709. case 24:
  3710. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3711. temp = (word32)-1;
  3712. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  3713. #endif
  3714. /* for (;;) here triggers a bug in VC60 SP4 w/ Pro Pack */
  3715. while (1)
  3716. {
  3717. temp = rk[ 5];
  3718. rk[ 6] = rk[ 0] ^
  3719. #ifndef WOLFSSL_AES_SMALL_TABLES
  3720. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  3721. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  3722. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  3723. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  3724. #else
  3725. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  3726. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  3727. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  3728. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  3729. #endif
  3730. rcon[i];
  3731. rk[ 7] = rk[ 1] ^ rk[ 6];
  3732. rk[ 8] = rk[ 2] ^ rk[ 7];
  3733. rk[ 9] = rk[ 3] ^ rk[ 8];
  3734. if (++i == 8)
  3735. break;
  3736. rk[10] = rk[ 4] ^ rk[ 9];
  3737. rk[11] = rk[ 5] ^ rk[10];
  3738. rk += 6;
  3739. }
  3740. break;
  3741. #endif /* 192 */
  3742. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \
  3743. defined(WOLFSSL_AES_256)
  3744. case 32:
  3745. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3746. temp = (word32)-1;
  3747. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  3748. #endif
  3749. while (1)
  3750. {
  3751. temp = rk[ 7];
  3752. rk[ 8] = rk[ 0] ^
  3753. #ifndef WOLFSSL_AES_SMALL_TABLES
  3754. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  3755. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  3756. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  3757. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  3758. #else
  3759. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  3760. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  3761. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  3762. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  3763. #endif
  3764. rcon[i];
  3765. rk[ 9] = rk[ 1] ^ rk[ 8];
  3766. rk[10] = rk[ 2] ^ rk[ 9];
  3767. rk[11] = rk[ 3] ^ rk[10];
  3768. if (++i == 7)
  3769. break;
  3770. temp = rk[11];
  3771. rk[12] = rk[ 4] ^
  3772. #ifndef WOLFSSL_AES_SMALL_TABLES
  3773. (GetTable(Te[2], GETBYTE(temp, 3)) & 0xff000000) ^
  3774. (GetTable(Te[3], GETBYTE(temp, 2)) & 0x00ff0000) ^
  3775. (GetTable(Te[0], GETBYTE(temp, 1)) & 0x0000ff00) ^
  3776. (GetTable(Te[1], GETBYTE(temp, 0)) & 0x000000ff);
  3777. #else
  3778. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3)) << 24) ^
  3779. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 16) ^
  3780. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 8) ^
  3781. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)));
  3782. #endif
  3783. rk[13] = rk[ 5] ^ rk[12];
  3784. rk[14] = rk[ 6] ^ rk[13];
  3785. rk[15] = rk[ 7] ^ rk[14];
  3786. rk += 8;
  3787. }
  3788. break;
  3789. #endif /* 256 */
  3790. } /* switch */
  3791. ForceZero(&temp, sizeof(temp));
  3792. #if defined(HAVE_AES_DECRYPT)
  3793. if (dir == AES_DECRYPTION) {
  3794. unsigned int j;
  3795. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  3796. rk = aes->key_C_fallback;
  3797. #else
  3798. rk = aes->key;
  3799. #endif
  3800. /* invert the order of the round keys: */
  3801. for (i = 0, j = 4* aes->rounds; i < j; i += 4, j -= 4) {
  3802. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  3803. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  3804. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  3805. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  3806. }
  3807. ForceZero(&temp, sizeof(temp));
  3808. #if !defined(WOLFSSL_AES_SMALL_TABLES)
  3809. /* apply the inverse MixColumn transform to all round keys but the
  3810. first and the last: */
  3811. for (i = 1; i < aes->rounds; i++) {
  3812. rk += 4;
  3813. rk[0] =
  3814. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[0], 3)) & 0xff) ^
  3815. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[0], 2)) & 0xff) ^
  3816. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[0], 1)) & 0xff) ^
  3817. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[0], 0)) & 0xff);
  3818. rk[1] =
  3819. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[1], 3)) & 0xff) ^
  3820. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[1], 2)) & 0xff) ^
  3821. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[1], 1)) & 0xff) ^
  3822. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[1], 0)) & 0xff);
  3823. rk[2] =
  3824. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[2], 3)) & 0xff) ^
  3825. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[2], 2)) & 0xff) ^
  3826. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[2], 1)) & 0xff) ^
  3827. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[2], 0)) & 0xff);
  3828. rk[3] =
  3829. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[3], 3)) & 0xff) ^
  3830. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[3], 2)) & 0xff) ^
  3831. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[3], 1)) & 0xff) ^
  3832. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[3], 0)) & 0xff);
  3833. }
  3834. #endif
  3835. }
  3836. #else
  3837. (void)dir;
  3838. #endif /* HAVE_AES_DECRYPT */
  3839. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3840. wc_MemZero_Check(&temp, sizeof(temp));
  3841. #else
  3842. (void)temp;
  3843. #endif
  3844. }
  3845. #else /* WC_AES_BITSLICED */
  3846. /* Set the AES key and expand.
  3847. *
  3848. * @param [in] aes AES object.
  3849. * @param [in] key Block to encrypt.
  3850. * @param [in] keySz Number of bytes in key.
  3851. * @param [in] dir Direction of crypt: AES_ENCRYPTION or AES_DECRYPTION.
  3852. */
  3853. static void AesSetKey_C(Aes* aes, const byte* key, word32 keySz, int dir)
  3854. {
  3855. /* No need to invert when decrypting. */
  3856. (void)dir;
  3857. bs_set_key(aes->bs_key, key, keySz, aes->rounds);
  3858. }
  3859. #endif /* WC_AES_BITSLICED */
  3860. #endif /* NEED_AES_TABLES */
  3861. /* Software AES - SetKey */
  3862. static WARN_UNUSED_RESULT int wc_AesSetKeyLocal(
  3863. Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir,
  3864. int checkKeyLen)
  3865. {
  3866. int ret;
  3867. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  3868. byte local[32];
  3869. word32 localSz = 32;
  3870. #endif
  3871. if (aes == NULL)
  3872. return BAD_FUNC_ARG;
  3873. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  3874. ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  3875. if (ret < 0)
  3876. return ret;
  3877. #endif
  3878. switch (keylen) {
  3879. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \
  3880. defined(WOLFSSL_AES_128)
  3881. case 16:
  3882. #endif
  3883. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \
  3884. defined(WOLFSSL_AES_192)
  3885. case 24:
  3886. #endif
  3887. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \
  3888. defined(WOLFSSL_AES_256)
  3889. case 32:
  3890. #endif
  3891. break;
  3892. default:
  3893. return BAD_FUNC_ARG;
  3894. }
  3895. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  3896. if (wc_MAXQ10XX_AesSetKey(aes, userKey, keylen) != 0) {
  3897. return WC_HW_E;
  3898. }
  3899. #endif
  3900. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  3901. if (keylen == (16 + WC_CAAM_BLOB_SZ) ||
  3902. keylen == (24 + WC_CAAM_BLOB_SZ) ||
  3903. keylen == (32 + WC_CAAM_BLOB_SZ)) {
  3904. if (wc_caamOpenBlob((byte*)userKey, keylen, local, &localSz) != 0) {
  3905. return BAD_FUNC_ARG;
  3906. }
  3907. /* set local values */
  3908. userKey = local;
  3909. keylen = localSz;
  3910. }
  3911. #endif
  3912. #ifdef WOLFSSL_SECO_CAAM
  3913. /* if set to use hardware than import the key */
  3914. if (aes->devId == WOLFSSL_SECO_DEVID) {
  3915. int keyGroup = 1; /* group one was chosen arbitrarily */
  3916. unsigned int keyIdOut;
  3917. byte importiv[GCM_NONCE_MID_SZ];
  3918. int importivSz = GCM_NONCE_MID_SZ;
  3919. int keyType = 0;
  3920. WC_RNG rng;
  3921. if (wc_InitRng(&rng) != 0) {
  3922. WOLFSSL_MSG("RNG init for IV failed");
  3923. return WC_HW_E;
  3924. }
  3925. if (wc_RNG_GenerateBlock(&rng, importiv, importivSz) != 0) {
  3926. WOLFSSL_MSG("Generate IV failed");
  3927. wc_FreeRng(&rng);
  3928. return WC_HW_E;
  3929. }
  3930. wc_FreeRng(&rng);
  3931. if (iv)
  3932. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  3933. else
  3934. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  3935. switch (keylen) {
  3936. case AES_128_KEY_SIZE: keyType = CAAM_KEYTYPE_AES128; break;
  3937. case AES_192_KEY_SIZE: keyType = CAAM_KEYTYPE_AES192; break;
  3938. case AES_256_KEY_SIZE: keyType = CAAM_KEYTYPE_AES256; break;
  3939. }
  3940. keyIdOut = wc_SECO_WrapKey(0, (byte*)userKey, keylen, importiv,
  3941. importivSz, keyType, CAAM_KEY_TRANSIENT, keyGroup);
  3942. if (keyIdOut == 0) {
  3943. return WC_HW_E;
  3944. }
  3945. aes->blackKey = keyIdOut;
  3946. return 0;
  3947. }
  3948. #endif
  3949. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  3950. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  3951. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  3952. #ifdef WOLF_CRYPTO_CB
  3953. if (aes->devId != INVALID_DEVID)
  3954. #endif
  3955. {
  3956. if (keylen > sizeof(aes->devKey)) {
  3957. return BAD_FUNC_ARG;
  3958. }
  3959. XMEMCPY(aes->devKey, userKey, keylen);
  3960. }
  3961. #endif
  3962. if (checkKeyLen) {
  3963. if (keylen != 16 && keylen != 24 && keylen != 32) {
  3964. return BAD_FUNC_ARG;
  3965. }
  3966. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE < 256
  3967. /* Check key length only when AES_MAX_KEY_SIZE doesn't allow
  3968. * all key sizes. Otherwise this condition is never true. */
  3969. if (keylen > (AES_MAX_KEY_SIZE / 8)) {
  3970. return BAD_FUNC_ARG;
  3971. }
  3972. #endif
  3973. }
  3974. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  3975. defined(WOLFSSL_AES_OFB)
  3976. aes->left = 0;
  3977. #endif
  3978. aes->keylen = (int)keylen;
  3979. aes->rounds = (keylen/4) + 6;
  3980. ret = wc_AesSetIV(aes, iv);
  3981. if (ret != 0)
  3982. return ret;
  3983. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  3984. #ifdef NEED_AES_TABLES
  3985. AesSetKey_C(aes, userKey, keylen, dir);
  3986. #endif /* NEED_AES_TABLES */
  3987. #endif /* WC_AES_C_DYNAMIC_FALLBACK */
  3988. #ifdef WOLFSSL_AESNI
  3989. aes->use_aesni = 0;
  3990. if (checkedAESNI == 0) {
  3991. haveAESNI = Check_CPU_support_AES();
  3992. checkedAESNI = 1;
  3993. }
  3994. if (haveAESNI) {
  3995. #ifdef WOLFSSL_LINUXKM
  3996. /* runtime alignment check */
  3997. if ((wc_ptr_t)&aes->key & (wc_ptr_t)0xf) {
  3998. return BAD_ALIGN_E;
  3999. }
  4000. #endif /* WOLFSSL_LINUXKM */
  4001. ret = SAVE_VECTOR_REGISTERS2();
  4002. if (ret == 0) {
  4003. if (dir == AES_ENCRYPTION)
  4004. ret = AES_set_encrypt_key_AESNI(userKey, (int)keylen * 8, aes);
  4005. #ifdef HAVE_AES_DECRYPT
  4006. else
  4007. ret = AES_set_decrypt_key_AESNI(userKey, (int)keylen * 8, aes);
  4008. #endif
  4009. RESTORE_VECTOR_REGISTERS();
  4010. if (ret == 0)
  4011. aes->use_aesni = 1;
  4012. else {
  4013. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  4014. ret = 0;
  4015. #endif
  4016. }
  4017. return ret;
  4018. } else {
  4019. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  4020. return 0;
  4021. #else
  4022. return ret;
  4023. #endif
  4024. }
  4025. }
  4026. #endif /* WOLFSSL_AESNI */
  4027. #ifdef WOLFSSL_KCAPI_AES
  4028. XMEMCPY(aes->devKey, userKey, keylen);
  4029. if (aes->init != 0) {
  4030. kcapi_cipher_destroy(aes->handle);
  4031. aes->handle = NULL;
  4032. aes->init = 0;
  4033. }
  4034. (void)dir;
  4035. #endif
  4036. if (keylen > sizeof(aes->key)) {
  4037. return BAD_FUNC_ARG;
  4038. }
  4039. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  4040. return wc_psa_aes_set_key(aes, userKey, keylen, (uint8_t*)iv,
  4041. ((psa_algorithm_t)0), dir);
  4042. #endif
  4043. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  4044. /* wolfSSL HostCrypto in SE05x SDK can request to use SW crypto
  4045. * instead of SE05x crypto by setting useSWCrypt */
  4046. if (aes->useSWCrypt == 0) {
  4047. ret = se050_aes_set_key(aes, userKey, keylen, iv, dir);
  4048. if (ret == 0) {
  4049. ret = wc_AesSetIV(aes, iv);
  4050. }
  4051. return ret;
  4052. }
  4053. #endif
  4054. XMEMCPY(aes->key, userKey, keylen);
  4055. #ifndef WC_AES_BITSLICED
  4056. #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  4057. (!defined(WOLFSSL_ESP32_CRYPT) || \
  4058. defined(NO_WOLFSSL_ESP32_CRYPT_AES))
  4059. /* software */
  4060. ByteReverseWords(aes->key, aes->key, keylen);
  4061. #elif defined(WOLFSSL_ESP32_CRYPT) && !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  4062. if (wc_esp32AesSupportedKeyLen(aes)) {
  4063. /* supported lengths don't get reversed */
  4064. ESP_LOGV(TAG, "wc_AesSetKeyLocal (no ByteReverseWords)");
  4065. }
  4066. else {
  4067. word32* rk = aes->key;
  4068. /* For example, the ESP32-S3 does not support HW for len = 24,
  4069. * so fall back to SW */
  4070. #ifdef DEBUG_WOLFSSL
  4071. ESP_LOGW(TAG, "wc_AesSetKeyLocal ByteReverseWords");
  4072. #endif
  4073. XMEMCPY(rk, userKey, keylen);
  4074. /* When not ESP32 HW, we need to reverse endianness */
  4075. ByteReverseWords(rk, rk, keylen);
  4076. }
  4077. #endif
  4078. #ifdef WOLFSSL_IMXRT_DCP
  4079. {
  4080. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  4081. word32 temp = 0;
  4082. if (keylen == 16)
  4083. temp = DCPAesSetKey(aes, userKey, keylen, iv, dir);
  4084. if (temp != 0)
  4085. return WC_HW_E;
  4086. }
  4087. #endif
  4088. #endif /* !WC_AES_BITSLICED */
  4089. #ifdef NEED_AES_TABLES
  4090. AesSetKey_C(aes, userKey, keylen, dir);
  4091. #endif /* NEED_AES_TABLES */
  4092. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  4093. XMEMCPY((byte*)aes->key, userKey, keylen);
  4094. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == CRYPTO_WORD_ENDIAN_BIG) {
  4095. ByteReverseWords(aes->key, aes->key, 32);
  4096. }
  4097. #endif
  4098. ret = wc_AesSetIV(aes, iv);
  4099. #if defined(WOLFSSL_DEVCRYPTO) && \
  4100. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  4101. aes->ctx.cfd = -1;
  4102. #endif
  4103. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4104. ForceZero(local, sizeof(local));
  4105. #endif
  4106. return ret;
  4107. } /* wc_AesSetKeyLocal */
  4108. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  4109. const byte* iv, int dir)
  4110. {
  4111. if (aes == NULL) {
  4112. return BAD_FUNC_ARG;
  4113. }
  4114. if (keylen > sizeof(aes->key)) {
  4115. return BAD_FUNC_ARG;
  4116. }
  4117. /* sometimes hardware may not support all keylengths (e.g. ESP32-S3) */
  4118. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AES_HW_FALLBACK)
  4119. ESP_LOGV(TAG, "wc_AesSetKey fallback check %d", keylen);
  4120. if (wc_esp32AesSupportedKeyLenValue(keylen)) {
  4121. ESP_LOGV(TAG, "wc_AesSetKey calling wc_AesSetKey_for_ESP32");
  4122. return wc_AesSetKey_for_ESP32(aes, userKey, keylen, iv, dir);
  4123. }
  4124. else {
  4125. #if defined(WOLFSSL_HW_METRICS)
  4126. /* It is interesting to know how many times we could not complete
  4127. * AES in hardware due to unsupported lengths. */
  4128. wc_esp32AesUnupportedLengthCountAdd();
  4129. #endif
  4130. #ifdef DEBUG_WOLFSSL
  4131. ESP_LOGW(TAG, "wc_AesSetKey HW Fallback, unsupported keylen = %d",
  4132. keylen);
  4133. #endif
  4134. }
  4135. #endif /* WOLFSSL_ESPIDF && NEED_AES_HW_FALLBACK */
  4136. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  4137. } /* wc_AesSetKey() */
  4138. #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER)
  4139. /* AES-CTR and AES-DIRECT need to use this for key setup */
  4140. /* This function allows key sizes that are not 128/192/256 bits */
  4141. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  4142. const byte* iv, int dir)
  4143. {
  4144. if (aes == NULL) {
  4145. return BAD_FUNC_ARG;
  4146. }
  4147. if (keylen > sizeof(aes->key)) {
  4148. return BAD_FUNC_ARG;
  4149. }
  4150. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 0);
  4151. }
  4152. #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER */
  4153. #endif /* wc_AesSetKey block */
  4154. /* wc_AesSetIV is shared between software and hardware */
  4155. int wc_AesSetIV(Aes* aes, const byte* iv)
  4156. {
  4157. if (aes == NULL)
  4158. return BAD_FUNC_ARG;
  4159. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  4160. {
  4161. int ret = wc_debug_CipherLifecycleCheck(aes->CipherLifecycleTag, 0);
  4162. if (ret < 0)
  4163. return ret;
  4164. }
  4165. #endif
  4166. if (iv)
  4167. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  4168. else
  4169. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  4170. #if defined(WOLFSSL_AES_COUNTER) || defined(WOLFSSL_AES_CFB) || \
  4171. defined(WOLFSSL_AES_OFB) || defined(WOLFSSL_AES_XTS)
  4172. /* Clear any unused bytes from last cipher op. */
  4173. aes->left = 0;
  4174. #endif
  4175. return 0;
  4176. }
  4177. #ifdef WOLFSSL_AESNI
  4178. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  4179. #define VECTOR_REGISTERS_PUSH { \
  4180. int orig_use_aesni = aes->use_aesni; \
  4181. if (aes->use_aesni && (SAVE_VECTOR_REGISTERS2() != 0)) { \
  4182. aes->use_aesni = 0; \
  4183. } \
  4184. WC_DO_NOTHING
  4185. #define VECTOR_REGISTERS_POP \
  4186. if (aes->use_aesni) \
  4187. RESTORE_VECTOR_REGISTERS(); \
  4188. else \
  4189. aes->use_aesni = orig_use_aesni; \
  4190. } \
  4191. WC_DO_NOTHING
  4192. #else
  4193. #define VECTOR_REGISTERS_PUSH { \
  4194. if (aes->use_aesni && ((ret = SAVE_VECTOR_REGISTERS2()) != 0)) { \
  4195. return ret; \
  4196. } \
  4197. WC_DO_NOTHING
  4198. #define VECTOR_REGISTERS_POP \
  4199. if (aes->use_aesni) { \
  4200. RESTORE_VECTOR_REGISTERS(); \
  4201. } \
  4202. } \
  4203. WC_DO_NOTHING
  4204. #endif
  4205. #else /* !WOLFSSL_AESNI */
  4206. #define VECTOR_REGISTERS_PUSH { WC_DO_NOTHING
  4207. #define VECTOR_REGISTERS_POP } WC_DO_NOTHING
  4208. #endif /* !WOLFSSL_AESNI */
  4209. /* AES-DIRECT */
  4210. #if defined(WOLFSSL_AES_DIRECT)
  4211. #if defined(HAVE_COLDFIRE_SEC)
  4212. #error "Coldfire SEC doesn't yet support AES direct"
  4213. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  4214. !defined(WOLFSSL_QNX_CAAM)
  4215. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  4216. #elif defined(WOLFSSL_AFALG)
  4217. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  4218. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4219. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4220. #else
  4221. /* Allow direct access to one block encrypt */
  4222. int wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  4223. {
  4224. int ret;
  4225. if (aes == NULL)
  4226. return BAD_FUNC_ARG;
  4227. VECTOR_REGISTERS_PUSH;
  4228. ret = wc_AesEncrypt(aes, in, out);
  4229. VECTOR_REGISTERS_POP;
  4230. return ret;
  4231. }
  4232. /* vector reg save/restore is explicit in all below calls to
  4233. * wc_Aes{En,De}cryptDirect(), so bypass the public version with a
  4234. * macro.
  4235. */
  4236. #define wc_AesEncryptDirect(aes, out, in) wc_AesEncrypt(aes, in, out)
  4237. #ifdef HAVE_AES_DECRYPT
  4238. /* Allow direct access to one block decrypt */
  4239. int wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  4240. {
  4241. int ret;
  4242. if (aes == NULL)
  4243. return BAD_FUNC_ARG;
  4244. VECTOR_REGISTERS_PUSH;
  4245. ret = wc_AesDecrypt(aes, in, out);
  4246. VECTOR_REGISTERS_POP;
  4247. return ret;
  4248. }
  4249. #define wc_AesDecryptDirect(aes, out, in) wc_AesDecrypt(aes, in, out)
  4250. #endif /* HAVE_AES_DECRYPT */
  4251. #endif /* AES direct block */
  4252. #endif /* WOLFSSL_AES_DIRECT */
  4253. /* AES-CBC */
  4254. #ifdef HAVE_AES_CBC
  4255. #if defined(STM32_CRYPTO)
  4256. #ifdef WOLFSSL_STM32_CUBEMX
  4257. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4258. {
  4259. int ret = 0;
  4260. CRYP_HandleTypeDef hcryp;
  4261. word32 blocks = (sz / AES_BLOCK_SIZE);
  4262. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4263. if (sz % AES_BLOCK_SIZE) {
  4264. return BAD_LENGTH_E;
  4265. }
  4266. #endif
  4267. if (blocks == 0)
  4268. return 0;
  4269. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  4270. if (ret != 0)
  4271. return ret;
  4272. ret = wolfSSL_CryptHwMutexLock();
  4273. if (ret != 0) {
  4274. return ret;
  4275. }
  4276. #if defined(STM32_HAL_V2)
  4277. hcryp.Init.Algorithm = CRYP_AES_CBC;
  4278. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  4279. #elif defined(STM32_CRYPTO_AES_ONLY)
  4280. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  4281. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  4282. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  4283. #endif
  4284. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  4285. HAL_CRYP_Init(&hcryp);
  4286. #if defined(STM32_HAL_V2)
  4287. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  4288. (uint32_t*)out, STM32_HAL_TIMEOUT);
  4289. #elif defined(STM32_CRYPTO_AES_ONLY)
  4290. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  4291. out, STM32_HAL_TIMEOUT);
  4292. #else
  4293. ret = HAL_CRYP_AESCBC_Encrypt(&hcryp, (uint8_t*)in,
  4294. blocks * AES_BLOCK_SIZE,
  4295. out, STM32_HAL_TIMEOUT);
  4296. #endif
  4297. if (ret != HAL_OK) {
  4298. ret = WC_TIMEOUT_E;
  4299. }
  4300. /* store iv for next call */
  4301. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4302. HAL_CRYP_DeInit(&hcryp);
  4303. wolfSSL_CryptHwMutexUnLock();
  4304. wc_Stm32_Aes_Cleanup();
  4305. return ret;
  4306. }
  4307. #ifdef HAVE_AES_DECRYPT
  4308. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4309. {
  4310. int ret = 0;
  4311. CRYP_HandleTypeDef hcryp;
  4312. word32 blocks = (sz / AES_BLOCK_SIZE);
  4313. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4314. if (sz % AES_BLOCK_SIZE) {
  4315. return BAD_LENGTH_E;
  4316. }
  4317. #endif
  4318. if (blocks == 0)
  4319. return 0;
  4320. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  4321. if (ret != 0)
  4322. return ret;
  4323. ret = wolfSSL_CryptHwMutexLock();
  4324. if (ret != 0) {
  4325. return ret;
  4326. }
  4327. /* if input and output same will overwrite input iv */
  4328. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4329. #if defined(STM32_HAL_V2)
  4330. hcryp.Init.Algorithm = CRYP_AES_CBC;
  4331. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  4332. #elif defined(STM32_CRYPTO_AES_ONLY)
  4333. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  4334. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  4335. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  4336. #endif
  4337. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  4338. HAL_CRYP_Init(&hcryp);
  4339. #if defined(STM32_HAL_V2)
  4340. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  4341. (uint32_t*)out, STM32_HAL_TIMEOUT);
  4342. #elif defined(STM32_CRYPTO_AES_ONLY)
  4343. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  4344. out, STM32_HAL_TIMEOUT);
  4345. #else
  4346. ret = HAL_CRYP_AESCBC_Decrypt(&hcryp, (uint8_t*)in,
  4347. blocks * AES_BLOCK_SIZE,
  4348. out, STM32_HAL_TIMEOUT);
  4349. #endif
  4350. if (ret != HAL_OK) {
  4351. ret = WC_TIMEOUT_E;
  4352. }
  4353. /* store iv for next call */
  4354. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  4355. HAL_CRYP_DeInit(&hcryp);
  4356. wolfSSL_CryptHwMutexUnLock();
  4357. wc_Stm32_Aes_Cleanup();
  4358. return ret;
  4359. }
  4360. #endif /* HAVE_AES_DECRYPT */
  4361. #else /* Standard Peripheral Library */
  4362. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4363. {
  4364. int ret;
  4365. word32 *iv;
  4366. CRYP_InitTypeDef cryptInit;
  4367. CRYP_KeyInitTypeDef keyInit;
  4368. CRYP_IVInitTypeDef ivInit;
  4369. word32 blocks = (sz / AES_BLOCK_SIZE);
  4370. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4371. if (sz % AES_BLOCK_SIZE) {
  4372. return BAD_LENGTH_E;
  4373. }
  4374. #endif
  4375. if (blocks == 0)
  4376. return 0;
  4377. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  4378. if (ret != 0)
  4379. return ret;
  4380. ret = wolfSSL_CryptHwMutexLock();
  4381. if (ret != 0) {
  4382. return ret;
  4383. }
  4384. /* reset registers to their default values */
  4385. CRYP_DeInit();
  4386. /* set key */
  4387. CRYP_KeyInit(&keyInit);
  4388. /* set iv */
  4389. iv = aes->reg;
  4390. CRYP_IVStructInit(&ivInit);
  4391. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  4392. ivInit.CRYP_IV0Left = iv[0];
  4393. ivInit.CRYP_IV0Right = iv[1];
  4394. ivInit.CRYP_IV1Left = iv[2];
  4395. ivInit.CRYP_IV1Right = iv[3];
  4396. CRYP_IVInit(&ivInit);
  4397. /* set direction and mode */
  4398. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  4399. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  4400. CRYP_Init(&cryptInit);
  4401. /* enable crypto processor */
  4402. CRYP_Cmd(ENABLE);
  4403. while (blocks--) {
  4404. /* flush IN/OUT FIFOs */
  4405. CRYP_FIFOFlush();
  4406. CRYP_DataIn(*(uint32_t*)&in[0]);
  4407. CRYP_DataIn(*(uint32_t*)&in[4]);
  4408. CRYP_DataIn(*(uint32_t*)&in[8]);
  4409. CRYP_DataIn(*(uint32_t*)&in[12]);
  4410. /* wait until the complete message has been processed */
  4411. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  4412. *(uint32_t*)&out[0] = CRYP_DataOut();
  4413. *(uint32_t*)&out[4] = CRYP_DataOut();
  4414. *(uint32_t*)&out[8] = CRYP_DataOut();
  4415. *(uint32_t*)&out[12] = CRYP_DataOut();
  4416. /* store iv for next call */
  4417. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4418. sz -= AES_BLOCK_SIZE;
  4419. in += AES_BLOCK_SIZE;
  4420. out += AES_BLOCK_SIZE;
  4421. }
  4422. /* disable crypto processor */
  4423. CRYP_Cmd(DISABLE);
  4424. wolfSSL_CryptHwMutexUnLock();
  4425. wc_Stm32_Aes_Cleanup();
  4426. return ret;
  4427. }
  4428. #ifdef HAVE_AES_DECRYPT
  4429. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4430. {
  4431. int ret;
  4432. word32 *iv;
  4433. CRYP_InitTypeDef cryptInit;
  4434. CRYP_KeyInitTypeDef keyInit;
  4435. CRYP_IVInitTypeDef ivInit;
  4436. word32 blocks = (sz / AES_BLOCK_SIZE);
  4437. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4438. if (sz % AES_BLOCK_SIZE) {
  4439. return BAD_LENGTH_E;
  4440. }
  4441. #endif
  4442. if (blocks == 0)
  4443. return 0;
  4444. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  4445. if (ret != 0)
  4446. return ret;
  4447. ret = wolfSSL_CryptHwMutexLock();
  4448. if (ret != 0) {
  4449. return ret;
  4450. }
  4451. /* if input and output same will overwrite input iv */
  4452. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4453. /* reset registers to their default values */
  4454. CRYP_DeInit();
  4455. /* set direction and key */
  4456. CRYP_KeyInit(&keyInit);
  4457. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  4458. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  4459. CRYP_Init(&cryptInit);
  4460. /* enable crypto processor */
  4461. CRYP_Cmd(ENABLE);
  4462. /* wait until key has been prepared */
  4463. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  4464. /* set direction and mode */
  4465. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  4466. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  4467. CRYP_Init(&cryptInit);
  4468. /* set iv */
  4469. iv = aes->reg;
  4470. CRYP_IVStructInit(&ivInit);
  4471. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  4472. ivInit.CRYP_IV0Left = iv[0];
  4473. ivInit.CRYP_IV0Right = iv[1];
  4474. ivInit.CRYP_IV1Left = iv[2];
  4475. ivInit.CRYP_IV1Right = iv[3];
  4476. CRYP_IVInit(&ivInit);
  4477. /* enable crypto processor */
  4478. CRYP_Cmd(ENABLE);
  4479. while (blocks--) {
  4480. /* flush IN/OUT FIFOs */
  4481. CRYP_FIFOFlush();
  4482. CRYP_DataIn(*(uint32_t*)&in[0]);
  4483. CRYP_DataIn(*(uint32_t*)&in[4]);
  4484. CRYP_DataIn(*(uint32_t*)&in[8]);
  4485. CRYP_DataIn(*(uint32_t*)&in[12]);
  4486. /* wait until the complete message has been processed */
  4487. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  4488. *(uint32_t*)&out[0] = CRYP_DataOut();
  4489. *(uint32_t*)&out[4] = CRYP_DataOut();
  4490. *(uint32_t*)&out[8] = CRYP_DataOut();
  4491. *(uint32_t*)&out[12] = CRYP_DataOut();
  4492. /* store iv for next call */
  4493. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  4494. in += AES_BLOCK_SIZE;
  4495. out += AES_BLOCK_SIZE;
  4496. }
  4497. /* disable crypto processor */
  4498. CRYP_Cmd(DISABLE);
  4499. wolfSSL_CryptHwMutexUnLock();
  4500. wc_Stm32_Aes_Cleanup();
  4501. return ret;
  4502. }
  4503. #endif /* HAVE_AES_DECRYPT */
  4504. #endif /* WOLFSSL_STM32_CUBEMX */
  4505. #elif defined(HAVE_COLDFIRE_SEC)
  4506. static WARN_UNUSED_RESULT int wc_AesCbcCrypt(
  4507. Aes* aes, byte* po, const byte* pi, word32 sz, word32 descHeader)
  4508. {
  4509. #ifdef DEBUG_WOLFSSL
  4510. int i; int stat1, stat2; int ret;
  4511. #endif
  4512. int size;
  4513. volatile int v;
  4514. if ((pi == NULL) || (po == NULL))
  4515. return BAD_FUNC_ARG; /*wrong pointer*/
  4516. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4517. if (sz % AES_BLOCK_SIZE) {
  4518. return BAD_LENGTH_E;
  4519. }
  4520. #endif
  4521. wc_LockMutex(&Mutex_AesSEC);
  4522. /* Set descriptor for SEC */
  4523. secDesc->length1 = 0x0;
  4524. secDesc->pointer1 = NULL;
  4525. secDesc->length2 = AES_BLOCK_SIZE;
  4526. secDesc->pointer2 = (byte *)secReg; /* Initial Vector */
  4527. switch(aes->rounds) {
  4528. case 10: secDesc->length3 = 16; break;
  4529. case 12: secDesc->length3 = 24; break;
  4530. case 14: secDesc->length3 = 32; break;
  4531. }
  4532. XMEMCPY(secKey, aes->key, secDesc->length3);
  4533. secDesc->pointer3 = (byte *)secKey;
  4534. secDesc->pointer4 = AESBuffIn;
  4535. secDesc->pointer5 = AESBuffOut;
  4536. secDesc->length6 = 0x0;
  4537. secDesc->pointer6 = NULL;
  4538. secDesc->length7 = 0x0;
  4539. secDesc->pointer7 = NULL;
  4540. secDesc->nextDescriptorPtr = NULL;
  4541. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4542. size = AES_BUFFER_SIZE;
  4543. #endif
  4544. while (sz) {
  4545. secDesc->header = descHeader;
  4546. XMEMCPY(secReg, aes->reg, AES_BLOCK_SIZE);
  4547. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4548. sz -= AES_BUFFER_SIZE;
  4549. #else
  4550. if (sz < AES_BUFFER_SIZE) {
  4551. size = sz;
  4552. sz = 0;
  4553. } else {
  4554. size = AES_BUFFER_SIZE;
  4555. sz -= AES_BUFFER_SIZE;
  4556. }
  4557. #endif
  4558. secDesc->length4 = size;
  4559. secDesc->length5 = size;
  4560. XMEMCPY(AESBuffIn, pi, size);
  4561. if(descHeader == SEC_DESC_AES_CBC_DECRYPT) {
  4562. XMEMCPY((void*)aes->tmp, (void*)&(pi[size-AES_BLOCK_SIZE]),
  4563. AES_BLOCK_SIZE);
  4564. }
  4565. /* Point SEC to the location of the descriptor */
  4566. MCF_SEC_FR0 = (uint32)secDesc;
  4567. /* Initialize SEC and wait for encryption to complete */
  4568. MCF_SEC_CCCR0 = 0x0000001a;
  4569. /* poll SISR to determine when channel is complete */
  4570. v=0;
  4571. while ((secDesc->header>> 24) != 0xff) v++;
  4572. #ifdef DEBUG_WOLFSSL
  4573. ret = MCF_SEC_SISRH;
  4574. stat1 = MCF_SEC_AESSR;
  4575. stat2 = MCF_SEC_AESISR;
  4576. if (ret & 0xe0000000) {
  4577. db_printf("Aes_Cbc(i=%d):ISRH=%08x, AESSR=%08x, "
  4578. "AESISR=%08x\n", i, ret, stat1, stat2);
  4579. }
  4580. #endif
  4581. XMEMCPY(po, AESBuffOut, size);
  4582. if (descHeader == SEC_DESC_AES_CBC_ENCRYPT) {
  4583. XMEMCPY((void*)aes->reg, (void*)&(po[size-AES_BLOCK_SIZE]),
  4584. AES_BLOCK_SIZE);
  4585. } else {
  4586. XMEMCPY((void*)aes->reg, (void*)aes->tmp, AES_BLOCK_SIZE);
  4587. }
  4588. pi += size;
  4589. po += size;
  4590. }
  4591. wc_UnLockMutex(&Mutex_AesSEC);
  4592. return 0;
  4593. }
  4594. int wc_AesCbcEncrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  4595. {
  4596. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_ENCRYPT));
  4597. }
  4598. #ifdef HAVE_AES_DECRYPT
  4599. int wc_AesCbcDecrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  4600. {
  4601. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_DECRYPT));
  4602. }
  4603. #endif /* HAVE_AES_DECRYPT */
  4604. #elif defined(FREESCALE_LTC)
  4605. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4606. {
  4607. word32 keySize;
  4608. status_t status;
  4609. byte *iv, *enc_key;
  4610. word32 blocks = (sz / AES_BLOCK_SIZE);
  4611. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4612. if (sz % AES_BLOCK_SIZE) {
  4613. return BAD_LENGTH_E;
  4614. }
  4615. #endif
  4616. if (blocks == 0)
  4617. return 0;
  4618. iv = (byte*)aes->reg;
  4619. enc_key = (byte*)aes->key;
  4620. status = wc_AesGetKeySize(aes, &keySize);
  4621. if (status != 0) {
  4622. return status;
  4623. }
  4624. status = wolfSSL_CryptHwMutexLock();
  4625. if (status != 0)
  4626. return status;
  4627. status = LTC_AES_EncryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  4628. iv, enc_key, keySize);
  4629. wolfSSL_CryptHwMutexUnLock();
  4630. /* store iv for next call */
  4631. if (status == kStatus_Success) {
  4632. XMEMCPY(iv, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4633. }
  4634. return (status == kStatus_Success) ? 0 : -1;
  4635. }
  4636. #ifdef HAVE_AES_DECRYPT
  4637. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4638. {
  4639. word32 keySize;
  4640. status_t status;
  4641. byte* iv, *dec_key;
  4642. byte temp_block[AES_BLOCK_SIZE];
  4643. word32 blocks = (sz / AES_BLOCK_SIZE);
  4644. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4645. if (sz % AES_BLOCK_SIZE) {
  4646. return BAD_LENGTH_E;
  4647. }
  4648. #endif
  4649. if (blocks == 0)
  4650. return 0;
  4651. iv = (byte*)aes->reg;
  4652. dec_key = (byte*)aes->key;
  4653. status = wc_AesGetKeySize(aes, &keySize);
  4654. if (status != 0) {
  4655. return status;
  4656. }
  4657. /* get IV for next call */
  4658. XMEMCPY(temp_block, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4659. status = wolfSSL_CryptHwMutexLock();
  4660. if (status != 0)
  4661. return status;
  4662. status = LTC_AES_DecryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  4663. iv, dec_key, keySize, kLTC_EncryptKey);
  4664. wolfSSL_CryptHwMutexUnLock();
  4665. /* store IV for next call */
  4666. if (status == kStatus_Success) {
  4667. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  4668. }
  4669. return (status == kStatus_Success) ? 0 : -1;
  4670. }
  4671. #endif /* HAVE_AES_DECRYPT */
  4672. #elif defined(FREESCALE_MMCAU)
  4673. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4674. {
  4675. int offset = 0;
  4676. byte *iv;
  4677. byte temp_block[AES_BLOCK_SIZE];
  4678. word32 blocks = (sz / AES_BLOCK_SIZE);
  4679. int ret;
  4680. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4681. if (sz % AES_BLOCK_SIZE) {
  4682. return BAD_LENGTH_E;
  4683. }
  4684. #endif
  4685. if (blocks == 0)
  4686. return 0;
  4687. iv = (byte*)aes->reg;
  4688. while (blocks--) {
  4689. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  4690. /* XOR block with IV for CBC */
  4691. xorbuf(temp_block, iv, AES_BLOCK_SIZE);
  4692. ret = wc_AesEncrypt(aes, temp_block, out + offset);
  4693. if (ret != 0)
  4694. return ret;
  4695. offset += AES_BLOCK_SIZE;
  4696. /* store IV for next block */
  4697. XMEMCPY(iv, out + offset - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4698. }
  4699. return 0;
  4700. }
  4701. #ifdef HAVE_AES_DECRYPT
  4702. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4703. {
  4704. int ret;
  4705. int offset = 0;
  4706. byte* iv;
  4707. byte temp_block[AES_BLOCK_SIZE];
  4708. word32 blocks = (sz / AES_BLOCK_SIZE);
  4709. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4710. if (sz % AES_BLOCK_SIZE) {
  4711. return BAD_LENGTH_E;
  4712. }
  4713. #endif
  4714. if (blocks == 0)
  4715. return 0;
  4716. iv = (byte*)aes->reg;
  4717. while (blocks--) {
  4718. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  4719. ret = wc_AesDecrypt(aes, in + offset, out + offset);
  4720. if (ret != 0)
  4721. return ret;
  4722. /* XOR block with IV for CBC */
  4723. xorbuf(out + offset, iv, AES_BLOCK_SIZE);
  4724. /* store IV for next block */
  4725. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  4726. offset += AES_BLOCK_SIZE;
  4727. }
  4728. return 0;
  4729. }
  4730. #endif /* HAVE_AES_DECRYPT */
  4731. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  4732. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4733. {
  4734. int ret;
  4735. if (sz == 0)
  4736. return 0;
  4737. /* hardware fails on input that is not a multiple of AES block size */
  4738. if (sz % AES_BLOCK_SIZE != 0) {
  4739. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4740. return BAD_LENGTH_E;
  4741. #else
  4742. return BAD_FUNC_ARG;
  4743. #endif
  4744. }
  4745. ret = wc_Pic32AesCrypt(
  4746. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  4747. out, in, sz, PIC32_ENCRYPTION,
  4748. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  4749. /* store iv for next call */
  4750. if (ret == 0) {
  4751. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4752. }
  4753. return ret;
  4754. }
  4755. #ifdef HAVE_AES_DECRYPT
  4756. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4757. {
  4758. int ret;
  4759. byte scratch[AES_BLOCK_SIZE];
  4760. if (sz == 0)
  4761. return 0;
  4762. /* hardware fails on input that is not a multiple of AES block size */
  4763. if (sz % AES_BLOCK_SIZE != 0) {
  4764. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4765. return BAD_LENGTH_E;
  4766. #else
  4767. return BAD_FUNC_ARG;
  4768. #endif
  4769. }
  4770. XMEMCPY(scratch, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4771. ret = wc_Pic32AesCrypt(
  4772. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  4773. out, in, sz, PIC32_DECRYPTION,
  4774. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  4775. /* store iv for next call */
  4776. if (ret == 0) {
  4777. XMEMCPY((byte*)aes->reg, scratch, AES_BLOCK_SIZE);
  4778. }
  4779. return ret;
  4780. }
  4781. #endif /* HAVE_AES_DECRYPT */
  4782. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  4783. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  4784. /* We'll use SW for fall back:
  4785. * unsupported key lengths
  4786. * hardware busy */
  4787. #define NEED_SW_AESCBC
  4788. #define NEED_AESCBC_HW_FALLBACK
  4789. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  4790. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4791. {
  4792. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  4793. }
  4794. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4795. {
  4796. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  4797. }
  4798. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  4799. !defined(WOLFSSL_QNX_CAAM)
  4800. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  4801. #elif defined(WOLFSSL_AFALG)
  4802. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  4803. #elif defined(WOLFSSL_KCAPI_AES) && !defined(WOLFSSL_NO_KCAPI_AES_CBC)
  4804. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  4805. #elif defined(WOLFSSL_DEVCRYPTO_CBC)
  4806. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4807. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  4808. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  4809. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  4810. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  4811. #else
  4812. /* Reminder: Some HW implementations may also define this as needed.
  4813. * (e.g. for unsupported key length fallback) */
  4814. #define NEED_SW_AESCBC
  4815. #endif
  4816. #ifdef NEED_SW_AESCBC
  4817. /* Software AES - CBC Encrypt */
  4818. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4819. {
  4820. word32 blocks;
  4821. int ret;
  4822. if (aes == NULL || out == NULL || in == NULL) {
  4823. return BAD_FUNC_ARG;
  4824. }
  4825. if (sz == 0) {
  4826. return 0;
  4827. }
  4828. blocks = sz / AES_BLOCK_SIZE;
  4829. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4830. if (sz % AES_BLOCK_SIZE) {
  4831. WOLFSSL_ERROR_VERBOSE(BAD_LENGTH_E);
  4832. return BAD_LENGTH_E;
  4833. }
  4834. #endif
  4835. #ifdef WOLFSSL_IMXRT_DCP
  4836. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  4837. if (aes->keylen == 16)
  4838. return DCPAesCbcEncrypt(aes, out, in, sz);
  4839. #endif
  4840. #ifdef WOLF_CRYPTO_CB
  4841. #ifndef WOLF_CRYPTO_CB_FIND
  4842. if (aes->devId != INVALID_DEVID)
  4843. #endif
  4844. {
  4845. int crypto_cb_ret = wc_CryptoCb_AesCbcEncrypt(aes, out, in, sz);
  4846. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  4847. return crypto_cb_ret;
  4848. /* fall-through when unavailable */
  4849. }
  4850. #endif
  4851. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  4852. /* if async and byte count above threshold */
  4853. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  4854. sz >= WC_ASYNC_THRESH_AES_CBC) {
  4855. #if defined(HAVE_CAVIUM)
  4856. return NitroxAesCbcEncrypt(aes, out, in, sz);
  4857. #elif defined(HAVE_INTEL_QA)
  4858. return IntelQaSymAesCbcEncrypt(&aes->asyncDev, out, in, sz,
  4859. (const byte*)aes->devKey, aes->keylen,
  4860. (byte*)aes->reg, AES_BLOCK_SIZE);
  4861. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  4862. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_ENCRYPT)) {
  4863. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  4864. sw->aes.aes = aes;
  4865. sw->aes.out = out;
  4866. sw->aes.in = in;
  4867. sw->aes.sz = sz;
  4868. return WC_PENDING_E;
  4869. }
  4870. #endif
  4871. }
  4872. #endif /* WOLFSSL_ASYNC_CRYPT */
  4873. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  4874. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  4875. if (aes->useSWCrypt == 0) {
  4876. return se050_aes_crypt(aes, in, out, sz, AES_ENCRYPTION,
  4877. kAlgorithm_SSS_AES_CBC);
  4878. }
  4879. else
  4880. #elif defined(WOLFSSL_ESPIDF) && defined(NEED_AESCBC_HW_FALLBACK)
  4881. if (wc_esp32AesSupportedKeyLen(aes)) {
  4882. ESP_LOGV(TAG, "wc_AesCbcEncrypt calling wc_esp32AesCbcEncrypt");
  4883. return wc_esp32AesCbcEncrypt(aes, out, in, sz);
  4884. }
  4885. else {
  4886. /* For example, the ESP32-S3 does not support HW for len = 24,
  4887. * so fall back to SW */
  4888. #ifdef DEBUG_WOLFSSL
  4889. ESP_LOGW(TAG, "wc_AesCbcEncrypt HW Falling back, "
  4890. "unsupported keylen = %d", aes->keylen);
  4891. #endif
  4892. }
  4893. #elif defined(WOLFSSL_AESNI)
  4894. VECTOR_REGISTERS_PUSH;
  4895. if (aes->use_aesni) {
  4896. #ifdef DEBUG_AESNI
  4897. printf("about to aes cbc encrypt\n");
  4898. printf("in = %p\n", in);
  4899. printf("out = %p\n", out);
  4900. printf("aes->key = %p\n", aes->key);
  4901. printf("aes->reg = %p\n", aes->reg);
  4902. printf("aes->rounds = %d\n", aes->rounds);
  4903. printf("sz = %d\n", sz);
  4904. #endif
  4905. /* check alignment, decrypt doesn't need alignment */
  4906. if ((wc_ptr_t)in % AESNI_ALIGN) {
  4907. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  4908. byte* tmp = (byte*)XMALLOC(sz + AES_BLOCK_SIZE + AESNI_ALIGN,
  4909. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  4910. byte* tmp_align;
  4911. if (tmp == NULL)
  4912. ret = MEMORY_E;
  4913. else {
  4914. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  4915. XMEMCPY(tmp_align, in, sz);
  4916. AES_CBC_encrypt_AESNI(tmp_align, tmp_align, (byte*)aes->reg, sz,
  4917. (byte*)aes->key, (int)aes->rounds);
  4918. /* store iv for next call */
  4919. XMEMCPY(aes->reg, tmp_align + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4920. XMEMCPY(out, tmp_align, sz);
  4921. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  4922. ret = 0;
  4923. }
  4924. #else
  4925. WOLFSSL_MSG("AES-CBC encrypt with bad alignment");
  4926. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  4927. ret = BAD_ALIGN_E;
  4928. #endif
  4929. } else {
  4930. AES_CBC_encrypt_AESNI(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  4931. (int)aes->rounds);
  4932. /* store iv for next call */
  4933. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  4934. ret = 0;
  4935. }
  4936. }
  4937. else
  4938. #endif
  4939. {
  4940. ret = 0;
  4941. while (blocks--) {
  4942. xorbuf((byte*)aes->reg, in, AES_BLOCK_SIZE);
  4943. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->reg);
  4944. if (ret != 0)
  4945. break;
  4946. XMEMCPY(out, aes->reg, AES_BLOCK_SIZE);
  4947. out += AES_BLOCK_SIZE;
  4948. in += AES_BLOCK_SIZE;
  4949. }
  4950. }
  4951. #ifdef WOLFSSL_AESNI
  4952. VECTOR_REGISTERS_POP;
  4953. #endif
  4954. return ret;
  4955. } /* wc_AesCbcEncrypt */
  4956. #ifdef HAVE_AES_DECRYPT
  4957. /* Software AES - CBC Decrypt */
  4958. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4959. {
  4960. word32 blocks;
  4961. int ret;
  4962. if (aes == NULL || out == NULL || in == NULL) {
  4963. return BAD_FUNC_ARG;
  4964. }
  4965. if (sz == 0) {
  4966. return 0;
  4967. }
  4968. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AESCBC_HW_FALLBACK)
  4969. if (wc_esp32AesSupportedKeyLen(aes)) {
  4970. ESP_LOGV(TAG, "wc_AesCbcDecrypt calling wc_esp32AesCbcDecrypt");
  4971. return wc_esp32AesCbcDecrypt(aes, out, in, sz);
  4972. }
  4973. else {
  4974. /* For example, the ESP32-S3 does not support HW for len = 24,
  4975. * so fall back to SW */
  4976. #ifdef DEBUG_WOLFSSL
  4977. ESP_LOGW(TAG, "wc_AesCbcDecrypt HW Falling back, "
  4978. "unsupported keylen = %d", aes->keylen);
  4979. #endif
  4980. }
  4981. #endif
  4982. blocks = sz / AES_BLOCK_SIZE;
  4983. if (sz % AES_BLOCK_SIZE) {
  4984. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  4985. return BAD_LENGTH_E;
  4986. #else
  4987. return BAD_FUNC_ARG;
  4988. #endif
  4989. }
  4990. #ifdef WOLFSSL_IMXRT_DCP
  4991. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  4992. if (aes->keylen == 16)
  4993. return DCPAesCbcDecrypt(aes, out, in, sz);
  4994. #endif
  4995. #ifdef WOLF_CRYPTO_CB
  4996. #ifndef WOLF_CRYPTO_CB_FIND
  4997. if (aes->devId != INVALID_DEVID)
  4998. #endif
  4999. {
  5000. int crypto_cb_ret = wc_CryptoCb_AesCbcDecrypt(aes, out, in, sz);
  5001. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  5002. return crypto_cb_ret;
  5003. /* fall-through when unavailable */
  5004. }
  5005. #endif
  5006. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  5007. /* if async and byte count above threshold */
  5008. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  5009. sz >= WC_ASYNC_THRESH_AES_CBC) {
  5010. #if defined(HAVE_CAVIUM)
  5011. return NitroxAesCbcDecrypt(aes, out, in, sz);
  5012. #elif defined(HAVE_INTEL_QA)
  5013. return IntelQaSymAesCbcDecrypt(&aes->asyncDev, out, in, sz,
  5014. (const byte*)aes->devKey, aes->keylen,
  5015. (byte*)aes->reg, AES_BLOCK_SIZE);
  5016. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  5017. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_DECRYPT)) {
  5018. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  5019. sw->aes.aes = aes;
  5020. sw->aes.out = out;
  5021. sw->aes.in = in;
  5022. sw->aes.sz = sz;
  5023. return WC_PENDING_E;
  5024. }
  5025. #endif
  5026. }
  5027. #endif
  5028. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  5029. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  5030. if (aes->useSWCrypt == 0) {
  5031. return se050_aes_crypt(aes, in, out, sz, AES_DECRYPTION,
  5032. kAlgorithm_SSS_AES_CBC);
  5033. }
  5034. #endif
  5035. VECTOR_REGISTERS_PUSH;
  5036. #ifdef WOLFSSL_AESNI
  5037. if (aes->use_aesni) {
  5038. #ifdef DEBUG_AESNI
  5039. printf("about to aes cbc decrypt\n");
  5040. printf("in = %p\n", in);
  5041. printf("out = %p\n", out);
  5042. printf("aes->key = %p\n", aes->key);
  5043. printf("aes->reg = %p\n", aes->reg);
  5044. printf("aes->rounds = %d\n", aes->rounds);
  5045. printf("sz = %d\n", sz);
  5046. #endif
  5047. /* if input and output same will overwrite input iv */
  5048. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  5049. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  5050. AES_CBC_decrypt_AESNI_by4(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  5051. aes->rounds);
  5052. #elif defined(WOLFSSL_AESNI_BY6)
  5053. AES_CBC_decrypt_AESNI_by6(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  5054. aes->rounds);
  5055. #else /* WOLFSSL_AESNI_BYx */
  5056. AES_CBC_decrypt_AESNI_by8(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  5057. (int)aes->rounds);
  5058. #endif /* WOLFSSL_AESNI_BYx */
  5059. /* store iv for next call */
  5060. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  5061. ret = 0;
  5062. }
  5063. else
  5064. #endif
  5065. {
  5066. ret = 0;
  5067. #ifdef WC_AES_BITSLICED
  5068. if (in != out) {
  5069. unsigned char dec[AES_BLOCK_SIZE * BS_WORD_SIZE];
  5070. while (blocks > BS_WORD_SIZE) {
  5071. AesDecryptBlocks_C(aes, in, dec, AES_BLOCK_SIZE * BS_WORD_SIZE);
  5072. xorbufout(out, dec, aes->reg, AES_BLOCK_SIZE);
  5073. xorbufout(out + AES_BLOCK_SIZE, dec + AES_BLOCK_SIZE, in,
  5074. AES_BLOCK_SIZE * (BS_WORD_SIZE - 1));
  5075. XMEMCPY(aes->reg, in + (AES_BLOCK_SIZE * (BS_WORD_SIZE - 1)),
  5076. AES_BLOCK_SIZE);
  5077. in += AES_BLOCK_SIZE * BS_WORD_SIZE;
  5078. out += AES_BLOCK_SIZE * BS_WORD_SIZE;
  5079. blocks -= BS_WORD_SIZE;
  5080. }
  5081. if (blocks > 0) {
  5082. AesDecryptBlocks_C(aes, in, dec, blocks * AES_BLOCK_SIZE);
  5083. xorbufout(out, dec, aes->reg, AES_BLOCK_SIZE);
  5084. xorbufout(out + AES_BLOCK_SIZE, dec + AES_BLOCK_SIZE, in,
  5085. AES_BLOCK_SIZE * (blocks - 1));
  5086. XMEMCPY(aes->reg, in + (AES_BLOCK_SIZE * (blocks - 1)),
  5087. AES_BLOCK_SIZE);
  5088. blocks = 0;
  5089. }
  5090. }
  5091. else {
  5092. unsigned char dec[AES_BLOCK_SIZE * BS_WORD_SIZE];
  5093. int i;
  5094. while (blocks > BS_WORD_SIZE) {
  5095. AesDecryptBlocks_C(aes, in, dec, AES_BLOCK_SIZE * BS_WORD_SIZE);
  5096. XMEMCPY(aes->tmp, in + (BS_WORD_SIZE - 1) * AES_BLOCK_SIZE,
  5097. AES_BLOCK_SIZE);
  5098. for (i = BS_WORD_SIZE-1; i >= 1; i--) {
  5099. xorbufout(out + i * AES_BLOCK_SIZE,
  5100. dec + i * AES_BLOCK_SIZE, in + (i - 1) * AES_BLOCK_SIZE,
  5101. AES_BLOCK_SIZE);
  5102. }
  5103. xorbufout(out, dec, aes->reg, AES_BLOCK_SIZE);
  5104. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  5105. in += AES_BLOCK_SIZE * BS_WORD_SIZE;
  5106. out += AES_BLOCK_SIZE * BS_WORD_SIZE;
  5107. blocks -= BS_WORD_SIZE;
  5108. }
  5109. if (blocks > 0) {
  5110. AesDecryptBlocks_C(aes, in, dec, blocks * AES_BLOCK_SIZE);
  5111. XMEMCPY(aes->tmp, in + (blocks - 1) * AES_BLOCK_SIZE,
  5112. AES_BLOCK_SIZE);
  5113. for (i = blocks-1; i >= 1; i--) {
  5114. xorbufout(out + i * AES_BLOCK_SIZE,
  5115. dec + i * AES_BLOCK_SIZE, in + (i - 1) * AES_BLOCK_SIZE,
  5116. AES_BLOCK_SIZE);
  5117. }
  5118. xorbufout(out, dec, aes->reg, AES_BLOCK_SIZE);
  5119. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  5120. blocks = 0;
  5121. }
  5122. }
  5123. #else
  5124. while (blocks--) {
  5125. XMEMCPY(aes->tmp, in, AES_BLOCK_SIZE);
  5126. ret = wc_AesDecrypt(aes, in, out);
  5127. if (ret != 0)
  5128. return ret;
  5129. xorbuf(out, (byte*)aes->reg, AES_BLOCK_SIZE);
  5130. /* store iv for next call */
  5131. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  5132. out += AES_BLOCK_SIZE;
  5133. in += AES_BLOCK_SIZE;
  5134. }
  5135. #endif
  5136. }
  5137. VECTOR_REGISTERS_POP;
  5138. return ret;
  5139. }
  5140. #endif /* HAVE_AES_DECRYPT */
  5141. #endif /* AES-CBC block */
  5142. #endif /* HAVE_AES_CBC */
  5143. /* AES-CTR */
  5144. #if defined(WOLFSSL_AES_COUNTER)
  5145. #ifdef STM32_CRYPTO
  5146. #define NEED_AES_CTR_SOFT
  5147. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  5148. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  5149. {
  5150. int ret = 0;
  5151. #ifdef WOLFSSL_STM32_CUBEMX
  5152. CRYP_HandleTypeDef hcryp;
  5153. #ifdef STM32_HAL_V2
  5154. word32 iv[AES_BLOCK_SIZE/sizeof(word32)];
  5155. #endif
  5156. #else
  5157. word32 *iv;
  5158. CRYP_InitTypeDef cryptInit;
  5159. CRYP_KeyInitTypeDef keyInit;
  5160. CRYP_IVInitTypeDef ivInit;
  5161. #endif
  5162. #ifdef WOLFSSL_STM32_CUBEMX
  5163. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  5164. if (ret != 0) {
  5165. return ret;
  5166. }
  5167. ret = wolfSSL_CryptHwMutexLock();
  5168. if (ret != 0) {
  5169. return ret;
  5170. }
  5171. #if defined(STM32_HAL_V2)
  5172. hcryp.Init.Algorithm = CRYP_AES_CTR;
  5173. ByteReverseWords(iv, aes->reg, AES_BLOCK_SIZE);
  5174. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)iv;
  5175. #elif defined(STM32_CRYPTO_AES_ONLY)
  5176. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  5177. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CTR;
  5178. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  5179. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  5180. #else
  5181. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  5182. #endif
  5183. HAL_CRYP_Init(&hcryp);
  5184. #if defined(STM32_HAL_V2)
  5185. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, AES_BLOCK_SIZE,
  5186. (uint32_t*)out, STM32_HAL_TIMEOUT);
  5187. #elif defined(STM32_CRYPTO_AES_ONLY)
  5188. ret = HAL_CRYPEx_AES(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  5189. out, STM32_HAL_TIMEOUT);
  5190. #else
  5191. ret = HAL_CRYP_AESCTR_Encrypt(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  5192. out, STM32_HAL_TIMEOUT);
  5193. #endif
  5194. if (ret != HAL_OK) {
  5195. ret = WC_TIMEOUT_E;
  5196. }
  5197. HAL_CRYP_DeInit(&hcryp);
  5198. #else /* Standard Peripheral Library */
  5199. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  5200. if (ret != 0) {
  5201. return ret;
  5202. }
  5203. ret = wolfSSL_CryptHwMutexLock();
  5204. if (ret != 0) {
  5205. return ret;
  5206. }
  5207. /* reset registers to their default values */
  5208. CRYP_DeInit();
  5209. /* set key */
  5210. CRYP_KeyInit(&keyInit);
  5211. /* set iv */
  5212. iv = aes->reg;
  5213. CRYP_IVStructInit(&ivInit);
  5214. ivInit.CRYP_IV0Left = ByteReverseWord32(iv[0]);
  5215. ivInit.CRYP_IV0Right = ByteReverseWord32(iv[1]);
  5216. ivInit.CRYP_IV1Left = ByteReverseWord32(iv[2]);
  5217. ivInit.CRYP_IV1Right = ByteReverseWord32(iv[3]);
  5218. CRYP_IVInit(&ivInit);
  5219. /* set direction and mode */
  5220. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  5221. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CTR;
  5222. CRYP_Init(&cryptInit);
  5223. /* enable crypto processor */
  5224. CRYP_Cmd(ENABLE);
  5225. /* flush IN/OUT FIFOs */
  5226. CRYP_FIFOFlush();
  5227. CRYP_DataIn(*(uint32_t*)&in[0]);
  5228. CRYP_DataIn(*(uint32_t*)&in[4]);
  5229. CRYP_DataIn(*(uint32_t*)&in[8]);
  5230. CRYP_DataIn(*(uint32_t*)&in[12]);
  5231. /* wait until the complete message has been processed */
  5232. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  5233. *(uint32_t*)&out[0] = CRYP_DataOut();
  5234. *(uint32_t*)&out[4] = CRYP_DataOut();
  5235. *(uint32_t*)&out[8] = CRYP_DataOut();
  5236. *(uint32_t*)&out[12] = CRYP_DataOut();
  5237. /* disable crypto processor */
  5238. CRYP_Cmd(DISABLE);
  5239. #endif /* WOLFSSL_STM32_CUBEMX */
  5240. wolfSSL_CryptHwMutexUnLock();
  5241. wc_Stm32_Aes_Cleanup();
  5242. return ret;
  5243. }
  5244. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  5245. #define NEED_AES_CTR_SOFT
  5246. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  5247. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  5248. {
  5249. word32 tmpIv[AES_BLOCK_SIZE / sizeof(word32)];
  5250. XMEMCPY(tmpIv, aes->reg, AES_BLOCK_SIZE);
  5251. return wc_Pic32AesCrypt(
  5252. aes->key, aes->keylen, tmpIv, AES_BLOCK_SIZE,
  5253. out, in, AES_BLOCK_SIZE,
  5254. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCTR);
  5255. }
  5256. #elif defined(HAVE_COLDFIRE_SEC)
  5257. #error "Coldfire SEC doesn't currently support AES-CTR mode"
  5258. #elif defined(FREESCALE_LTC)
  5259. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  5260. {
  5261. int ret = 0;
  5262. word32 keySize;
  5263. byte *iv, *enc_key;
  5264. byte* tmp;
  5265. if (aes == NULL || out == NULL || in == NULL) {
  5266. return BAD_FUNC_ARG;
  5267. }
  5268. /* consume any unused bytes left in aes->tmp */
  5269. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  5270. while (aes->left && sz) {
  5271. *(out++) = *(in++) ^ *(tmp++);
  5272. aes->left--;
  5273. sz--;
  5274. }
  5275. if (sz) {
  5276. iv = (byte*)aes->reg;
  5277. enc_key = (byte*)aes->key;
  5278. ret = wc_AesGetKeySize(aes, &keySize);
  5279. if (ret != 0)
  5280. return ret;
  5281. ret = wolfSSL_CryptHwMutexLock();
  5282. if (ret != 0)
  5283. return ret;
  5284. LTC_AES_CryptCtr(LTC_BASE, in, out, sz,
  5285. iv, enc_key, keySize, (byte*)aes->tmp,
  5286. (uint32_t*)&aes->left);
  5287. wolfSSL_CryptHwMutexUnLock();
  5288. }
  5289. return ret;
  5290. }
  5291. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  5292. !defined(WOLFSSL_QNX_CAAM)
  5293. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  5294. #elif defined(WOLFSSL_AFALG)
  5295. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  5296. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  5297. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  5298. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  5299. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  5300. /* esp32 doesn't support CRT mode by hw. */
  5301. /* use aes ecnryption plus sw implementation */
  5302. #define NEED_AES_CTR_SOFT
  5303. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  5304. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  5305. #else
  5306. /* Use software based AES counter */
  5307. #define NEED_AES_CTR_SOFT
  5308. #endif
  5309. #ifdef NEED_AES_CTR_SOFT
  5310. /* Increment AES counter */
  5311. static WC_INLINE void IncrementAesCounter(byte* inOutCtr)
  5312. {
  5313. /* in network byte order so start at end and work back */
  5314. int i;
  5315. for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) {
  5316. if (++inOutCtr[i]) /* we're done unless we overflow */
  5317. return;
  5318. }
  5319. }
  5320. /* Software AES - CTR Encrypt */
  5321. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  5322. {
  5323. byte scratch[AES_BLOCK_SIZE];
  5324. int ret = 0;
  5325. word32 processed;
  5326. if (aes == NULL || out == NULL || in == NULL) {
  5327. return BAD_FUNC_ARG;
  5328. }
  5329. #ifdef WOLF_CRYPTO_CB
  5330. #ifndef WOLF_CRYPTO_CB_FIND
  5331. if (aes->devId != INVALID_DEVID)
  5332. #endif
  5333. {
  5334. int crypto_cb_ret = wc_CryptoCb_AesCtrEncrypt(aes, out, in, sz);
  5335. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  5336. return crypto_cb_ret;
  5337. /* fall-through when unavailable */
  5338. }
  5339. #endif
  5340. /* consume any unused bytes left in aes->tmp */
  5341. processed = min(aes->left, sz);
  5342. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left,
  5343. processed);
  5344. out += processed;
  5345. in += processed;
  5346. aes->left -= processed;
  5347. sz -= processed;
  5348. VECTOR_REGISTERS_PUSH;
  5349. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  5350. !defined(XTRANSFORM_AESCTRBLOCK)
  5351. if (in != out && sz >= AES_BLOCK_SIZE) {
  5352. word32 blocks = sz / AES_BLOCK_SIZE;
  5353. byte* counter = (byte*)aes->reg;
  5354. byte* c = out;
  5355. while (blocks--) {
  5356. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  5357. c += AES_BLOCK_SIZE;
  5358. IncrementAesCounter(counter);
  5359. }
  5360. /* reset number of blocks and then do encryption */
  5361. blocks = sz / AES_BLOCK_SIZE;
  5362. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  5363. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  5364. in += AES_BLOCK_SIZE * blocks;
  5365. out += AES_BLOCK_SIZE * blocks;
  5366. sz -= blocks * AES_BLOCK_SIZE;
  5367. }
  5368. else
  5369. #endif
  5370. {
  5371. #ifdef WOLFSSL_CHECK_MEM_ZERO
  5372. wc_MemZero_Add("wc_AesCtrEncrypt scratch", scratch,
  5373. AES_BLOCK_SIZE);
  5374. #endif
  5375. /* do as many block size ops as possible */
  5376. while (sz >= AES_BLOCK_SIZE) {
  5377. #ifdef XTRANSFORM_AESCTRBLOCK
  5378. XTRANSFORM_AESCTRBLOCK(aes, out, in);
  5379. #else
  5380. ret = wc_AesEncrypt(aes, (byte*)aes->reg, scratch);
  5381. if (ret != 0)
  5382. break;
  5383. xorbuf(scratch, in, AES_BLOCK_SIZE);
  5384. XMEMCPY(out, scratch, AES_BLOCK_SIZE);
  5385. #endif
  5386. IncrementAesCounter((byte*)aes->reg);
  5387. out += AES_BLOCK_SIZE;
  5388. in += AES_BLOCK_SIZE;
  5389. sz -= AES_BLOCK_SIZE;
  5390. aes->left = 0;
  5391. }
  5392. ForceZero(scratch, AES_BLOCK_SIZE);
  5393. }
  5394. /* handle non block size remaining and store unused byte count in left */
  5395. if ((ret == 0) && sz) {
  5396. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->tmp);
  5397. if (ret == 0) {
  5398. IncrementAesCounter((byte*)aes->reg);
  5399. aes->left = AES_BLOCK_SIZE - sz;
  5400. xorbufout(out, in, aes->tmp, sz);
  5401. }
  5402. }
  5403. if (ret < 0)
  5404. ForceZero(scratch, AES_BLOCK_SIZE);
  5405. #ifdef WOLFSSL_CHECK_MEM_ZERO
  5406. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  5407. #endif
  5408. VECTOR_REGISTERS_POP;
  5409. return ret;
  5410. }
  5411. int wc_AesCtrSetKey(Aes* aes, const byte* key, word32 len,
  5412. const byte* iv, int dir)
  5413. {
  5414. if (aes == NULL) {
  5415. return BAD_FUNC_ARG;
  5416. }
  5417. if (len > sizeof(aes->key)) {
  5418. return BAD_FUNC_ARG;
  5419. }
  5420. return wc_AesSetKeyLocal(aes, key, len, iv, dir, 0);
  5421. }
  5422. #endif /* NEED_AES_CTR_SOFT */
  5423. #endif /* WOLFSSL_AES_COUNTER */
  5424. #endif /* !WOLFSSL_ARMASM */
  5425. /*
  5426. * The IV for AES GCM and CCM, stored in struct Aes's member reg, is comprised
  5427. * of two parts in order:
  5428. * 1. The fixed field which may be 0 or 4 bytes long. In TLS, this is set
  5429. * to the implicit IV.
  5430. * 2. The explicit IV is generated by wolfCrypt. It needs to be managed
  5431. * by wolfCrypt to ensure the IV is unique for each call to encrypt.
  5432. * The IV may be a 96-bit random value, or the 32-bit fixed value and a
  5433. * 64-bit set of 0 or random data. The final 32-bits of reg is used as a
  5434. * block counter during the encryption.
  5435. */
  5436. #if (defined(HAVE_AESGCM) && !defined(WC_NO_RNG)) || defined(HAVE_AESCCM)
  5437. static WC_INLINE void IncCtr(byte* ctr, word32 ctrSz)
  5438. {
  5439. int i;
  5440. for (i = (int)ctrSz - 1; i >= 0; i--) {
  5441. if (++ctr[i])
  5442. break;
  5443. }
  5444. }
  5445. #endif /* HAVE_AESGCM || HAVE_AESCCM */
  5446. #ifdef HAVE_AESGCM
  5447. #ifdef WOLFSSL_AESGCM_STREAM
  5448. /* Access initialization counter data. */
  5449. #define AES_INITCTR(aes) ((aes)->streamData + 0 * AES_BLOCK_SIZE)
  5450. /* Access counter data. */
  5451. #define AES_COUNTER(aes) ((aes)->streamData + 1 * AES_BLOCK_SIZE)
  5452. /* Access tag data. */
  5453. #define AES_TAG(aes) ((aes)->streamData + 2 * AES_BLOCK_SIZE)
  5454. /* Access last GHASH block. */
  5455. #define AES_LASTGBLOCK(aes) ((aes)->streamData + 3 * AES_BLOCK_SIZE)
  5456. /* Access last encrypted block. */
  5457. #define AES_LASTBLOCK(aes) ((aes)->streamData + 4 * AES_BLOCK_SIZE)
  5458. #endif
  5459. #if defined(HAVE_COLDFIRE_SEC)
  5460. #error "Coldfire SEC doesn't currently support AES-GCM mode"
  5461. #endif
  5462. #ifdef WOLFSSL_ARMASM
  5463. /* implementation is located in wolfcrypt/src/port/arm/armv8-aes.c */
  5464. #elif defined(WOLFSSL_AFALG)
  5465. /* implemented in wolfcrypt/src/port/afalg/afalg_aes.c */
  5466. #elif defined(WOLFSSL_KCAPI_AES)
  5467. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  5468. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  5469. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  5470. #else /* software + AESNI implementation */
  5471. #if !defined(FREESCALE_LTC_AES_GCM)
  5472. static WC_INLINE void IncrementGcmCounter(byte* inOutCtr)
  5473. {
  5474. int i;
  5475. /* in network byte order so start at end and work back */
  5476. for (i = AES_BLOCK_SIZE - 1; i >= AES_BLOCK_SIZE - CTR_SZ; i--) {
  5477. if (++inOutCtr[i]) /* we're done unless we overflow */
  5478. return;
  5479. }
  5480. }
  5481. #endif /* !FREESCALE_LTC_AES_GCM */
  5482. #if defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  5483. static WC_INLINE void FlattenSzInBits(byte* buf, word32 sz)
  5484. {
  5485. /* Multiply the sz by 8 */
  5486. word32 szHi = (sz >> (8*sizeof(sz) - 3));
  5487. sz <<= 3;
  5488. /* copy over the words of the sz into the destination buffer */
  5489. buf[0] = (byte)(szHi >> 24);
  5490. buf[1] = (byte)(szHi >> 16);
  5491. buf[2] = (byte)(szHi >> 8);
  5492. buf[3] = (byte)szHi;
  5493. buf[4] = (byte)(sz >> 24);
  5494. buf[5] = (byte)(sz >> 16);
  5495. buf[6] = (byte)(sz >> 8);
  5496. buf[7] = (byte)sz;
  5497. }
  5498. static WC_INLINE void RIGHTSHIFTX(byte* x)
  5499. {
  5500. int i;
  5501. int carryIn = 0;
  5502. byte borrow = (0x00 - (x[15] & 0x01)) & 0xE1;
  5503. for (i = 0; i < AES_BLOCK_SIZE; i++) {
  5504. int carryOut = (x[i] & 0x01) << 7;
  5505. x[i] = (byte) ((x[i] >> 1) | carryIn);
  5506. carryIn = carryOut;
  5507. }
  5508. x[0] ^= borrow;
  5509. }
  5510. #endif /* defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) */
  5511. #ifdef GCM_TABLE
  5512. void GenerateM0(Gcm* gcm)
  5513. {
  5514. int i, j;
  5515. byte (*m)[AES_BLOCK_SIZE] = gcm->M0;
  5516. XMEMCPY(m[128], gcm->H, AES_BLOCK_SIZE);
  5517. for (i = 64; i > 0; i /= 2) {
  5518. XMEMCPY(m[i], m[i*2], AES_BLOCK_SIZE);
  5519. RIGHTSHIFTX(m[i]);
  5520. }
  5521. for (i = 2; i < 256; i *= 2) {
  5522. for (j = 1; j < i; j++) {
  5523. XMEMCPY(m[i+j], m[i], AES_BLOCK_SIZE);
  5524. xorbuf(m[i+j], m[j], AES_BLOCK_SIZE);
  5525. }
  5526. }
  5527. XMEMSET(m[0], 0, AES_BLOCK_SIZE);
  5528. }
  5529. #elif defined(GCM_TABLE_4BIT)
  5530. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  5531. static WC_INLINE void Shift4_M0(byte *r8, byte *z8)
  5532. {
  5533. int i;
  5534. for (i = 15; i > 0; i--)
  5535. r8[i] = (byte)(z8[i-1] << 4) | (byte)(z8[i] >> 4);
  5536. r8[0] = (byte)(z8[0] >> 4);
  5537. }
  5538. #endif
  5539. void GenerateM0(Gcm* gcm)
  5540. {
  5541. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  5542. int i;
  5543. #endif
  5544. byte (*m)[AES_BLOCK_SIZE] = gcm->M0;
  5545. /* 0 times -> 0x0 */
  5546. XMEMSET(m[0x0], 0, AES_BLOCK_SIZE);
  5547. /* 1 times -> 0x8 */
  5548. XMEMCPY(m[0x8], gcm->H, AES_BLOCK_SIZE);
  5549. /* 2 times -> 0x4 */
  5550. XMEMCPY(m[0x4], m[0x8], AES_BLOCK_SIZE);
  5551. RIGHTSHIFTX(m[0x4]);
  5552. /* 4 times -> 0x2 */
  5553. XMEMCPY(m[0x2], m[0x4], AES_BLOCK_SIZE);
  5554. RIGHTSHIFTX(m[0x2]);
  5555. /* 8 times -> 0x1 */
  5556. XMEMCPY(m[0x1], m[0x2], AES_BLOCK_SIZE);
  5557. RIGHTSHIFTX(m[0x1]);
  5558. /* 0x3 */
  5559. XMEMCPY(m[0x3], m[0x2], AES_BLOCK_SIZE);
  5560. xorbuf (m[0x3], m[0x1], AES_BLOCK_SIZE);
  5561. /* 0x5 -> 0x7 */
  5562. XMEMCPY(m[0x5], m[0x4], AES_BLOCK_SIZE);
  5563. xorbuf (m[0x5], m[0x1], AES_BLOCK_SIZE);
  5564. XMEMCPY(m[0x6], m[0x4], AES_BLOCK_SIZE);
  5565. xorbuf (m[0x6], m[0x2], AES_BLOCK_SIZE);
  5566. XMEMCPY(m[0x7], m[0x4], AES_BLOCK_SIZE);
  5567. xorbuf (m[0x7], m[0x3], AES_BLOCK_SIZE);
  5568. /* 0x9 -> 0xf */
  5569. XMEMCPY(m[0x9], m[0x8], AES_BLOCK_SIZE);
  5570. xorbuf (m[0x9], m[0x1], AES_BLOCK_SIZE);
  5571. XMEMCPY(m[0xa], m[0x8], AES_BLOCK_SIZE);
  5572. xorbuf (m[0xa], m[0x2], AES_BLOCK_SIZE);
  5573. XMEMCPY(m[0xb], m[0x8], AES_BLOCK_SIZE);
  5574. xorbuf (m[0xb], m[0x3], AES_BLOCK_SIZE);
  5575. XMEMCPY(m[0xc], m[0x8], AES_BLOCK_SIZE);
  5576. xorbuf (m[0xc], m[0x4], AES_BLOCK_SIZE);
  5577. XMEMCPY(m[0xd], m[0x8], AES_BLOCK_SIZE);
  5578. xorbuf (m[0xd], m[0x5], AES_BLOCK_SIZE);
  5579. XMEMCPY(m[0xe], m[0x8], AES_BLOCK_SIZE);
  5580. xorbuf (m[0xe], m[0x6], AES_BLOCK_SIZE);
  5581. XMEMCPY(m[0xf], m[0x8], AES_BLOCK_SIZE);
  5582. xorbuf (m[0xf], m[0x7], AES_BLOCK_SIZE);
  5583. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  5584. for (i = 0; i < 16; i++) {
  5585. Shift4_M0(m[16+i], m[i]);
  5586. }
  5587. #endif
  5588. }
  5589. #endif /* GCM_TABLE */
  5590. /* Software AES - GCM SetKey */
  5591. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  5592. {
  5593. int ret;
  5594. byte iv[AES_BLOCK_SIZE];
  5595. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  5596. byte local[32];
  5597. word32 localSz = 32;
  5598. if (len == (16 + WC_CAAM_BLOB_SZ) ||
  5599. len == (24 + WC_CAAM_BLOB_SZ) ||
  5600. len == (32 + WC_CAAM_BLOB_SZ)) {
  5601. if (wc_caamOpenBlob((byte*)key, len, local, &localSz) != 0) {
  5602. return BAD_FUNC_ARG;
  5603. }
  5604. /* set local values */
  5605. key = local;
  5606. len = localSz;
  5607. }
  5608. #endif
  5609. if (!((len == 16) || (len == 24) || (len == 32)))
  5610. return BAD_FUNC_ARG;
  5611. if (aes == NULL) {
  5612. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  5613. ForceZero(local, sizeof(local));
  5614. #endif
  5615. return BAD_FUNC_ARG;
  5616. }
  5617. #ifdef OPENSSL_EXTRA
  5618. XMEMSET(aes->gcm.aadH, 0, sizeof(aes->gcm.aadH));
  5619. aes->gcm.aadLen = 0;
  5620. #endif
  5621. XMEMSET(iv, 0, AES_BLOCK_SIZE);
  5622. ret = wc_AesSetKey(aes, key, len, iv, AES_ENCRYPTION);
  5623. #ifdef WOLFSSL_AESGCM_STREAM
  5624. aes->gcmKeySet = 1;
  5625. #endif
  5626. #if defined(WOLFSSL_SECO_CAAM)
  5627. if (aes->devId == WOLFSSL_SECO_DEVID) {
  5628. return ret;
  5629. }
  5630. #endif /* WOLFSSL_SECO_CAAM */
  5631. #if defined(WOLFSSL_RENESAS_FSPSM_CRYPTONLY) && \
  5632. !defined(NO_WOLFSSL_RENESAS_FSPSM_AES)
  5633. return ret;
  5634. #endif /* WOLFSSL_RENESAS_RSIP && WOLFSSL_RENESAS_FSPSM_CRYPTONLY*/
  5635. #if !defined(FREESCALE_LTC_AES_GCM)
  5636. if (ret == 0) {
  5637. VECTOR_REGISTERS_PUSH;
  5638. /* AES-NI code generates its own H value, but generate it here too, to
  5639. * assure pure-C fallback is always usable.
  5640. */
  5641. ret = wc_AesEncrypt(aes, iv, aes->gcm.H);
  5642. VECTOR_REGISTERS_POP;
  5643. }
  5644. if (ret == 0) {
  5645. #if defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  5646. GenerateM0(&aes->gcm);
  5647. #endif /* GCM_TABLE */
  5648. }
  5649. #endif /* FREESCALE_LTC_AES_GCM */
  5650. #if defined(WOLFSSL_XILINX_CRYPT) || defined(WOLFSSL_AFALG_XILINX_AES)
  5651. wc_AesGcmSetKey_ex(aes, key, len, WOLFSSL_XILINX_AES_KEY_SRC);
  5652. #endif
  5653. #ifdef WOLF_CRYPTO_CB
  5654. if (aes->devId != INVALID_DEVID) {
  5655. XMEMCPY(aes->devKey, key, len);
  5656. }
  5657. #endif
  5658. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  5659. ForceZero(local, sizeof(local));
  5660. #endif
  5661. return ret;
  5662. }
  5663. #ifdef WOLFSSL_AESNI
  5664. #if defined(USE_INTEL_SPEEDUP)
  5665. #define HAVE_INTEL_AVX1
  5666. #define HAVE_INTEL_AVX2
  5667. #endif /* USE_INTEL_SPEEDUP */
  5668. void AES_GCM_encrypt_aesni(const unsigned char *in, unsigned char *out,
  5669. const unsigned char* addt, const unsigned char* ivec,
  5670. unsigned char *tag, word32 nbytes,
  5671. word32 abytes, word32 ibytes,
  5672. word32 tbytes, const unsigned char* key, int nr)
  5673. XASM_LINK("AES_GCM_encrypt_aesni");
  5674. #ifdef HAVE_INTEL_AVX1
  5675. void AES_GCM_encrypt_avx1(const unsigned char *in, unsigned char *out,
  5676. const unsigned char* addt, const unsigned char* ivec,
  5677. unsigned char *tag, word32 nbytes,
  5678. word32 abytes, word32 ibytes,
  5679. word32 tbytes, const unsigned char* key,
  5680. int nr)
  5681. XASM_LINK("AES_GCM_encrypt_avx1");
  5682. #ifdef HAVE_INTEL_AVX2
  5683. void AES_GCM_encrypt_avx2(const unsigned char *in, unsigned char *out,
  5684. const unsigned char* addt, const unsigned char* ivec,
  5685. unsigned char *tag, word32 nbytes,
  5686. word32 abytes, word32 ibytes,
  5687. word32 tbytes, const unsigned char* key,
  5688. int nr)
  5689. XASM_LINK("AES_GCM_encrypt_avx2");
  5690. #endif /* HAVE_INTEL_AVX2 */
  5691. #endif /* HAVE_INTEL_AVX1 */
  5692. #ifdef HAVE_AES_DECRYPT
  5693. void AES_GCM_decrypt_aesni(const unsigned char *in, unsigned char *out,
  5694. const unsigned char* addt, const unsigned char* ivec,
  5695. const unsigned char *tag, word32 nbytes, word32 abytes,
  5696. word32 ibytes, word32 tbytes, const unsigned char* key,
  5697. int nr, int* res)
  5698. XASM_LINK("AES_GCM_decrypt_aesni");
  5699. #ifdef HAVE_INTEL_AVX1
  5700. void AES_GCM_decrypt_avx1(const unsigned char *in, unsigned char *out,
  5701. const unsigned char* addt, const unsigned char* ivec,
  5702. const unsigned char *tag, word32 nbytes,
  5703. word32 abytes, word32 ibytes, word32 tbytes,
  5704. const unsigned char* key, int nr, int* res)
  5705. XASM_LINK("AES_GCM_decrypt_avx1");
  5706. #ifdef HAVE_INTEL_AVX2
  5707. void AES_GCM_decrypt_avx2(const unsigned char *in, unsigned char *out,
  5708. const unsigned char* addt, const unsigned char* ivec,
  5709. const unsigned char *tag, word32 nbytes,
  5710. word32 abytes, word32 ibytes, word32 tbytes,
  5711. const unsigned char* key, int nr, int* res)
  5712. XASM_LINK("AES_GCM_decrypt_avx2");
  5713. #endif /* HAVE_INTEL_AVX2 */
  5714. #endif /* HAVE_INTEL_AVX1 */
  5715. #endif /* HAVE_AES_DECRYPT */
  5716. #endif /* WOLFSSL_AESNI */
  5717. #if defined(GCM_SMALL)
  5718. static void GMULT(byte* X, byte* Y)
  5719. {
  5720. byte Z[AES_BLOCK_SIZE];
  5721. byte V[AES_BLOCK_SIZE];
  5722. int i, j;
  5723. XMEMSET(Z, 0, AES_BLOCK_SIZE);
  5724. XMEMCPY(V, X, AES_BLOCK_SIZE);
  5725. for (i = 0; i < AES_BLOCK_SIZE; i++)
  5726. {
  5727. byte y = Y[i];
  5728. for (j = 0; j < 8; j++)
  5729. {
  5730. if (y & 0x80) {
  5731. xorbuf(Z, V, AES_BLOCK_SIZE);
  5732. }
  5733. RIGHTSHIFTX(V);
  5734. y = y << 1;
  5735. }
  5736. }
  5737. XMEMCPY(X, Z, AES_BLOCK_SIZE);
  5738. }
  5739. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  5740. word32 cSz, byte* s, word32 sSz)
  5741. {
  5742. byte x[AES_BLOCK_SIZE];
  5743. byte scratch[AES_BLOCK_SIZE];
  5744. word32 blocks, partial;
  5745. byte* h;
  5746. if (gcm == NULL) {
  5747. return;
  5748. }
  5749. h = gcm->H;
  5750. XMEMSET(x, 0, AES_BLOCK_SIZE);
  5751. /* Hash in A, the Additional Authentication Data */
  5752. if (aSz != 0 && a != NULL) {
  5753. blocks = aSz / AES_BLOCK_SIZE;
  5754. partial = aSz % AES_BLOCK_SIZE;
  5755. while (blocks--) {
  5756. xorbuf(x, a, AES_BLOCK_SIZE);
  5757. GMULT(x, h);
  5758. a += AES_BLOCK_SIZE;
  5759. }
  5760. if (partial != 0) {
  5761. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5762. XMEMCPY(scratch, a, partial);
  5763. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5764. GMULT(x, h);
  5765. }
  5766. }
  5767. /* Hash in C, the Ciphertext */
  5768. if (cSz != 0 && c != NULL) {
  5769. blocks = cSz / AES_BLOCK_SIZE;
  5770. partial = cSz % AES_BLOCK_SIZE;
  5771. while (blocks--) {
  5772. xorbuf(x, c, AES_BLOCK_SIZE);
  5773. GMULT(x, h);
  5774. c += AES_BLOCK_SIZE;
  5775. }
  5776. if (partial != 0) {
  5777. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5778. XMEMCPY(scratch, c, partial);
  5779. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5780. GMULT(x, h);
  5781. }
  5782. }
  5783. /* Hash in the lengths of A and C in bits */
  5784. FlattenSzInBits(&scratch[0], aSz);
  5785. FlattenSzInBits(&scratch[8], cSz);
  5786. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5787. GMULT(x, h);
  5788. /* Copy the result into s. */
  5789. XMEMCPY(s, x, sSz);
  5790. }
  5791. #ifdef WOLFSSL_AESGCM_STREAM
  5792. /* No extra initialization for small implementation.
  5793. *
  5794. * @param [in] aes AES GCM object.
  5795. */
  5796. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5797. /* GHASH one block of data..
  5798. *
  5799. * XOR block into tag and GMULT with H.
  5800. *
  5801. * @param [in, out] aes AES GCM object.
  5802. * @param [in] block Block of AAD or cipher text.
  5803. */
  5804. #define GHASH_ONE_BLOCK(aes, block) \
  5805. do { \
  5806. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5807. GMULT(AES_TAG(aes), aes->gcm.H); \
  5808. } \
  5809. while (0)
  5810. #endif /* WOLFSSL_AESGCM_STREAM */
  5811. /* end GCM_SMALL */
  5812. #elif defined(GCM_TABLE)
  5813. ALIGN16 static const byte R[256][2] = {
  5814. {0x00, 0x00}, {0x01, 0xc2}, {0x03, 0x84}, {0x02, 0x46},
  5815. {0x07, 0x08}, {0x06, 0xca}, {0x04, 0x8c}, {0x05, 0x4e},
  5816. {0x0e, 0x10}, {0x0f, 0xd2}, {0x0d, 0x94}, {0x0c, 0x56},
  5817. {0x09, 0x18}, {0x08, 0xda}, {0x0a, 0x9c}, {0x0b, 0x5e},
  5818. {0x1c, 0x20}, {0x1d, 0xe2}, {0x1f, 0xa4}, {0x1e, 0x66},
  5819. {0x1b, 0x28}, {0x1a, 0xea}, {0x18, 0xac}, {0x19, 0x6e},
  5820. {0x12, 0x30}, {0x13, 0xf2}, {0x11, 0xb4}, {0x10, 0x76},
  5821. {0x15, 0x38}, {0x14, 0xfa}, {0x16, 0xbc}, {0x17, 0x7e},
  5822. {0x38, 0x40}, {0x39, 0x82}, {0x3b, 0xc4}, {0x3a, 0x06},
  5823. {0x3f, 0x48}, {0x3e, 0x8a}, {0x3c, 0xcc}, {0x3d, 0x0e},
  5824. {0x36, 0x50}, {0x37, 0x92}, {0x35, 0xd4}, {0x34, 0x16},
  5825. {0x31, 0x58}, {0x30, 0x9a}, {0x32, 0xdc}, {0x33, 0x1e},
  5826. {0x24, 0x60}, {0x25, 0xa2}, {0x27, 0xe4}, {0x26, 0x26},
  5827. {0x23, 0x68}, {0x22, 0xaa}, {0x20, 0xec}, {0x21, 0x2e},
  5828. {0x2a, 0x70}, {0x2b, 0xb2}, {0x29, 0xf4}, {0x28, 0x36},
  5829. {0x2d, 0x78}, {0x2c, 0xba}, {0x2e, 0xfc}, {0x2f, 0x3e},
  5830. {0x70, 0x80}, {0x71, 0x42}, {0x73, 0x04}, {0x72, 0xc6},
  5831. {0x77, 0x88}, {0x76, 0x4a}, {0x74, 0x0c}, {0x75, 0xce},
  5832. {0x7e, 0x90}, {0x7f, 0x52}, {0x7d, 0x14}, {0x7c, 0xd6},
  5833. {0x79, 0x98}, {0x78, 0x5a}, {0x7a, 0x1c}, {0x7b, 0xde},
  5834. {0x6c, 0xa0}, {0x6d, 0x62}, {0x6f, 0x24}, {0x6e, 0xe6},
  5835. {0x6b, 0xa8}, {0x6a, 0x6a}, {0x68, 0x2c}, {0x69, 0xee},
  5836. {0x62, 0xb0}, {0x63, 0x72}, {0x61, 0x34}, {0x60, 0xf6},
  5837. {0x65, 0xb8}, {0x64, 0x7a}, {0x66, 0x3c}, {0x67, 0xfe},
  5838. {0x48, 0xc0}, {0x49, 0x02}, {0x4b, 0x44}, {0x4a, 0x86},
  5839. {0x4f, 0xc8}, {0x4e, 0x0a}, {0x4c, 0x4c}, {0x4d, 0x8e},
  5840. {0x46, 0xd0}, {0x47, 0x12}, {0x45, 0x54}, {0x44, 0x96},
  5841. {0x41, 0xd8}, {0x40, 0x1a}, {0x42, 0x5c}, {0x43, 0x9e},
  5842. {0x54, 0xe0}, {0x55, 0x22}, {0x57, 0x64}, {0x56, 0xa6},
  5843. {0x53, 0xe8}, {0x52, 0x2a}, {0x50, 0x6c}, {0x51, 0xae},
  5844. {0x5a, 0xf0}, {0x5b, 0x32}, {0x59, 0x74}, {0x58, 0xb6},
  5845. {0x5d, 0xf8}, {0x5c, 0x3a}, {0x5e, 0x7c}, {0x5f, 0xbe},
  5846. {0xe1, 0x00}, {0xe0, 0xc2}, {0xe2, 0x84}, {0xe3, 0x46},
  5847. {0xe6, 0x08}, {0xe7, 0xca}, {0xe5, 0x8c}, {0xe4, 0x4e},
  5848. {0xef, 0x10}, {0xee, 0xd2}, {0xec, 0x94}, {0xed, 0x56},
  5849. {0xe8, 0x18}, {0xe9, 0xda}, {0xeb, 0x9c}, {0xea, 0x5e},
  5850. {0xfd, 0x20}, {0xfc, 0xe2}, {0xfe, 0xa4}, {0xff, 0x66},
  5851. {0xfa, 0x28}, {0xfb, 0xea}, {0xf9, 0xac}, {0xf8, 0x6e},
  5852. {0xf3, 0x30}, {0xf2, 0xf2}, {0xf0, 0xb4}, {0xf1, 0x76},
  5853. {0xf4, 0x38}, {0xf5, 0xfa}, {0xf7, 0xbc}, {0xf6, 0x7e},
  5854. {0xd9, 0x40}, {0xd8, 0x82}, {0xda, 0xc4}, {0xdb, 0x06},
  5855. {0xde, 0x48}, {0xdf, 0x8a}, {0xdd, 0xcc}, {0xdc, 0x0e},
  5856. {0xd7, 0x50}, {0xd6, 0x92}, {0xd4, 0xd4}, {0xd5, 0x16},
  5857. {0xd0, 0x58}, {0xd1, 0x9a}, {0xd3, 0xdc}, {0xd2, 0x1e},
  5858. {0xc5, 0x60}, {0xc4, 0xa2}, {0xc6, 0xe4}, {0xc7, 0x26},
  5859. {0xc2, 0x68}, {0xc3, 0xaa}, {0xc1, 0xec}, {0xc0, 0x2e},
  5860. {0xcb, 0x70}, {0xca, 0xb2}, {0xc8, 0xf4}, {0xc9, 0x36},
  5861. {0xcc, 0x78}, {0xcd, 0xba}, {0xcf, 0xfc}, {0xce, 0x3e},
  5862. {0x91, 0x80}, {0x90, 0x42}, {0x92, 0x04}, {0x93, 0xc6},
  5863. {0x96, 0x88}, {0x97, 0x4a}, {0x95, 0x0c}, {0x94, 0xce},
  5864. {0x9f, 0x90}, {0x9e, 0x52}, {0x9c, 0x14}, {0x9d, 0xd6},
  5865. {0x98, 0x98}, {0x99, 0x5a}, {0x9b, 0x1c}, {0x9a, 0xde},
  5866. {0x8d, 0xa0}, {0x8c, 0x62}, {0x8e, 0x24}, {0x8f, 0xe6},
  5867. {0x8a, 0xa8}, {0x8b, 0x6a}, {0x89, 0x2c}, {0x88, 0xee},
  5868. {0x83, 0xb0}, {0x82, 0x72}, {0x80, 0x34}, {0x81, 0xf6},
  5869. {0x84, 0xb8}, {0x85, 0x7a}, {0x87, 0x3c}, {0x86, 0xfe},
  5870. {0xa9, 0xc0}, {0xa8, 0x02}, {0xaa, 0x44}, {0xab, 0x86},
  5871. {0xae, 0xc8}, {0xaf, 0x0a}, {0xad, 0x4c}, {0xac, 0x8e},
  5872. {0xa7, 0xd0}, {0xa6, 0x12}, {0xa4, 0x54}, {0xa5, 0x96},
  5873. {0xa0, 0xd8}, {0xa1, 0x1a}, {0xa3, 0x5c}, {0xa2, 0x9e},
  5874. {0xb5, 0xe0}, {0xb4, 0x22}, {0xb6, 0x64}, {0xb7, 0xa6},
  5875. {0xb2, 0xe8}, {0xb3, 0x2a}, {0xb1, 0x6c}, {0xb0, 0xae},
  5876. {0xbb, 0xf0}, {0xba, 0x32}, {0xb8, 0x74}, {0xb9, 0xb6},
  5877. {0xbc, 0xf8}, {0xbd, 0x3a}, {0xbf, 0x7c}, {0xbe, 0xbe} };
  5878. static void GMULT(byte *x, byte m[256][AES_BLOCK_SIZE])
  5879. {
  5880. #if !defined(WORD64_AVAILABLE) || defined(BIG_ENDIAN_ORDER)
  5881. int i, j;
  5882. byte Z[AES_BLOCK_SIZE];
  5883. byte a;
  5884. XMEMSET(Z, 0, sizeof(Z));
  5885. for (i = 15; i > 0; i--) {
  5886. xorbuf(Z, m[x[i]], AES_BLOCK_SIZE);
  5887. a = Z[15];
  5888. for (j = 15; j > 0; j--) {
  5889. Z[j] = Z[j-1];
  5890. }
  5891. Z[0] = R[a][0];
  5892. Z[1] ^= R[a][1];
  5893. }
  5894. xorbuf(Z, m[x[0]], AES_BLOCK_SIZE);
  5895. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  5896. #elif defined(WC_32BIT_CPU)
  5897. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  5898. byte a;
  5899. word32* pZ;
  5900. word32* pm;
  5901. word32* px = (word32*)(x);
  5902. int i;
  5903. pZ = (word32*)(Z + 15 + 1);
  5904. pm = (word32*)(m[x[15]]);
  5905. pZ[0] = pm[0];
  5906. pZ[1] = pm[1];
  5907. pZ[2] = pm[2];
  5908. pZ[3] = pm[3];
  5909. a = Z[16 + 15];
  5910. Z[15] = R[a][0];
  5911. Z[16] ^= R[a][1];
  5912. for (i = 14; i > 0; i--) {
  5913. pZ = (word32*)(Z + i + 1);
  5914. pm = (word32*)(m[x[i]]);
  5915. pZ[0] ^= pm[0];
  5916. pZ[1] ^= pm[1];
  5917. pZ[2] ^= pm[2];
  5918. pZ[3] ^= pm[3];
  5919. a = Z[16 + i];
  5920. Z[i] = R[a][0];
  5921. Z[i+1] ^= R[a][1];
  5922. }
  5923. pZ = (word32*)(Z + 1);
  5924. pm = (word32*)(m[x[0]]);
  5925. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  5926. px[2] = pZ[2] ^ pm[2]; px[3] = pZ[3] ^ pm[3];
  5927. #else
  5928. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  5929. byte a;
  5930. word64* pZ;
  5931. word64* pm;
  5932. word64* px = (word64*)(x);
  5933. int i;
  5934. pZ = (word64*)(Z + 15 + 1);
  5935. pm = (word64*)(m[x[15]]);
  5936. pZ[0] = pm[0];
  5937. pZ[1] = pm[1];
  5938. a = Z[16 + 15];
  5939. Z[15] = R[a][0];
  5940. Z[16] ^= R[a][1];
  5941. for (i = 14; i > 0; i--) {
  5942. pZ = (word64*)(Z + i + 1);
  5943. pm = (word64*)(m[x[i]]);
  5944. pZ[0] ^= pm[0];
  5945. pZ[1] ^= pm[1];
  5946. a = Z[16 + i];
  5947. Z[i] = R[a][0];
  5948. Z[i+1] ^= R[a][1];
  5949. }
  5950. pZ = (word64*)(Z + 1);
  5951. pm = (word64*)(m[x[0]]);
  5952. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  5953. #endif
  5954. }
  5955. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  5956. word32 cSz, byte* s, word32 sSz)
  5957. {
  5958. byte x[AES_BLOCK_SIZE];
  5959. byte scratch[AES_BLOCK_SIZE];
  5960. word32 blocks, partial;
  5961. if (gcm == NULL) {
  5962. return;
  5963. }
  5964. XMEMSET(x, 0, AES_BLOCK_SIZE);
  5965. /* Hash in A, the Additional Authentication Data */
  5966. if (aSz != 0 && a != NULL) {
  5967. blocks = aSz / AES_BLOCK_SIZE;
  5968. partial = aSz % AES_BLOCK_SIZE;
  5969. while (blocks--) {
  5970. xorbuf(x, a, AES_BLOCK_SIZE);
  5971. GMULT(x, gcm->M0);
  5972. a += AES_BLOCK_SIZE;
  5973. }
  5974. if (partial != 0) {
  5975. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5976. XMEMCPY(scratch, a, partial);
  5977. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5978. GMULT(x, gcm->M0);
  5979. }
  5980. }
  5981. /* Hash in C, the Ciphertext */
  5982. if (cSz != 0 && c != NULL) {
  5983. blocks = cSz / AES_BLOCK_SIZE;
  5984. partial = cSz % AES_BLOCK_SIZE;
  5985. while (blocks--) {
  5986. xorbuf(x, c, AES_BLOCK_SIZE);
  5987. GMULT(x, gcm->M0);
  5988. c += AES_BLOCK_SIZE;
  5989. }
  5990. if (partial != 0) {
  5991. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  5992. XMEMCPY(scratch, c, partial);
  5993. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5994. GMULT(x, gcm->M0);
  5995. }
  5996. }
  5997. /* Hash in the lengths of A and C in bits */
  5998. FlattenSzInBits(&scratch[0], aSz);
  5999. FlattenSzInBits(&scratch[8], cSz);
  6000. xorbuf(x, scratch, AES_BLOCK_SIZE);
  6001. GMULT(x, gcm->M0);
  6002. /* Copy the result into s. */
  6003. XMEMCPY(s, x, sSz);
  6004. }
  6005. #ifdef WOLFSSL_AESGCM_STREAM
  6006. /* No extra initialization for table implementation.
  6007. *
  6008. * @param [in] aes AES GCM object.
  6009. */
  6010. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  6011. /* GHASH one block of data..
  6012. *
  6013. * XOR block into tag and GMULT with H using pre-computed table.
  6014. *
  6015. * @param [in, out] aes AES GCM object.
  6016. * @param [in] block Block of AAD or cipher text.
  6017. */
  6018. #define GHASH_ONE_BLOCK(aes, block) \
  6019. do { \
  6020. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  6021. GMULT(AES_TAG(aes), aes->gcm.M0); \
  6022. } \
  6023. while (0)
  6024. #endif /* WOLFSSL_AESGCM_STREAM */
  6025. /* end GCM_TABLE */
  6026. #elif defined(GCM_TABLE_4BIT)
  6027. /* remainder = x^7 + x^2 + x^1 + 1 => 0xe1
  6028. * R shifts right a reverse bit pair of bytes such that:
  6029. * R(b0, b1) => b1 = (b1 >> 1) | (b0 << 7); b0 >>= 1
  6030. * 0 => 0, 0, 0, 0 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ 00,00 = 00,00
  6031. * 8 => 0, 0, 0, 1 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ e1,00 = e1,00
  6032. * 4 => 0, 0, 1, 0 => R(R(R(00,00) ^ 00,00) ^ e1,00) ^ 00,00 = 70,80
  6033. * 2 => 0, 1, 0, 0 => R(R(R(00,00) ^ e1,00) ^ 00,00) ^ 00,00 = 38,40
  6034. * 1 => 1, 0, 0, 0 => R(R(R(e1,00) ^ 00,00) ^ 00,00) ^ 00,00 = 1c,20
  6035. * To calculate te rest, XOR result for each bit.
  6036. * e.g. 6 = 4 ^ 2 => 48,c0
  6037. *
  6038. * Second half is same values rotated by 4-bits.
  6039. */
  6040. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  6041. static const byte R[16][2] = {
  6042. {0x00, 0x00}, {0x1c, 0x20}, {0x38, 0x40}, {0x24, 0x60},
  6043. {0x70, 0x80}, {0x6c, 0xa0}, {0x48, 0xc0}, {0x54, 0xe0},
  6044. {0xe1, 0x00}, {0xfd, 0x20}, {0xd9, 0x40}, {0xc5, 0x60},
  6045. {0x91, 0x80}, {0x8d, 0xa0}, {0xa9, 0xc0}, {0xb5, 0xe0},
  6046. };
  6047. #else
  6048. static const word16 R[32] = {
  6049. 0x0000, 0x201c, 0x4038, 0x6024,
  6050. 0x8070, 0xa06c, 0xc048, 0xe054,
  6051. 0x00e1, 0x20fd, 0x40d9, 0x60c5,
  6052. 0x8091, 0xa08d, 0xc0a9, 0xe0b5,
  6053. 0x0000, 0xc201, 0x8403, 0x4602,
  6054. 0x0807, 0xca06, 0x8c04, 0x4e05,
  6055. 0x100e, 0xd20f, 0x940d, 0x560c,
  6056. 0x1809, 0xda08, 0x9c0a, 0x5e0b,
  6057. };
  6058. #endif
  6059. /* Multiply in GF(2^128) defined by polynomial:
  6060. * x^128 + x^7 + x^2 + x^1 + 1.
  6061. *
  6062. * H: hash key = encrypt(key, 0)
  6063. * x = x * H in field
  6064. *
  6065. * x: cumulative result
  6066. * m: 4-bit table
  6067. * [0..15] * H
  6068. */
  6069. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  6070. static void GMULT(byte *x, byte m[16][AES_BLOCK_SIZE])
  6071. {
  6072. int i, j, n;
  6073. byte Z[AES_BLOCK_SIZE];
  6074. byte a;
  6075. XMEMSET(Z, 0, sizeof(Z));
  6076. for (i = 15; i >= 0; i--) {
  6077. for (n = 0; n < 2; n++) {
  6078. if (n == 0)
  6079. xorbuf(Z, m[x[i] & 0xf], AES_BLOCK_SIZE);
  6080. else {
  6081. xorbuf(Z, m[x[i] >> 4], AES_BLOCK_SIZE);
  6082. if (i == 0)
  6083. break;
  6084. }
  6085. a = Z[15] & 0xf;
  6086. for (j = 15; j > 0; j--)
  6087. Z[j] = (Z[j-1] << 4) | (Z[j] >> 4);
  6088. Z[0] >>= 4;
  6089. Z[0] ^= R[a][0];
  6090. Z[1] ^= R[a][1];
  6091. }
  6092. }
  6093. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  6094. }
  6095. #elif defined(WC_32BIT_CPU)
  6096. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  6097. {
  6098. int i;
  6099. word32 z8[4] = {0, 0, 0, 0};
  6100. byte a;
  6101. word32* x8 = (word32*)x;
  6102. word32* m8;
  6103. byte xi;
  6104. word32 n7, n6, n5, n4, n3, n2, n1, n0;
  6105. for (i = 15; i > 0; i--) {
  6106. xi = x[i];
  6107. /* XOR in (msn * H) */
  6108. m8 = (word32*)m[xi & 0xf];
  6109. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  6110. /* Cache top byte for remainder calculations - lost in rotate. */
  6111. a = (byte)(z8[3] >> 24);
  6112. /* Rotate Z by 8-bits */
  6113. z8[3] = (z8[2] >> 24) | (z8[3] << 8);
  6114. z8[2] = (z8[1] >> 24) | (z8[2] << 8);
  6115. z8[1] = (z8[0] >> 24) | (z8[1] << 8);
  6116. z8[0] <<= 8;
  6117. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  6118. z8[0] ^= (word32)R[16 + (a & 0xf)];
  6119. xi >>= 4;
  6120. /* XOR in next significant nibble (XORed with H) * remainder */
  6121. m8 = (word32*)m[xi];
  6122. a ^= (byte)(m8[3] >> 20);
  6123. z8[0] ^= (word32)R[a >> 4];
  6124. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  6125. m8 = (word32*)m[16 + xi];
  6126. z8[0] ^= m8[0]; z8[1] ^= m8[1];
  6127. z8[2] ^= m8[2]; z8[3] ^= m8[3];
  6128. }
  6129. xi = x[0];
  6130. /* XOR in most significant nibble * H */
  6131. m8 = (word32*)m[xi & 0xf];
  6132. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  6133. /* Cache top byte for remainder calculations - lost in rotate. */
  6134. a = (z8[3] >> 24) & 0xf;
  6135. /* Rotate z by 4-bits */
  6136. n7 = z8[3] & 0xf0f0f0f0ULL;
  6137. n6 = z8[3] & 0x0f0f0f0fULL;
  6138. n5 = z8[2] & 0xf0f0f0f0ULL;
  6139. n4 = z8[2] & 0x0f0f0f0fULL;
  6140. n3 = z8[1] & 0xf0f0f0f0ULL;
  6141. n2 = z8[1] & 0x0f0f0f0fULL;
  6142. n1 = z8[0] & 0xf0f0f0f0ULL;
  6143. n0 = z8[0] & 0x0f0f0f0fULL;
  6144. z8[3] = (n7 >> 4) | (n6 << 12) | (n4 >> 20);
  6145. z8[2] = (n5 >> 4) | (n4 << 12) | (n2 >> 20);
  6146. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 20);
  6147. z8[0] = (n1 >> 4) | (n0 << 12);
  6148. /* XOR in most significant nibble * remainder */
  6149. z8[0] ^= (word32)R[a];
  6150. /* XOR in next significant nibble * H */
  6151. m8 = (word32*)m[xi >> 4];
  6152. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  6153. /* Write back result. */
  6154. x8[0] = z8[0]; x8[1] = z8[1]; x8[2] = z8[2]; x8[3] = z8[3];
  6155. }
  6156. #else
  6157. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  6158. {
  6159. int i;
  6160. word64 z8[2] = {0, 0};
  6161. byte a;
  6162. word64* x8 = (word64*)x;
  6163. word64* m8;
  6164. word64 n0, n1, n2, n3;
  6165. byte xi;
  6166. for (i = 15; i > 0; i--) {
  6167. xi = x[i];
  6168. /* XOR in (msn * H) */
  6169. m8 = (word64*)m[xi & 0xf];
  6170. z8[0] ^= m8[0];
  6171. z8[1] ^= m8[1];
  6172. /* Cache top byte for remainder calculations - lost in rotate. */
  6173. a = (byte)(z8[1] >> 56);
  6174. /* Rotate Z by 8-bits */
  6175. z8[1] = (z8[0] >> 56) | (z8[1] << 8);
  6176. z8[0] <<= 8;
  6177. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  6178. m8 = (word64*)m[16 + (xi >> 4)];
  6179. z8[0] ^= m8[0];
  6180. z8[1] ^= m8[1];
  6181. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  6182. z8[0] ^= (word64)R[16 + (a & 0xf)];
  6183. /* XOR in next significant nibble (XORed with H) * remainder */
  6184. m8 = (word64*)m[xi >> 4];
  6185. a ^= (byte)(m8[1] >> 52);
  6186. z8[0] ^= (word64)R[a >> 4];
  6187. }
  6188. xi = x[0];
  6189. /* XOR in most significant nibble * H */
  6190. m8 = (word64*)m[xi & 0xf];
  6191. z8[0] ^= m8[0];
  6192. z8[1] ^= m8[1];
  6193. /* Cache top byte for remainder calculations - lost in rotate. */
  6194. a = (z8[1] >> 56) & 0xf;
  6195. /* Rotate z by 4-bits */
  6196. n3 = z8[1] & W64LIT(0xf0f0f0f0f0f0f0f0);
  6197. n2 = z8[1] & W64LIT(0x0f0f0f0f0f0f0f0f);
  6198. n1 = z8[0] & W64LIT(0xf0f0f0f0f0f0f0f0);
  6199. n0 = z8[0] & W64LIT(0x0f0f0f0f0f0f0f0f);
  6200. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 52);
  6201. z8[0] = (n1 >> 4) | (n0 << 12);
  6202. /* XOR in next significant nibble * H */
  6203. m8 = (word64*)m[xi >> 4];
  6204. z8[0] ^= m8[0];
  6205. z8[1] ^= m8[1];
  6206. /* XOR in most significant nibble * remainder */
  6207. z8[0] ^= (word64)R[a];
  6208. /* Write back result. */
  6209. x8[0] = z8[0];
  6210. x8[1] = z8[1];
  6211. }
  6212. #endif
  6213. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  6214. word32 cSz, byte* s, word32 sSz)
  6215. {
  6216. byte x[AES_BLOCK_SIZE];
  6217. byte scratch[AES_BLOCK_SIZE];
  6218. word32 blocks, partial;
  6219. if (gcm == NULL) {
  6220. return;
  6221. }
  6222. XMEMSET(x, 0, AES_BLOCK_SIZE);
  6223. /* Hash in A, the Additional Authentication Data */
  6224. if (aSz != 0 && a != NULL) {
  6225. blocks = aSz / AES_BLOCK_SIZE;
  6226. partial = aSz % AES_BLOCK_SIZE;
  6227. while (blocks--) {
  6228. xorbuf(x, a, AES_BLOCK_SIZE);
  6229. GMULT(x, gcm->M0);
  6230. a += AES_BLOCK_SIZE;
  6231. }
  6232. if (partial != 0) {
  6233. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  6234. XMEMCPY(scratch, a, partial);
  6235. xorbuf(x, scratch, AES_BLOCK_SIZE);
  6236. GMULT(x, gcm->M0);
  6237. }
  6238. }
  6239. /* Hash in C, the Ciphertext */
  6240. if (cSz != 0 && c != NULL) {
  6241. blocks = cSz / AES_BLOCK_SIZE;
  6242. partial = cSz % AES_BLOCK_SIZE;
  6243. while (blocks--) {
  6244. xorbuf(x, c, AES_BLOCK_SIZE);
  6245. GMULT(x, gcm->M0);
  6246. c += AES_BLOCK_SIZE;
  6247. }
  6248. if (partial != 0) {
  6249. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  6250. XMEMCPY(scratch, c, partial);
  6251. xorbuf(x, scratch, AES_BLOCK_SIZE);
  6252. GMULT(x, gcm->M0);
  6253. }
  6254. }
  6255. /* Hash in the lengths of A and C in bits */
  6256. FlattenSzInBits(&scratch[0], aSz);
  6257. FlattenSzInBits(&scratch[8], cSz);
  6258. xorbuf(x, scratch, AES_BLOCK_SIZE);
  6259. GMULT(x, gcm->M0);
  6260. /* Copy the result into s. */
  6261. XMEMCPY(s, x, sSz);
  6262. }
  6263. #ifdef WOLFSSL_AESGCM_STREAM
  6264. /* No extra initialization for 4-bit table implementation.
  6265. *
  6266. * @param [in] aes AES GCM object.
  6267. */
  6268. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  6269. /* GHASH one block of data..
  6270. *
  6271. * XOR block into tag and GMULT with H using pre-computed table.
  6272. *
  6273. * @param [in, out] aes AES GCM object.
  6274. * @param [in] block Block of AAD or cipher text.
  6275. */
  6276. #define GHASH_ONE_BLOCK(aes, block) \
  6277. do { \
  6278. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  6279. GMULT(AES_TAG(aes), (aes)->gcm.M0); \
  6280. } \
  6281. while (0)
  6282. #endif /* WOLFSSL_AESGCM_STREAM */
  6283. #elif defined(WORD64_AVAILABLE) && !defined(GCM_WORD32)
  6284. #if !defined(FREESCALE_LTC_AES_GCM)
  6285. static void GMULT(word64* X, word64* Y)
  6286. {
  6287. word64 Z[2] = {0,0};
  6288. word64 V[2];
  6289. int i, j;
  6290. word64 v1;
  6291. V[0] = X[0]; V[1] = X[1];
  6292. for (i = 0; i < 2; i++)
  6293. {
  6294. word64 y = Y[i];
  6295. for (j = 0; j < 64; j++)
  6296. {
  6297. #ifndef AES_GCM_GMULT_NCT
  6298. word64 mask = 0 - (y >> 63);
  6299. Z[0] ^= V[0] & mask;
  6300. Z[1] ^= V[1] & mask;
  6301. #else
  6302. if (y & 0x8000000000000000ULL) {
  6303. Z[0] ^= V[0];
  6304. Z[1] ^= V[1];
  6305. }
  6306. #endif
  6307. v1 = (0 - (V[1] & 1)) & 0xE100000000000000ULL;
  6308. V[1] >>= 1;
  6309. V[1] |= V[0] << 63;
  6310. V[0] >>= 1;
  6311. V[0] ^= v1;
  6312. y <<= 1;
  6313. }
  6314. }
  6315. X[0] = Z[0];
  6316. X[1] = Z[1];
  6317. }
  6318. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  6319. word32 cSz, byte* s, word32 sSz)
  6320. {
  6321. word64 x[2] = {0,0};
  6322. word32 blocks, partial;
  6323. word64 bigH[2];
  6324. if (gcm == NULL) {
  6325. return;
  6326. }
  6327. XMEMCPY(bigH, gcm->H, AES_BLOCK_SIZE);
  6328. #ifdef LITTLE_ENDIAN_ORDER
  6329. ByteReverseWords64(bigH, bigH, AES_BLOCK_SIZE);
  6330. #endif
  6331. /* Hash in A, the Additional Authentication Data */
  6332. if (aSz != 0 && a != NULL) {
  6333. word64 bigA[2];
  6334. blocks = aSz / AES_BLOCK_SIZE;
  6335. partial = aSz % AES_BLOCK_SIZE;
  6336. while (blocks--) {
  6337. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  6338. #ifdef LITTLE_ENDIAN_ORDER
  6339. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  6340. #endif
  6341. x[0] ^= bigA[0];
  6342. x[1] ^= bigA[1];
  6343. GMULT(x, bigH);
  6344. a += AES_BLOCK_SIZE;
  6345. }
  6346. if (partial != 0) {
  6347. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  6348. XMEMCPY(bigA, a, partial);
  6349. #ifdef LITTLE_ENDIAN_ORDER
  6350. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  6351. #endif
  6352. x[0] ^= bigA[0];
  6353. x[1] ^= bigA[1];
  6354. GMULT(x, bigH);
  6355. }
  6356. #ifdef OPENSSL_EXTRA
  6357. /* store AAD partial tag for next call */
  6358. gcm->aadH[0] = (word32)((x[0] & 0xFFFFFFFF00000000ULL) >> 32);
  6359. gcm->aadH[1] = (word32)(x[0] & 0xFFFFFFFF);
  6360. gcm->aadH[2] = (word32)((x[1] & 0xFFFFFFFF00000000ULL) >> 32);
  6361. gcm->aadH[3] = (word32)(x[1] & 0xFFFFFFFF);
  6362. #endif
  6363. }
  6364. /* Hash in C, the Ciphertext */
  6365. if (cSz != 0 && c != NULL) {
  6366. word64 bigC[2];
  6367. blocks = cSz / AES_BLOCK_SIZE;
  6368. partial = cSz % AES_BLOCK_SIZE;
  6369. #ifdef OPENSSL_EXTRA
  6370. /* Start from last AAD partial tag */
  6371. if(gcm->aadLen) {
  6372. x[0] = ((word64)gcm->aadH[0]) << 32 | gcm->aadH[1];
  6373. x[1] = ((word64)gcm->aadH[2]) << 32 | gcm->aadH[3];
  6374. }
  6375. #endif
  6376. while (blocks--) {
  6377. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  6378. #ifdef LITTLE_ENDIAN_ORDER
  6379. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  6380. #endif
  6381. x[0] ^= bigC[0];
  6382. x[1] ^= bigC[1];
  6383. GMULT(x, bigH);
  6384. c += AES_BLOCK_SIZE;
  6385. }
  6386. if (partial != 0) {
  6387. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  6388. XMEMCPY(bigC, c, partial);
  6389. #ifdef LITTLE_ENDIAN_ORDER
  6390. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  6391. #endif
  6392. x[0] ^= bigC[0];
  6393. x[1] ^= bigC[1];
  6394. GMULT(x, bigH);
  6395. }
  6396. }
  6397. /* Hash in the lengths in bits of A and C */
  6398. {
  6399. word64 len[2];
  6400. len[0] = aSz; len[1] = cSz;
  6401. #ifdef OPENSSL_EXTRA
  6402. if (gcm->aadLen)
  6403. len[0] = (word64)gcm->aadLen;
  6404. #endif
  6405. /* Lengths are in bytes. Convert to bits. */
  6406. len[0] *= 8;
  6407. len[1] *= 8;
  6408. x[0] ^= len[0];
  6409. x[1] ^= len[1];
  6410. GMULT(x, bigH);
  6411. }
  6412. #ifdef LITTLE_ENDIAN_ORDER
  6413. ByteReverseWords64(x, x, AES_BLOCK_SIZE);
  6414. #endif
  6415. XMEMCPY(s, x, sSz);
  6416. }
  6417. #endif /* !FREESCALE_LTC_AES_GCM */
  6418. #ifdef WOLFSSL_AESGCM_STREAM
  6419. #ifdef LITTLE_ENDIAN_ORDER
  6420. /* No extra initialization for small implementation.
  6421. *
  6422. * @param [in] aes AES GCM object.
  6423. */
  6424. #define GHASH_INIT_EXTRA(aes) \
  6425. ByteReverseWords64((word64*)aes->gcm.H, (word64*)aes->gcm.H, AES_BLOCK_SIZE)
  6426. /* GHASH one block of data..
  6427. *
  6428. * XOR block into tag and GMULT with H.
  6429. *
  6430. * @param [in, out] aes AES GCM object.
  6431. * @param [in] block Block of AAD or cipher text.
  6432. */
  6433. #define GHASH_ONE_BLOCK(aes, block) \
  6434. do { \
  6435. word64* x = (word64*)AES_TAG(aes); \
  6436. word64* h = (word64*)aes->gcm.H; \
  6437. word64 block64[2]; \
  6438. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  6439. ByteReverseWords64(block64, block64, AES_BLOCK_SIZE); \
  6440. x[0] ^= block64[0]; \
  6441. x[1] ^= block64[1]; \
  6442. GMULT(x, h); \
  6443. } \
  6444. while (0)
  6445. #ifdef OPENSSL_EXTRA
  6446. /* GHASH in AAD and cipher text lengths in bits.
  6447. *
  6448. * Convert tag back to little-endian.
  6449. *
  6450. * @param [in, out] aes AES GCM object.
  6451. */
  6452. #define GHASH_LEN_BLOCK(aes) \
  6453. do { \
  6454. word64* x = (word64*)AES_TAG(aes); \
  6455. word64* h = (word64*)aes->gcm.H; \
  6456. word64 len[2]; \
  6457. len[0] = aes->aSz; len[1] = aes->cSz; \
  6458. if (aes->gcm.aadLen) \
  6459. len[0] = (word64)aes->gcm.aadLen; \
  6460. /* Lengths are in bytes. Convert to bits. */ \
  6461. len[0] *= 8; \
  6462. len[1] *= 8; \
  6463. \
  6464. x[0] ^= len[0]; \
  6465. x[1] ^= len[1]; \
  6466. GMULT(x, h); \
  6467. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  6468. } \
  6469. while (0)
  6470. #else
  6471. /* GHASH in AAD and cipher text lengths in bits.
  6472. *
  6473. * Convert tag back to little-endian.
  6474. *
  6475. * @param [in, out] aes AES GCM object.
  6476. */
  6477. #define GHASH_LEN_BLOCK(aes) \
  6478. do { \
  6479. word64* x = (word64*)AES_TAG(aes); \
  6480. word64* h = (word64*)aes->gcm.H; \
  6481. word64 len[2]; \
  6482. len[0] = aes->aSz; len[1] = aes->cSz; \
  6483. /* Lengths are in bytes. Convert to bits. */ \
  6484. len[0] *= 8; \
  6485. len[1] *= 8; \
  6486. \
  6487. x[0] ^= len[0]; \
  6488. x[1] ^= len[1]; \
  6489. GMULT(x, h); \
  6490. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  6491. } \
  6492. while (0)
  6493. #endif
  6494. #else
  6495. /* No extra initialization for small implementation.
  6496. *
  6497. * @param [in] aes AES GCM object.
  6498. */
  6499. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  6500. /* GHASH one block of data..
  6501. *
  6502. * XOR block into tag and GMULT with H.
  6503. *
  6504. * @param [in, out] aes AES GCM object.
  6505. * @param [in] block Block of AAD or cipher text.
  6506. */
  6507. #define GHASH_ONE_BLOCK(aes, block) \
  6508. do { \
  6509. word64* x = (word64*)AES_TAG(aes); \
  6510. word64* h = (word64*)aes->gcm.H; \
  6511. word64 block64[2]; \
  6512. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  6513. x[0] ^= block64[0]; \
  6514. x[1] ^= block64[1]; \
  6515. GMULT(x, h); \
  6516. } \
  6517. while (0)
  6518. #ifdef OPENSSL_EXTRA
  6519. /* GHASH in AAD and cipher text lengths in bits.
  6520. *
  6521. * Convert tag back to little-endian.
  6522. *
  6523. * @param [in, out] aes AES GCM object.
  6524. */
  6525. #define GHASH_LEN_BLOCK(aes) \
  6526. do { \
  6527. word64* x = (word64*)AES_TAG(aes); \
  6528. word64* h = (word64*)aes->gcm.H; \
  6529. word64 len[2]; \
  6530. len[0] = aes->aSz; len[1] = aes->cSz; \
  6531. if (aes->gcm.aadLen) \
  6532. len[0] = (word64)aes->gcm.aadLen; \
  6533. /* Lengths are in bytes. Convert to bits. */ \
  6534. len[0] *= 8; \
  6535. len[1] *= 8; \
  6536. \
  6537. x[0] ^= len[0]; \
  6538. x[1] ^= len[1]; \
  6539. GMULT(x, h); \
  6540. } \
  6541. while (0)
  6542. #else
  6543. /* GHASH in AAD and cipher text lengths in bits.
  6544. *
  6545. * Convert tag back to little-endian.
  6546. *
  6547. * @param [in, out] aes AES GCM object.
  6548. */
  6549. #define GHASH_LEN_BLOCK(aes) \
  6550. do { \
  6551. word64* x = (word64*)AES_TAG(aes); \
  6552. word64* h = (word64*)aes->gcm.H; \
  6553. word64 len[2]; \
  6554. len[0] = aes->aSz; len[1] = aes->cSz; \
  6555. /* Lengths are in bytes. Convert to bits. */ \
  6556. len[0] *= 8; \
  6557. len[1] *= 8; \
  6558. \
  6559. x[0] ^= len[0]; \
  6560. x[1] ^= len[1]; \
  6561. GMULT(x, h); \
  6562. } \
  6563. while (0)
  6564. #endif
  6565. #endif /* !LITTLE_ENDIAN_ORDER */
  6566. #endif /* WOLFSSL_AESGCM_STREAM */
  6567. /* end defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) */
  6568. #else /* GCM_WORD32 */
  6569. static void GMULT(word32* X, word32* Y)
  6570. {
  6571. word32 Z[4] = {0,0,0,0};
  6572. word32 V[4];
  6573. int i, j;
  6574. V[0] = X[0]; V[1] = X[1]; V[2] = X[2]; V[3] = X[3];
  6575. for (i = 0; i < 4; i++)
  6576. {
  6577. word32 y = Y[i];
  6578. for (j = 0; j < 32; j++)
  6579. {
  6580. if (y & 0x80000000) {
  6581. Z[0] ^= V[0];
  6582. Z[1] ^= V[1];
  6583. Z[2] ^= V[2];
  6584. Z[3] ^= V[3];
  6585. }
  6586. if (V[3] & 0x00000001) {
  6587. V[3] >>= 1;
  6588. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  6589. V[2] >>= 1;
  6590. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  6591. V[1] >>= 1;
  6592. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  6593. V[0] >>= 1;
  6594. V[0] ^= 0xE1000000;
  6595. } else {
  6596. V[3] >>= 1;
  6597. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  6598. V[2] >>= 1;
  6599. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  6600. V[1] >>= 1;
  6601. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  6602. V[0] >>= 1;
  6603. }
  6604. y <<= 1;
  6605. }
  6606. }
  6607. X[0] = Z[0];
  6608. X[1] = Z[1];
  6609. X[2] = Z[2];
  6610. X[3] = Z[3];
  6611. }
  6612. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  6613. word32 cSz, byte* s, word32 sSz)
  6614. {
  6615. word32 x[4] = {0,0,0,0};
  6616. word32 blocks, partial;
  6617. word32 bigH[4];
  6618. if (gcm == NULL) {
  6619. return;
  6620. }
  6621. XMEMCPY(bigH, gcm->H, AES_BLOCK_SIZE);
  6622. #ifdef LITTLE_ENDIAN_ORDER
  6623. ByteReverseWords(bigH, bigH, AES_BLOCK_SIZE);
  6624. #endif
  6625. /* Hash in A, the Additional Authentication Data */
  6626. if (aSz != 0 && a != NULL) {
  6627. word32 bigA[4];
  6628. blocks = aSz / AES_BLOCK_SIZE;
  6629. partial = aSz % AES_BLOCK_SIZE;
  6630. while (blocks--) {
  6631. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  6632. #ifdef LITTLE_ENDIAN_ORDER
  6633. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  6634. #endif
  6635. x[0] ^= bigA[0];
  6636. x[1] ^= bigA[1];
  6637. x[2] ^= bigA[2];
  6638. x[3] ^= bigA[3];
  6639. GMULT(x, bigH);
  6640. a += AES_BLOCK_SIZE;
  6641. }
  6642. if (partial != 0) {
  6643. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  6644. XMEMCPY(bigA, a, partial);
  6645. #ifdef LITTLE_ENDIAN_ORDER
  6646. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  6647. #endif
  6648. x[0] ^= bigA[0];
  6649. x[1] ^= bigA[1];
  6650. x[2] ^= bigA[2];
  6651. x[3] ^= bigA[3];
  6652. GMULT(x, bigH);
  6653. }
  6654. }
  6655. /* Hash in C, the Ciphertext */
  6656. if (cSz != 0 && c != NULL) {
  6657. word32 bigC[4];
  6658. blocks = cSz / AES_BLOCK_SIZE;
  6659. partial = cSz % AES_BLOCK_SIZE;
  6660. while (blocks--) {
  6661. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  6662. #ifdef LITTLE_ENDIAN_ORDER
  6663. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  6664. #endif
  6665. x[0] ^= bigC[0];
  6666. x[1] ^= bigC[1];
  6667. x[2] ^= bigC[2];
  6668. x[3] ^= bigC[3];
  6669. GMULT(x, bigH);
  6670. c += AES_BLOCK_SIZE;
  6671. }
  6672. if (partial != 0) {
  6673. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  6674. XMEMCPY(bigC, c, partial);
  6675. #ifdef LITTLE_ENDIAN_ORDER
  6676. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  6677. #endif
  6678. x[0] ^= bigC[0];
  6679. x[1] ^= bigC[1];
  6680. x[2] ^= bigC[2];
  6681. x[3] ^= bigC[3];
  6682. GMULT(x, bigH);
  6683. }
  6684. }
  6685. /* Hash in the lengths in bits of A and C */
  6686. {
  6687. word32 len[4];
  6688. /* Lengths are in bytes. Convert to bits. */
  6689. len[0] = (aSz >> (8*sizeof(aSz) - 3));
  6690. len[1] = aSz << 3;
  6691. len[2] = (cSz >> (8*sizeof(cSz) - 3));
  6692. len[3] = cSz << 3;
  6693. x[0] ^= len[0];
  6694. x[1] ^= len[1];
  6695. x[2] ^= len[2];
  6696. x[3] ^= len[3];
  6697. GMULT(x, bigH);
  6698. }
  6699. #ifdef LITTLE_ENDIAN_ORDER
  6700. ByteReverseWords(x, x, AES_BLOCK_SIZE);
  6701. #endif
  6702. XMEMCPY(s, x, sSz);
  6703. }
  6704. #ifdef WOLFSSL_AESGCM_STREAM
  6705. #ifdef LITTLE_ENDIAN_ORDER
  6706. /* Little-endian 32-bit word implementation requires byte reversal of H.
  6707. *
  6708. * H is all-zeros block encrypted with key.
  6709. *
  6710. * @param [in, out] aes AES GCM object.
  6711. */
  6712. #define GHASH_INIT_EXTRA(aes) \
  6713. ByteReverseWords((word32*)aes->gcm.H, (word32*)aes->gcm.H, AES_BLOCK_SIZE)
  6714. /* GHASH one block of data..
  6715. *
  6716. * XOR block, in big-endian form, into tag and GMULT with H.
  6717. *
  6718. * @param [in, out] aes AES GCM object.
  6719. * @param [in] block Block of AAD or cipher text.
  6720. */
  6721. #define GHASH_ONE_BLOCK(aes, block) \
  6722. do { \
  6723. word32* x = (word32*)AES_TAG(aes); \
  6724. word32* h = (word32*)aes->gcm.H; \
  6725. word32 bigEnd[4]; \
  6726. XMEMCPY(bigEnd, block, AES_BLOCK_SIZE); \
  6727. ByteReverseWords(bigEnd, bigEnd, AES_BLOCK_SIZE); \
  6728. x[0] ^= bigEnd[0]; \
  6729. x[1] ^= bigEnd[1]; \
  6730. x[2] ^= bigEnd[2]; \
  6731. x[3] ^= bigEnd[3]; \
  6732. GMULT(x, h); \
  6733. } \
  6734. while (0)
  6735. /* GHASH in AAD and cipher text lengths in bits.
  6736. *
  6737. * Convert tag back to little-endian.
  6738. *
  6739. * @param [in, out] aes AES GCM object.
  6740. */
  6741. #define GHASH_LEN_BLOCK(aes) \
  6742. do { \
  6743. word32 len[4]; \
  6744. word32* x = (word32*)AES_TAG(aes); \
  6745. word32* h = (word32*)aes->gcm.H; \
  6746. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  6747. len[1] = aes->aSz << 3; \
  6748. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  6749. len[3] = aes->cSz << 3; \
  6750. x[0] ^= len[0]; \
  6751. x[1] ^= len[1]; \
  6752. x[2] ^= len[2]; \
  6753. x[3] ^= len[3]; \
  6754. GMULT(x, h); \
  6755. ByteReverseWords(x, x, AES_BLOCK_SIZE); \
  6756. } \
  6757. while (0)
  6758. #else
  6759. /* No extra initialization for 32-bit word implementation.
  6760. *
  6761. * @param [in] aes AES GCM object.
  6762. */
  6763. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  6764. /* GHASH one block of data..
  6765. *
  6766. * XOR block into tag and GMULT with H.
  6767. *
  6768. * @param [in, out] aes AES GCM object.
  6769. * @param [in] block Block of AAD or cipher text.
  6770. */
  6771. #define GHASH_ONE_BLOCK(aes, block) \
  6772. do { \
  6773. word32* x = (word32*)AES_TAG(aes); \
  6774. word32* h = (word32*)aes->gcm.H; \
  6775. word32 block32[4]; \
  6776. XMEMCPY(block32, block, AES_BLOCK_SIZE); \
  6777. x[0] ^= block32[0]; \
  6778. x[1] ^= block32[1]; \
  6779. x[2] ^= block32[2]; \
  6780. x[3] ^= block32[3]; \
  6781. GMULT(x, h); \
  6782. } \
  6783. while (0)
  6784. /* GHASH in AAD and cipher text lengths in bits.
  6785. *
  6786. * @param [in, out] aes AES GCM object.
  6787. */
  6788. #define GHASH_LEN_BLOCK(aes) \
  6789. do { \
  6790. word32 len[4]; \
  6791. word32* x = (word32*)AES_TAG(aes); \
  6792. word32* h = (word32*)aes->gcm.H; \
  6793. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  6794. len[1] = aes->aSz << 3; \
  6795. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  6796. len[3] = aes->cSz << 3; \
  6797. x[0] ^= len[0]; \
  6798. x[1] ^= len[1]; \
  6799. x[2] ^= len[2]; \
  6800. x[3] ^= len[3]; \
  6801. GMULT(x, h); \
  6802. } \
  6803. while (0)
  6804. #endif /* LITTLE_ENDIAN_ORDER */
  6805. #endif /* WOLFSSL_AESGCM_STREAM */
  6806. #endif /* end GCM_WORD32 */
  6807. #if !defined(WOLFSSL_XILINX_CRYPT) && !defined(WOLFSSL_AFALG_XILINX_AES)
  6808. #ifdef WOLFSSL_AESGCM_STREAM
  6809. #ifndef GHASH_LEN_BLOCK
  6810. /* Hash in the lengths of the AAD and cipher text in bits.
  6811. *
  6812. * Default implementation.
  6813. *
  6814. * @param [in, out] aes AES GCM object.
  6815. */
  6816. #define GHASH_LEN_BLOCK(aes) \
  6817. do { \
  6818. byte scratch[AES_BLOCK_SIZE]; \
  6819. FlattenSzInBits(&scratch[0], (aes)->aSz); \
  6820. FlattenSzInBits(&scratch[8], (aes)->cSz); \
  6821. GHASH_ONE_BLOCK(aes, scratch); \
  6822. } \
  6823. while (0)
  6824. #endif
  6825. /* Initialize a GHASH for streaming operations.
  6826. *
  6827. * @param [in, out] aes AES GCM object.
  6828. */
  6829. static void GHASH_INIT(Aes* aes) {
  6830. /* Set tag to all zeros as initial value. */
  6831. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  6832. /* Reset counts of AAD and cipher text. */
  6833. aes->aOver = 0;
  6834. aes->cOver = 0;
  6835. /* Extra initialization based on implementation. */
  6836. GHASH_INIT_EXTRA(aes);
  6837. }
  6838. /* Update the GHASH with AAD and/or cipher text.
  6839. *
  6840. * @param [in,out] aes AES GCM object.
  6841. * @param [in] a Additional authentication data buffer.
  6842. * @param [in] aSz Size of data in AAD buffer.
  6843. * @param [in] c Cipher text buffer.
  6844. * @param [in] cSz Size of data in cipher text buffer.
  6845. */
  6846. static void GHASH_UPDATE(Aes* aes, const byte* a, word32 aSz, const byte* c,
  6847. word32 cSz)
  6848. {
  6849. word32 blocks;
  6850. word32 partial;
  6851. /* Hash in A, the Additional Authentication Data */
  6852. if (aSz != 0 && a != NULL) {
  6853. /* Update count of AAD we have hashed. */
  6854. aes->aSz += aSz;
  6855. /* Check if we have unprocessed data. */
  6856. if (aes->aOver > 0) {
  6857. /* Calculate amount we can use - fill up the block. */
  6858. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6859. if (sz > aSz) {
  6860. sz = (byte)aSz;
  6861. }
  6862. /* Copy extra into last GHASH block array and update count. */
  6863. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  6864. aes->aOver += sz;
  6865. if (aes->aOver == AES_BLOCK_SIZE) {
  6866. /* We have filled up the block and can process. */
  6867. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6868. /* Reset count. */
  6869. aes->aOver = 0;
  6870. }
  6871. /* Used up some data. */
  6872. aSz -= sz;
  6873. a += sz;
  6874. }
  6875. /* Calculate number of blocks of AAD and the leftover. */
  6876. blocks = aSz / AES_BLOCK_SIZE;
  6877. partial = aSz % AES_BLOCK_SIZE;
  6878. /* GHASH full blocks now. */
  6879. while (blocks--) {
  6880. GHASH_ONE_BLOCK(aes, a);
  6881. a += AES_BLOCK_SIZE;
  6882. }
  6883. if (partial != 0) {
  6884. /* Cache the partial block. */
  6885. XMEMCPY(AES_LASTGBLOCK(aes), a, partial);
  6886. aes->aOver = (byte)partial;
  6887. }
  6888. }
  6889. if (aes->aOver > 0 && cSz > 0 && c != NULL) {
  6890. /* No more AAD coming and we have a partial block. */
  6891. /* Fill the rest of the block with zeros. */
  6892. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6893. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, sz);
  6894. /* GHASH last AAD block. */
  6895. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6896. /* Clear partial count for next time through. */
  6897. aes->aOver = 0;
  6898. }
  6899. /* Hash in C, the Ciphertext */
  6900. if (cSz != 0 && c != NULL) {
  6901. /* Update count of cipher text we have hashed. */
  6902. aes->cSz += cSz;
  6903. if (aes->cOver > 0) {
  6904. /* Calculate amount we can use - fill up the block. */
  6905. byte sz = AES_BLOCK_SIZE - aes->cOver;
  6906. if (sz > cSz) {
  6907. sz = (byte)cSz;
  6908. }
  6909. XMEMCPY(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  6910. /* Update count of unused encrypted counter. */
  6911. aes->cOver += sz;
  6912. if (aes->cOver == AES_BLOCK_SIZE) {
  6913. /* We have filled up the block and can process. */
  6914. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6915. /* Reset count. */
  6916. aes->cOver = 0;
  6917. }
  6918. /* Used up some data. */
  6919. cSz -= sz;
  6920. c += sz;
  6921. }
  6922. /* Calculate number of blocks of cipher text and the leftover. */
  6923. blocks = cSz / AES_BLOCK_SIZE;
  6924. partial = cSz % AES_BLOCK_SIZE;
  6925. /* GHASH full blocks now. */
  6926. while (blocks--) {
  6927. GHASH_ONE_BLOCK(aes, c);
  6928. c += AES_BLOCK_SIZE;
  6929. }
  6930. if (partial != 0) {
  6931. /* Cache the partial block. */
  6932. XMEMCPY(AES_LASTGBLOCK(aes), c, partial);
  6933. aes->cOver = (byte)partial;
  6934. }
  6935. }
  6936. }
  6937. /* Finalize the GHASH calculation.
  6938. *
  6939. * Complete hashing cipher text and hash the AAD and cipher text lengths.
  6940. *
  6941. * @param [in, out] aes AES GCM object.
  6942. * @param [out] s Authentication tag.
  6943. * @param [in] sSz Size of authentication tag required.
  6944. */
  6945. static void GHASH_FINAL(Aes* aes, byte* s, word32 sSz)
  6946. {
  6947. /* AAD block incomplete when > 0 */
  6948. byte over = aes->aOver;
  6949. if (aes->cOver > 0) {
  6950. /* Cipher text block incomplete. */
  6951. over = aes->cOver;
  6952. }
  6953. if (over > 0) {
  6954. /* Zeroize the unused part of the block. */
  6955. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  6956. /* Hash the last block of cipher text. */
  6957. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  6958. }
  6959. /* Hash in the lengths of AAD and cipher text in bits */
  6960. GHASH_LEN_BLOCK(aes);
  6961. /* Copy the result into s. */
  6962. XMEMCPY(s, AES_TAG(aes), sSz);
  6963. }
  6964. #endif /* WOLFSSL_AESGCM_STREAM */
  6965. #ifdef FREESCALE_LTC_AES_GCM
  6966. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6967. const byte* iv, word32 ivSz,
  6968. byte* authTag, word32 authTagSz,
  6969. const byte* authIn, word32 authInSz)
  6970. {
  6971. status_t status;
  6972. word32 keySize;
  6973. /* argument checks */
  6974. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  6975. return BAD_FUNC_ARG;
  6976. }
  6977. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  6978. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  6979. return BAD_FUNC_ARG;
  6980. }
  6981. status = wc_AesGetKeySize(aes, &keySize);
  6982. if (status)
  6983. return status;
  6984. status = wolfSSL_CryptHwMutexLock();
  6985. if (status != 0)
  6986. return status;
  6987. status = LTC_AES_EncryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  6988. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  6989. wolfSSL_CryptHwMutexUnLock();
  6990. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  6991. }
  6992. #else
  6993. #ifdef STM32_CRYPTO_AES_GCM
  6994. /* this function supports inline encrypt */
  6995. /* define STM32_AESGCM_PARTIAL for STM HW that does not support authentication
  6996. * on byte multiples (see CRYP_HEADERWIDTHUNIT_BYTE) */
  6997. static WARN_UNUSED_RESULT int wc_AesGcmEncrypt_STM32(
  6998. Aes* aes, byte* out, const byte* in, word32 sz,
  6999. const byte* iv, word32 ivSz,
  7000. byte* authTag, word32 authTagSz,
  7001. const byte* authIn, word32 authInSz)
  7002. {
  7003. int ret;
  7004. #ifdef WOLFSSL_STM32_CUBEMX
  7005. CRYP_HandleTypeDef hcryp;
  7006. #else
  7007. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  7008. #endif
  7009. word32 keySize;
  7010. #ifdef WOLFSSL_STM32_CUBEMX
  7011. int status = HAL_OK;
  7012. word32 blocks = sz / AES_BLOCK_SIZE;
  7013. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  7014. #else
  7015. int status = SUCCESS;
  7016. #endif
  7017. word32 partial = sz % AES_BLOCK_SIZE;
  7018. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  7019. word32 ctrInit[AES_BLOCK_SIZE/sizeof(word32)];
  7020. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  7021. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  7022. byte* authInPadded = NULL;
  7023. int authPadSz, wasAlloc = 0, useSwGhash = 0;
  7024. ret = wc_AesGetKeySize(aes, &keySize);
  7025. if (ret != 0)
  7026. return ret;
  7027. #ifdef WOLFSSL_STM32_CUBEMX
  7028. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  7029. if (ret != 0)
  7030. return ret;
  7031. #endif
  7032. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  7033. if (ivSz == GCM_NONCE_MID_SZ) {
  7034. byte* pCtr = (byte*)ctr;
  7035. XMEMCPY(ctr, iv, ivSz);
  7036. pCtr[AES_BLOCK_SIZE - 1] = 1;
  7037. }
  7038. else {
  7039. GHASH(&aes->gcm, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  7040. }
  7041. XMEMCPY(ctrInit, ctr, sizeof(ctr)); /* save off initial counter for GMAC */
  7042. /* Authentication buffer - must be 4-byte multiple zero padded */
  7043. authPadSz = authInSz % sizeof(word32);
  7044. if (authPadSz != 0) {
  7045. authPadSz = authInSz + sizeof(word32) - authPadSz;
  7046. if (authPadSz <= sizeof(authhdr)) {
  7047. authInPadded = (byte*)authhdr;
  7048. }
  7049. else {
  7050. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  7051. DYNAMIC_TYPE_TMP_BUFFER);
  7052. if (authInPadded == NULL) {
  7053. wolfSSL_CryptHwMutexUnLock();
  7054. return MEMORY_E;
  7055. }
  7056. wasAlloc = 1;
  7057. }
  7058. XMEMSET(authInPadded, 0, authPadSz);
  7059. XMEMCPY(authInPadded, authIn, authInSz);
  7060. } else {
  7061. authPadSz = authInSz;
  7062. authInPadded = (byte*)authIn;
  7063. }
  7064. /* for cases where hardware cannot be used for authTag calculate it */
  7065. /* if IV is not 12 calculate GHASH using software */
  7066. if (ivSz != GCM_NONCE_MID_SZ
  7067. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  7068. /* or hardware that does not support partial block */
  7069. || sz == 0 || partial != 0
  7070. #endif
  7071. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  7072. /* or authIn is not a multiple of 4 */
  7073. || authPadSz != authInSz
  7074. #endif
  7075. ) {
  7076. useSwGhash = 1;
  7077. }
  7078. /* Hardware requires counter + 1 */
  7079. IncrementGcmCounter((byte*)ctr);
  7080. ret = wolfSSL_CryptHwMutexLock();
  7081. if (ret != 0) {
  7082. return ret;
  7083. }
  7084. #ifdef WOLFSSL_STM32_CUBEMX
  7085. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  7086. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  7087. #if defined(STM32_HAL_V2)
  7088. hcryp.Init.Algorithm = CRYP_AES_GCM;
  7089. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  7090. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  7091. hcryp.Init.HeaderSize = authInSz;
  7092. #else
  7093. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  7094. #endif
  7095. #ifdef CRYP_KEYIVCONFIG_ONCE
  7096. /* allows repeated calls to HAL_CRYP_Encrypt */
  7097. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  7098. #endif
  7099. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  7100. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  7101. HAL_CRYP_Init(&hcryp);
  7102. #ifndef CRYP_KEYIVCONFIG_ONCE
  7103. /* GCM payload phase - can handle partial blocks */
  7104. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  7105. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  7106. #else
  7107. /* GCM payload phase - blocks */
  7108. if (blocks) {
  7109. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  7110. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  7111. }
  7112. /* GCM payload phase - partial remainder */
  7113. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  7114. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7115. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7116. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)partialBlock, partial,
  7117. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  7118. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7119. }
  7120. #endif
  7121. if (status == HAL_OK && !useSwGhash) {
  7122. /* Compute the authTag */
  7123. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  7124. STM32_HAL_TIMEOUT);
  7125. }
  7126. #elif defined(STM32_CRYPTO_AES_ONLY)
  7127. /* Set the CRYP parameters */
  7128. hcryp.Init.HeaderSize = authPadSz;
  7129. if (authPadSz == 0)
  7130. hcryp.Init.Header = NULL; /* cannot pass pointer here when authIn == 0 */
  7131. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  7132. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  7133. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  7134. HAL_CRYP_Init(&hcryp);
  7135. /* GCM init phase */
  7136. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  7137. if (status == HAL_OK) {
  7138. /* GCM header phase */
  7139. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  7140. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  7141. }
  7142. if (status == HAL_OK) {
  7143. /* GCM payload phase - blocks */
  7144. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  7145. if (blocks) {
  7146. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  7147. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  7148. }
  7149. }
  7150. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  7151. /* GCM payload phase - partial remainder */
  7152. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7153. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7154. status = HAL_CRYPEx_AES_Auth(&hcryp, (uint8_t*)partialBlock, partial,
  7155. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  7156. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7157. }
  7158. if (status == HAL_OK && !useSwGhash) {
  7159. /* GCM final phase */
  7160. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  7161. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  7162. }
  7163. #else
  7164. hcryp.Init.HeaderSize = authPadSz;
  7165. HAL_CRYP_Init(&hcryp);
  7166. if (blocks) {
  7167. /* GCM payload phase - blocks */
  7168. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (byte*)in,
  7169. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  7170. }
  7171. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  7172. /* GCM payload phase - partial remainder */
  7173. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7174. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7175. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (uint8_t*)partialBlock, partial,
  7176. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  7177. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7178. }
  7179. if (status == HAL_OK && !useSwGhash) {
  7180. /* Compute the authTag */
  7181. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  7182. }
  7183. #endif
  7184. if (status != HAL_OK)
  7185. ret = AES_GCM_AUTH_E;
  7186. HAL_CRYP_DeInit(&hcryp);
  7187. #else /* Standard Peripheral Library */
  7188. ByteReverseWords(keyCopy, (word32*)aes->key, keySize);
  7189. status = CRYP_AES_GCM(MODE_ENCRYPT, (uint8_t*)ctr,
  7190. (uint8_t*)keyCopy, keySize * 8,
  7191. (uint8_t*)in, sz,
  7192. (uint8_t*)authInPadded, authInSz,
  7193. (uint8_t*)out, (uint8_t*)tag);
  7194. if (status != SUCCESS)
  7195. ret = AES_GCM_AUTH_E;
  7196. #endif /* WOLFSSL_STM32_CUBEMX */
  7197. wolfSSL_CryptHwMutexUnLock();
  7198. wc_Stm32_Aes_Cleanup();
  7199. if (ret == 0) {
  7200. /* return authTag */
  7201. if (authTag) {
  7202. if (useSwGhash) {
  7203. GHASH(&aes->gcm, authIn, authInSz, out, sz, authTag, authTagSz);
  7204. ret = wc_AesEncrypt(aes, (byte*)ctrInit, (byte*)tag);
  7205. if (ret == 0) {
  7206. xorbuf(authTag, tag, authTagSz);
  7207. }
  7208. }
  7209. else {
  7210. /* use hardware calculated tag */
  7211. XMEMCPY(authTag, tag, authTagSz);
  7212. }
  7213. }
  7214. }
  7215. /* Free memory */
  7216. if (wasAlloc) {
  7217. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  7218. }
  7219. return ret;
  7220. }
  7221. #endif /* STM32_CRYPTO_AES_GCM */
  7222. #ifdef WOLFSSL_AESNI
  7223. /* For performance reasons, this code needs to be not inlined. */
  7224. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  7225. Aes* aes, byte* out, const byte* in, word32 sz,
  7226. const byte* iv, word32 ivSz,
  7227. byte* authTag, word32 authTagSz,
  7228. const byte* authIn, word32 authInSz);
  7229. #else
  7230. static
  7231. #endif
  7232. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  7233. Aes* aes, byte* out, const byte* in, word32 sz,
  7234. const byte* iv, word32 ivSz,
  7235. byte* authTag, word32 authTagSz,
  7236. const byte* authIn, word32 authInSz)
  7237. {
  7238. int ret = 0;
  7239. word32 blocks = sz / AES_BLOCK_SIZE;
  7240. word32 partial = sz % AES_BLOCK_SIZE;
  7241. const byte* p = in;
  7242. byte* c = out;
  7243. ALIGN16 byte counter[AES_BLOCK_SIZE];
  7244. ALIGN16 byte initialCounter[AES_BLOCK_SIZE];
  7245. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  7246. if (ivSz == GCM_NONCE_MID_SZ) {
  7247. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  7248. XMEMCPY(counter, iv, ivSz);
  7249. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  7250. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  7251. counter[AES_BLOCK_SIZE - 1] = 1;
  7252. }
  7253. else {
  7254. /* Counter is GHASH of IV. */
  7255. #ifdef OPENSSL_EXTRA
  7256. word32 aadTemp = aes->gcm.aadLen;
  7257. aes->gcm.aadLen = 0;
  7258. #endif
  7259. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  7260. #ifdef OPENSSL_EXTRA
  7261. aes->gcm.aadLen = aadTemp;
  7262. #endif
  7263. }
  7264. XMEMCPY(initialCounter, counter, AES_BLOCK_SIZE);
  7265. #ifdef WOLFSSL_PIC32MZ_CRYPT
  7266. if (blocks) {
  7267. /* use initial IV for HW, but don't use it below */
  7268. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  7269. ret = wc_Pic32AesCrypt(
  7270. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  7271. out, in, (blocks * AES_BLOCK_SIZE),
  7272. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  7273. if (ret != 0)
  7274. return ret;
  7275. }
  7276. /* process remainder using partial handling */
  7277. #endif
  7278. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  7279. /* some hardware acceleration can gain performance from doing AES encryption
  7280. * of the whole buffer at once */
  7281. if (c != p && blocks > 0) { /* can not handle inline encryption */
  7282. while (blocks--) {
  7283. IncrementGcmCounter(counter);
  7284. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  7285. c += AES_BLOCK_SIZE;
  7286. }
  7287. /* reset number of blocks and then do encryption */
  7288. blocks = sz / AES_BLOCK_SIZE;
  7289. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  7290. xorbuf(out, p, AES_BLOCK_SIZE * blocks);
  7291. p += AES_BLOCK_SIZE * blocks;
  7292. }
  7293. else
  7294. #endif /* HAVE_AES_ECB && !WOLFSSL_PIC32MZ_CRYPT */
  7295. {
  7296. while (blocks--) {
  7297. IncrementGcmCounter(counter);
  7298. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  7299. ret = wc_AesEncrypt(aes, counter, scratch);
  7300. if (ret != 0)
  7301. return ret;
  7302. xorbufout(c, scratch, p, AES_BLOCK_SIZE);
  7303. #endif
  7304. p += AES_BLOCK_SIZE;
  7305. c += AES_BLOCK_SIZE;
  7306. }
  7307. }
  7308. if (partial != 0) {
  7309. IncrementGcmCounter(counter);
  7310. ret = wc_AesEncrypt(aes, counter, scratch);
  7311. if (ret != 0)
  7312. return ret;
  7313. xorbufout(c, scratch, p, partial);
  7314. }
  7315. if (authTag) {
  7316. GHASH(&aes->gcm, authIn, authInSz, out, sz, authTag, authTagSz);
  7317. ret = wc_AesEncrypt(aes, initialCounter, scratch);
  7318. if (ret != 0)
  7319. return ret;
  7320. xorbuf(authTag, scratch, authTagSz);
  7321. #ifdef OPENSSL_EXTRA
  7322. if (!in && !sz)
  7323. /* store AAD size for next call */
  7324. aes->gcm.aadLen = authInSz;
  7325. #endif
  7326. }
  7327. return ret;
  7328. }
  7329. /* Software AES - GCM Encrypt */
  7330. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  7331. const byte* iv, word32 ivSz,
  7332. byte* authTag, word32 authTagSz,
  7333. const byte* authIn, word32 authInSz)
  7334. {
  7335. int ret;
  7336. /* argument checks */
  7337. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  7338. return BAD_FUNC_ARG;
  7339. }
  7340. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  7341. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  7342. return BAD_FUNC_ARG;
  7343. }
  7344. #ifdef WOLF_CRYPTO_CB
  7345. #ifndef WOLF_CRYPTO_CB_FIND
  7346. if (aes->devId != INVALID_DEVID)
  7347. #endif
  7348. {
  7349. int crypto_cb_ret =
  7350. wc_CryptoCb_AesGcmEncrypt(aes, out, in, sz, iv, ivSz, authTag,
  7351. authTagSz, authIn, authInSz);
  7352. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  7353. return crypto_cb_ret;
  7354. /* fall-through when unavailable */
  7355. }
  7356. #endif
  7357. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  7358. /* if async and byte count above threshold */
  7359. /* only 12-byte IV is supported in HW */
  7360. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  7361. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  7362. #if defined(HAVE_CAVIUM)
  7363. #ifdef HAVE_CAVIUM_V
  7364. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  7365. return NitroxAesGcmEncrypt(aes, out, in, sz,
  7366. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  7367. authTag, authTagSz, authIn, authInSz);
  7368. }
  7369. #endif
  7370. #elif defined(HAVE_INTEL_QA)
  7371. return IntelQaSymAesGcmEncrypt(&aes->asyncDev, out, in, sz,
  7372. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  7373. authTag, authTagSz, authIn, authInSz);
  7374. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  7375. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_ENCRYPT)) {
  7376. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  7377. sw->aes.aes = aes;
  7378. sw->aes.out = out;
  7379. sw->aes.in = in;
  7380. sw->aes.sz = sz;
  7381. sw->aes.iv = iv;
  7382. sw->aes.ivSz = ivSz;
  7383. sw->aes.authTag = authTag;
  7384. sw->aes.authTagSz = authTagSz;
  7385. sw->aes.authIn = authIn;
  7386. sw->aes.authInSz = authInSz;
  7387. return WC_PENDING_E;
  7388. }
  7389. #endif
  7390. }
  7391. #endif /* WOLFSSL_ASYNC_CRYPT */
  7392. #ifdef WOLFSSL_SILABS_SE_ACCEL
  7393. return wc_AesGcmEncrypt_silabs(
  7394. aes, out, in, sz,
  7395. iv, ivSz,
  7396. authTag, authTagSz,
  7397. authIn, authInSz);
  7398. #endif
  7399. #ifdef STM32_CRYPTO_AES_GCM
  7400. return wc_AesGcmEncrypt_STM32(
  7401. aes, out, in, sz, iv, ivSz,
  7402. authTag, authTagSz, authIn, authInSz);
  7403. #endif /* STM32_CRYPTO_AES_GCM */
  7404. VECTOR_REGISTERS_PUSH;
  7405. #ifdef WOLFSSL_AESNI
  7406. if (aes->use_aesni) {
  7407. #ifdef HAVE_INTEL_AVX2
  7408. if (IS_INTEL_AVX2(intel_flags)) {
  7409. AES_GCM_encrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7410. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  7411. ret = 0;
  7412. }
  7413. else
  7414. #endif
  7415. #if defined(HAVE_INTEL_AVX1)
  7416. if (IS_INTEL_AVX1(intel_flags)) {
  7417. AES_GCM_encrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7418. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  7419. ret = 0;
  7420. } else
  7421. #endif
  7422. {
  7423. AES_GCM_encrypt_aesni(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7424. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  7425. ret = 0;
  7426. }
  7427. }
  7428. else
  7429. #endif /* WOLFSSL_AESNI */
  7430. {
  7431. ret = AES_GCM_encrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  7432. authIn, authInSz);
  7433. }
  7434. VECTOR_REGISTERS_POP;
  7435. return ret;
  7436. }
  7437. #endif
  7438. /* AES GCM Decrypt */
  7439. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7440. #ifdef FREESCALE_LTC_AES_GCM
  7441. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  7442. const byte* iv, word32 ivSz,
  7443. const byte* authTag, word32 authTagSz,
  7444. const byte* authIn, word32 authInSz)
  7445. {
  7446. int ret;
  7447. word32 keySize;
  7448. status_t status;
  7449. /* argument checks */
  7450. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  7451. * in and out are don't cares, as this is is the GMAC case. */
  7452. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7453. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  7454. ivSz == 0) {
  7455. return BAD_FUNC_ARG;
  7456. }
  7457. ret = wc_AesGetKeySize(aes, &keySize);
  7458. if (ret != 0) {
  7459. return ret;
  7460. }
  7461. status = wolfSSL_CryptHwMutexLock();
  7462. if (status != 0)
  7463. return status;
  7464. status = LTC_AES_DecryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  7465. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  7466. wolfSSL_CryptHwMutexUnLock();
  7467. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  7468. }
  7469. #else
  7470. #ifdef STM32_CRYPTO_AES_GCM
  7471. /* this function supports inline decrypt */
  7472. static WARN_UNUSED_RESULT int wc_AesGcmDecrypt_STM32(
  7473. Aes* aes, byte* out,
  7474. const byte* in, word32 sz,
  7475. const byte* iv, word32 ivSz,
  7476. const byte* authTag, word32 authTagSz,
  7477. const byte* authIn, word32 authInSz)
  7478. {
  7479. int ret;
  7480. #ifdef WOLFSSL_STM32_CUBEMX
  7481. int status = HAL_OK;
  7482. CRYP_HandleTypeDef hcryp;
  7483. word32 blocks = sz / AES_BLOCK_SIZE;
  7484. #else
  7485. int status = SUCCESS;
  7486. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  7487. #endif
  7488. word32 keySize;
  7489. word32 partial = sz % AES_BLOCK_SIZE;
  7490. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  7491. word32 tagExpected[AES_BLOCK_SIZE/sizeof(word32)];
  7492. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  7493. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  7494. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  7495. byte* authInPadded = NULL;
  7496. int authPadSz, wasAlloc = 0, tagComputed = 0;
  7497. ret = wc_AesGetKeySize(aes, &keySize);
  7498. if (ret != 0)
  7499. return ret;
  7500. #ifdef WOLFSSL_STM32_CUBEMX
  7501. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  7502. if (ret != 0)
  7503. return ret;
  7504. #endif
  7505. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  7506. if (ivSz == GCM_NONCE_MID_SZ) {
  7507. byte* pCtr = (byte*)ctr;
  7508. XMEMCPY(ctr, iv, ivSz);
  7509. pCtr[AES_BLOCK_SIZE - 1] = 1;
  7510. }
  7511. else {
  7512. GHASH(&aes->gcm, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  7513. }
  7514. /* Make copy of expected authTag, which could get corrupted in some
  7515. * Cube HAL versions without proper partial block support.
  7516. * For TLS blocks the authTag is after the output buffer, so save it */
  7517. XMEMCPY(tagExpected, authTag, authTagSz);
  7518. /* Authentication buffer - must be 4-byte multiple zero padded */
  7519. authPadSz = authInSz % sizeof(word32);
  7520. if (authPadSz != 0) {
  7521. authPadSz = authInSz + sizeof(word32) - authPadSz;
  7522. }
  7523. else {
  7524. authPadSz = authInSz;
  7525. }
  7526. /* for cases where hardware cannot be used for authTag calculate it */
  7527. /* if IV is not 12 calculate GHASH using software */
  7528. if (ivSz != GCM_NONCE_MID_SZ
  7529. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  7530. /* or hardware that does not support partial block */
  7531. || sz == 0 || partial != 0
  7532. #endif
  7533. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  7534. /* or authIn is not a multiple of 4 */
  7535. || authPadSz != authInSz
  7536. #endif
  7537. ) {
  7538. GHASH(&aes->gcm, authIn, authInSz, in, sz, (byte*)tag, sizeof(tag));
  7539. ret = wc_AesEncrypt(aes, (byte*)ctr, (byte*)partialBlock);
  7540. if (ret != 0)
  7541. return ret;
  7542. xorbuf(tag, partialBlock, sizeof(tag));
  7543. tagComputed = 1;
  7544. }
  7545. /* if using hardware for authentication tag make sure its aligned and zero padded */
  7546. if (authPadSz != authInSz && !tagComputed) {
  7547. if (authPadSz <= sizeof(authhdr)) {
  7548. authInPadded = (byte*)authhdr;
  7549. }
  7550. else {
  7551. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  7552. DYNAMIC_TYPE_TMP_BUFFER);
  7553. if (authInPadded == NULL) {
  7554. wolfSSL_CryptHwMutexUnLock();
  7555. return MEMORY_E;
  7556. }
  7557. wasAlloc = 1;
  7558. }
  7559. XMEMSET(authInPadded, 0, authPadSz);
  7560. XMEMCPY(authInPadded, authIn, authInSz);
  7561. } else {
  7562. authInPadded = (byte*)authIn;
  7563. }
  7564. /* Hardware requires counter + 1 */
  7565. IncrementGcmCounter((byte*)ctr);
  7566. ret = wolfSSL_CryptHwMutexLock();
  7567. if (ret != 0) {
  7568. return ret;
  7569. }
  7570. #ifdef WOLFSSL_STM32_CUBEMX
  7571. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  7572. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  7573. #if defined(STM32_HAL_V2)
  7574. hcryp.Init.Algorithm = CRYP_AES_GCM;
  7575. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  7576. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  7577. hcryp.Init.HeaderSize = authInSz;
  7578. #else
  7579. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  7580. #endif
  7581. #ifdef CRYP_KEYIVCONFIG_ONCE
  7582. /* allows repeated calls to HAL_CRYP_Decrypt */
  7583. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  7584. #endif
  7585. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  7586. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  7587. HAL_CRYP_Init(&hcryp);
  7588. #ifndef CRYP_KEYIVCONFIG_ONCE
  7589. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  7590. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  7591. #else
  7592. /* GCM payload phase - blocks */
  7593. if (blocks) {
  7594. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  7595. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  7596. }
  7597. /* GCM payload phase - partial remainder */
  7598. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  7599. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7600. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7601. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)partialBlock, partial,
  7602. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  7603. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7604. }
  7605. #endif
  7606. if (status == HAL_OK && !tagComputed) {
  7607. /* Compute the authTag */
  7608. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  7609. STM32_HAL_TIMEOUT);
  7610. }
  7611. #elif defined(STM32_CRYPTO_AES_ONLY)
  7612. /* Set the CRYP parameters */
  7613. hcryp.Init.HeaderSize = authPadSz;
  7614. if (authPadSz == 0)
  7615. hcryp.Init.Header = NULL; /* cannot pass pointer when authIn == 0 */
  7616. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  7617. hcryp.Init.OperatingMode = CRYP_ALGOMODE_DECRYPT;
  7618. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  7619. HAL_CRYP_Init(&hcryp);
  7620. /* GCM init phase */
  7621. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  7622. if (status == HAL_OK) {
  7623. /* GCM header phase */
  7624. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  7625. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  7626. }
  7627. if (status == HAL_OK) {
  7628. /* GCM payload phase - blocks */
  7629. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  7630. if (blocks) {
  7631. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  7632. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  7633. }
  7634. }
  7635. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  7636. /* GCM payload phase - partial remainder */
  7637. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7638. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7639. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)partialBlock, partial,
  7640. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  7641. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7642. }
  7643. if (status == HAL_OK && tagComputed == 0) {
  7644. /* GCM final phase */
  7645. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  7646. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  7647. }
  7648. #else
  7649. hcryp.Init.HeaderSize = authPadSz;
  7650. HAL_CRYP_Init(&hcryp);
  7651. if (blocks) {
  7652. /* GCM payload phase - blocks */
  7653. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)in,
  7654. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  7655. }
  7656. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  7657. /* GCM payload phase - partial remainder */
  7658. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  7659. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  7660. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)partialBlock, partial,
  7661. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  7662. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  7663. }
  7664. if (status == HAL_OK && tagComputed == 0) {
  7665. /* Compute the authTag */
  7666. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  7667. }
  7668. #endif
  7669. if (status != HAL_OK)
  7670. ret = AES_GCM_AUTH_E;
  7671. HAL_CRYP_DeInit(&hcryp);
  7672. #else /* Standard Peripheral Library */
  7673. ByteReverseWords(keyCopy, (word32*)aes->key, aes->keylen);
  7674. /* Input size and auth size need to be the actual sizes, even though
  7675. * they are not block aligned, because this length (in bits) is used
  7676. * in the final GHASH. */
  7677. XMEMSET(partialBlock, 0, sizeof(partialBlock)); /* use this to get tag */
  7678. status = CRYP_AES_GCM(MODE_DECRYPT, (uint8_t*)ctr,
  7679. (uint8_t*)keyCopy, keySize * 8,
  7680. (uint8_t*)in, sz,
  7681. (uint8_t*)authInPadded, authInSz,
  7682. (uint8_t*)out, (uint8_t*)partialBlock);
  7683. if (status != SUCCESS)
  7684. ret = AES_GCM_AUTH_E;
  7685. if (tagComputed == 0)
  7686. XMEMCPY(tag, partialBlock, authTagSz);
  7687. #endif /* WOLFSSL_STM32_CUBEMX */
  7688. wolfSSL_CryptHwMutexUnLock();
  7689. wc_Stm32_Aes_Cleanup();
  7690. /* Check authentication tag */
  7691. if (ConstantCompare((const byte*)tagExpected, (byte*)tag, authTagSz) != 0) {
  7692. ret = AES_GCM_AUTH_E;
  7693. }
  7694. /* Free memory */
  7695. if (wasAlloc) {
  7696. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  7697. }
  7698. return ret;
  7699. }
  7700. #endif /* STM32_CRYPTO_AES_GCM */
  7701. #ifdef WOLFSSL_AESNI
  7702. /* For performance reasons, this code needs to be not inlined. */
  7703. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  7704. Aes* aes, byte* out, const byte* in, word32 sz,
  7705. const byte* iv, word32 ivSz,
  7706. const byte* authTag, word32 authTagSz,
  7707. const byte* authIn, word32 authInSz);
  7708. #else
  7709. static
  7710. #endif
  7711. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  7712. Aes* aes, byte* out, const byte* in, word32 sz,
  7713. const byte* iv, word32 ivSz,
  7714. const byte* authTag, word32 authTagSz,
  7715. const byte* authIn, word32 authInSz)
  7716. {
  7717. int ret;
  7718. word32 blocks = sz / AES_BLOCK_SIZE;
  7719. word32 partial = sz % AES_BLOCK_SIZE;
  7720. const byte* c = in;
  7721. byte* p = out;
  7722. ALIGN16 byte counter[AES_BLOCK_SIZE];
  7723. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  7724. ALIGN16 byte Tprime[AES_BLOCK_SIZE];
  7725. ALIGN16 byte EKY0[AES_BLOCK_SIZE];
  7726. sword32 res;
  7727. if (ivSz == GCM_NONCE_MID_SZ) {
  7728. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  7729. XMEMCPY(counter, iv, ivSz);
  7730. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  7731. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  7732. counter[AES_BLOCK_SIZE - 1] = 1;
  7733. }
  7734. else {
  7735. /* Counter is GHASH of IV. */
  7736. #ifdef OPENSSL_EXTRA
  7737. word32 aadTemp = aes->gcm.aadLen;
  7738. aes->gcm.aadLen = 0;
  7739. #endif
  7740. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  7741. #ifdef OPENSSL_EXTRA
  7742. aes->gcm.aadLen = aadTemp;
  7743. #endif
  7744. }
  7745. /* Calc the authTag again using received auth data and the cipher text */
  7746. GHASH(&aes->gcm, authIn, authInSz, in, sz, Tprime, sizeof(Tprime));
  7747. ret = wc_AesEncrypt(aes, counter, EKY0);
  7748. if (ret != 0)
  7749. return ret;
  7750. xorbuf(Tprime, EKY0, sizeof(Tprime));
  7751. #ifdef WC_AES_GCM_DEC_AUTH_EARLY
  7752. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  7753. * the pairwise bytes in the strings.
  7754. */
  7755. res = ConstantCompare(authTag, Tprime, authTagSz);
  7756. /* convert positive retval from ConstantCompare() to all-1s word, in
  7757. * constant time.
  7758. */
  7759. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  7760. ret = res & AES_GCM_AUTH_E;
  7761. if (ret != 0)
  7762. return ret;
  7763. #endif
  7764. #ifdef OPENSSL_EXTRA
  7765. if (!out) {
  7766. /* authenticated, non-confidential data */
  7767. /* store AAD size for next call */
  7768. aes->gcm.aadLen = authInSz;
  7769. }
  7770. #endif
  7771. #if defined(WOLFSSL_PIC32MZ_CRYPT)
  7772. if (blocks) {
  7773. /* use initial IV for HW, but don't use it below */
  7774. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  7775. ret = wc_Pic32AesCrypt(
  7776. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  7777. out, in, (blocks * AES_BLOCK_SIZE),
  7778. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  7779. if (ret != 0)
  7780. return ret;
  7781. }
  7782. /* process remainder using partial handling */
  7783. #endif
  7784. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  7785. /* some hardware acceleration can gain performance from doing AES encryption
  7786. * of the whole buffer at once */
  7787. if (c != p && blocks > 0) { /* can not handle inline decryption */
  7788. while (blocks--) {
  7789. IncrementGcmCounter(counter);
  7790. XMEMCPY(p, counter, AES_BLOCK_SIZE);
  7791. p += AES_BLOCK_SIZE;
  7792. }
  7793. /* reset number of blocks and then do encryption */
  7794. blocks = sz / AES_BLOCK_SIZE;
  7795. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  7796. xorbuf(out, c, AES_BLOCK_SIZE * blocks);
  7797. c += AES_BLOCK_SIZE * blocks;
  7798. }
  7799. else
  7800. #endif /* HAVE_AES_ECB && !PIC32MZ */
  7801. {
  7802. while (blocks--) {
  7803. IncrementGcmCounter(counter);
  7804. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  7805. ret = wc_AesEncrypt(aes, counter, scratch);
  7806. if (ret != 0)
  7807. return ret;
  7808. xorbufout(p, scratch, c, AES_BLOCK_SIZE);
  7809. #endif
  7810. p += AES_BLOCK_SIZE;
  7811. c += AES_BLOCK_SIZE;
  7812. }
  7813. }
  7814. if (partial != 0) {
  7815. IncrementGcmCounter(counter);
  7816. ret = wc_AesEncrypt(aes, counter, scratch);
  7817. if (ret != 0)
  7818. return ret;
  7819. xorbuf(scratch, c, partial);
  7820. XMEMCPY(p, scratch, partial);
  7821. }
  7822. #ifndef WC_AES_GCM_DEC_AUTH_EARLY
  7823. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  7824. * the pairwise bytes in the strings.
  7825. */
  7826. res = ConstantCompare(authTag, Tprime, (int)authTagSz);
  7827. /* convert positive retval from ConstantCompare() to all-1s word, in
  7828. * constant time.
  7829. */
  7830. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  7831. /* now use res as a mask for constant time return of ret, unless tag
  7832. * mismatch, whereupon AES_GCM_AUTH_E is returned.
  7833. */
  7834. ret = (ret & ~res) | (res & AES_GCM_AUTH_E);
  7835. #endif
  7836. return ret;
  7837. }
  7838. /* Software AES - GCM Decrypt */
  7839. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  7840. const byte* iv, word32 ivSz,
  7841. const byte* authTag, word32 authTagSz,
  7842. const byte* authIn, word32 authInSz)
  7843. {
  7844. int ret;
  7845. #ifdef WOLFSSL_AESNI
  7846. int res = AES_GCM_AUTH_E;
  7847. #endif
  7848. /* argument checks */
  7849. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  7850. * in and out are don't cares, as this is is the GMAC case. */
  7851. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7852. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  7853. ivSz == 0) {
  7854. return BAD_FUNC_ARG;
  7855. }
  7856. #ifdef WOLF_CRYPTO_CB
  7857. #ifndef WOLF_CRYPTO_CB_FIND
  7858. if (aes->devId != INVALID_DEVID)
  7859. #endif
  7860. {
  7861. int crypto_cb_ret =
  7862. wc_CryptoCb_AesGcmDecrypt(aes, out, in, sz, iv, ivSz,
  7863. authTag, authTagSz, authIn, authInSz);
  7864. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  7865. return crypto_cb_ret;
  7866. /* fall-through when unavailable */
  7867. }
  7868. #endif
  7869. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  7870. /* if async and byte count above threshold */
  7871. /* only 12-byte IV is supported in HW */
  7872. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  7873. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  7874. #if defined(HAVE_CAVIUM)
  7875. #ifdef HAVE_CAVIUM_V
  7876. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  7877. return NitroxAesGcmDecrypt(aes, out, in, sz,
  7878. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  7879. authTag, authTagSz, authIn, authInSz);
  7880. }
  7881. #endif
  7882. #elif defined(HAVE_INTEL_QA)
  7883. return IntelQaSymAesGcmDecrypt(&aes->asyncDev, out, in, sz,
  7884. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  7885. authTag, authTagSz, authIn, authInSz);
  7886. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  7887. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_DECRYPT)) {
  7888. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  7889. sw->aes.aes = aes;
  7890. sw->aes.out = out;
  7891. sw->aes.in = in;
  7892. sw->aes.sz = sz;
  7893. sw->aes.iv = iv;
  7894. sw->aes.ivSz = ivSz;
  7895. sw->aes.authTag = (byte*)authTag;
  7896. sw->aes.authTagSz = authTagSz;
  7897. sw->aes.authIn = authIn;
  7898. sw->aes.authInSz = authInSz;
  7899. return WC_PENDING_E;
  7900. }
  7901. #endif
  7902. }
  7903. #endif /* WOLFSSL_ASYNC_CRYPT */
  7904. #ifdef WOLFSSL_SILABS_SE_ACCEL
  7905. return wc_AesGcmDecrypt_silabs(
  7906. aes, out, in, sz, iv, ivSz,
  7907. authTag, authTagSz, authIn, authInSz);
  7908. #endif
  7909. #ifdef STM32_CRYPTO_AES_GCM
  7910. /* The STM standard peripheral library API's doesn't support partial blocks */
  7911. return wc_AesGcmDecrypt_STM32(
  7912. aes, out, in, sz, iv, ivSz,
  7913. authTag, authTagSz, authIn, authInSz);
  7914. #endif /* STM32_CRYPTO_AES_GCM */
  7915. VECTOR_REGISTERS_PUSH;
  7916. #ifdef WOLFSSL_AESNI
  7917. if (aes->use_aesni) {
  7918. #ifdef HAVE_INTEL_AVX2
  7919. if (IS_INTEL_AVX2(intel_flags)) {
  7920. AES_GCM_decrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7921. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  7922. if (res == 0)
  7923. ret = AES_GCM_AUTH_E;
  7924. else
  7925. ret = 0;
  7926. }
  7927. else
  7928. #endif
  7929. #if defined(HAVE_INTEL_AVX1)
  7930. if (IS_INTEL_AVX1(intel_flags)) {
  7931. AES_GCM_decrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7932. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  7933. if (res == 0)
  7934. ret = AES_GCM_AUTH_E;
  7935. else
  7936. ret = 0;
  7937. }
  7938. else
  7939. #endif
  7940. {
  7941. AES_GCM_decrypt_aesni(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  7942. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  7943. if (res == 0)
  7944. ret = AES_GCM_AUTH_E;
  7945. else
  7946. ret = 0;
  7947. }
  7948. }
  7949. else
  7950. #endif /* WOLFSSL_AESNI */
  7951. {
  7952. ret = AES_GCM_decrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  7953. authIn, authInSz);
  7954. }
  7955. VECTOR_REGISTERS_POP;
  7956. return ret;
  7957. }
  7958. #endif
  7959. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7960. #ifdef WOLFSSL_AESGCM_STREAM
  7961. #if defined(WC_AES_C_DYNAMIC_FALLBACK) && defined(WOLFSSL_AESNI)
  7962. #error "AES-GCM streaming with AESNI is incompatible with WC_AES_C_DYNAMIC_FALLBACK."
  7963. #endif
  7964. /* Initialize the AES GCM cipher with an IV. C implementation.
  7965. *
  7966. * @param [in, out] aes AES object.
  7967. * @param [in] iv IV/nonce buffer.
  7968. * @param [in] ivSz Length of IV/nonce data.
  7969. */
  7970. static WARN_UNUSED_RESULT int AesGcmInit_C(Aes* aes, const byte* iv, word32 ivSz)
  7971. {
  7972. ALIGN32 byte counter[AES_BLOCK_SIZE];
  7973. int ret;
  7974. #ifdef WOLFSSL_AESNI
  7975. aes->use_aesni = 0;
  7976. #endif
  7977. if (ivSz == GCM_NONCE_MID_SZ) {
  7978. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  7979. XMEMCPY(counter, iv, ivSz);
  7980. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  7981. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  7982. counter[AES_BLOCK_SIZE - 1] = 1;
  7983. }
  7984. else {
  7985. /* Counter is GHASH of IV. */
  7986. #ifdef OPENSSL_EXTRA
  7987. word32 aadTemp = aes->gcm.aadLen;
  7988. aes->gcm.aadLen = 0;
  7989. #endif
  7990. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  7991. #ifdef OPENSSL_EXTRA
  7992. aes->gcm.aadLen = aadTemp;
  7993. #endif
  7994. }
  7995. /* Copy in the counter for use with cipher. */
  7996. XMEMCPY(AES_COUNTER(aes), counter, AES_BLOCK_SIZE);
  7997. /* Encrypt initial counter into a buffer for GCM. */
  7998. ret = wc_AesEncrypt(aes, counter, AES_INITCTR(aes));
  7999. if (ret != 0)
  8000. return ret;
  8001. /* Reset state fields. */
  8002. aes->over = 0;
  8003. aes->aSz = 0;
  8004. aes->cSz = 0;
  8005. /* Initialization for GHASH. */
  8006. GHASH_INIT(aes);
  8007. return 0;
  8008. }
  8009. /* Update the AES GCM cipher with data. C implementation.
  8010. *
  8011. * Only enciphers data.
  8012. *
  8013. * @param [in, out] aes AES object.
  8014. * @param [in] out Cipher text or plaintext buffer.
  8015. * @param [in] in Plaintext or cipher text buffer.
  8016. * @param [in] sz Length of data.
  8017. */
  8018. static WARN_UNUSED_RESULT int AesGcmCryptUpdate_C(
  8019. Aes* aes, byte* out, const byte* in, word32 sz)
  8020. {
  8021. word32 blocks;
  8022. word32 partial;
  8023. int ret;
  8024. /* Check if previous encrypted block was not used up. */
  8025. if (aes->over > 0) {
  8026. byte pSz = AES_BLOCK_SIZE - aes->over;
  8027. if (pSz > sz) pSz = (byte)sz;
  8028. /* Use some/all of last encrypted block. */
  8029. xorbufout(out, AES_LASTBLOCK(aes) + aes->over, in, pSz);
  8030. aes->over = (aes->over + pSz) & (AES_BLOCK_SIZE - 1);
  8031. /* Some data used. */
  8032. sz -= pSz;
  8033. in += pSz;
  8034. out += pSz;
  8035. }
  8036. /* Calculate the number of blocks needing to be encrypted and any leftover.
  8037. */
  8038. blocks = sz / AES_BLOCK_SIZE;
  8039. partial = sz & (AES_BLOCK_SIZE - 1);
  8040. #if defined(HAVE_AES_ECB)
  8041. /* Some hardware acceleration can gain performance from doing AES encryption
  8042. * of the whole buffer at once.
  8043. * Overwrites the cipher text before using plaintext - no inline encryption.
  8044. */
  8045. if ((out != in) && blocks > 0) {
  8046. word32 b;
  8047. /* Place incrementing counter blocks into cipher text. */
  8048. for (b = 0; b < blocks; b++) {
  8049. IncrementGcmCounter(AES_COUNTER(aes));
  8050. XMEMCPY(out + b * AES_BLOCK_SIZE, AES_COUNTER(aes), AES_BLOCK_SIZE);
  8051. }
  8052. /* Encrypt counter blocks. */
  8053. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  8054. /* XOR in plaintext. */
  8055. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  8056. /* Skip over processed data. */
  8057. in += AES_BLOCK_SIZE * blocks;
  8058. out += AES_BLOCK_SIZE * blocks;
  8059. }
  8060. else
  8061. #endif /* HAVE_AES_ECB */
  8062. {
  8063. /* Encrypt block by block. */
  8064. while (blocks--) {
  8065. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  8066. IncrementGcmCounter(AES_COUNTER(aes));
  8067. /* Encrypt counter into a buffer. */
  8068. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), scratch);
  8069. if (ret != 0)
  8070. return ret;
  8071. /* XOR plain text into encrypted counter into cipher text buffer. */
  8072. xorbufout(out, scratch, in, AES_BLOCK_SIZE);
  8073. /* Data complete. */
  8074. in += AES_BLOCK_SIZE;
  8075. out += AES_BLOCK_SIZE;
  8076. }
  8077. }
  8078. if (partial != 0) {
  8079. /* Generate an extra block and use up as much as needed. */
  8080. IncrementGcmCounter(AES_COUNTER(aes));
  8081. /* Encrypt counter into cache. */
  8082. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), AES_LASTBLOCK(aes));
  8083. if (ret != 0)
  8084. return ret;
  8085. /* XOR plain text into encrypted counter into cipher text buffer. */
  8086. xorbufout(out, AES_LASTBLOCK(aes), in, partial);
  8087. /* Keep amount of encrypted block used. */
  8088. aes->over = (byte)partial;
  8089. }
  8090. return 0;
  8091. }
  8092. /* Calculates authentication tag for AES GCM. C implementation.
  8093. *
  8094. * @param [in, out] aes AES object.
  8095. * @param [out] authTag Buffer to store authentication tag in.
  8096. * @param [in] authTagSz Length of tag to create.
  8097. */
  8098. static WARN_UNUSED_RESULT int AesGcmFinal_C(
  8099. Aes* aes, byte* authTag, word32 authTagSz)
  8100. {
  8101. /* Calculate authentication tag. */
  8102. GHASH_FINAL(aes, authTag, authTagSz);
  8103. /* XOR in as much of encrypted counter as is required. */
  8104. xorbuf(authTag, AES_INITCTR(aes), authTagSz);
  8105. #ifdef OPENSSL_EXTRA
  8106. /* store AAD size for next call */
  8107. aes->gcm.aadLen = aes->aSz;
  8108. #endif
  8109. /* Zeroize last block to protect sensitive data. */
  8110. ForceZero(AES_LASTBLOCK(aes), AES_BLOCK_SIZE);
  8111. return 0;
  8112. }
  8113. #ifdef WOLFSSL_AESNI
  8114. #ifdef __cplusplus
  8115. extern "C" {
  8116. #endif
  8117. /* Assembly code implementations in: aes_gcm_asm.S */
  8118. #ifdef HAVE_INTEL_AVX2
  8119. extern void AES_GCM_init_avx2(const unsigned char* key, int nr,
  8120. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  8121. unsigned char* counter, unsigned char* initCtr);
  8122. extern void AES_GCM_aad_update_avx2(const unsigned char* addt,
  8123. unsigned int abytes, unsigned char* tag, unsigned char* h);
  8124. extern void AES_GCM_encrypt_block_avx2(const unsigned char* key, int nr,
  8125. unsigned char* out, const unsigned char* in, unsigned char* counter);
  8126. extern void AES_GCM_ghash_block_avx2(const unsigned char* data,
  8127. unsigned char* tag, unsigned char* h);
  8128. extern void AES_GCM_encrypt_update_avx2(const unsigned char* key, int nr,
  8129. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8130. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8131. extern void AES_GCM_encrypt_final_avx2(unsigned char* tag,
  8132. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8133. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  8134. #endif
  8135. #ifdef HAVE_INTEL_AVX1
  8136. extern void AES_GCM_init_avx1(const unsigned char* key, int nr,
  8137. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  8138. unsigned char* counter, unsigned char* initCtr);
  8139. extern void AES_GCM_aad_update_avx1(const unsigned char* addt,
  8140. unsigned int abytes, unsigned char* tag, unsigned char* h);
  8141. extern void AES_GCM_encrypt_block_avx1(const unsigned char* key, int nr,
  8142. unsigned char* out, const unsigned char* in, unsigned char* counter);
  8143. extern void AES_GCM_ghash_block_avx1(const unsigned char* data,
  8144. unsigned char* tag, unsigned char* h);
  8145. extern void AES_GCM_encrypt_update_avx1(const unsigned char* key, int nr,
  8146. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8147. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8148. extern void AES_GCM_encrypt_final_avx1(unsigned char* tag,
  8149. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8150. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  8151. #endif
  8152. extern void AES_GCM_init_aesni(const unsigned char* key, int nr,
  8153. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  8154. unsigned char* counter, unsigned char* initCtr);
  8155. extern void AES_GCM_aad_update_aesni(const unsigned char* addt,
  8156. unsigned int abytes, unsigned char* tag, unsigned char* h);
  8157. extern void AES_GCM_encrypt_block_aesni(const unsigned char* key, int nr,
  8158. unsigned char* out, const unsigned char* in, unsigned char* counter);
  8159. extern void AES_GCM_ghash_block_aesni(const unsigned char* data,
  8160. unsigned char* tag, unsigned char* h);
  8161. extern void AES_GCM_encrypt_update_aesni(const unsigned char* key, int nr,
  8162. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8163. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8164. extern void AES_GCM_encrypt_final_aesni(unsigned char* tag,
  8165. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8166. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  8167. #ifdef __cplusplus
  8168. } /* extern "C" */
  8169. #endif
  8170. /* Initialize the AES GCM cipher with an IV. AES-NI implementations.
  8171. *
  8172. * @param [in, out] aes AES object.
  8173. * @param [in] iv IV/nonce buffer.
  8174. * @param [in] ivSz Length of IV/nonce data.
  8175. */
  8176. static WARN_UNUSED_RESULT int AesGcmInit_aesni(
  8177. Aes* aes, const byte* iv, word32 ivSz)
  8178. {
  8179. ASSERT_SAVED_VECTOR_REGISTERS();
  8180. /* Reset state fields. */
  8181. aes->aSz = 0;
  8182. aes->cSz = 0;
  8183. /* Set tag to all zeros as initial value. */
  8184. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  8185. /* Reset counts of AAD and cipher text. */
  8186. aes->aOver = 0;
  8187. aes->cOver = 0;
  8188. #ifdef HAVE_INTEL_AVX2
  8189. if (IS_INTEL_AVX2(intel_flags)) {
  8190. AES_GCM_init_avx2((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  8191. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  8192. }
  8193. else
  8194. #endif
  8195. #ifdef HAVE_INTEL_AVX1
  8196. if (IS_INTEL_AVX1(intel_flags)) {
  8197. AES_GCM_init_avx1((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  8198. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  8199. }
  8200. else
  8201. #endif
  8202. {
  8203. AES_GCM_init_aesni((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  8204. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  8205. }
  8206. aes->use_aesni = 1;
  8207. return 0;
  8208. }
  8209. /* Update the AES GCM for encryption with authentication data.
  8210. *
  8211. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  8212. *
  8213. * @param [in, out] aes AES object.
  8214. * @param [in] a Buffer holding authentication data.
  8215. * @param [in] aSz Length of authentication data in bytes.
  8216. * @param [in] endA Whether no more authentication data is expected.
  8217. */
  8218. static WARN_UNUSED_RESULT int AesGcmAadUpdate_aesni(
  8219. Aes* aes, const byte* a, word32 aSz, int endA)
  8220. {
  8221. word32 blocks;
  8222. int partial;
  8223. ASSERT_SAVED_VECTOR_REGISTERS();
  8224. if (aSz != 0 && a != NULL) {
  8225. /* Total count of AAD updated. */
  8226. aes->aSz += aSz;
  8227. /* Check if we have unprocessed data. */
  8228. if (aes->aOver > 0) {
  8229. /* Calculate amount we can use - fill up the block. */
  8230. byte sz = AES_BLOCK_SIZE - aes->aOver;
  8231. if (sz > aSz) {
  8232. sz = (byte)aSz;
  8233. }
  8234. /* Copy extra into last GHASH block array and update count. */
  8235. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  8236. aes->aOver += sz;
  8237. if (aes->aOver == AES_BLOCK_SIZE) {
  8238. /* We have filled up the block and can process. */
  8239. #ifdef HAVE_INTEL_AVX2
  8240. if (IS_INTEL_AVX2(intel_flags)) {
  8241. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8242. aes->gcm.H);
  8243. }
  8244. else
  8245. #endif
  8246. #ifdef HAVE_INTEL_AVX1
  8247. if (IS_INTEL_AVX1(intel_flags)) {
  8248. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8249. aes->gcm.H);
  8250. }
  8251. else
  8252. #endif
  8253. {
  8254. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8255. aes->gcm.H);
  8256. }
  8257. /* Reset count. */
  8258. aes->aOver = 0;
  8259. }
  8260. /* Used up some data. */
  8261. aSz -= sz;
  8262. a += sz;
  8263. }
  8264. /* Calculate number of blocks of AAD and the leftover. */
  8265. blocks = aSz / AES_BLOCK_SIZE;
  8266. partial = aSz % AES_BLOCK_SIZE;
  8267. if (blocks > 0) {
  8268. /* GHASH full blocks now. */
  8269. #ifdef HAVE_INTEL_AVX2
  8270. if (IS_INTEL_AVX2(intel_flags)) {
  8271. AES_GCM_aad_update_avx2(a, blocks * AES_BLOCK_SIZE,
  8272. AES_TAG(aes), aes->gcm.H);
  8273. }
  8274. else
  8275. #endif
  8276. #ifdef HAVE_INTEL_AVX1
  8277. if (IS_INTEL_AVX1(intel_flags)) {
  8278. AES_GCM_aad_update_avx1(a, blocks * AES_BLOCK_SIZE,
  8279. AES_TAG(aes), aes->gcm.H);
  8280. }
  8281. else
  8282. #endif
  8283. {
  8284. AES_GCM_aad_update_aesni(a, blocks * AES_BLOCK_SIZE,
  8285. AES_TAG(aes), aes->gcm.H);
  8286. }
  8287. /* Skip over to end of AAD blocks. */
  8288. a += blocks * AES_BLOCK_SIZE;
  8289. }
  8290. if (partial != 0) {
  8291. /* Cache the partial block. */
  8292. XMEMCPY(AES_LASTGBLOCK(aes), a, (size_t)partial);
  8293. aes->aOver = (byte)partial;
  8294. }
  8295. }
  8296. if (endA && (aes->aOver > 0)) {
  8297. /* No more AAD coming and we have a partial block. */
  8298. /* Fill the rest of the block with zeros. */
  8299. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0,
  8300. AES_BLOCK_SIZE - aes->aOver);
  8301. /* GHASH last AAD block. */
  8302. #ifdef HAVE_INTEL_AVX2
  8303. if (IS_INTEL_AVX2(intel_flags)) {
  8304. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8305. aes->gcm.H);
  8306. }
  8307. else
  8308. #endif
  8309. #ifdef HAVE_INTEL_AVX1
  8310. if (IS_INTEL_AVX1(intel_flags)) {
  8311. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8312. aes->gcm.H);
  8313. }
  8314. else
  8315. #endif
  8316. {
  8317. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8318. aes->gcm.H);
  8319. }
  8320. /* Clear partial count for next time through. */
  8321. aes->aOver = 0;
  8322. }
  8323. return 0;
  8324. }
  8325. /* Update the AES GCM for encryption with data and/or authentication data.
  8326. *
  8327. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  8328. *
  8329. * @param [in, out] aes AES object.
  8330. * @param [out] c Buffer to hold cipher text.
  8331. * @param [in] p Buffer holding plaintext.
  8332. * @param [in] cSz Length of cipher text/plaintext in bytes.
  8333. * @param [in] a Buffer holding authentication data.
  8334. * @param [in] aSz Length of authentication data in bytes.
  8335. */
  8336. static WARN_UNUSED_RESULT int AesGcmEncryptUpdate_aesni(
  8337. Aes* aes, byte* c, const byte* p, word32 cSz, const byte* a, word32 aSz)
  8338. {
  8339. word32 blocks;
  8340. int partial;
  8341. int ret;
  8342. ASSERT_SAVED_VECTOR_REGISTERS();
  8343. /* Hash in A, the Authentication Data */
  8344. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  8345. if (ret != 0)
  8346. return ret;
  8347. /* Encrypt plaintext and Hash in C, the Cipher text */
  8348. if (cSz != 0 && c != NULL) {
  8349. /* Update count of cipher text we have hashed. */
  8350. aes->cSz += cSz;
  8351. if (aes->cOver > 0) {
  8352. /* Calculate amount we can use - fill up the block. */
  8353. byte sz = AES_BLOCK_SIZE - aes->cOver;
  8354. if (sz > cSz) {
  8355. sz = (byte)cSz;
  8356. }
  8357. /* Encrypt some of the plaintext. */
  8358. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, p, sz);
  8359. XMEMCPY(c, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  8360. /* Update count of unused encrypted counter. */
  8361. aes->cOver += sz;
  8362. if (aes->cOver == AES_BLOCK_SIZE) {
  8363. /* We have filled up the block and can process. */
  8364. #ifdef HAVE_INTEL_AVX2
  8365. if (IS_INTEL_AVX2(intel_flags)) {
  8366. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8367. aes->gcm.H);
  8368. }
  8369. else
  8370. #endif
  8371. #ifdef HAVE_INTEL_AVX1
  8372. if (IS_INTEL_AVX1(intel_flags)) {
  8373. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8374. aes->gcm.H);
  8375. }
  8376. else
  8377. #endif
  8378. {
  8379. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8380. aes->gcm.H);
  8381. }
  8382. /* Reset count. */
  8383. aes->cOver = 0;
  8384. }
  8385. /* Used up some data. */
  8386. cSz -= sz;
  8387. p += sz;
  8388. c += sz;
  8389. }
  8390. /* Calculate number of blocks of plaintext and the leftover. */
  8391. blocks = cSz / AES_BLOCK_SIZE;
  8392. partial = cSz % AES_BLOCK_SIZE;
  8393. if (blocks > 0) {
  8394. /* Encrypt and GHASH full blocks now. */
  8395. #ifdef HAVE_INTEL_AVX2
  8396. if (IS_INTEL_AVX2(intel_flags)) {
  8397. AES_GCM_encrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  8398. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  8399. AES_COUNTER(aes));
  8400. }
  8401. else
  8402. #endif
  8403. #ifdef HAVE_INTEL_AVX1
  8404. if (IS_INTEL_AVX1(intel_flags)) {
  8405. AES_GCM_encrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  8406. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  8407. AES_COUNTER(aes));
  8408. }
  8409. else
  8410. #endif
  8411. {
  8412. AES_GCM_encrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  8413. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  8414. AES_COUNTER(aes));
  8415. }
  8416. /* Skip over to end of blocks. */
  8417. p += blocks * AES_BLOCK_SIZE;
  8418. c += blocks * AES_BLOCK_SIZE;
  8419. }
  8420. if (partial != 0) {
  8421. /* Encrypt the counter - XOR in zeros as proxy for plaintext. */
  8422. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  8423. #ifdef HAVE_INTEL_AVX2
  8424. if (IS_INTEL_AVX2(intel_flags)) {
  8425. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  8426. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8427. }
  8428. else
  8429. #endif
  8430. #ifdef HAVE_INTEL_AVX1
  8431. if (IS_INTEL_AVX1(intel_flags)) {
  8432. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  8433. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8434. }
  8435. else
  8436. #endif
  8437. {
  8438. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  8439. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8440. }
  8441. /* XOR the remaining plaintext to calculate cipher text.
  8442. * Keep cipher text for GHASH of last partial block.
  8443. */
  8444. xorbuf(AES_LASTGBLOCK(aes), p, (word32)partial);
  8445. XMEMCPY(c, AES_LASTGBLOCK(aes), (size_t)partial);
  8446. /* Update count of the block used. */
  8447. aes->cOver = (byte)partial;
  8448. }
  8449. }
  8450. return 0;
  8451. }
  8452. /* Finalize the AES GCM for encryption and calculate the authentication tag.
  8453. *
  8454. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  8455. *
  8456. * @param [in, out] aes AES object.
  8457. * @param [in] authTag Buffer to hold authentication tag.
  8458. * @param [in] authTagSz Length of authentication tag in bytes.
  8459. * @return 0 on success.
  8460. */
  8461. static WARN_UNUSED_RESULT int AesGcmEncryptFinal_aesni(
  8462. Aes* aes, byte* authTag, word32 authTagSz)
  8463. {
  8464. /* AAD block incomplete when > 0 */
  8465. byte over = aes->aOver;
  8466. ASSERT_SAVED_VECTOR_REGISTERS();
  8467. if (aes->cOver > 0) {
  8468. /* Cipher text block incomplete. */
  8469. over = aes->cOver;
  8470. }
  8471. if (over > 0) {
  8472. /* Fill the rest of the block with zeros. */
  8473. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  8474. /* GHASH last cipher block. */
  8475. #ifdef HAVE_INTEL_AVX2
  8476. if (IS_INTEL_AVX2(intel_flags)) {
  8477. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8478. aes->gcm.H);
  8479. }
  8480. else
  8481. #endif
  8482. #ifdef HAVE_INTEL_AVX1
  8483. if (IS_INTEL_AVX1(intel_flags)) {
  8484. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8485. aes->gcm.H);
  8486. }
  8487. else
  8488. #endif
  8489. {
  8490. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  8491. aes->gcm.H);
  8492. }
  8493. }
  8494. /* Calculate the authentication tag. */
  8495. #ifdef HAVE_INTEL_AVX2
  8496. if (IS_INTEL_AVX2(intel_flags)) {
  8497. AES_GCM_encrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8498. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  8499. }
  8500. else
  8501. #endif
  8502. #ifdef HAVE_INTEL_AVX1
  8503. if (IS_INTEL_AVX1(intel_flags)) {
  8504. AES_GCM_encrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8505. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  8506. }
  8507. else
  8508. #endif
  8509. {
  8510. AES_GCM_encrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8511. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  8512. }
  8513. return 0;
  8514. }
  8515. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  8516. #ifdef __cplusplus
  8517. extern "C" {
  8518. #endif
  8519. /* Assembly code implementations in: aes_gcm_asm.S and aes_gcm_x86_asm.S */
  8520. #ifdef HAVE_INTEL_AVX2
  8521. extern void AES_GCM_decrypt_update_avx2(const unsigned char* key, int nr,
  8522. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8523. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8524. extern void AES_GCM_decrypt_final_avx2(unsigned char* tag,
  8525. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8526. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  8527. #endif
  8528. #ifdef HAVE_INTEL_AVX1
  8529. extern void AES_GCM_decrypt_update_avx1(const unsigned char* key, int nr,
  8530. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8531. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8532. extern void AES_GCM_decrypt_final_avx1(unsigned char* tag,
  8533. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8534. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  8535. #endif
  8536. extern void AES_GCM_decrypt_update_aesni(const unsigned char* key, int nr,
  8537. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  8538. unsigned char* tag, unsigned char* h, unsigned char* counter);
  8539. extern void AES_GCM_decrypt_final_aesni(unsigned char* tag,
  8540. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  8541. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  8542. #ifdef __cplusplus
  8543. } /* extern "C" */
  8544. #endif
  8545. /* Update the AES GCM for decryption with data and/or authentication data.
  8546. *
  8547. * @param [in, out] aes AES object.
  8548. * @param [out] p Buffer to hold plaintext.
  8549. * @param [in] c Buffer holding cipher text.
  8550. * @param [in] cSz Length of cipher text/plaintext in bytes.
  8551. * @param [in] a Buffer holding authentication data.
  8552. * @param [in] aSz Length of authentication data in bytes.
  8553. */
  8554. static WARN_UNUSED_RESULT int AesGcmDecryptUpdate_aesni(
  8555. Aes* aes, byte* p, const byte* c, word32 cSz, const byte* a, word32 aSz)
  8556. {
  8557. word32 blocks;
  8558. int partial;
  8559. int ret;
  8560. ASSERT_SAVED_VECTOR_REGISTERS();
  8561. /* Hash in A, the Authentication Data */
  8562. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  8563. if (ret != 0)
  8564. return ret;
  8565. /* Hash in C, the Cipher text, and decrypt. */
  8566. if (cSz != 0 && p != NULL) {
  8567. /* Update count of cipher text we have hashed. */
  8568. aes->cSz += cSz;
  8569. if (aes->cOver > 0) {
  8570. /* Calculate amount we can use - fill up the block. */
  8571. byte sz = AES_BLOCK_SIZE - aes->cOver;
  8572. if (sz > cSz) {
  8573. sz = (byte)cSz;
  8574. }
  8575. /* Keep a copy of the cipher text for GHASH. */
  8576. XMEMCPY(AES_LASTBLOCK(aes) + aes->cOver, c, sz);
  8577. /* Decrypt some of the cipher text. */
  8578. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  8579. XMEMCPY(p, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  8580. /* Update count of unused encrypted counter. */
  8581. aes->cOver += sz;
  8582. if (aes->cOver == AES_BLOCK_SIZE) {
  8583. /* We have filled up the block and can process. */
  8584. #ifdef HAVE_INTEL_AVX2
  8585. if (IS_INTEL_AVX2(intel_flags)) {
  8586. AES_GCM_ghash_block_avx2(AES_LASTBLOCK(aes), AES_TAG(aes),
  8587. aes->gcm.H);
  8588. }
  8589. else
  8590. #endif
  8591. #ifdef HAVE_INTEL_AVX1
  8592. if (IS_INTEL_AVX1(intel_flags)) {
  8593. AES_GCM_ghash_block_avx1(AES_LASTBLOCK(aes), AES_TAG(aes),
  8594. aes->gcm.H);
  8595. }
  8596. else
  8597. #endif
  8598. {
  8599. AES_GCM_ghash_block_aesni(AES_LASTBLOCK(aes), AES_TAG(aes),
  8600. aes->gcm.H);
  8601. }
  8602. /* Reset count. */
  8603. aes->cOver = 0;
  8604. }
  8605. /* Used up some data. */
  8606. cSz -= sz;
  8607. c += sz;
  8608. p += sz;
  8609. }
  8610. /* Calculate number of blocks of plaintext and the leftover. */
  8611. blocks = cSz / AES_BLOCK_SIZE;
  8612. partial = cSz % AES_BLOCK_SIZE;
  8613. if (blocks > 0) {
  8614. /* Decrypt and GHASH full blocks now. */
  8615. #ifdef HAVE_INTEL_AVX2
  8616. if (IS_INTEL_AVX2(intel_flags)) {
  8617. AES_GCM_decrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  8618. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  8619. AES_COUNTER(aes));
  8620. }
  8621. else
  8622. #endif
  8623. #ifdef HAVE_INTEL_AVX1
  8624. if (IS_INTEL_AVX1(intel_flags)) {
  8625. AES_GCM_decrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  8626. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  8627. AES_COUNTER(aes));
  8628. }
  8629. else
  8630. #endif
  8631. {
  8632. AES_GCM_decrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  8633. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  8634. AES_COUNTER(aes));
  8635. }
  8636. /* Skip over to end of blocks. */
  8637. c += blocks * AES_BLOCK_SIZE;
  8638. p += blocks * AES_BLOCK_SIZE;
  8639. }
  8640. if (partial != 0) {
  8641. /* Encrypt the counter - XOR in zeros as proxy for cipher text. */
  8642. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  8643. #ifdef HAVE_INTEL_AVX2
  8644. if (IS_INTEL_AVX2(intel_flags)) {
  8645. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  8646. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8647. }
  8648. else
  8649. #endif
  8650. #ifdef HAVE_INTEL_AVX1
  8651. if (IS_INTEL_AVX1(intel_flags)) {
  8652. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  8653. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8654. }
  8655. else
  8656. #endif
  8657. {
  8658. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  8659. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  8660. }
  8661. /* Keep cipher text for GHASH of last partial block. */
  8662. XMEMCPY(AES_LASTBLOCK(aes), c, (size_t)partial);
  8663. /* XOR the remaining cipher text to calculate plaintext. */
  8664. xorbuf(AES_LASTGBLOCK(aes), c, (word32)partial);
  8665. XMEMCPY(p, AES_LASTGBLOCK(aes), (size_t)partial);
  8666. /* Update count of the block used. */
  8667. aes->cOver = (byte)partial;
  8668. }
  8669. }
  8670. return 0;
  8671. }
  8672. /* Finalize the AES GCM for decryption and check the authentication tag.
  8673. *
  8674. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  8675. *
  8676. * @param [in, out] aes AES object.
  8677. * @param [in] authTag Buffer holding authentication tag.
  8678. * @param [in] authTagSz Length of authentication tag in bytes.
  8679. * @return 0 on success.
  8680. * @return AES_GCM_AUTH_E when authentication tag doesn't match calculated
  8681. * value.
  8682. */
  8683. static WARN_UNUSED_RESULT int AesGcmDecryptFinal_aesni(
  8684. Aes* aes, const byte* authTag, word32 authTagSz)
  8685. {
  8686. int ret = 0;
  8687. int res;
  8688. /* AAD block incomplete when > 0 */
  8689. byte over = aes->aOver;
  8690. byte *lastBlock = AES_LASTGBLOCK(aes);
  8691. ASSERT_SAVED_VECTOR_REGISTERS();
  8692. if (aes->cOver > 0) {
  8693. /* Cipher text block incomplete. */
  8694. over = aes->cOver;
  8695. lastBlock = AES_LASTBLOCK(aes);
  8696. }
  8697. if (over > 0) {
  8698. /* Zeroize the unused part of the block. */
  8699. XMEMSET(lastBlock + over, 0, AES_BLOCK_SIZE - over);
  8700. /* Hash the last block of cipher text. */
  8701. #ifdef HAVE_INTEL_AVX2
  8702. if (IS_INTEL_AVX2(intel_flags)) {
  8703. AES_GCM_ghash_block_avx2(lastBlock, AES_TAG(aes), aes->gcm.H);
  8704. }
  8705. else
  8706. #endif
  8707. #ifdef HAVE_INTEL_AVX1
  8708. if (IS_INTEL_AVX1(intel_flags)) {
  8709. AES_GCM_ghash_block_avx1(lastBlock, AES_TAG(aes), aes->gcm.H);
  8710. }
  8711. else
  8712. #endif
  8713. {
  8714. AES_GCM_ghash_block_aesni(lastBlock, AES_TAG(aes), aes->gcm.H);
  8715. }
  8716. }
  8717. /* Calculate and compare the authentication tag. */
  8718. #ifdef HAVE_INTEL_AVX2
  8719. if (IS_INTEL_AVX2(intel_flags)) {
  8720. AES_GCM_decrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8721. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  8722. }
  8723. else
  8724. #endif
  8725. #ifdef HAVE_INTEL_AVX1
  8726. if (IS_INTEL_AVX1(intel_flags)) {
  8727. AES_GCM_decrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8728. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  8729. }
  8730. else
  8731. #endif
  8732. {
  8733. AES_GCM_decrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  8734. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  8735. }
  8736. /* Return error code when calculated doesn't match input. */
  8737. if (res == 0) {
  8738. ret = AES_GCM_AUTH_E;
  8739. }
  8740. return ret;
  8741. }
  8742. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  8743. #endif /* WOLFSSL_AESNI */
  8744. /* Initialize an AES GCM cipher for encryption or decryption.
  8745. *
  8746. * Must call wc_AesInit() before calling this function.
  8747. * Call wc_AesGcmSetIV() before calling this function to generate part of IV.
  8748. * Call wc_AesGcmSetExtIV() before calling this function to cache IV.
  8749. *
  8750. * @param [in, out] aes AES object.
  8751. * @param [in] key Buffer holding key.
  8752. * @param [in] len Length of key in bytes.
  8753. * @param [in] iv Buffer holding IV/nonce.
  8754. * @param [in] ivSz Length of IV/nonce in bytes.
  8755. * @return 0 on success.
  8756. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8757. * is NULL, or the IV is NULL and no previous IV has been set.
  8758. * @return MEMORY_E when dynamic memory allocation fails. (WOLFSSL_SMALL_STACK)
  8759. */
  8760. int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  8761. word32 ivSz)
  8762. {
  8763. int ret = 0;
  8764. /* Check validity of parameters. */
  8765. if ((aes == NULL) || ((len > 0) && (key == NULL)) ||
  8766. ((ivSz == 0) && (iv != NULL)) ||
  8767. ((ivSz > 0) && (iv == NULL))) {
  8768. ret = BAD_FUNC_ARG;
  8769. }
  8770. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  8771. if ((ret == 0) && (aes->streamData == NULL)) {
  8772. /* Allocate buffers for streaming. */
  8773. aes->streamData = (byte*)XMALLOC(5 * AES_BLOCK_SIZE, aes->heap,
  8774. DYNAMIC_TYPE_AES);
  8775. if (aes->streamData == NULL) {
  8776. ret = MEMORY_E;
  8777. }
  8778. }
  8779. #endif
  8780. /* Set the key if passed in. */
  8781. if ((ret == 0) && (key != NULL)) {
  8782. ret = wc_AesGcmSetKey(aes, key, len);
  8783. }
  8784. if (ret == 0) {
  8785. /* Set the IV passed in if it is smaller than a block. */
  8786. if ((iv != NULL) && (ivSz <= AES_BLOCK_SIZE)) {
  8787. XMEMMOVE((byte*)aes->reg, iv, ivSz);
  8788. aes->nonceSz = ivSz;
  8789. }
  8790. /* No IV passed in, check for cached IV. */
  8791. if ((iv == NULL) && (aes->nonceSz != 0)) {
  8792. /* Use the cached copy. */
  8793. iv = (byte*)aes->reg;
  8794. ivSz = aes->nonceSz;
  8795. }
  8796. if (iv != NULL) {
  8797. /* Initialize with the IV. */
  8798. VECTOR_REGISTERS_PUSH;
  8799. #ifdef WOLFSSL_AESNI
  8800. if (aes->use_aesni) {
  8801. ret = AesGcmInit_aesni(aes, iv, ivSz);
  8802. }
  8803. else
  8804. #endif
  8805. {
  8806. ret = AesGcmInit_C(aes, iv, ivSz);
  8807. }
  8808. VECTOR_REGISTERS_POP;
  8809. if (ret == 0)
  8810. aes->nonceSet = 1;
  8811. }
  8812. }
  8813. return ret;
  8814. }
  8815. /* Initialize an AES GCM cipher for encryption.
  8816. *
  8817. * Must call wc_AesInit() before calling this function.
  8818. *
  8819. * @param [in, out] aes AES object.
  8820. * @param [in] key Buffer holding key.
  8821. * @param [in] len Length of key in bytes.
  8822. * @param [in] iv Buffer holding IV/nonce.
  8823. * @param [in] ivSz Length of IV/nonce in bytes.
  8824. * @return 0 on success.
  8825. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8826. * is NULL, or the IV is NULL and no previous IV has been set.
  8827. */
  8828. int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  8829. word32 ivSz)
  8830. {
  8831. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  8832. }
  8833. /* Initialize an AES GCM cipher for encryption. Get IV.
  8834. *
  8835. * Must call wc_AesGcmSetIV() to generate part of IV before calling this
  8836. * function.
  8837. * Must call wc_AesInit() before calling this function.
  8838. *
  8839. * See wc_AesGcmEncrypt_ex() for non-streaming version of getting IV out.
  8840. *
  8841. * @param [in, out] aes AES object.
  8842. * @param [in] key Buffer holding key.
  8843. * @param [in] len Length of key in bytes.
  8844. * @param [in] iv Buffer holding IV/nonce.
  8845. * @param [in] ivSz Length of IV/nonce in bytes.
  8846. * @return 0 on success.
  8847. * @return BAD_FUNC_ARG when aes is NULL, key length is non-zero but key
  8848. * is NULL, or the IV is NULL or ivOutSz is not the same as cached
  8849. * nonce size.
  8850. */
  8851. int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut,
  8852. word32 ivOutSz)
  8853. {
  8854. int ret;
  8855. /* Check validity of parameters. */
  8856. if ((aes == NULL) || (ivOut == NULL) || (ivOutSz != aes->nonceSz)) {
  8857. ret = BAD_FUNC_ARG;
  8858. }
  8859. else {
  8860. /* Copy out the IV including generated part for decryption. */
  8861. XMEMCPY(ivOut, aes->reg, ivOutSz);
  8862. /* Initialize AES GCM cipher with key and cached Iv. */
  8863. ret = wc_AesGcmInit(aes, key, len, NULL, 0);
  8864. }
  8865. return ret;
  8866. }
  8867. /* Update the AES GCM for encryption with data and/or authentication data.
  8868. *
  8869. * All the AAD must be passed to update before the plaintext.
  8870. * Last part of AAD can be passed with first part of plaintext.
  8871. *
  8872. * Must set key and IV before calling this function.
  8873. * Must call wc_AesGcmInit() before calling this function.
  8874. *
  8875. * @param [in, out] aes AES object.
  8876. * @param [out] out Buffer to hold cipher text.
  8877. * @param [in] in Buffer holding plaintext.
  8878. * @param [in] sz Length of plaintext in bytes.
  8879. * @param [in] authIn Buffer holding authentication data.
  8880. * @param [in] authInSz Length of authentication data in bytes.
  8881. * @return 0 on success.
  8882. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8883. * is NULL.
  8884. */
  8885. int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  8886. const byte* authIn, word32 authInSz)
  8887. {
  8888. int ret = 0;
  8889. /* Check validity of parameters. */
  8890. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  8891. ((out == NULL) || (in == NULL)))) {
  8892. ret = BAD_FUNC_ARG;
  8893. }
  8894. /* Check key has been set. */
  8895. if ((ret == 0) && (!aes->gcmKeySet)) {
  8896. ret = MISSING_KEY;
  8897. }
  8898. /* Check IV has been set. */
  8899. if ((ret == 0) && (!aes->nonceSet)) {
  8900. ret = MISSING_IV;
  8901. }
  8902. if ((ret == 0) && aes->ctrSet && (aes->aSz == 0) && (aes->cSz == 0)) {
  8903. aes->invokeCtr[0]++;
  8904. if (aes->invokeCtr[0] == 0) {
  8905. aes->invokeCtr[1]++;
  8906. if (aes->invokeCtr[1] == 0)
  8907. ret = AES_GCM_OVERFLOW_E;
  8908. }
  8909. }
  8910. if (ret == 0) {
  8911. /* Encrypt with AAD and/or plaintext. */
  8912. VECTOR_REGISTERS_PUSH;
  8913. #ifdef WOLFSSL_AESNI
  8914. if (aes->use_aesni) {
  8915. ret = AesGcmEncryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  8916. }
  8917. else
  8918. #endif
  8919. {
  8920. /* Encrypt the plaintext. */
  8921. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  8922. if (ret == 0) {
  8923. /* Update the authentication tag with any authentication data and the
  8924. * new cipher text. */
  8925. GHASH_UPDATE(aes, authIn, authInSz, out, sz);
  8926. }
  8927. }
  8928. VECTOR_REGISTERS_POP;
  8929. }
  8930. return ret;
  8931. }
  8932. /* Finalize the AES GCM for encryption and return the authentication tag.
  8933. *
  8934. * Must set key and IV before calling this function.
  8935. * Must call wc_AesGcmInit() before calling this function.
  8936. *
  8937. * @param [in, out] aes AES object.
  8938. * @param [out] authTag Buffer to hold authentication tag.
  8939. * @param [in] authTagSz Length of authentication tag in bytes.
  8940. * @return 0 on success.
  8941. */
  8942. int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz)
  8943. {
  8944. int ret = 0;
  8945. /* Check validity of parameters. */
  8946. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  8947. (authTagSz == 0)) {
  8948. ret = BAD_FUNC_ARG;
  8949. }
  8950. /* Check key has been set. */
  8951. if ((ret == 0) && (!aes->gcmKeySet)) {
  8952. ret = MISSING_KEY;
  8953. }
  8954. /* Check IV has been set. */
  8955. if ((ret == 0) && (!aes->nonceSet)) {
  8956. ret = MISSING_IV;
  8957. }
  8958. if (ret == 0) {
  8959. /* Calculate authentication tag. */
  8960. VECTOR_REGISTERS_PUSH;
  8961. #ifdef WOLFSSL_AESNI
  8962. if (aes->use_aesni) {
  8963. ret = AesGcmEncryptFinal_aesni(aes, authTag, authTagSz);
  8964. }
  8965. else
  8966. #endif
  8967. {
  8968. ret = AesGcmFinal_C(aes, authTag, authTagSz);
  8969. }
  8970. VECTOR_REGISTERS_POP;
  8971. }
  8972. if ((ret == 0) && aes->ctrSet) {
  8973. IncCtr((byte*)aes->reg, aes->nonceSz);
  8974. }
  8975. return ret;
  8976. }
  8977. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  8978. /* Initialize an AES GCM cipher for decryption.
  8979. *
  8980. * Must call wc_AesInit() before calling this function.
  8981. *
  8982. * Call wc_AesGcmSetExtIV() before calling this function to use FIPS external IV
  8983. * instead.
  8984. *
  8985. * @param [in, out] aes AES object.
  8986. * @param [in] key Buffer holding key.
  8987. * @param [in] len Length of key in bytes.
  8988. * @param [in] iv Buffer holding IV/nonce.
  8989. * @param [in] ivSz Length of IV/nonce in bytes.
  8990. * @return 0 on success.
  8991. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  8992. * is NULL, or the IV is NULL and no previous IV has been set.
  8993. */
  8994. int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  8995. word32 ivSz)
  8996. {
  8997. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  8998. }
  8999. /* Update the AES GCM for decryption with data and/or authentication data.
  9000. *
  9001. * All the AAD must be passed to update before the cipher text.
  9002. * Last part of AAD can be passed with first part of cipher text.
  9003. *
  9004. * Must set key and IV before calling this function.
  9005. * Must call wc_AesGcmInit() before calling this function.
  9006. *
  9007. * @param [in, out] aes AES object.
  9008. * @param [out] out Buffer to hold plaintext.
  9009. * @param [in] in Buffer holding cipher text.
  9010. * @param [in] sz Length of cipher text in bytes.
  9011. * @param [in] authIn Buffer holding authentication data.
  9012. * @param [in] authInSz Length of authentication data in bytes.
  9013. * @return 0 on success.
  9014. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  9015. * is NULL.
  9016. */
  9017. int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  9018. const byte* authIn, word32 authInSz)
  9019. {
  9020. int ret = 0;
  9021. /* Check validity of parameters. */
  9022. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  9023. ((out == NULL) || (in == NULL)))) {
  9024. ret = BAD_FUNC_ARG;
  9025. }
  9026. /* Check key has been set. */
  9027. if ((ret == 0) && (!aes->gcmKeySet)) {
  9028. ret = MISSING_KEY;
  9029. }
  9030. /* Check IV has been set. */
  9031. if ((ret == 0) && (!aes->nonceSet)) {
  9032. ret = MISSING_IV;
  9033. }
  9034. if (ret == 0) {
  9035. /* Decrypt with AAD and/or cipher text. */
  9036. VECTOR_REGISTERS_PUSH;
  9037. #ifdef WOLFSSL_AESNI
  9038. if (aes->use_aesni) {
  9039. ret = AesGcmDecryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  9040. }
  9041. else
  9042. #endif
  9043. {
  9044. /* Update the authentication tag with any authentication data and
  9045. * cipher text. */
  9046. GHASH_UPDATE(aes, authIn, authInSz, in, sz);
  9047. /* Decrypt the cipher text. */
  9048. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  9049. }
  9050. VECTOR_REGISTERS_POP;
  9051. }
  9052. return ret;
  9053. }
  9054. /* Finalize the AES GCM for decryption and check the authentication tag.
  9055. *
  9056. * Must set key and IV before calling this function.
  9057. * Must call wc_AesGcmInit() before calling this function.
  9058. *
  9059. * @param [in, out] aes AES object.
  9060. * @param [in] authTag Buffer holding authentication tag.
  9061. * @param [in] authTagSz Length of authentication tag in bytes.
  9062. * @return 0 on success.
  9063. */
  9064. int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz)
  9065. {
  9066. int ret = 0;
  9067. /* Check validity of parameters. */
  9068. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  9069. (authTagSz == 0)) {
  9070. ret = BAD_FUNC_ARG;
  9071. }
  9072. /* Check key has been set. */
  9073. if ((ret == 0) && (!aes->gcmKeySet)) {
  9074. ret = MISSING_KEY;
  9075. }
  9076. /* Check IV has been set. */
  9077. if ((ret == 0) && (!aes->nonceSet)) {
  9078. ret = MISSING_IV;
  9079. }
  9080. if (ret == 0) {
  9081. /* Calculate authentication tag and compare with one passed in.. */
  9082. VECTOR_REGISTERS_PUSH;
  9083. #ifdef WOLFSSL_AESNI
  9084. if (aes->use_aesni) {
  9085. ret = AesGcmDecryptFinal_aesni(aes, authTag, authTagSz);
  9086. }
  9087. else
  9088. #endif
  9089. {
  9090. ALIGN32 byte calcTag[AES_BLOCK_SIZE];
  9091. /* Calculate authentication tag. */
  9092. ret = AesGcmFinal_C(aes, calcTag, authTagSz);
  9093. if (ret == 0) {
  9094. /* Check calculated tag matches the one passed in. */
  9095. if (ConstantCompare(authTag, calcTag, (int)authTagSz) != 0) {
  9096. ret = AES_GCM_AUTH_E;
  9097. }
  9098. }
  9099. }
  9100. VECTOR_REGISTERS_POP;
  9101. }
  9102. return ret;
  9103. }
  9104. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  9105. #endif /* WOLFSSL_AESGCM_STREAM */
  9106. #endif /* WOLFSSL_XILINX_CRYPT */
  9107. #endif /* end of block for AESGCM implementation selection */
  9108. /* Common to all, abstract functions that build off of lower level AESGCM
  9109. * functions */
  9110. #ifndef WC_NO_RNG
  9111. static WARN_UNUSED_RESULT WC_INLINE int CheckAesGcmIvSize(int ivSz) {
  9112. return (ivSz == GCM_NONCE_MIN_SZ ||
  9113. ivSz == GCM_NONCE_MID_SZ ||
  9114. ivSz == GCM_NONCE_MAX_SZ);
  9115. }
  9116. int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz)
  9117. {
  9118. int ret = 0;
  9119. if (aes == NULL || iv == NULL || !CheckAesGcmIvSize((int)ivSz)) {
  9120. ret = BAD_FUNC_ARG;
  9121. }
  9122. if (ret == 0) {
  9123. XMEMCPY((byte*)aes->reg, iv, ivSz);
  9124. /* If the IV is 96, allow for a 2^64 invocation counter.
  9125. * For any other size for the nonce, limit the invocation
  9126. * counter to 32-bits. (SP 800-38D 8.3) */
  9127. aes->invokeCtr[0] = 0;
  9128. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  9129. #ifdef WOLFSSL_AESGCM_STREAM
  9130. aes->ctrSet = 1;
  9131. #endif
  9132. aes->nonceSz = ivSz;
  9133. }
  9134. return ret;
  9135. }
  9136. int wc_AesGcmSetIV(Aes* aes, word32 ivSz,
  9137. const byte* ivFixed, word32 ivFixedSz,
  9138. WC_RNG* rng)
  9139. {
  9140. int ret = 0;
  9141. if (aes == NULL || rng == NULL || !CheckAesGcmIvSize((int)ivSz) ||
  9142. (ivFixed == NULL && ivFixedSz != 0) ||
  9143. (ivFixed != NULL && ivFixedSz != AES_IV_FIXED_SZ)) {
  9144. ret = BAD_FUNC_ARG;
  9145. }
  9146. if (ret == 0) {
  9147. byte* iv = (byte*)aes->reg;
  9148. if (ivFixedSz)
  9149. XMEMCPY(iv, ivFixed, ivFixedSz);
  9150. ret = wc_RNG_GenerateBlock(rng, iv + ivFixedSz, ivSz - ivFixedSz);
  9151. }
  9152. if (ret == 0) {
  9153. /* If the IV is 96, allow for a 2^64 invocation counter.
  9154. * For any other size for the nonce, limit the invocation
  9155. * counter to 32-bits. (SP 800-38D 8.3) */
  9156. aes->invokeCtr[0] = 0;
  9157. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  9158. #ifdef WOLFSSL_AESGCM_STREAM
  9159. aes->ctrSet = 1;
  9160. #endif
  9161. aes->nonceSz = ivSz;
  9162. }
  9163. return ret;
  9164. }
  9165. int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  9166. byte* ivOut, word32 ivOutSz,
  9167. byte* authTag, word32 authTagSz,
  9168. const byte* authIn, word32 authInSz)
  9169. {
  9170. int ret = 0;
  9171. if (aes == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  9172. ivOut == NULL || ivOutSz != aes->nonceSz ||
  9173. (authIn == NULL && authInSz != 0)) {
  9174. ret = BAD_FUNC_ARG;
  9175. }
  9176. if (ret == 0) {
  9177. aes->invokeCtr[0]++;
  9178. if (aes->invokeCtr[0] == 0) {
  9179. aes->invokeCtr[1]++;
  9180. if (aes->invokeCtr[1] == 0)
  9181. ret = AES_GCM_OVERFLOW_E;
  9182. }
  9183. }
  9184. if (ret == 0) {
  9185. XMEMCPY(ivOut, aes->reg, ivOutSz);
  9186. ret = wc_AesGcmEncrypt(aes, out, in, sz,
  9187. (byte*)aes->reg, ivOutSz,
  9188. authTag, authTagSz,
  9189. authIn, authInSz);
  9190. if (ret == 0)
  9191. IncCtr((byte*)aes->reg, ivOutSz);
  9192. }
  9193. return ret;
  9194. }
  9195. int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz,
  9196. const byte* authIn, word32 authInSz,
  9197. byte* authTag, word32 authTagSz, WC_RNG* rng)
  9198. {
  9199. #ifdef WOLFSSL_SMALL_STACK
  9200. Aes *aes = NULL;
  9201. #else
  9202. Aes aes[1];
  9203. #endif
  9204. int ret;
  9205. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  9206. authTag == NULL || authTagSz == 0 || rng == NULL) {
  9207. return BAD_FUNC_ARG;
  9208. }
  9209. #ifdef WOLFSSL_SMALL_STACK
  9210. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9211. DYNAMIC_TYPE_AES)) == NULL)
  9212. return MEMORY_E;
  9213. #endif
  9214. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9215. if (ret == 0) {
  9216. ret = wc_AesGcmSetKey(aes, key, keySz);
  9217. if (ret == 0)
  9218. ret = wc_AesGcmSetIV(aes, ivSz, NULL, 0, rng);
  9219. if (ret == 0)
  9220. ret = wc_AesGcmEncrypt_ex(aes, NULL, NULL, 0, iv, ivSz,
  9221. authTag, authTagSz, authIn, authInSz);
  9222. wc_AesFree(aes);
  9223. }
  9224. ForceZero(aes, sizeof *aes);
  9225. #ifdef WOLFSSL_SMALL_STACK
  9226. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9227. #endif
  9228. return ret;
  9229. }
  9230. int wc_GmacVerify(const byte* key, word32 keySz,
  9231. const byte* iv, word32 ivSz,
  9232. const byte* authIn, word32 authInSz,
  9233. const byte* authTag, word32 authTagSz)
  9234. {
  9235. int ret;
  9236. #ifdef HAVE_AES_DECRYPT
  9237. #ifdef WOLFSSL_SMALL_STACK
  9238. Aes *aes = NULL;
  9239. #else
  9240. Aes aes[1];
  9241. #endif
  9242. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  9243. authTag == NULL || authTagSz == 0 || authTagSz > AES_BLOCK_SIZE) {
  9244. return BAD_FUNC_ARG;
  9245. }
  9246. #ifdef WOLFSSL_SMALL_STACK
  9247. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9248. DYNAMIC_TYPE_AES)) == NULL)
  9249. return MEMORY_E;
  9250. #endif
  9251. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9252. if (ret == 0) {
  9253. ret = wc_AesGcmSetKey(aes, key, keySz);
  9254. if (ret == 0)
  9255. ret = wc_AesGcmDecrypt(aes, NULL, NULL, 0, iv, ivSz,
  9256. authTag, authTagSz, authIn, authInSz);
  9257. wc_AesFree(aes);
  9258. }
  9259. ForceZero(aes, sizeof *aes);
  9260. #ifdef WOLFSSL_SMALL_STACK
  9261. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9262. #endif
  9263. #else
  9264. (void)key;
  9265. (void)keySz;
  9266. (void)iv;
  9267. (void)ivSz;
  9268. (void)authIn;
  9269. (void)authInSz;
  9270. (void)authTag;
  9271. (void)authTagSz;
  9272. ret = NOT_COMPILED_IN;
  9273. #endif
  9274. return ret;
  9275. }
  9276. #endif /* WC_NO_RNG */
  9277. WOLFSSL_API int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  9278. {
  9279. if (gmac == NULL || key == NULL) {
  9280. return BAD_FUNC_ARG;
  9281. }
  9282. return wc_AesGcmSetKey(&gmac->aes, key, len);
  9283. }
  9284. WOLFSSL_API int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  9285. const byte* authIn, word32 authInSz,
  9286. byte* authTag, word32 authTagSz)
  9287. {
  9288. if (gmac == NULL) {
  9289. return BAD_FUNC_ARG;
  9290. }
  9291. return wc_AesGcmEncrypt(&gmac->aes, NULL, NULL, 0, iv, ivSz,
  9292. authTag, authTagSz, authIn, authInSz);
  9293. }
  9294. #endif /* HAVE_AESGCM */
  9295. #ifdef HAVE_AESCCM
  9296. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  9297. {
  9298. if (!((keySz == 16) || (keySz == 24) || (keySz == 32)))
  9299. return BAD_FUNC_ARG;
  9300. return wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  9301. }
  9302. /* Checks if the tag size is an accepted value based on RFC 3610 section 2
  9303. * returns 0 if tag size is ok
  9304. */
  9305. int wc_AesCcmCheckTagSize(int sz)
  9306. {
  9307. /* values here are from RFC 3610 section 2 */
  9308. if (sz != 4 && sz != 6 && sz != 8 && sz != 10 && sz != 12 && sz != 14
  9309. && sz != 16) {
  9310. WOLFSSL_MSG("Bad auth tag size AES-CCM");
  9311. return BAD_FUNC_ARG;
  9312. }
  9313. return 0;
  9314. }
  9315. #ifdef WOLFSSL_ARMASM
  9316. /* implementation located in wolfcrypt/src/port/arm/armv8-aes.c */
  9317. #elif defined(HAVE_COLDFIRE_SEC)
  9318. #error "Coldfire SEC doesn't currently support AES-CCM mode"
  9319. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  9320. !defined(WOLFSSL_QNX_CAAM)
  9321. /* implemented in wolfcrypt/src/port/caam_aes.c */
  9322. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  9323. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  9324. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9325. const byte* nonce, word32 nonceSz,
  9326. byte* authTag, word32 authTagSz,
  9327. const byte* authIn, word32 authInSz)
  9328. {
  9329. return wc_AesCcmEncrypt_silabs(
  9330. aes, out, in, inSz,
  9331. nonce, nonceSz,
  9332. authTag, authTagSz,
  9333. authIn, authInSz);
  9334. }
  9335. #ifdef HAVE_AES_DECRYPT
  9336. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9337. const byte* nonce, word32 nonceSz,
  9338. const byte* authTag, word32 authTagSz,
  9339. const byte* authIn, word32 authInSz)
  9340. {
  9341. return wc_AesCcmDecrypt_silabs(
  9342. aes, out, in, inSz,
  9343. nonce, nonceSz,
  9344. authTag, authTagSz,
  9345. authIn, authInSz);
  9346. }
  9347. #endif
  9348. #elif defined(FREESCALE_LTC)
  9349. /* return 0 on success */
  9350. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9351. const byte* nonce, word32 nonceSz,
  9352. byte* authTag, word32 authTagSz,
  9353. const byte* authIn, word32 authInSz)
  9354. {
  9355. byte *key;
  9356. word32 keySize;
  9357. status_t status;
  9358. /* sanity check on arguments */
  9359. /* note, LTC_AES_EncryptTagCcm() doesn't allow null src or dst
  9360. * ptrs even if inSz is zero (ltc_aes_ccm_check_input_args()), so
  9361. * don't allow it here either.
  9362. */
  9363. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  9364. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  9365. return BAD_FUNC_ARG;
  9366. }
  9367. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  9368. return BAD_FUNC_ARG;
  9369. }
  9370. key = (byte*)aes->key;
  9371. status = wc_AesGetKeySize(aes, &keySize);
  9372. if (status != 0) {
  9373. return status;
  9374. }
  9375. status = wolfSSL_CryptHwMutexLock();
  9376. if (status != 0)
  9377. return status;
  9378. status = LTC_AES_EncryptTagCcm(LTC_BASE, in, out, inSz,
  9379. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  9380. wolfSSL_CryptHwMutexUnLock();
  9381. return (kStatus_Success == status) ? 0 : BAD_FUNC_ARG;
  9382. }
  9383. #ifdef HAVE_AES_DECRYPT
  9384. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9385. const byte* nonce, word32 nonceSz,
  9386. const byte* authTag, word32 authTagSz,
  9387. const byte* authIn, word32 authInSz)
  9388. {
  9389. byte *key;
  9390. word32 keySize;
  9391. status_t status;
  9392. /* sanity check on arguments */
  9393. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  9394. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  9395. return BAD_FUNC_ARG;
  9396. }
  9397. key = (byte*)aes->key;
  9398. status = wc_AesGetKeySize(aes, &keySize);
  9399. if (status != 0) {
  9400. return status;
  9401. }
  9402. status = wolfSSL_CryptHwMutexLock();
  9403. if (status != 0)
  9404. return status;
  9405. status = LTC_AES_DecryptTagCcm(LTC_BASE, in, out, inSz,
  9406. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  9407. wolfSSL_CryptHwMutexUnLock();
  9408. if (status != kStatus_Success) {
  9409. XMEMSET(out, 0, inSz);
  9410. return AES_CCM_AUTH_E;
  9411. }
  9412. return 0;
  9413. }
  9414. #endif /* HAVE_AES_DECRYPT */
  9415. #else
  9416. /* Software CCM */
  9417. static WARN_UNUSED_RESULT int roll_x(
  9418. Aes* aes, const byte* in, word32 inSz, byte* out)
  9419. {
  9420. int ret;
  9421. /* process the bulk of the data */
  9422. while (inSz >= AES_BLOCK_SIZE) {
  9423. xorbuf(out, in, AES_BLOCK_SIZE);
  9424. in += AES_BLOCK_SIZE;
  9425. inSz -= AES_BLOCK_SIZE;
  9426. ret = wc_AesEncrypt(aes, out, out);
  9427. if (ret != 0)
  9428. return ret;
  9429. }
  9430. /* process remainder of the data */
  9431. if (inSz > 0) {
  9432. xorbuf(out, in, inSz);
  9433. ret = wc_AesEncrypt(aes, out, out);
  9434. if (ret != 0)
  9435. return ret;
  9436. }
  9437. return 0;
  9438. }
  9439. static WARN_UNUSED_RESULT int roll_auth(
  9440. Aes* aes, const byte* in, word32 inSz, byte* out)
  9441. {
  9442. word32 authLenSz;
  9443. word32 remainder;
  9444. int ret;
  9445. /* encode the length in */
  9446. if (inSz <= 0xFEFF) {
  9447. authLenSz = 2;
  9448. out[0] ^= (byte)(inSz >> 8);
  9449. out[1] ^= (byte)inSz;
  9450. }
  9451. else {
  9452. authLenSz = 6;
  9453. out[0] ^= 0xFF;
  9454. out[1] ^= 0xFE;
  9455. out[2] ^= (byte)(inSz >> 24);
  9456. out[3] ^= (byte)(inSz >> 16);
  9457. out[4] ^= (byte)(inSz >> 8);
  9458. out[5] ^= (byte)inSz;
  9459. }
  9460. /* Note, the protocol handles auth data up to 2^64, but we are
  9461. * using 32-bit sizes right now, so the bigger data isn't handled
  9462. * else {}
  9463. */
  9464. /* start fill out the rest of the first block */
  9465. remainder = AES_BLOCK_SIZE - authLenSz;
  9466. if (inSz >= remainder) {
  9467. /* plenty of bulk data to fill the remainder of this block */
  9468. xorbuf(out + authLenSz, in, remainder);
  9469. inSz -= remainder;
  9470. in += remainder;
  9471. }
  9472. else {
  9473. /* not enough bulk data, copy what is available, and pad zero */
  9474. xorbuf(out + authLenSz, in, inSz);
  9475. inSz = 0;
  9476. }
  9477. ret = wc_AesEncrypt(aes, out, out);
  9478. if ((ret == 0) && (inSz > 0)) {
  9479. ret = roll_x(aes, in, inSz, out);
  9480. }
  9481. return ret;
  9482. }
  9483. static WC_INLINE void AesCcmCtrInc(byte* B, word32 lenSz)
  9484. {
  9485. word32 i;
  9486. for (i = 0; i < lenSz; i++) {
  9487. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) return;
  9488. }
  9489. }
  9490. #ifdef WOLFSSL_AESNI
  9491. static WC_INLINE void AesCcmCtrIncSet4(byte* B, word32 lenSz)
  9492. {
  9493. word32 i;
  9494. /* B+1 = B */
  9495. XMEMCPY(B + AES_BLOCK_SIZE * 1, B, AES_BLOCK_SIZE);
  9496. /* B+2,B+3 = B,B+1 */
  9497. XMEMCPY(B + AES_BLOCK_SIZE * 2, B, AES_BLOCK_SIZE * 2);
  9498. for (i = 0; i < lenSz; i++) {
  9499. if (++B[AES_BLOCK_SIZE * 2 - 1 - i] != 0) break;
  9500. }
  9501. B[AES_BLOCK_SIZE * 3 - 1] += 2;
  9502. if (B[AES_BLOCK_SIZE * 3 - 1] < 2) {
  9503. for (i = 1; i < lenSz; i++) {
  9504. if (++B[AES_BLOCK_SIZE * 3 - 1 - i] != 0) break;
  9505. }
  9506. }
  9507. B[AES_BLOCK_SIZE * 4 - 1] += 3;
  9508. if (B[AES_BLOCK_SIZE * 4 - 1] < 3) {
  9509. for (i = 1; i < lenSz; i++) {
  9510. if (++B[AES_BLOCK_SIZE * 4 - 1 - i] != 0) break;
  9511. }
  9512. }
  9513. }
  9514. static WC_INLINE void AesCcmCtrInc4(byte* B, word32 lenSz)
  9515. {
  9516. word32 i;
  9517. B[AES_BLOCK_SIZE - 1] += 4;
  9518. if (B[AES_BLOCK_SIZE - 1] < 4) {
  9519. for (i = 1; i < lenSz; i++) {
  9520. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) break;
  9521. }
  9522. }
  9523. }
  9524. #endif
  9525. /* Software AES - CCM Encrypt */
  9526. /* return 0 on success */
  9527. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9528. const byte* nonce, word32 nonceSz,
  9529. byte* authTag, word32 authTagSz,
  9530. const byte* authIn, word32 authInSz)
  9531. {
  9532. #ifdef WOLFSSL_AESNI
  9533. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  9534. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  9535. #else
  9536. byte A[AES_BLOCK_SIZE];
  9537. byte B[AES_BLOCK_SIZE];
  9538. #endif
  9539. byte lenSz;
  9540. word32 i;
  9541. byte mask = 0xFF;
  9542. const word32 wordSz = (word32)sizeof(word32);
  9543. int ret;
  9544. /* sanity check on arguments */
  9545. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  9546. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  9547. authTagSz > AES_BLOCK_SIZE)
  9548. return BAD_FUNC_ARG;
  9549. /* sanity check on tag size */
  9550. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  9551. return BAD_FUNC_ARG;
  9552. }
  9553. #ifdef WOLF_CRYPTO_CB
  9554. #ifndef WOLF_CRYPTO_CB_FIND
  9555. if (aes->devId != INVALID_DEVID)
  9556. #endif
  9557. {
  9558. int crypto_cb_ret =
  9559. wc_CryptoCb_AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz,
  9560. authTag, authTagSz, authIn, authInSz);
  9561. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  9562. return crypto_cb_ret;
  9563. /* fall-through when unavailable */
  9564. }
  9565. #endif
  9566. XMEMSET(A, 0, sizeof(A));
  9567. XMEMCPY(B+1, nonce, nonceSz);
  9568. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  9569. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  9570. + (8 * (((byte)authTagSz - 2) / 2))
  9571. + (lenSz - 1));
  9572. for (i = 0; i < lenSz; i++) {
  9573. if (mask && i >= wordSz)
  9574. mask = 0x00;
  9575. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  9576. }
  9577. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9578. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  9579. #endif
  9580. VECTOR_REGISTERS_PUSH;
  9581. ret = wc_AesEncrypt(aes, B, A);
  9582. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9583. if (ret == 0)
  9584. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  9585. #endif
  9586. if ((ret == 0) && (authInSz > 0))
  9587. ret = roll_auth(aes, authIn, authInSz, A);
  9588. if ((ret == 0) && (inSz > 0))
  9589. ret = roll_x(aes, in, inSz, A);
  9590. if (ret == 0) {
  9591. XMEMCPY(authTag, A, authTagSz);
  9592. B[0] = lenSz - 1;
  9593. for (i = 0; i < lenSz; i++)
  9594. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9595. ret = wc_AesEncrypt(aes, B, A);
  9596. }
  9597. if (ret == 0) {
  9598. xorbuf(authTag, A, authTagSz);
  9599. B[15] = 1;
  9600. }
  9601. #ifdef WOLFSSL_AESNI
  9602. if ((ret == 0) && aes->use_aesni) {
  9603. while (inSz >= AES_BLOCK_SIZE * 4) {
  9604. AesCcmCtrIncSet4(B, lenSz);
  9605. AES_ECB_encrypt_AESNI(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  9606. (int)aes->rounds);
  9607. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  9608. XMEMCPY(out, A, AES_BLOCK_SIZE * 4);
  9609. inSz -= AES_BLOCK_SIZE * 4;
  9610. in += AES_BLOCK_SIZE * 4;
  9611. out += AES_BLOCK_SIZE * 4;
  9612. AesCcmCtrInc4(B, lenSz);
  9613. }
  9614. }
  9615. #endif
  9616. if (ret == 0) {
  9617. while (inSz >= AES_BLOCK_SIZE) {
  9618. ret = wc_AesEncrypt(aes, B, A);
  9619. if (ret != 0)
  9620. break;
  9621. xorbuf(A, in, AES_BLOCK_SIZE);
  9622. XMEMCPY(out, A, AES_BLOCK_SIZE);
  9623. AesCcmCtrInc(B, lenSz);
  9624. inSz -= AES_BLOCK_SIZE;
  9625. in += AES_BLOCK_SIZE;
  9626. out += AES_BLOCK_SIZE;
  9627. }
  9628. }
  9629. if ((ret == 0) && (inSz > 0)) {
  9630. ret = wc_AesEncrypt(aes, B, A);
  9631. }
  9632. if ((ret == 0) && (inSz > 0)) {
  9633. xorbuf(A, in, inSz);
  9634. XMEMCPY(out, A, inSz);
  9635. }
  9636. ForceZero(A, sizeof(A));
  9637. ForceZero(B, sizeof(B));
  9638. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9639. wc_MemZero_Check(A, sizeof(A));
  9640. wc_MemZero_Check(B, sizeof(B));
  9641. #endif
  9642. VECTOR_REGISTERS_POP;
  9643. return ret;
  9644. }
  9645. #ifdef HAVE_AES_DECRYPT
  9646. /* Software AES - CCM Decrypt */
  9647. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  9648. const byte* nonce, word32 nonceSz,
  9649. const byte* authTag, word32 authTagSz,
  9650. const byte* authIn, word32 authInSz)
  9651. {
  9652. #ifdef WOLFSSL_AESNI
  9653. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  9654. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  9655. #else
  9656. byte A[AES_BLOCK_SIZE];
  9657. byte B[AES_BLOCK_SIZE];
  9658. #endif
  9659. byte* o;
  9660. byte lenSz;
  9661. word32 i, oSz;
  9662. byte mask = 0xFF;
  9663. const word32 wordSz = (word32)sizeof(word32);
  9664. int ret = 0;
  9665. /* sanity check on arguments */
  9666. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  9667. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  9668. authTagSz > AES_BLOCK_SIZE)
  9669. return BAD_FUNC_ARG;
  9670. /* sanity check on tag size */
  9671. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  9672. return BAD_FUNC_ARG;
  9673. }
  9674. #ifdef WOLF_CRYPTO_CB
  9675. #ifndef WOLF_CRYPTO_CB_FIND
  9676. if (aes->devId != INVALID_DEVID)
  9677. #endif
  9678. {
  9679. int crypto_cb_ret =
  9680. wc_CryptoCb_AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  9681. authTag, authTagSz, authIn, authInSz);
  9682. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  9683. return crypto_cb_ret;
  9684. /* fall-through when unavailable */
  9685. }
  9686. #endif
  9687. o = out;
  9688. oSz = inSz;
  9689. XMEMSET(A, 0, sizeof A);
  9690. XMEMCPY(B+1, nonce, nonceSz);
  9691. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  9692. B[0] = lenSz - 1;
  9693. for (i = 0; i < lenSz; i++)
  9694. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9695. B[15] = 1;
  9696. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9697. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  9698. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  9699. #endif
  9700. VECTOR_REGISTERS_PUSH;
  9701. #ifdef WOLFSSL_AESNI
  9702. if (aes->use_aesni) {
  9703. while (oSz >= AES_BLOCK_SIZE * 4) {
  9704. AesCcmCtrIncSet4(B, lenSz);
  9705. AES_ECB_encrypt_AESNI(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  9706. (int)aes->rounds);
  9707. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  9708. XMEMCPY(o, A, AES_BLOCK_SIZE * 4);
  9709. oSz -= AES_BLOCK_SIZE * 4;
  9710. in += AES_BLOCK_SIZE * 4;
  9711. o += AES_BLOCK_SIZE * 4;
  9712. AesCcmCtrInc4(B, lenSz);
  9713. }
  9714. }
  9715. #endif
  9716. while (oSz >= AES_BLOCK_SIZE) {
  9717. ret = wc_AesEncrypt(aes, B, A);
  9718. if (ret != 0)
  9719. break;
  9720. xorbuf(A, in, AES_BLOCK_SIZE);
  9721. XMEMCPY(o, A, AES_BLOCK_SIZE);
  9722. AesCcmCtrInc(B, lenSz);
  9723. oSz -= AES_BLOCK_SIZE;
  9724. in += AES_BLOCK_SIZE;
  9725. o += AES_BLOCK_SIZE;
  9726. }
  9727. if ((ret == 0) && (inSz > 0))
  9728. ret = wc_AesEncrypt(aes, B, A);
  9729. if ((ret == 0) && (inSz > 0)) {
  9730. xorbuf(A, in, oSz);
  9731. XMEMCPY(o, A, oSz);
  9732. for (i = 0; i < lenSz; i++)
  9733. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9734. ret = wc_AesEncrypt(aes, B, A);
  9735. }
  9736. if (ret == 0) {
  9737. o = out;
  9738. oSz = inSz;
  9739. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  9740. + (8 * (((byte)authTagSz - 2) / 2))
  9741. + (lenSz - 1));
  9742. for (i = 0; i < lenSz; i++) {
  9743. if (mask && i >= wordSz)
  9744. mask = 0x00;
  9745. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  9746. }
  9747. ret = wc_AesEncrypt(aes, B, A);
  9748. }
  9749. if (ret == 0) {
  9750. if (authInSz > 0)
  9751. ret = roll_auth(aes, authIn, authInSz, A);
  9752. }
  9753. if ((ret == 0) && (inSz > 0))
  9754. ret = roll_x(aes, o, oSz, A);
  9755. if (ret == 0) {
  9756. B[0] = lenSz - 1;
  9757. for (i = 0; i < lenSz; i++)
  9758. B[AES_BLOCK_SIZE - 1 - i] = 0;
  9759. ret = wc_AesEncrypt(aes, B, B);
  9760. }
  9761. if (ret == 0)
  9762. xorbuf(A, B, authTagSz);
  9763. if (ret == 0) {
  9764. if (ConstantCompare(A, authTag, (int)authTagSz) != 0) {
  9765. /* If the authTag check fails, don't keep the decrypted data.
  9766. * Unfortunately, you need the decrypted data to calculate the
  9767. * check value. */
  9768. #if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) && \
  9769. defined(ACVP_VECTOR_TESTING)
  9770. WOLFSSL_MSG("Preserve output for vector responses");
  9771. #else
  9772. if (inSz > 0)
  9773. XMEMSET(out, 0, inSz);
  9774. #endif
  9775. ret = AES_CCM_AUTH_E;
  9776. }
  9777. }
  9778. ForceZero(A, sizeof(A));
  9779. ForceZero(B, sizeof(B));
  9780. o = NULL;
  9781. #ifdef WOLFSSL_CHECK_MEM_ZERO
  9782. wc_MemZero_Check(A, sizeof(A));
  9783. wc_MemZero_Check(B, sizeof(B));
  9784. #endif
  9785. VECTOR_REGISTERS_POP;
  9786. return ret;
  9787. }
  9788. #endif /* HAVE_AES_DECRYPT */
  9789. #endif /* software CCM */
  9790. /* abstract functions that call lower level AESCCM functions */
  9791. #ifndef WC_NO_RNG
  9792. int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz)
  9793. {
  9794. int ret = 0;
  9795. if (aes == NULL || nonce == NULL ||
  9796. nonceSz < CCM_NONCE_MIN_SZ || nonceSz > CCM_NONCE_MAX_SZ) {
  9797. ret = BAD_FUNC_ARG;
  9798. }
  9799. if (ret == 0) {
  9800. XMEMCPY(aes->reg, nonce, nonceSz);
  9801. aes->nonceSz = nonceSz;
  9802. /* Invocation counter should be 2^61 */
  9803. aes->invokeCtr[0] = 0;
  9804. aes->invokeCtr[1] = 0xE0000000;
  9805. }
  9806. return ret;
  9807. }
  9808. int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  9809. byte* ivOut, word32 ivOutSz,
  9810. byte* authTag, word32 authTagSz,
  9811. const byte* authIn, word32 authInSz)
  9812. {
  9813. int ret = 0;
  9814. if (aes == NULL || out == NULL ||
  9815. (in == NULL && sz != 0) ||
  9816. ivOut == NULL ||
  9817. (authIn == NULL && authInSz != 0) ||
  9818. (ivOutSz != aes->nonceSz)) {
  9819. ret = BAD_FUNC_ARG;
  9820. }
  9821. if (ret == 0) {
  9822. aes->invokeCtr[0]++;
  9823. if (aes->invokeCtr[0] == 0) {
  9824. aes->invokeCtr[1]++;
  9825. if (aes->invokeCtr[1] == 0)
  9826. ret = AES_CCM_OVERFLOW_E;
  9827. }
  9828. }
  9829. if (ret == 0) {
  9830. ret = wc_AesCcmEncrypt(aes, out, in, sz,
  9831. (byte*)aes->reg, aes->nonceSz,
  9832. authTag, authTagSz,
  9833. authIn, authInSz);
  9834. if (ret == 0) {
  9835. XMEMCPY(ivOut, aes->reg, aes->nonceSz);
  9836. IncCtr((byte*)aes->reg, aes->nonceSz);
  9837. }
  9838. }
  9839. return ret;
  9840. }
  9841. #endif /* WC_NO_RNG */
  9842. #endif /* HAVE_AESCCM */
  9843. /* Initialize Aes for use with async hardware */
  9844. int wc_AesInit(Aes* aes, void* heap, int devId)
  9845. {
  9846. int ret = 0;
  9847. if (aes == NULL)
  9848. return BAD_FUNC_ARG;
  9849. aes->heap = heap;
  9850. #ifdef WOLF_CRYPTO_CB
  9851. aes->devId = devId;
  9852. aes->devCtx = NULL;
  9853. #else
  9854. (void)devId;
  9855. #endif
  9856. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  9857. ret = wolfAsync_DevCtxInit(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES,
  9858. aes->heap, devId);
  9859. #endif /* WOLFSSL_ASYNC_CRYPT */
  9860. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  9861. aes->alFd = WC_SOCK_NOTSET;
  9862. aes->rdFd = WC_SOCK_NOTSET;
  9863. #endif
  9864. #ifdef WOLFSSL_KCAPI_AES
  9865. aes->handle = NULL;
  9866. aes->init = 0;
  9867. #endif
  9868. #if defined(WOLFSSL_DEVCRYPTO) && \
  9869. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  9870. aes->ctx.cfd = -1;
  9871. #endif
  9872. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  9873. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  9874. #endif
  9875. #if defined(WOLFSSL_IMXRT_DCP)
  9876. DCPAesInit(aes);
  9877. #endif
  9878. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  9879. XMEMSET(&aes->maxq_ctx, 0, sizeof(aes->maxq_ctx));
  9880. #endif
  9881. #ifdef HAVE_AESGCM
  9882. #ifdef OPENSSL_EXTRA
  9883. XMEMSET(aes->gcm.aadH, 0, sizeof(aes->gcm.aadH));
  9884. aes->gcm.aadLen = 0;
  9885. #endif
  9886. #endif
  9887. #ifdef WOLFSSL_AESGCM_STREAM
  9888. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  9889. aes->streamData = NULL;
  9890. #endif
  9891. aes->keylen = 0;
  9892. aes->nonceSz = 0;
  9893. aes->gcmKeySet = 0;
  9894. aes->nonceSet = 0;
  9895. aes->ctrSet = 0;
  9896. #endif
  9897. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  9898. ret = wc_psa_aes_init(aes);
  9899. #endif
  9900. #if defined(WOLFSSL_RENESAS_FSPSM)
  9901. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  9902. #endif
  9903. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  9904. if (ret == 0)
  9905. ret = wc_debug_CipherLifecycleInit(&aes->CipherLifecycleTag, aes->heap);
  9906. #endif
  9907. return ret;
  9908. }
  9909. #ifdef WOLF_PRIVATE_KEY_ID
  9910. int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, int devId)
  9911. {
  9912. int ret = 0;
  9913. if (aes == NULL)
  9914. ret = BAD_FUNC_ARG;
  9915. if (ret == 0 && (len < 0 || len > AES_MAX_ID_LEN))
  9916. ret = BUFFER_E;
  9917. if (ret == 0)
  9918. ret = wc_AesInit(aes, heap, devId);
  9919. if (ret == 0) {
  9920. XMEMCPY(aes->id, id, (size_t)len);
  9921. aes->idLen = len;
  9922. aes->labelLen = 0;
  9923. }
  9924. return ret;
  9925. }
  9926. int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId)
  9927. {
  9928. int ret = 0;
  9929. size_t labelLen = 0;
  9930. if (aes == NULL || label == NULL)
  9931. ret = BAD_FUNC_ARG;
  9932. if (ret == 0) {
  9933. labelLen = XSTRLEN(label);
  9934. if (labelLen == 0 || labelLen > AES_MAX_LABEL_LEN)
  9935. ret = BUFFER_E;
  9936. }
  9937. if (ret == 0)
  9938. ret = wc_AesInit(aes, heap, devId);
  9939. if (ret == 0) {
  9940. XMEMCPY(aes->label, label, labelLen);
  9941. aes->labelLen = (int)labelLen;
  9942. aes->idLen = 0;
  9943. }
  9944. return ret;
  9945. }
  9946. #endif
  9947. /* Free Aes from use with async hardware */
  9948. void wc_AesFree(Aes* aes)
  9949. {
  9950. if (aes == NULL)
  9951. return;
  9952. #ifdef WC_DEBUG_CIPHER_LIFECYCLE
  9953. (void)wc_debug_CipherLifecycleFree(&aes->CipherLifecycleTag, aes->heap, 1);
  9954. #endif
  9955. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  9956. wolfAsync_DevCtxFree(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES);
  9957. #endif /* WOLFSSL_ASYNC_CRYPT */
  9958. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  9959. if (aes->rdFd > 0) { /* negative is error case */
  9960. close(aes->rdFd);
  9961. aes->rdFd = WC_SOCK_NOTSET;
  9962. }
  9963. if (aes->alFd > 0) {
  9964. close(aes->alFd);
  9965. aes->alFd = WC_SOCK_NOTSET;
  9966. }
  9967. #endif /* WOLFSSL_AFALG */
  9968. #ifdef WOLFSSL_KCAPI_AES
  9969. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  9970. if (aes->init == 1) {
  9971. kcapi_cipher_destroy(aes->handle);
  9972. }
  9973. aes->init = 0;
  9974. aes->handle = NULL;
  9975. #endif
  9976. #if defined(WOLFSSL_DEVCRYPTO) && \
  9977. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  9978. wc_DevCryptoFree(&aes->ctx);
  9979. #endif
  9980. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  9981. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  9982. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  9983. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  9984. #endif
  9985. #if defined(WOLFSSL_IMXRT_DCP)
  9986. DCPAesFree(aes);
  9987. #endif
  9988. #if defined(WOLFSSL_AESGCM_STREAM) && defined(WOLFSSL_SMALL_STACK) && \
  9989. !defined(WOLFSSL_AESNI)
  9990. if (aes->streamData != NULL) {
  9991. XFREE(aes->streamData, aes->heap, DYNAMIC_TYPE_AES);
  9992. aes->streamData = NULL;
  9993. }
  9994. #endif
  9995. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  9996. if (aes->useSWCrypt == 0) {
  9997. se050_aes_free(aes);
  9998. }
  9999. #endif
  10000. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  10001. wc_psa_aes_free(aes);
  10002. #endif
  10003. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  10004. wc_MAXQ10XX_AesFree(aes);
  10005. #endif
  10006. #if ((defined(WOLFSSL_RENESAS_FSPSM_TLS) || \
  10007. defined(WOLFSSL_RENESAS_FSPSM_CRYPTONLY)) && \
  10008. !defined(NO_WOLFSSL_RENESAS_FSPSM_AES))
  10009. wc_fspsm_Aesfree(aes);
  10010. #endif
  10011. #ifdef WOLFSSL_CHECK_MEM_ZERO
  10012. wc_MemZero_Check(aes, sizeof(Aes));
  10013. #endif
  10014. }
  10015. int wc_AesGetKeySize(Aes* aes, word32* keySize)
  10016. {
  10017. int ret = 0;
  10018. if (aes == NULL || keySize == NULL) {
  10019. return BAD_FUNC_ARG;
  10020. }
  10021. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  10022. return wc_psa_aes_get_key_size(aes, keySize);
  10023. #endif
  10024. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  10025. *keySize = aes->ctx.key.keySize;
  10026. return ret;
  10027. #endif
  10028. switch (aes->rounds) {
  10029. #ifdef WOLFSSL_AES_128
  10030. case 10:
  10031. *keySize = 16;
  10032. break;
  10033. #endif
  10034. #ifdef WOLFSSL_AES_192
  10035. case 12:
  10036. *keySize = 24;
  10037. break;
  10038. #endif
  10039. #ifdef WOLFSSL_AES_256
  10040. case 14:
  10041. *keySize = 32;
  10042. break;
  10043. #endif
  10044. default:
  10045. *keySize = 0;
  10046. ret = BAD_FUNC_ARG;
  10047. }
  10048. return ret;
  10049. }
  10050. #endif /* !WOLFSSL_TI_CRYPT */
  10051. /* the earlier do-nothing default definitions for VECTOR_REGISTERS_{PUSH,POP}
  10052. * are missed when WOLFSSL_TI_CRYPT or WOLFSSL_ARMASM.
  10053. */
  10054. #ifndef VECTOR_REGISTERS_PUSH
  10055. #define VECTOR_REGISTERS_PUSH { WC_DO_NOTHING
  10056. #endif
  10057. #ifndef VECTOR_REGISTERS_POP
  10058. #define VECTOR_REGISTERS_POP } WC_DO_NOTHING
  10059. #endif
  10060. #ifdef HAVE_AES_ECB
  10061. #if defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  10062. !defined(WOLFSSL_QNX_CAAM)
  10063. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  10064. #elif defined(WOLFSSL_AFALG)
  10065. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  10066. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  10067. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  10068. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  10069. /* Software AES - ECB */
  10070. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10071. {
  10072. if ((in == NULL) || (out == NULL) || (aes == NULL))
  10073. return BAD_FUNC_ARG;
  10074. return AES_ECB_encrypt(aes, in, out, sz);
  10075. }
  10076. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10077. {
  10078. if ((in == NULL) || (out == NULL) || (aes == NULL))
  10079. return BAD_FUNC_ARG;
  10080. return AES_ECB_decrypt(aes, in, out, sz);
  10081. }
  10082. #else
  10083. /* Software AES - ECB */
  10084. static WARN_UNUSED_RESULT int _AesEcbEncrypt(
  10085. Aes* aes, byte* out, const byte* in, word32 sz)
  10086. {
  10087. int ret = 0;
  10088. #ifdef WOLF_CRYPTO_CB
  10089. #ifndef WOLF_CRYPTO_CB_FIND
  10090. if (aes->devId != INVALID_DEVID)
  10091. #endif
  10092. {
  10093. ret = wc_CryptoCb_AesEcbEncrypt(aes, out, in, sz);
  10094. if (ret != CRYPTOCB_UNAVAILABLE)
  10095. return ret;
  10096. ret = 0;
  10097. /* fall-through when unavailable */
  10098. }
  10099. #endif
  10100. #ifdef WOLFSSL_IMXRT_DCP
  10101. if (aes->keylen == 16)
  10102. return DCPAesEcbEncrypt(aes, out, in, sz);
  10103. #endif
  10104. VECTOR_REGISTERS_PUSH;
  10105. #ifdef WOLFSSL_AESNI
  10106. if (aes->use_aesni) {
  10107. AES_ECB_encrypt_AESNI(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  10108. }
  10109. else
  10110. #endif
  10111. {
  10112. #ifndef WOLFSSL_ARMASM
  10113. AesEncryptBlocks_C(aes, in, out, sz);
  10114. #else
  10115. word32 i;
  10116. for (i = 0; i < sz; i += AES_BLOCK_SIZE) {
  10117. ret = wc_AesEncryptDirect(aes, out, in);
  10118. if (ret != 0)
  10119. break;
  10120. in += AES_BLOCK_SIZE;
  10121. out += AES_BLOCK_SIZE;
  10122. }
  10123. #endif
  10124. }
  10125. VECTOR_REGISTERS_POP;
  10126. return ret;
  10127. }
  10128. static WARN_UNUSED_RESULT int _AesEcbDecrypt(
  10129. Aes* aes, byte* out, const byte* in, word32 sz)
  10130. {
  10131. int ret = 0;
  10132. #ifdef WOLF_CRYPTO_CB
  10133. #ifndef WOLF_CRYPTO_CB_FIND
  10134. if (aes->devId != INVALID_DEVID)
  10135. #endif
  10136. {
  10137. ret = wc_CryptoCb_AesEcbDecrypt(aes, out, in, sz);
  10138. if (ret != CRYPTOCB_UNAVAILABLE)
  10139. return ret;
  10140. ret = 0;
  10141. /* fall-through when unavailable */
  10142. }
  10143. #endif
  10144. #ifdef WOLFSSL_IMXRT_DCP
  10145. if (aes->keylen == 16)
  10146. return DCPAesEcbDecrypt(aes, out, in, sz);
  10147. #endif
  10148. VECTOR_REGISTERS_PUSH;
  10149. #ifdef WOLFSSL_AESNI
  10150. if (aes->use_aesni) {
  10151. AES_ECB_decrypt_AESNI(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  10152. }
  10153. else
  10154. #endif
  10155. {
  10156. #ifndef WOLFSSL_ARMASM
  10157. AesDecryptBlocks_C(aes, in, out, sz);
  10158. #else
  10159. word32 i;
  10160. for (i = 0; i < sz; i += AES_BLOCK_SIZE) {
  10161. ret = wc_AesDecryptDirect(aes, out, in);
  10162. if (ret != 0)
  10163. break;
  10164. in += AES_BLOCK_SIZE;
  10165. out += AES_BLOCK_SIZE;
  10166. }
  10167. #endif
  10168. }
  10169. VECTOR_REGISTERS_POP;
  10170. return ret;
  10171. }
  10172. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10173. {
  10174. if ((in == NULL) || (out == NULL) || (aes == NULL))
  10175. return BAD_FUNC_ARG;
  10176. if ((sz % AES_BLOCK_SIZE) != 0) {
  10177. return BAD_LENGTH_E;
  10178. }
  10179. return _AesEcbEncrypt(aes, out, in, sz);
  10180. }
  10181. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10182. {
  10183. if ((in == NULL) || (out == NULL) || (aes == NULL))
  10184. return BAD_FUNC_ARG;
  10185. if ((sz % AES_BLOCK_SIZE) != 0) {
  10186. return BAD_LENGTH_E;
  10187. }
  10188. return _AesEcbDecrypt(aes, out, in, sz);
  10189. }
  10190. #endif
  10191. #endif /* HAVE_AES_ECB */
  10192. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_OFB)
  10193. /* Feedback AES mode
  10194. *
  10195. * aes structure holding key to use for encryption
  10196. * out buffer to hold result of encryption (must be at least as large as input
  10197. * buffer)
  10198. * in buffer to encrypt
  10199. * sz size of input buffer
  10200. * mode flag to specify AES mode
  10201. *
  10202. * returns 0 on success and negative error values on failure
  10203. */
  10204. /* Software AES - CFB Encrypt */
  10205. static WARN_UNUSED_RESULT int wc_AesFeedbackEncrypt(
  10206. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  10207. {
  10208. byte* tmp = NULL;
  10209. int ret = 0;
  10210. word32 processed;
  10211. if (aes == NULL || out == NULL || in == NULL) {
  10212. return BAD_FUNC_ARG;
  10213. }
  10214. /* consume any unused bytes left in aes->tmp */
  10215. processed = min(aes->left, sz);
  10216. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  10217. #ifdef WOLFSSL_AES_CFB
  10218. if (mode == AES_CFB_MODE) {
  10219. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, out, processed);
  10220. }
  10221. #endif
  10222. aes->left -= processed;
  10223. out += processed;
  10224. in += processed;
  10225. sz -= processed;
  10226. VECTOR_REGISTERS_PUSH;
  10227. while (sz >= AES_BLOCK_SIZE) {
  10228. /* Using aes->tmp here for inline case i.e. in=out */
  10229. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10230. if (ret != 0)
  10231. break;
  10232. #ifdef WOLFSSL_AES_OFB
  10233. if (mode == AES_OFB_MODE) {
  10234. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  10235. }
  10236. #endif
  10237. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  10238. #ifdef WOLFSSL_AES_CFB
  10239. if (mode == AES_CFB_MODE) {
  10240. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  10241. }
  10242. #endif
  10243. XMEMCPY(out, aes->tmp, AES_BLOCK_SIZE);
  10244. out += AES_BLOCK_SIZE;
  10245. in += AES_BLOCK_SIZE;
  10246. sz -= AES_BLOCK_SIZE;
  10247. aes->left = 0;
  10248. }
  10249. /* encrypt left over data */
  10250. if ((ret == 0) && sz) {
  10251. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10252. }
  10253. if ((ret == 0) && sz) {
  10254. aes->left = AES_BLOCK_SIZE;
  10255. tmp = (byte*)aes->tmp;
  10256. #ifdef WOLFSSL_AES_OFB
  10257. if (mode == AES_OFB_MODE) {
  10258. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  10259. }
  10260. #endif
  10261. xorbufout(out, in, tmp, sz);
  10262. #ifdef WOLFSSL_AES_CFB
  10263. if (mode == AES_CFB_MODE) {
  10264. XMEMCPY(aes->reg, out, sz);
  10265. }
  10266. #endif
  10267. aes->left -= sz;
  10268. }
  10269. VECTOR_REGISTERS_POP;
  10270. return ret;
  10271. }
  10272. #ifdef HAVE_AES_DECRYPT
  10273. /* CFB 128
  10274. *
  10275. * aes structure holding key to use for decryption
  10276. * out buffer to hold result of decryption (must be at least as large as input
  10277. * buffer)
  10278. * in buffer to decrypt
  10279. * sz size of input buffer
  10280. *
  10281. * returns 0 on success and negative error values on failure
  10282. */
  10283. /* Software AES - CFB Decrypt */
  10284. static WARN_UNUSED_RESULT int wc_AesFeedbackDecrypt(
  10285. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  10286. {
  10287. int ret = 0;
  10288. word32 processed;
  10289. if (aes == NULL || out == NULL || in == NULL) {
  10290. return BAD_FUNC_ARG;
  10291. }
  10292. #ifdef WOLFSSL_AES_CFB
  10293. /* check if more input needs copied over to aes->reg */
  10294. if (aes->left && sz && mode == AES_CFB_MODE) {
  10295. word32 size = min(aes->left, sz);
  10296. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, in, size);
  10297. }
  10298. #endif
  10299. /* consume any unused bytes left in aes->tmp */
  10300. processed = min(aes->left, sz);
  10301. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  10302. aes->left -= processed;
  10303. out += processed;
  10304. in += processed;
  10305. sz -= processed;
  10306. VECTOR_REGISTERS_PUSH;
  10307. while (sz > AES_BLOCK_SIZE) {
  10308. /* Using aes->tmp here for inline case i.e. in=out */
  10309. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10310. if (ret != 0)
  10311. break;
  10312. #ifdef WOLFSSL_AES_OFB
  10313. if (mode == AES_OFB_MODE) {
  10314. XMEMCPY((byte*)aes->reg, (byte*)aes->tmp, AES_BLOCK_SIZE);
  10315. }
  10316. #endif
  10317. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  10318. #ifdef WOLFSSL_AES_CFB
  10319. if (mode == AES_CFB_MODE) {
  10320. XMEMCPY(aes->reg, in, AES_BLOCK_SIZE);
  10321. }
  10322. #endif
  10323. XMEMCPY(out, (byte*)aes->tmp, AES_BLOCK_SIZE);
  10324. out += AES_BLOCK_SIZE;
  10325. in += AES_BLOCK_SIZE;
  10326. sz -= AES_BLOCK_SIZE;
  10327. aes->left = 0;
  10328. }
  10329. /* decrypt left over data */
  10330. if ((ret == 0) && sz) {
  10331. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10332. }
  10333. if ((ret == 0) && sz) {
  10334. #ifdef WOLFSSL_AES_CFB
  10335. if (mode == AES_CFB_MODE) {
  10336. XMEMCPY(aes->reg, in, sz);
  10337. }
  10338. #endif
  10339. #ifdef WOLFSSL_AES_OFB
  10340. if (mode == AES_OFB_MODE) {
  10341. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  10342. }
  10343. #endif
  10344. aes->left = AES_BLOCK_SIZE - sz;
  10345. xorbufout(out, in, aes->tmp, sz);
  10346. }
  10347. VECTOR_REGISTERS_POP;
  10348. return ret;
  10349. }
  10350. #endif /* HAVE_AES_DECRYPT */
  10351. #endif /* WOLFSSL_AES_CFB */
  10352. #ifdef WOLFSSL_AES_CFB
  10353. /* CFB 128
  10354. *
  10355. * aes structure holding key to use for encryption
  10356. * out buffer to hold result of encryption (must be at least as large as input
  10357. * buffer)
  10358. * in buffer to encrypt
  10359. * sz size of input buffer
  10360. *
  10361. * returns 0 on success and negative error values on failure
  10362. */
  10363. /* Software AES - CFB Encrypt */
  10364. int wc_AesCfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10365. {
  10366. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_CFB_MODE);
  10367. }
  10368. #ifdef HAVE_AES_DECRYPT
  10369. /* CFB 128
  10370. *
  10371. * aes structure holding key to use for decryption
  10372. * out buffer to hold result of decryption (must be at least as large as input
  10373. * buffer)
  10374. * in buffer to decrypt
  10375. * sz size of input buffer
  10376. *
  10377. * returns 0 on success and negative error values on failure
  10378. */
  10379. /* Software AES - CFB Decrypt */
  10380. int wc_AesCfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10381. {
  10382. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_CFB_MODE);
  10383. }
  10384. #endif /* HAVE_AES_DECRYPT */
  10385. /* shift the whole AES_BLOCK_SIZE array left by 8 or 1 bits */
  10386. static void shiftLeftArray(byte* ary, byte shift)
  10387. {
  10388. int i;
  10389. if (shift == WOLFSSL_BIT_SIZE) {
  10390. /* shifting over by 8 bits */
  10391. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  10392. ary[i] = ary[i+1];
  10393. }
  10394. ary[i] = 0;
  10395. }
  10396. else {
  10397. /* shifting over by 7 or less bits */
  10398. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  10399. byte carry = ary[i+1] & (0XFF << (WOLFSSL_BIT_SIZE - shift));
  10400. carry >>= (WOLFSSL_BIT_SIZE - shift);
  10401. ary[i] = (byte)((ary[i] << shift) + carry);
  10402. }
  10403. ary[i] = ary[i] << shift;
  10404. }
  10405. }
  10406. /* returns 0 on success and negative values on failure */
  10407. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB8(
  10408. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  10409. {
  10410. byte *pt;
  10411. int ret = 0;
  10412. if (aes == NULL || out == NULL || in == NULL) {
  10413. return BAD_FUNC_ARG;
  10414. }
  10415. if (sz == 0) {
  10416. return 0;
  10417. }
  10418. VECTOR_REGISTERS_PUSH;
  10419. while (sz > 0) {
  10420. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10421. if (ret != 0)
  10422. break;
  10423. if (dir == AES_DECRYPTION) {
  10424. pt = (byte*)aes->reg;
  10425. /* LSB + CAT */
  10426. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  10427. pt[AES_BLOCK_SIZE - 1] = in[0];
  10428. }
  10429. /* MSB + XOR */
  10430. #ifdef BIG_ENDIAN_ORDER
  10431. ByteReverseWords(aes->tmp, aes->tmp, AES_BLOCK_SIZE);
  10432. #endif
  10433. out[0] = (byte)(aes->tmp[0] ^ in[0]);
  10434. if (dir == AES_ENCRYPTION) {
  10435. pt = (byte*)aes->reg;
  10436. /* LSB + CAT */
  10437. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  10438. pt[AES_BLOCK_SIZE - 1] = out[0];
  10439. }
  10440. out += 1;
  10441. in += 1;
  10442. sz -= 1;
  10443. }
  10444. VECTOR_REGISTERS_POP;
  10445. return ret;
  10446. }
  10447. /* returns 0 on success and negative values on failure */
  10448. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB1(
  10449. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  10450. {
  10451. byte tmp;
  10452. byte cur = 0; /* hold current work in order to handle inline in=out */
  10453. byte* pt;
  10454. int bit = 7;
  10455. int ret = 0;
  10456. if (aes == NULL || out == NULL || in == NULL) {
  10457. return BAD_FUNC_ARG;
  10458. }
  10459. if (sz == 0) {
  10460. return 0;
  10461. }
  10462. VECTOR_REGISTERS_PUSH;
  10463. while (sz > 0) {
  10464. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  10465. if (ret != 0)
  10466. break;
  10467. if (dir == AES_DECRYPTION) {
  10468. pt = (byte*)aes->reg;
  10469. /* LSB + CAT */
  10470. tmp = (0X01 << bit) & in[0];
  10471. tmp = tmp >> bit;
  10472. tmp &= 0x01;
  10473. shiftLeftArray((byte*)aes->reg, 1);
  10474. pt[AES_BLOCK_SIZE - 1] |= tmp;
  10475. }
  10476. /* MSB + XOR */
  10477. tmp = (0X01 << bit) & in[0];
  10478. pt = (byte*)aes->tmp;
  10479. tmp = (pt[0] >> 7) ^ (tmp >> bit);
  10480. tmp &= 0x01;
  10481. cur |= (tmp << bit);
  10482. if (dir == AES_ENCRYPTION) {
  10483. pt = (byte*)aes->reg;
  10484. /* LSB + CAT */
  10485. shiftLeftArray((byte*)aes->reg, 1);
  10486. pt[AES_BLOCK_SIZE - 1] |= tmp;
  10487. }
  10488. bit--;
  10489. if (bit < 0) {
  10490. out[0] = cur;
  10491. out += 1;
  10492. in += 1;
  10493. sz -= 1;
  10494. bit = 7;
  10495. cur = 0;
  10496. }
  10497. else {
  10498. sz -= 1;
  10499. }
  10500. }
  10501. if (ret == 0) {
  10502. if (bit > 0 && bit < 7) {
  10503. out[0] = cur;
  10504. }
  10505. }
  10506. VECTOR_REGISTERS_POP;
  10507. return ret;
  10508. }
  10509. /* CFB 1
  10510. *
  10511. * aes structure holding key to use for encryption
  10512. * out buffer to hold result of encryption (must be at least as large as input
  10513. * buffer)
  10514. * in buffer to encrypt (packed to left, i.e. 101 is 0x90)
  10515. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  10516. *
  10517. * returns 0 on success and negative values on failure
  10518. */
  10519. int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10520. {
  10521. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_ENCRYPTION);
  10522. }
  10523. /* CFB 8
  10524. *
  10525. * aes structure holding key to use for encryption
  10526. * out buffer to hold result of encryption (must be at least as large as input
  10527. * buffer)
  10528. * in buffer to encrypt
  10529. * sz size of input buffer
  10530. *
  10531. * returns 0 on success and negative values on failure
  10532. */
  10533. int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10534. {
  10535. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_ENCRYPTION);
  10536. }
  10537. #ifdef HAVE_AES_DECRYPT
  10538. /* CFB 1
  10539. *
  10540. * aes structure holding key to use for encryption
  10541. * out buffer to hold result of encryption (must be at least as large as input
  10542. * buffer)
  10543. * in buffer to encrypt
  10544. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  10545. *
  10546. * returns 0 on success and negative values on failure
  10547. */
  10548. int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10549. {
  10550. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_DECRYPTION);
  10551. }
  10552. /* CFB 8
  10553. *
  10554. * aes structure holding key to use for encryption
  10555. * out buffer to hold result of encryption (must be at least as large as input
  10556. * buffer)
  10557. * in buffer to encrypt
  10558. * sz size of input buffer
  10559. *
  10560. * returns 0 on success and negative values on failure
  10561. */
  10562. int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10563. {
  10564. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_DECRYPTION);
  10565. }
  10566. #endif /* HAVE_AES_DECRYPT */
  10567. #endif /* WOLFSSL_AES_CFB */
  10568. #ifdef WOLFSSL_AES_OFB
  10569. /* OFB
  10570. *
  10571. * aes structure holding key to use for encryption
  10572. * out buffer to hold result of encryption (must be at least as large as input
  10573. * buffer)
  10574. * in buffer to encrypt
  10575. * sz size of input buffer
  10576. *
  10577. * returns 0 on success and negative error values on failure
  10578. */
  10579. /* Software AES - CFB Encrypt */
  10580. int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10581. {
  10582. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_OFB_MODE);
  10583. }
  10584. #ifdef HAVE_AES_DECRYPT
  10585. /* OFB
  10586. *
  10587. * aes structure holding key to use for decryption
  10588. * out buffer to hold result of decryption (must be at least as large as input
  10589. * buffer)
  10590. * in buffer to decrypt
  10591. * sz size of input buffer
  10592. *
  10593. * returns 0 on success and negative error values on failure
  10594. */
  10595. /* Software AES - OFB Decrypt */
  10596. int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  10597. {
  10598. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_OFB_MODE);
  10599. }
  10600. #endif /* HAVE_AES_DECRYPT */
  10601. #endif /* WOLFSSL_AES_OFB */
  10602. #ifdef HAVE_AES_KEYWRAP
  10603. /* Initialize key wrap counter with value */
  10604. static WC_INLINE void InitKeyWrapCounter(byte* inOutCtr, word32 value)
  10605. {
  10606. word32 i;
  10607. word32 bytes;
  10608. bytes = sizeof(word32);
  10609. for (i = 0; i < sizeof(word32); i++) {
  10610. inOutCtr[i+sizeof(word32)] = (byte)(value >> ((bytes - 1) * 8));
  10611. bytes--;
  10612. }
  10613. }
  10614. /* Increment key wrap counter */
  10615. static WC_INLINE void IncrementKeyWrapCounter(byte* inOutCtr)
  10616. {
  10617. int i;
  10618. /* in network byte order so start at end and work back */
  10619. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  10620. if (++inOutCtr[i]) /* we're done unless we overflow */
  10621. return;
  10622. }
  10623. }
  10624. /* Decrement key wrap counter */
  10625. static WC_INLINE void DecrementKeyWrapCounter(byte* inOutCtr)
  10626. {
  10627. int i;
  10628. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  10629. if (--inOutCtr[i] != 0xFF) /* we're done unless we underflow */
  10630. return;
  10631. }
  10632. }
  10633. int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  10634. word32 outSz, const byte* iv)
  10635. {
  10636. word32 i;
  10637. byte* r;
  10638. int j;
  10639. int ret = 0;
  10640. byte t[KEYWRAP_BLOCK_SIZE];
  10641. byte tmp[AES_BLOCK_SIZE];
  10642. /* n must be at least 2 64-bit blocks, output size is (n + 1) 8 bytes (64-bit) */
  10643. if (aes == NULL || in == NULL || inSz < 2*KEYWRAP_BLOCK_SIZE ||
  10644. out == NULL || outSz < (inSz + KEYWRAP_BLOCK_SIZE))
  10645. return BAD_FUNC_ARG;
  10646. /* input must be multiple of 64-bits */
  10647. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  10648. return BAD_FUNC_ARG;
  10649. r = out + 8;
  10650. XMEMCPY(r, in, inSz);
  10651. XMEMSET(t, 0, sizeof(t));
  10652. /* user IV is optional */
  10653. if (iv == NULL) {
  10654. XMEMSET(tmp, 0xA6, KEYWRAP_BLOCK_SIZE);
  10655. } else {
  10656. XMEMCPY(tmp, iv, KEYWRAP_BLOCK_SIZE);
  10657. }
  10658. VECTOR_REGISTERS_PUSH;
  10659. for (j = 0; j <= 5; j++) {
  10660. for (i = 1; i <= inSz / KEYWRAP_BLOCK_SIZE; i++) {
  10661. /* load R[i] */
  10662. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  10663. ret = wc_AesEncryptDirect(aes, tmp, tmp);
  10664. if (ret != 0)
  10665. break;
  10666. /* calculate new A */
  10667. IncrementKeyWrapCounter(t);
  10668. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  10669. /* save R[i] */
  10670. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  10671. r += KEYWRAP_BLOCK_SIZE;
  10672. }
  10673. if (ret != 0)
  10674. break;
  10675. r = out + KEYWRAP_BLOCK_SIZE;
  10676. }
  10677. VECTOR_REGISTERS_POP;
  10678. if (ret != 0)
  10679. return ret;
  10680. /* C[0] = A */
  10681. XMEMCPY(out, tmp, KEYWRAP_BLOCK_SIZE);
  10682. return (int)(inSz + KEYWRAP_BLOCK_SIZE);
  10683. }
  10684. /* perform AES key wrap (RFC3394), return out sz on success, negative on err */
  10685. int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  10686. byte* out, word32 outSz, const byte* iv)
  10687. {
  10688. #ifdef WOLFSSL_SMALL_STACK
  10689. Aes *aes = NULL;
  10690. #else
  10691. Aes aes[1];
  10692. #endif
  10693. int ret;
  10694. if (key == NULL)
  10695. return BAD_FUNC_ARG;
  10696. #ifdef WOLFSSL_SMALL_STACK
  10697. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  10698. DYNAMIC_TYPE_AES)) == NULL)
  10699. return MEMORY_E;
  10700. #endif
  10701. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10702. if (ret != 0)
  10703. goto out;
  10704. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  10705. if (ret != 0) {
  10706. wc_AesFree(aes);
  10707. goto out;
  10708. }
  10709. ret = wc_AesKeyWrap_ex(aes, in, inSz, out, outSz, iv);
  10710. wc_AesFree(aes);
  10711. out:
  10712. #ifdef WOLFSSL_SMALL_STACK
  10713. if (aes != NULL)
  10714. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10715. #endif
  10716. return ret;
  10717. }
  10718. int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  10719. word32 outSz, const byte* iv)
  10720. {
  10721. byte* r;
  10722. word32 i, n;
  10723. int j;
  10724. int ret = 0;
  10725. byte t[KEYWRAP_BLOCK_SIZE];
  10726. byte tmp[AES_BLOCK_SIZE];
  10727. const byte* expIv;
  10728. const byte defaultIV[] = {
  10729. 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6
  10730. };
  10731. if (aes == NULL || in == NULL || inSz < 3 * KEYWRAP_BLOCK_SIZE ||
  10732. out == NULL || outSz < (inSz - KEYWRAP_BLOCK_SIZE))
  10733. return BAD_FUNC_ARG;
  10734. /* input must be multiple of 64-bits */
  10735. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  10736. return BAD_FUNC_ARG;
  10737. /* user IV optional */
  10738. if (iv != NULL)
  10739. expIv = iv;
  10740. else
  10741. expIv = defaultIV;
  10742. /* A = C[0], R[i] = C[i] */
  10743. XMEMCPY(tmp, in, KEYWRAP_BLOCK_SIZE);
  10744. XMEMCPY(out, in + KEYWRAP_BLOCK_SIZE, inSz - KEYWRAP_BLOCK_SIZE);
  10745. XMEMSET(t, 0, sizeof(t));
  10746. VECTOR_REGISTERS_PUSH;
  10747. /* initialize counter to 6n */
  10748. n = (inSz - 1) / KEYWRAP_BLOCK_SIZE;
  10749. InitKeyWrapCounter(t, 6 * n);
  10750. for (j = 5; j >= 0; j--) {
  10751. for (i = n; i >= 1; i--) {
  10752. /* calculate A */
  10753. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  10754. DecrementKeyWrapCounter(t);
  10755. /* load R[i], starting at end of R */
  10756. r = out + ((i - 1) * KEYWRAP_BLOCK_SIZE);
  10757. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  10758. ret = wc_AesDecryptDirect(aes, tmp, tmp);
  10759. if (ret != 0)
  10760. break;
  10761. /* save R[i] */
  10762. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  10763. }
  10764. if (ret != 0)
  10765. break;
  10766. }
  10767. VECTOR_REGISTERS_POP;
  10768. if (ret != 0)
  10769. return ret;
  10770. /* verify IV */
  10771. if (XMEMCMP(tmp, expIv, KEYWRAP_BLOCK_SIZE) != 0)
  10772. return BAD_KEYWRAP_IV_E;
  10773. return (int)(inSz - KEYWRAP_BLOCK_SIZE);
  10774. }
  10775. int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  10776. byte* out, word32 outSz, const byte* iv)
  10777. {
  10778. #ifdef WOLFSSL_SMALL_STACK
  10779. Aes *aes = NULL;
  10780. #else
  10781. Aes aes[1];
  10782. #endif
  10783. int ret;
  10784. (void)iv;
  10785. if (key == NULL)
  10786. return BAD_FUNC_ARG;
  10787. #ifdef WOLFSSL_SMALL_STACK
  10788. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  10789. DYNAMIC_TYPE_AES)) == NULL)
  10790. return MEMORY_E;
  10791. #endif
  10792. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10793. if (ret != 0)
  10794. goto out;
  10795. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_DECRYPTION);
  10796. if (ret != 0) {
  10797. wc_AesFree(aes);
  10798. goto out;
  10799. }
  10800. ret = wc_AesKeyUnWrap_ex(aes, in, inSz, out, outSz, iv);
  10801. wc_AesFree(aes);
  10802. out:
  10803. #ifdef WOLFSSL_SMALL_STACK
  10804. if (aes)
  10805. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10806. #endif
  10807. return ret;
  10808. }
  10809. #endif /* HAVE_AES_KEYWRAP */
  10810. #ifdef WOLFSSL_AES_XTS
  10811. /* Galios Field to use */
  10812. #define GF_XTS 0x87
  10813. int wc_AesXtsInit(XtsAes* aes, void* heap, int devId)
  10814. {
  10815. int ret = 0;
  10816. if (aes == NULL) {
  10817. return BAD_FUNC_ARG;
  10818. }
  10819. if ((ret = wc_AesInit(&aes->tweak, heap, devId)) != 0) {
  10820. return ret;
  10821. }
  10822. if ((ret = wc_AesInit(&aes->aes, heap, devId)) != 0) {
  10823. return ret;
  10824. }
  10825. return 0;
  10826. }
  10827. /* This is to help with setting keys to correct encrypt or decrypt type.
  10828. *
  10829. * tweak AES key for tweak in XTS
  10830. * aes AES key for encrypt/decrypt process
  10831. * key buffer holding aes key | tweak key
  10832. * len length of key buffer in bytes. Should be twice that of key size. i.e.
  10833. * 32 for a 16 byte key.
  10834. * dir direction, either AES_ENCRYPTION or AES_DECRYPTION
  10835. * heap heap hint to use for memory. Can be NULL
  10836. * devId id to use with async crypto. Can be 0
  10837. *
  10838. * return 0 on success
  10839. */
  10840. int wc_AesXtsSetKeyNoInit(XtsAes* aes, const byte* key, word32 len, int dir)
  10841. {
  10842. word32 keySz;
  10843. int ret = 0;
  10844. if (aes == NULL || key == NULL) {
  10845. return BAD_FUNC_ARG;
  10846. }
  10847. keySz = len/2;
  10848. if (keySz != 16 && keySz != 32) {
  10849. WOLFSSL_MSG("Unsupported key size");
  10850. return WC_KEY_SIZE_E;
  10851. }
  10852. if ((ret = wc_AesSetKey(&aes->aes, key, keySz, NULL, dir)) == 0) {
  10853. ret = wc_AesSetKey(&aes->tweak, key + keySz, keySz, NULL,
  10854. AES_ENCRYPTION);
  10855. if (ret != 0) {
  10856. wc_AesFree(&aes->aes);
  10857. }
  10858. #ifdef WOLFSSL_AESNI
  10859. if (aes->aes.use_aesni != aes->tweak.use_aesni) {
  10860. if (aes->aes.use_aesni)
  10861. aes->aes.use_aesni = 0;
  10862. else
  10863. aes->tweak.use_aesni = 0;
  10864. }
  10865. #endif
  10866. }
  10867. return ret;
  10868. }
  10869. /* Combined call to wc_AesXtsInit() and wc_AesXtsSetKeyNoInit().
  10870. *
  10871. * Note: is up to user to call wc_AesXtsFree when done.
  10872. *
  10873. * return 0 on success
  10874. */
  10875. int wc_AesXtsSetKey(XtsAes* aes, const byte* key, word32 len, int dir,
  10876. void* heap, int devId)
  10877. {
  10878. int ret = 0;
  10879. if (aes == NULL || key == NULL) {
  10880. return BAD_FUNC_ARG;
  10881. }
  10882. ret = wc_AesXtsInit(aes, heap, devId);
  10883. if (ret != 0)
  10884. return ret;
  10885. ret = wc_AesXtsSetKeyNoInit(aes, key, len, dir);
  10886. if (ret != 0)
  10887. wc_AesXtsFree(aes);
  10888. return ret;
  10889. }
  10890. /* This is used to free up resources used by Aes structs
  10891. *
  10892. * aes AES keys to free
  10893. *
  10894. * return 0 on success
  10895. */
  10896. int wc_AesXtsFree(XtsAes* aes)
  10897. {
  10898. if (aes != NULL) {
  10899. wc_AesFree(&aes->aes);
  10900. wc_AesFree(&aes->tweak);
  10901. }
  10902. return 0;
  10903. }
  10904. /* Same process as wc_AesXtsEncrypt but uses a word64 type as the tweak value
  10905. * instead of a byte array. This just converts the word64 to a byte array and
  10906. * calls wc_AesXtsEncrypt.
  10907. *
  10908. * aes AES keys to use for block encrypt/decrypt
  10909. * out output buffer to hold cipher text
  10910. * in input plain text buffer to encrypt
  10911. * sz size of both out and in buffers
  10912. * sector value to use for tweak
  10913. *
  10914. * returns 0 on success
  10915. */
  10916. int wc_AesXtsEncryptSector(XtsAes* aes, byte* out, const byte* in,
  10917. word32 sz, word64 sector)
  10918. {
  10919. byte* pt;
  10920. byte i[AES_BLOCK_SIZE];
  10921. XMEMSET(i, 0, AES_BLOCK_SIZE);
  10922. #ifdef BIG_ENDIAN_ORDER
  10923. sector = ByteReverseWord64(sector);
  10924. #endif
  10925. pt = (byte*)&sector;
  10926. XMEMCPY(i, pt, sizeof(word64));
  10927. return wc_AesXtsEncrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  10928. }
  10929. /* Same process as wc_AesXtsDecrypt but uses a word64 type as the tweak value
  10930. * instead of a byte array. This just converts the word64 to a byte array.
  10931. *
  10932. * aes AES keys to use for block encrypt/decrypt
  10933. * out output buffer to hold plain text
  10934. * in input cipher text buffer to encrypt
  10935. * sz size of both out and in buffers
  10936. * sector value to use for tweak
  10937. *
  10938. * returns 0 on success
  10939. */
  10940. int wc_AesXtsDecryptSector(XtsAes* aes, byte* out, const byte* in, word32 sz,
  10941. word64 sector)
  10942. {
  10943. byte* pt;
  10944. byte i[AES_BLOCK_SIZE];
  10945. XMEMSET(i, 0, AES_BLOCK_SIZE);
  10946. #ifdef BIG_ENDIAN_ORDER
  10947. sector = ByteReverseWord64(sector);
  10948. #endif
  10949. pt = (byte*)&sector;
  10950. XMEMCPY(i, pt, sizeof(word64));
  10951. return wc_AesXtsDecrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  10952. }
  10953. #ifdef WOLFSSL_AESNI
  10954. #if defined(USE_INTEL_SPEEDUP)
  10955. #define HAVE_INTEL_AVX1
  10956. #define HAVE_INTEL_AVX2
  10957. #endif /* USE_INTEL_SPEEDUP */
  10958. void AES_XTS_encrypt_aesni(const unsigned char *in, unsigned char *out, word32 sz,
  10959. const unsigned char* i, const unsigned char* key,
  10960. const unsigned char* key2, int nr)
  10961. XASM_LINK("AES_XTS_encrypt_aesni");
  10962. #ifdef HAVE_INTEL_AVX1
  10963. void AES_XTS_encrypt_avx1(const unsigned char *in, unsigned char *out,
  10964. word32 sz, const unsigned char* i,
  10965. const unsigned char* key, const unsigned char* key2,
  10966. int nr)
  10967. XASM_LINK("AES_XTS_encrypt_avx1");
  10968. #endif /* HAVE_INTEL_AVX1 */
  10969. #ifdef HAVE_AES_DECRYPT
  10970. void AES_XTS_decrypt_aesni(const unsigned char *in, unsigned char *out, word32 sz,
  10971. const unsigned char* i, const unsigned char* key,
  10972. const unsigned char* key2, int nr)
  10973. XASM_LINK("AES_XTS_decrypt_aesni");
  10974. #ifdef HAVE_INTEL_AVX1
  10975. void AES_XTS_decrypt_avx1(const unsigned char *in, unsigned char *out,
  10976. word32 sz, const unsigned char* i,
  10977. const unsigned char* key, const unsigned char* key2,
  10978. int nr)
  10979. XASM_LINK("AES_XTS_decrypt_avx1");
  10980. #endif /* HAVE_INTEL_AVX1 */
  10981. #endif /* HAVE_AES_DECRYPT */
  10982. #endif /* WOLFSSL_AESNI */
  10983. #if !defined(WOLFSSL_ARMASM) || defined(WOLFSSL_ARMASM_NO_HW_CRYPTO)
  10984. #ifdef HAVE_AES_ECB
  10985. /* helper function for encrypting / decrypting full buffer at once */
  10986. static WARN_UNUSED_RESULT int _AesXtsHelper(
  10987. Aes* aes, byte* out, const byte* in, word32 sz, int dir)
  10988. {
  10989. word32 outSz = sz;
  10990. word32 totalSz = (sz / AES_BLOCK_SIZE) * AES_BLOCK_SIZE; /* total bytes */
  10991. byte* pt = out;
  10992. outSz -= AES_BLOCK_SIZE;
  10993. while (outSz > 0) {
  10994. word32 j;
  10995. byte carry = 0;
  10996. /* multiply by shift left and propagate carry */
  10997. for (j = 0; j < AES_BLOCK_SIZE && outSz > 0; j++, outSz--) {
  10998. byte tmpC;
  10999. tmpC = (pt[j] >> 7) & 0x01;
  11000. pt[j+AES_BLOCK_SIZE] = (byte)((pt[j] << 1) + carry);
  11001. carry = tmpC;
  11002. }
  11003. if (carry) {
  11004. pt[AES_BLOCK_SIZE] ^= GF_XTS;
  11005. }
  11006. pt += AES_BLOCK_SIZE;
  11007. }
  11008. xorbuf(out, in, totalSz);
  11009. if (dir == AES_ENCRYPTION) {
  11010. return _AesEcbEncrypt(aes, out, out, totalSz);
  11011. }
  11012. else {
  11013. return _AesEcbDecrypt(aes, out, out, totalSz);
  11014. }
  11015. }
  11016. #endif /* HAVE_AES_ECB */
  11017. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  11018. *
  11019. * xaes AES keys to use for block encrypt/decrypt
  11020. * out output buffer to hold cipher text
  11021. * in input plain text buffer to encrypt
  11022. * sz size of both out and in buffers
  11023. * i value to use for tweak
  11024. *
  11025. * returns 0 on success
  11026. */
  11027. /* Software AES - XTS Encrypt */
  11028. static int AesXtsEncrypt_sw(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  11029. const byte* i)
  11030. {
  11031. int ret = 0;
  11032. word32 blocks = (sz / AES_BLOCK_SIZE);
  11033. Aes *aes = &xaes->aes;
  11034. Aes *tweak = &xaes->tweak;
  11035. byte tmp[AES_BLOCK_SIZE];
  11036. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  11037. * key setup passed to encrypt direct*/
  11038. ret = wc_AesEncryptDirect(tweak, tmp, i);
  11039. if (ret != 0)
  11040. return ret;
  11041. #ifdef HAVE_AES_ECB
  11042. /* encrypt all of buffer at once when possible */
  11043. if (in != out) { /* can not handle inline */
  11044. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  11045. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_ENCRYPTION)) != 0)
  11046. return ret;
  11047. }
  11048. #endif
  11049. while (blocks > 0) {
  11050. word32 j;
  11051. byte carry = 0;
  11052. #ifdef HAVE_AES_ECB
  11053. if (in == out)
  11054. #endif
  11055. { /* check for if inline */
  11056. byte buf[AES_BLOCK_SIZE];
  11057. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  11058. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  11059. ret = wc_AesEncryptDirect(aes, out, buf);
  11060. if (ret != 0)
  11061. return ret;
  11062. }
  11063. xorbuf(out, tmp, AES_BLOCK_SIZE);
  11064. /* multiply by shift left and propagate carry */
  11065. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  11066. byte tmpC;
  11067. tmpC = (tmp[j] >> 7) & 0x01;
  11068. tmp[j] = (byte)((tmp[j] << 1) + carry);
  11069. carry = tmpC;
  11070. }
  11071. if (carry) {
  11072. tmp[0] ^= GF_XTS;
  11073. }
  11074. in += AES_BLOCK_SIZE;
  11075. out += AES_BLOCK_SIZE;
  11076. sz -= AES_BLOCK_SIZE;
  11077. blocks--;
  11078. }
  11079. /* stealing operation of XTS to handle left overs */
  11080. if (sz > 0) {
  11081. byte buf[AES_BLOCK_SIZE];
  11082. XMEMCPY(buf, out - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  11083. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  11084. return BUFFER_E;
  11085. }
  11086. if (in != out) {
  11087. XMEMCPY(out, buf, sz);
  11088. XMEMCPY(buf, in, sz);
  11089. }
  11090. else {
  11091. byte buf2[AES_BLOCK_SIZE];
  11092. XMEMCPY(buf2, buf, sz);
  11093. XMEMCPY(buf, in, sz);
  11094. XMEMCPY(out, buf2, sz);
  11095. }
  11096. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  11097. ret = wc_AesEncryptDirect(aes, out - AES_BLOCK_SIZE, buf);
  11098. if (ret == 0)
  11099. xorbuf(out - AES_BLOCK_SIZE, tmp, AES_BLOCK_SIZE);
  11100. }
  11101. return ret;
  11102. }
  11103. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  11104. *
  11105. * xaes AES keys to use for block encrypt/decrypt
  11106. * out output buffer to hold cipher text
  11107. * in input plain text buffer to encrypt
  11108. * sz size of both out and in buffers
  11109. * i value to use for tweak
  11110. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  11111. * adds a sanity check on how the user calls the function.
  11112. *
  11113. * returns 0 on success
  11114. */
  11115. int wc_AesXtsEncrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  11116. const byte* i, word32 iSz)
  11117. {
  11118. int ret;
  11119. if (xaes == NULL || out == NULL || in == NULL) {
  11120. return BAD_FUNC_ARG;
  11121. }
  11122. if (iSz < AES_BLOCK_SIZE) {
  11123. return BAD_FUNC_ARG;
  11124. }
  11125. if (sz < AES_BLOCK_SIZE) {
  11126. WOLFSSL_MSG("Plain text input too small for encryption");
  11127. return BAD_FUNC_ARG;
  11128. }
  11129. {
  11130. #ifdef WOLFSSL_AESNI
  11131. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  11132. int orig_use_aesni = xaes->aes.use_aesni;
  11133. #endif
  11134. if (xaes->aes.use_aesni && ((ret = SAVE_VECTOR_REGISTERS2()) != 0)) {
  11135. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  11136. xaes->aes.use_aesni = 0;
  11137. xaes->tweak.use_aesni = 0;
  11138. #else
  11139. return ret;
  11140. #endif
  11141. }
  11142. if (xaes->aes.use_aesni) {
  11143. #if defined(HAVE_INTEL_AVX1)
  11144. if (IS_INTEL_AVX1(intel_flags)) {
  11145. AES_XTS_encrypt_avx1(in, out, sz, i, (const byte*)xaes->aes.key,
  11146. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  11147. ret = 0;
  11148. }
  11149. else
  11150. #endif
  11151. {
  11152. AES_XTS_encrypt_aesni(in, out, sz, i, (const byte*)xaes->aes.key,
  11153. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  11154. ret = 0;
  11155. }
  11156. }
  11157. else
  11158. #endif
  11159. {
  11160. ret = AesXtsEncrypt_sw(xaes, out, in, sz, i);
  11161. }
  11162. #ifdef WOLFSSL_AESNI
  11163. if (xaes->aes.use_aesni)
  11164. RESTORE_VECTOR_REGISTERS();
  11165. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  11166. else if (orig_use_aesni) {
  11167. xaes->aes.use_aesni = orig_use_aesni;
  11168. xaes->tweak.use_aesni = orig_use_aesni;
  11169. }
  11170. #endif
  11171. #endif
  11172. }
  11173. return ret;
  11174. }
  11175. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  11176. *
  11177. * xaes AES keys to use for block encrypt/decrypt
  11178. * out output buffer to hold plain text
  11179. * in input cipher text buffer to decrypt
  11180. * sz size of both out and in buffers
  11181. * i value to use for tweak
  11182. *
  11183. * returns 0 on success
  11184. */
  11185. /* Software AES - XTS Decrypt */
  11186. static int AesXtsDecrypt_sw(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  11187. const byte* i)
  11188. {
  11189. int ret = 0;
  11190. word32 blocks = (sz / AES_BLOCK_SIZE);
  11191. Aes *aes = &xaes->aes;
  11192. Aes *tweak = &xaes->tweak;
  11193. word32 j;
  11194. byte carry = 0;
  11195. byte tmp[AES_BLOCK_SIZE];
  11196. byte stl = (sz % AES_BLOCK_SIZE);
  11197. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  11198. * key setup passed to decrypt direct*/
  11199. ret = wc_AesEncryptDirect(tweak, tmp, i);
  11200. if (ret != 0)
  11201. return ret;
  11202. /* if Stealing then break out of loop one block early to handle special
  11203. * case */
  11204. if (stl > 0) {
  11205. blocks--;
  11206. }
  11207. #ifdef HAVE_AES_ECB
  11208. /* decrypt all of buffer at once when possible */
  11209. if (in != out) { /* can not handle inline */
  11210. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  11211. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_DECRYPTION)) != 0)
  11212. return ret;
  11213. }
  11214. #endif
  11215. while (blocks > 0) {
  11216. #ifdef HAVE_AES_ECB
  11217. if (in == out)
  11218. #endif
  11219. { /* check for if inline */
  11220. byte buf[AES_BLOCK_SIZE];
  11221. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  11222. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  11223. ret = wc_AesDecryptDirect(aes, out, buf);
  11224. if (ret != 0)
  11225. return ret;
  11226. }
  11227. xorbuf(out, tmp, AES_BLOCK_SIZE);
  11228. /* multiply by shift left and propagate carry */
  11229. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  11230. byte tmpC;
  11231. tmpC = (tmp[j] >> 7) & 0x01;
  11232. tmp[j] = (byte)((tmp[j] << 1) + carry);
  11233. carry = tmpC;
  11234. }
  11235. if (carry) {
  11236. tmp[0] ^= GF_XTS;
  11237. }
  11238. carry = 0;
  11239. in += AES_BLOCK_SIZE;
  11240. out += AES_BLOCK_SIZE;
  11241. sz -= AES_BLOCK_SIZE;
  11242. blocks--;
  11243. }
  11244. /* stealing operation of XTS to handle left overs */
  11245. if (sz >= AES_BLOCK_SIZE) {
  11246. byte buf[AES_BLOCK_SIZE];
  11247. byte tmp2[AES_BLOCK_SIZE];
  11248. /* multiply by shift left and propagate carry */
  11249. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  11250. byte tmpC;
  11251. tmpC = (tmp[j] >> 7) & 0x01;
  11252. tmp2[j] = (byte)((tmp[j] << 1) + carry);
  11253. carry = tmpC;
  11254. }
  11255. if (carry) {
  11256. tmp2[0] ^= GF_XTS;
  11257. }
  11258. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  11259. xorbuf(buf, tmp2, AES_BLOCK_SIZE);
  11260. ret = wc_AesDecryptDirect(aes, out, buf);
  11261. if (ret != 0)
  11262. return ret;
  11263. xorbuf(out, tmp2, AES_BLOCK_SIZE);
  11264. /* tmp2 holds partial | last */
  11265. XMEMCPY(tmp2, out, AES_BLOCK_SIZE);
  11266. in += AES_BLOCK_SIZE;
  11267. out += AES_BLOCK_SIZE;
  11268. sz -= AES_BLOCK_SIZE;
  11269. /* Make buffer with end of cipher text | last */
  11270. XMEMCPY(buf, tmp2, AES_BLOCK_SIZE);
  11271. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  11272. return BUFFER_E;
  11273. }
  11274. XMEMCPY(buf, in, sz);
  11275. XMEMCPY(out, tmp2, sz);
  11276. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  11277. ret = wc_AesDecryptDirect(aes, tmp2, buf);
  11278. if (ret != 0)
  11279. return ret;
  11280. xorbuf(tmp2, tmp, AES_BLOCK_SIZE);
  11281. XMEMCPY(out - AES_BLOCK_SIZE, tmp2, AES_BLOCK_SIZE);
  11282. }
  11283. return ret;
  11284. }
  11285. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  11286. *
  11287. * xaes AES keys to use for block encrypt/decrypt
  11288. * out output buffer to hold plain text
  11289. * in input cipher text buffer to decrypt
  11290. * sz size of both out and in buffers
  11291. * i value to use for tweak
  11292. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  11293. * adds a sanity check on how the user calls the function.
  11294. *
  11295. * returns 0 on success
  11296. */
  11297. int wc_AesXtsDecrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  11298. const byte* i, word32 iSz)
  11299. {
  11300. int ret;
  11301. if (xaes == NULL || out == NULL || in == NULL) {
  11302. return BAD_FUNC_ARG;
  11303. }
  11304. if (iSz < AES_BLOCK_SIZE) {
  11305. return BAD_FUNC_ARG;
  11306. }
  11307. if (sz < AES_BLOCK_SIZE) {
  11308. WOLFSSL_MSG("Cipher text input too small for decryption");
  11309. return BAD_FUNC_ARG;
  11310. }
  11311. {
  11312. #ifdef WOLFSSL_AESNI
  11313. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  11314. int orig_use_aesni = xaes->aes.use_aesni;
  11315. #endif
  11316. if (xaes->aes.use_aesni && (SAVE_VECTOR_REGISTERS2() != 0)) {
  11317. xaes->aes.use_aesni = 0;
  11318. xaes->tweak.use_aesni = 0;
  11319. }
  11320. if (xaes->aes.use_aesni) {
  11321. #if defined(HAVE_INTEL_AVX1)
  11322. if (IS_INTEL_AVX1(intel_flags)) {
  11323. AES_XTS_decrypt_avx1(in, out, sz, i, (const byte*)xaes->aes.key,
  11324. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  11325. ret = 0;
  11326. }
  11327. else
  11328. #endif
  11329. {
  11330. AES_XTS_decrypt_aesni(in, out, sz, i, (const byte*)xaes->aes.key,
  11331. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  11332. ret = 0;
  11333. }
  11334. }
  11335. else
  11336. #endif
  11337. {
  11338. ret = AesXtsDecrypt_sw(xaes, out, in, sz, i);
  11339. }
  11340. #ifdef WOLFSSL_AESNI
  11341. if (xaes->aes.use_aesni)
  11342. RESTORE_VECTOR_REGISTERS();
  11343. #ifdef WC_AES_C_DYNAMIC_FALLBACK
  11344. else if (orig_use_aesni) {
  11345. xaes->aes.use_aesni = orig_use_aesni;
  11346. xaes->tweak.use_aesni = orig_use_aesni;
  11347. }
  11348. #endif
  11349. #endif
  11350. return ret;
  11351. }
  11352. }
  11353. #endif /* !WOLFSSL_ARMASM || WOLFSSL_ARMASM_NO_HW_CRYPTO */
  11354. /* Same as wc_AesXtsEncryptSector but the sector gets incremented by one every
  11355. * sectorSz bytes
  11356. *
  11357. * xaes AES keys to use for block encrypt
  11358. * out output buffer to hold cipher text
  11359. * in input plain text buffer to encrypt
  11360. * sz size of both out and in buffers
  11361. * sector value to use for tweak
  11362. * sectorSz size of the sector
  11363. *
  11364. * returns 0 on success
  11365. */
  11366. int wc_AesXtsEncryptConsecutiveSectors(XtsAes* aes, byte* out, const byte* in,
  11367. word32 sz, word64 sector, word32 sectorSz)
  11368. {
  11369. int ret = 0;
  11370. word32 iter = 0;
  11371. word32 sectorCount;
  11372. word32 remainder;
  11373. if (aes == NULL || out == NULL || in == NULL || sectorSz == 0) {
  11374. return BAD_FUNC_ARG;
  11375. }
  11376. if (sz < AES_BLOCK_SIZE) {
  11377. WOLFSSL_MSG("Cipher text input too small for encryption");
  11378. return BAD_FUNC_ARG;
  11379. }
  11380. sectorCount = sz / sectorSz;
  11381. remainder = sz % sectorSz;
  11382. while (sectorCount) {
  11383. ret = wc_AesXtsEncryptSector(aes, out + (iter * sectorSz),
  11384. in + (iter * sectorSz), sectorSz, sector);
  11385. if (ret != 0)
  11386. break;
  11387. sectorCount--;
  11388. iter++;
  11389. sector++;
  11390. }
  11391. if (remainder && ret == 0)
  11392. ret = wc_AesXtsEncryptSector(aes, out + (iter * sectorSz),
  11393. in + (iter * sectorSz), remainder, sector);
  11394. return ret;
  11395. }
  11396. /* Same as wc_AesXtsEncryptConsecutiveSectors but Aes key is AES_DECRYPTION type
  11397. *
  11398. * xaes AES keys to use for block decrypt
  11399. * out output buffer to hold cipher text
  11400. * in input plain text buffer to encrypt
  11401. * sz size of both out and in buffers
  11402. * sector value to use for tweak
  11403. * sectorSz size of the sector
  11404. *
  11405. * returns 0 on success
  11406. */
  11407. int wc_AesXtsDecryptConsecutiveSectors(XtsAes* aes, byte* out, const byte* in,
  11408. word32 sz, word64 sector, word32 sectorSz)
  11409. {
  11410. int ret = 0;
  11411. word32 iter = 0;
  11412. word32 sectorCount;
  11413. word32 remainder;
  11414. if (aes == NULL || out == NULL || in == NULL || sectorSz == 0) {
  11415. return BAD_FUNC_ARG;
  11416. }
  11417. if (sz < AES_BLOCK_SIZE) {
  11418. WOLFSSL_MSG("Cipher text input too small for decryption");
  11419. return BAD_FUNC_ARG;
  11420. }
  11421. sectorCount = sz / sectorSz;
  11422. remainder = sz % sectorSz;
  11423. while (sectorCount) {
  11424. ret = wc_AesXtsDecryptSector(aes, out + (iter * sectorSz),
  11425. in + (iter * sectorSz), sectorSz, sector);
  11426. if (ret != 0)
  11427. break;
  11428. sectorCount--;
  11429. iter++;
  11430. sector++;
  11431. }
  11432. if (remainder && ret == 0)
  11433. ret = wc_AesXtsDecryptSector(aes, out + (iter * sectorSz),
  11434. in + (iter * sectorSz), remainder, sector);
  11435. return ret;
  11436. }
  11437. #endif /* WOLFSSL_AES_XTS */
  11438. #ifdef WOLFSSL_AES_SIV
  11439. /*
  11440. * See RFC 5297 Section 2.4.
  11441. */
  11442. static WARN_UNUSED_RESULT int S2V(
  11443. const byte* key, word32 keySz, const byte* assoc, word32 assocSz,
  11444. const byte* nonce, word32 nonceSz, const byte* data,
  11445. word32 dataSz, byte* out)
  11446. {
  11447. #ifdef WOLFSSL_SMALL_STACK
  11448. byte* tmp[3] = {NULL, NULL, NULL};
  11449. int i;
  11450. Cmac* cmac;
  11451. #else
  11452. byte tmp[3][AES_BLOCK_SIZE];
  11453. Cmac cmac[1];
  11454. #endif
  11455. word32 macSz = AES_BLOCK_SIZE;
  11456. int ret = 0;
  11457. word32 zeroBytes;
  11458. #ifdef WOLFSSL_SMALL_STACK
  11459. for (i = 0; i < 3; ++i) {
  11460. tmp[i] = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  11461. if (tmp[i] == NULL) {
  11462. ret = MEMORY_E;
  11463. break;
  11464. }
  11465. }
  11466. if (ret == 0)
  11467. #endif
  11468. {
  11469. XMEMSET(tmp[1], 0, AES_BLOCK_SIZE);
  11470. XMEMSET(tmp[2], 0, AES_BLOCK_SIZE);
  11471. ret = wc_AesCmacGenerate(tmp[0], &macSz, tmp[1], AES_BLOCK_SIZE,
  11472. key, keySz);
  11473. if (ret == 0) {
  11474. ShiftAndXorRb(tmp[1], tmp[0]);
  11475. ret = wc_AesCmacGenerate(tmp[0], &macSz, assoc, assocSz, key,
  11476. keySz);
  11477. if (ret == 0) {
  11478. xorbuf(tmp[1], tmp[0], AES_BLOCK_SIZE);
  11479. }
  11480. }
  11481. }
  11482. if (ret == 0) {
  11483. if (nonceSz > 0) {
  11484. ShiftAndXorRb(tmp[0], tmp[1]);
  11485. ret = wc_AesCmacGenerate(tmp[1], &macSz, nonce, nonceSz, key,
  11486. keySz);
  11487. if (ret == 0) {
  11488. xorbuf(tmp[0], tmp[1], AES_BLOCK_SIZE);
  11489. }
  11490. }
  11491. else {
  11492. XMEMCPY(tmp[0], tmp[1], AES_BLOCK_SIZE);
  11493. }
  11494. }
  11495. if (ret == 0) {
  11496. if (dataSz >= AES_BLOCK_SIZE) {
  11497. #ifdef WOLFSSL_SMALL_STACK
  11498. cmac = (Cmac*)XMALLOC(sizeof(Cmac), NULL, DYNAMIC_TYPE_CMAC);
  11499. if (cmac == NULL) {
  11500. ret = MEMORY_E;
  11501. }
  11502. if (ret == 0)
  11503. #endif
  11504. {
  11505. #ifdef WOLFSSL_CHECK_MEM_ZERO
  11506. /* Aes part is checked by wc_AesFree. */
  11507. wc_MemZero_Add("wc_AesCmacGenerate cmac",
  11508. ((unsigned char *)cmac) + sizeof(Aes),
  11509. sizeof(Cmac) - sizeof(Aes));
  11510. #endif
  11511. xorbuf(tmp[0], data + (dataSz - AES_BLOCK_SIZE),
  11512. AES_BLOCK_SIZE);
  11513. ret = wc_InitCmac(cmac, key, keySz, WC_CMAC_AES, NULL);
  11514. if (ret == 0) {
  11515. ret = wc_CmacUpdate(cmac, data, dataSz - AES_BLOCK_SIZE);
  11516. }
  11517. if (ret == 0) {
  11518. ret = wc_CmacUpdate(cmac, tmp[0], AES_BLOCK_SIZE);
  11519. }
  11520. if (ret == 0) {
  11521. ret = wc_CmacFinal(cmac, out, &macSz);
  11522. }
  11523. }
  11524. #ifdef WOLFSSL_SMALL_STACK
  11525. if (cmac != NULL) {
  11526. XFREE(cmac, NULL, DYNAMIC_TYPE_CMAC);
  11527. }
  11528. #elif defined(WOLFSSL_CHECK_MEM_ZERO)
  11529. wc_MemZero_Check(cmac, sizeof(Cmac));
  11530. #endif
  11531. }
  11532. else {
  11533. XMEMCPY(tmp[2], data, dataSz);
  11534. tmp[2][dataSz] |= 0x80;
  11535. zeroBytes = AES_BLOCK_SIZE - (dataSz + 1);
  11536. if (zeroBytes != 0) {
  11537. XMEMSET(tmp[2] + dataSz + 1, 0, zeroBytes);
  11538. }
  11539. ShiftAndXorRb(tmp[1], tmp[0]);
  11540. xorbuf(tmp[1], tmp[2], AES_BLOCK_SIZE);
  11541. ret = wc_AesCmacGenerate(out, &macSz, tmp[1], AES_BLOCK_SIZE, key,
  11542. keySz);
  11543. }
  11544. }
  11545. #ifdef WOLFSSL_SMALL_STACK
  11546. for (i = 0; i < 3; ++i) {
  11547. if (tmp[i] != NULL) {
  11548. XFREE(tmp[i], NULL, DYNAMIC_TYPE_TMP_BUFFER);
  11549. }
  11550. }
  11551. #endif
  11552. return ret;
  11553. }
  11554. static WARN_UNUSED_RESULT int AesSivCipher(
  11555. const byte* key, word32 keySz, const byte* assoc,
  11556. word32 assocSz, const byte* nonce, word32 nonceSz,
  11557. const byte* data, word32 dataSz, byte* siv, byte* out,
  11558. int enc)
  11559. {
  11560. int ret = 0;
  11561. #ifdef WOLFSSL_SMALL_STACK
  11562. Aes* aes = NULL;
  11563. #else
  11564. Aes aes[1];
  11565. #endif
  11566. byte sivTmp[AES_BLOCK_SIZE];
  11567. if (key == NULL || siv == NULL || out == NULL) {
  11568. WOLFSSL_MSG("Bad parameter");
  11569. ret = BAD_FUNC_ARG;
  11570. }
  11571. if (ret == 0 && keySz != 32 && keySz != 48 && keySz != 64) {
  11572. WOLFSSL_MSG("Bad key size. Must be 256, 384, or 512 bits.");
  11573. ret = BAD_FUNC_ARG;
  11574. }
  11575. if (ret == 0) {
  11576. if (enc == 1) {
  11577. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, data,
  11578. dataSz, sivTmp);
  11579. if (ret != 0) {
  11580. WOLFSSL_MSG("S2V failed.");
  11581. }
  11582. else {
  11583. XMEMCPY(siv, sivTmp, AES_BLOCK_SIZE);
  11584. }
  11585. }
  11586. else {
  11587. XMEMCPY(sivTmp, siv, AES_BLOCK_SIZE);
  11588. }
  11589. }
  11590. #ifdef WOLFSSL_SMALL_STACK
  11591. if (ret == 0) {
  11592. aes = (Aes*)XMALLOC(sizeof(Aes), NULL, DYNAMIC_TYPE_AES);
  11593. if (aes == NULL) {
  11594. ret = MEMORY_E;
  11595. }
  11596. }
  11597. #endif
  11598. if (ret == 0) {
  11599. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  11600. if (ret != 0) {
  11601. WOLFSSL_MSG("Failed to initialized AES object.");
  11602. }
  11603. }
  11604. if (ret == 0 && dataSz > 0) {
  11605. sivTmp[12] &= 0x7f;
  11606. sivTmp[8] &= 0x7f;
  11607. ret = wc_AesSetKey(aes, key + keySz / 2, keySz / 2, sivTmp,
  11608. AES_ENCRYPTION);
  11609. if (ret != 0) {
  11610. WOLFSSL_MSG("Failed to set key for AES-CTR.");
  11611. }
  11612. else {
  11613. ret = wc_AesCtrEncrypt(aes, out, data, dataSz);
  11614. if (ret != 0) {
  11615. WOLFSSL_MSG("AES-CTR encryption failed.");
  11616. }
  11617. }
  11618. }
  11619. if (ret == 0 && enc == 0) {
  11620. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, out, dataSz,
  11621. sivTmp);
  11622. if (ret != 0) {
  11623. WOLFSSL_MSG("S2V failed.");
  11624. }
  11625. if (XMEMCMP(siv, sivTmp, AES_BLOCK_SIZE) != 0) {
  11626. WOLFSSL_MSG("Computed SIV doesn't match received SIV.");
  11627. ret = AES_SIV_AUTH_E;
  11628. }
  11629. }
  11630. wc_AesFree(aes);
  11631. #ifdef WOLFSSL_SMALL_STACK
  11632. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  11633. #endif
  11634. return ret;
  11635. }
  11636. /*
  11637. * See RFC 5297 Section 2.6.
  11638. */
  11639. int wc_AesSivEncrypt(const byte* key, word32 keySz, const byte* assoc,
  11640. word32 assocSz, const byte* nonce, word32 nonceSz,
  11641. const byte* in, word32 inSz, byte* siv, byte* out)
  11642. {
  11643. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  11644. siv, out, 1);
  11645. }
  11646. /*
  11647. * See RFC 5297 Section 2.7.
  11648. */
  11649. int wc_AesSivDecrypt(const byte* key, word32 keySz, const byte* assoc,
  11650. word32 assocSz, const byte* nonce, word32 nonceSz,
  11651. const byte* in, word32 inSz, byte* siv, byte* out)
  11652. {
  11653. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  11654. siv, out, 0);
  11655. }
  11656. #endif /* WOLFSSL_AES_SIV */
  11657. #if defined(WOLFSSL_AES_EAX)
  11658. /*
  11659. * AES EAX one-shot API
  11660. * Encrypts input data and computes an auth tag over the input
  11661. * auth data and ciphertext
  11662. *
  11663. * Returns 0 on success
  11664. * Returns error code on failure
  11665. */
  11666. int wc_AesEaxEncryptAuth(const byte* key, word32 keySz, byte* out,
  11667. const byte* in, word32 inSz,
  11668. const byte* nonce, word32 nonceSz,
  11669. /* output computed auth tag */
  11670. byte* authTag, word32 authTagSz,
  11671. /* input data to authenticate */
  11672. const byte* authIn, word32 authInSz)
  11673. {
  11674. #if defined(WOLFSSL_SMALL_STACK)
  11675. AesEax *eax;
  11676. #else
  11677. AesEax eax_mem;
  11678. AesEax *eax = &eax_mem;
  11679. #endif
  11680. int ret;
  11681. int eaxInited = 0;
  11682. if (key == NULL || out == NULL || in == NULL || nonce == NULL
  11683. || authTag == NULL || authIn == NULL) {
  11684. return BAD_FUNC_ARG;
  11685. }
  11686. #if defined(WOLFSSL_SMALL_STACK)
  11687. if ((eax = (AesEax *)XMALLOC(sizeof(AesEax),
  11688. NULL,
  11689. DYNAMIC_TYPE_AES_EAX)) == NULL) {
  11690. return MEMORY_E;
  11691. }
  11692. #endif
  11693. if ((ret = wc_AesEaxInit(eax,
  11694. key, keySz,
  11695. nonce, nonceSz,
  11696. authIn, authInSz)) != 0) {
  11697. goto cleanup;
  11698. }
  11699. eaxInited = 1;
  11700. if ((ret = wc_AesEaxEncryptUpdate(eax, out, in, inSz, NULL, 0)) != 0) {
  11701. goto cleanup;
  11702. }
  11703. if ((ret = wc_AesEaxEncryptFinal(eax, authTag, authTagSz)) != 0) {
  11704. goto cleanup;
  11705. }
  11706. cleanup:
  11707. if (eaxInited)
  11708. wc_AesEaxFree(eax);
  11709. #if defined(WOLFSSL_SMALL_STACK)
  11710. XFREE(eax, NULL, DYNAMIC_TYPE_AES_EAX);
  11711. #endif
  11712. return ret;
  11713. }
  11714. /*
  11715. * AES EAX one-shot API
  11716. * Decrypts and authenticates data against a supplied auth tag
  11717. *
  11718. * Returns 0 on success
  11719. * Returns error code on failure
  11720. */
  11721. int wc_AesEaxDecryptAuth(const byte* key, word32 keySz, byte* out,
  11722. const byte* in, word32 inSz,
  11723. const byte* nonce, word32 nonceSz,
  11724. /* auth tag to verify against */
  11725. const byte* authTag, word32 authTagSz,
  11726. /* input data to authenticate */
  11727. const byte* authIn, word32 authInSz)
  11728. {
  11729. #if defined(WOLFSSL_SMALL_STACK)
  11730. AesEax *eax;
  11731. #else
  11732. AesEax eax_mem;
  11733. AesEax *eax = &eax_mem;
  11734. #endif
  11735. int ret;
  11736. int eaxInited = 0;
  11737. if (key == NULL || out == NULL || in == NULL || nonce == NULL
  11738. || authTag == NULL || authIn == NULL) {
  11739. return BAD_FUNC_ARG;
  11740. }
  11741. #if defined(WOLFSSL_SMALL_STACK)
  11742. if ((eax = (AesEax *)XMALLOC(sizeof(AesEax),
  11743. NULL,
  11744. DYNAMIC_TYPE_AES_EAX)) == NULL) {
  11745. return MEMORY_E;
  11746. }
  11747. #endif
  11748. if ((ret = wc_AesEaxInit(eax,
  11749. key, keySz,
  11750. nonce, nonceSz,
  11751. authIn, authInSz)) != 0) {
  11752. goto cleanup;
  11753. }
  11754. eaxInited = 1;
  11755. if ((ret = wc_AesEaxDecryptUpdate(eax, out, in, inSz, NULL, 0)) != 0) {
  11756. goto cleanup;
  11757. }
  11758. if ((ret = wc_AesEaxDecryptFinal(eax, authTag, authTagSz)) != 0) {
  11759. goto cleanup;
  11760. }
  11761. cleanup:
  11762. if (eaxInited)
  11763. wc_AesEaxFree(eax);
  11764. #if defined(WOLFSSL_SMALL_STACK)
  11765. XFREE(eax, NULL, DYNAMIC_TYPE_AES_EAX);
  11766. #endif
  11767. return ret;
  11768. }
  11769. /*
  11770. * AES EAX Incremental API:
  11771. * Initializes an AES EAX encryption or decryption operation. This must be
  11772. * called before any other EAX APIs are used on the AesEax struct
  11773. *
  11774. * Returns 0 on success
  11775. * Returns error code on failure
  11776. */
  11777. int wc_AesEaxInit(AesEax* eax,
  11778. const byte* key, word32 keySz,
  11779. const byte* nonce, word32 nonceSz,
  11780. const byte* authIn, word32 authInSz)
  11781. {
  11782. int ret = 0;
  11783. word32 cmacSize;
  11784. int aesInited = 0;
  11785. int nonceCmacInited = 0;
  11786. int aadCmacInited = 0;
  11787. if (eax == NULL || key == NULL || nonce == NULL) {
  11788. return BAD_FUNC_ARG;
  11789. }
  11790. XMEMSET(eax->prefixBuf, 0, sizeof(eax->prefixBuf));
  11791. if ((ret = wc_AesInit(&eax->aes, NULL, INVALID_DEVID)) != 0) {
  11792. goto out;
  11793. }
  11794. aesInited = 1;
  11795. if ((ret = wc_AesSetKey(&eax->aes,
  11796. key,
  11797. keySz,
  11798. NULL,
  11799. AES_ENCRYPTION)) != 0) {
  11800. goto out;
  11801. }
  11802. /*
  11803. * OMAC the nonce to use as the IV for CTR encryption and auth tag chunk
  11804. * N' = OMAC^0_K(N)
  11805. */
  11806. if ((ret = wc_InitCmac(&eax->nonceCmac,
  11807. key,
  11808. keySz,
  11809. WC_CMAC_AES,
  11810. NULL)) != 0) {
  11811. return ret;
  11812. }
  11813. nonceCmacInited = 1;
  11814. if ((ret = wc_CmacUpdate(&eax->nonceCmac,
  11815. eax->prefixBuf,
  11816. sizeof(eax->prefixBuf))) != 0) {
  11817. goto out;
  11818. }
  11819. if ((ret = wc_CmacUpdate(&eax->nonceCmac, nonce, nonceSz)) != 0) {
  11820. goto out;
  11821. }
  11822. cmacSize = AES_BLOCK_SIZE;
  11823. if ((ret = wc_CmacFinal(&eax->nonceCmac,
  11824. eax->nonceCmacFinal,
  11825. &cmacSize)) != 0) {
  11826. goto out;
  11827. }
  11828. if ((ret = wc_AesSetIV(&eax->aes, eax->nonceCmacFinal)) != 0) {
  11829. goto out;
  11830. }
  11831. /*
  11832. * start the OMAC used to build the auth tag chunk for the AD .
  11833. * This CMAC is continued in subsequent update calls when more auth data is
  11834. * provided
  11835. * H' = OMAC^1_K(H)
  11836. */
  11837. eax->prefixBuf[AES_BLOCK_SIZE-1] = 1;
  11838. if ((ret = wc_InitCmac(&eax->aadCmac,
  11839. key,
  11840. keySz,
  11841. WC_CMAC_AES,
  11842. NULL)) != 0) {
  11843. goto out;
  11844. }
  11845. aadCmacInited = 1;
  11846. if ((ret = wc_CmacUpdate(&eax->aadCmac,
  11847. eax->prefixBuf,
  11848. sizeof(eax->prefixBuf))) != 0) {
  11849. goto out;
  11850. }
  11851. if (authIn != NULL) {
  11852. if ((ret = wc_CmacUpdate(&eax->aadCmac, authIn, authInSz)) != 0) {
  11853. goto out;
  11854. }
  11855. }
  11856. /*
  11857. * start the OMAC to create auth tag chunk for ciphertext. This MAC will be
  11858. * updated in subsequent calls to encrypt/decrypt
  11859. * C' = OMAC^2_K(C)
  11860. */
  11861. eax->prefixBuf[AES_BLOCK_SIZE-1] = 2;
  11862. if ((ret = wc_InitCmac(&eax->ciphertextCmac,
  11863. key,
  11864. keySz,
  11865. WC_CMAC_AES,
  11866. NULL)) != 0) {
  11867. goto out;
  11868. }
  11869. if ((ret = wc_CmacUpdate(&eax->ciphertextCmac,
  11870. eax->prefixBuf,
  11871. sizeof(eax->prefixBuf))) != 0) {
  11872. goto out;
  11873. }
  11874. out:
  11875. if (ret != 0) {
  11876. if (aesInited)
  11877. wc_AesFree(&eax->aes);
  11878. if (nonceCmacInited)
  11879. wc_CmacFree(&eax->nonceCmac);
  11880. if (aadCmacInited)
  11881. wc_CmacFree(&eax->aadCmac);
  11882. }
  11883. return ret;
  11884. }
  11885. /*
  11886. * AES EAX Incremental API:
  11887. * Encrypts input plaintext using AES EAX mode, adding optional auth data to
  11888. * the authentication stream
  11889. *
  11890. * Returns 0 on success
  11891. * Returns error code on failure
  11892. */
  11893. int wc_AesEaxEncryptUpdate(AesEax* eax, byte* out,
  11894. const byte* in, word32 inSz,
  11895. const byte* authIn, word32 authInSz)
  11896. {
  11897. int ret;
  11898. if (eax == NULL || out == NULL || in == NULL) {
  11899. return BAD_FUNC_ARG;
  11900. }
  11901. /*
  11902. * Encrypt the plaintext using AES CTR
  11903. * C = CTR(M)
  11904. */
  11905. if ((ret = wc_AesCtrEncrypt(&eax->aes, out, in, inSz)) != 0) {
  11906. return ret;
  11907. }
  11908. /*
  11909. * update OMAC with new ciphertext
  11910. * C' = OMAC^2_K(C)
  11911. */
  11912. if ((ret = wc_CmacUpdate(&eax->ciphertextCmac, out, inSz)) != 0) {
  11913. return ret;
  11914. }
  11915. /* If there exists new auth data, update the OMAC for that as well */
  11916. if (authIn != NULL) {
  11917. if ((ret = wc_CmacUpdate(&eax->aadCmac, authIn, authInSz)) != 0) {
  11918. return ret;
  11919. }
  11920. }
  11921. return 0;
  11922. }
  11923. /*
  11924. * AES EAX Incremental API:
  11925. * Decrypts input ciphertext using AES EAX mode, adding optional auth data to
  11926. * the authentication stream
  11927. *
  11928. * Returns 0 on success
  11929. * Returns error code on failure
  11930. */
  11931. int wc_AesEaxDecryptUpdate(AesEax* eax, byte* out,
  11932. const byte* in, word32 inSz,
  11933. const byte* authIn, word32 authInSz)
  11934. {
  11935. int ret;
  11936. if (eax == NULL || out == NULL || in == NULL) {
  11937. return BAD_FUNC_ARG;
  11938. }
  11939. /*
  11940. * Decrypt the plaintext using AES CTR
  11941. * C = CTR(M)
  11942. */
  11943. if ((ret = wc_AesCtrEncrypt(&eax->aes, out, in, inSz)) != 0) {
  11944. return ret;
  11945. }
  11946. /*
  11947. * update OMAC with new ciphertext
  11948. * C' = OMAC^2_K(C)
  11949. */
  11950. if ((ret = wc_CmacUpdate(&eax->ciphertextCmac, in, inSz)) != 0) {
  11951. return ret;
  11952. }
  11953. /* If there exists new auth data, update the OMAC for that as well */
  11954. if (authIn != NULL) {
  11955. if ((ret = wc_CmacUpdate(&eax->aadCmac, authIn, authInSz)) != 0) {
  11956. return ret;
  11957. }
  11958. }
  11959. return 0;
  11960. }
  11961. /*
  11962. * AES EAX Incremental API:
  11963. * Provides additional auth data information to the authentication
  11964. * stream for an authenticated encryption or decryption operation
  11965. *
  11966. * Returns 0 on success
  11967. * Returns error code on failure
  11968. */
  11969. int wc_AesEaxAuthDataUpdate(AesEax* eax, const byte* authIn, word32 authInSz)
  11970. {
  11971. return wc_CmacUpdate(&eax->aadCmac, authIn, authInSz);
  11972. }
  11973. /*
  11974. * AES EAX Incremental API:
  11975. * Finalizes the authenticated encryption operation, computing the auth tag
  11976. * over previously supplied auth data and computed ciphertext
  11977. *
  11978. * Returns 0 on success
  11979. * Returns error code on failure
  11980. */
  11981. int wc_AesEaxEncryptFinal(AesEax* eax, byte* authTag, word32 authTagSz)
  11982. {
  11983. word32 cmacSize;
  11984. int ret;
  11985. word32 i;
  11986. if (eax == NULL || authTag == NULL || authTagSz > AES_BLOCK_SIZE) {
  11987. return BAD_FUNC_ARG;
  11988. }
  11989. /* Complete the OMAC for the ciphertext */
  11990. cmacSize = AES_BLOCK_SIZE;
  11991. if ((ret = wc_CmacFinalNoFree(&eax->ciphertextCmac,
  11992. eax->ciphertextCmacFinal,
  11993. &cmacSize)) != 0) {
  11994. return ret;
  11995. }
  11996. /* Complete the OMAC for auth data */
  11997. cmacSize = AES_BLOCK_SIZE;
  11998. if ((ret = wc_CmacFinalNoFree(&eax->aadCmac,
  11999. eax->aadCmacFinal,
  12000. &cmacSize)) != 0) {
  12001. return ret;
  12002. }
  12003. /*
  12004. * Concatenate all three auth tag chunks into the final tag, truncating
  12005. * at the specified tag length
  12006. * T = Tag [first authTagSz bytes]
  12007. */
  12008. for (i = 0; i < authTagSz; i++) {
  12009. authTag[i] = eax->nonceCmacFinal[i]
  12010. ^ eax->aadCmacFinal[i]
  12011. ^ eax->ciphertextCmacFinal[i];
  12012. }
  12013. return 0;
  12014. }
  12015. /*
  12016. * AES EAX Incremental API:
  12017. * Finalizes the authenticated decryption operation, computing the auth tag
  12018. * for the previously supplied auth data and cipher text and validating it
  12019. * against a provided auth tag
  12020. *
  12021. * Returns 0 on success
  12022. * Return error code for failure
  12023. */
  12024. int wc_AesEaxDecryptFinal(AesEax* eax,
  12025. const byte* authIn, word32 authInSz)
  12026. {
  12027. int ret;
  12028. word32 i;
  12029. word32 cmacSize;
  12030. #if defined(WOLFSSL_SMALL_STACK)
  12031. byte *authTag;
  12032. #else
  12033. byte authTag[AES_BLOCK_SIZE];
  12034. #endif
  12035. if (eax == NULL || authIn == NULL || authInSz > AES_BLOCK_SIZE) {
  12036. return BAD_FUNC_ARG;
  12037. }
  12038. /* Complete the OMAC for the ciphertext */
  12039. cmacSize = AES_BLOCK_SIZE;
  12040. if ((ret = wc_CmacFinalNoFree(&eax->ciphertextCmac,
  12041. eax->ciphertextCmacFinal,
  12042. &cmacSize)) != 0) {
  12043. return ret;
  12044. }
  12045. /* Complete the OMAC for auth data */
  12046. cmacSize = AES_BLOCK_SIZE;
  12047. if ((ret = wc_CmacFinalNoFree(&eax->aadCmac,
  12048. eax->aadCmacFinal,
  12049. &cmacSize)) != 0) {
  12050. return ret;
  12051. }
  12052. #if defined(WOLFSSL_SMALL_STACK)
  12053. authTag = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  12054. if (authTag == NULL) {
  12055. return MEMORY_E;
  12056. }
  12057. #endif
  12058. /*
  12059. * Concatenate all three auth tag chunks into the final tag, truncating
  12060. * at the specified tag length
  12061. * T = Tag [first authInSz bytes]
  12062. */
  12063. for (i = 0; i < authInSz; i++) {
  12064. authTag[i] = eax->nonceCmacFinal[i]
  12065. ^ eax->aadCmacFinal[i]
  12066. ^ eax->ciphertextCmacFinal[i];
  12067. }
  12068. if (ConstantCompare((const byte*)authTag, authIn, (int)authInSz) != 0) {
  12069. ret = AES_EAX_AUTH_E;
  12070. }
  12071. else {
  12072. ret = 0;
  12073. }
  12074. #if defined(WOLFSSL_SMALL_STACK)
  12075. XFREE(authTag, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  12076. #endif
  12077. return ret;
  12078. }
  12079. /*
  12080. * Frees the underlying CMAC and AES contexts. Must be called when done using
  12081. * the AES EAX context structure.
  12082. *
  12083. * Returns 0 on success
  12084. * Returns error code on failure
  12085. */
  12086. int wc_AesEaxFree(AesEax* eax)
  12087. {
  12088. if (eax == NULL) {
  12089. return BAD_FUNC_ARG;
  12090. }
  12091. (void)wc_CmacFree(&eax->ciphertextCmac);
  12092. (void)wc_CmacFree(&eax->aadCmac);
  12093. wc_AesFree(&eax->aes);
  12094. return 0;
  12095. }
  12096. #endif /* WOLFSSL_AES_EAX */
  12097. #endif /* !NO_AES */