aes.c 393 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278
  1. /* aes.c
  2. *
  3. * Copyright (C) 2006-2023 wolfSSL Inc.
  4. *
  5. * This file is part of wolfSSL.
  6. *
  7. * wolfSSL is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * wolfSSL is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  20. */
  21. /*
  22. DESCRIPTION
  23. This library provides the interfaces to the Advanced Encryption Standard (AES)
  24. for encrypting and decrypting data. AES is the standard known for a symmetric
  25. block cipher mechanism that uses n-bit binary string parameter key with 128-bits,
  26. 192-bits, and 256-bits of key sizes.
  27. */
  28. #ifdef HAVE_CONFIG_H
  29. #include <config.h>
  30. #endif
  31. #include <wolfssl/wolfcrypt/settings.h>
  32. #include <wolfssl/wolfcrypt/error-crypt.h>
  33. #if !defined(NO_AES)
  34. /* Tip: Locate the software cipher modes by searching for "Software AES" */
  35. #if defined(HAVE_FIPS) && \
  36. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  37. /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
  38. #define FIPS_NO_WRAPPERS
  39. #ifdef USE_WINDOWS_API
  40. #pragma code_seg(".fipsA$g")
  41. #pragma const_seg(".fipsB$g")
  42. #endif
  43. #endif
  44. #include <wolfssl/wolfcrypt/aes.h>
  45. #ifdef WOLFSSL_AESNI
  46. #include <wmmintrin.h>
  47. #include <emmintrin.h>
  48. #include <smmintrin.h>
  49. #endif /* WOLFSSL_AESNI */
  50. #include <wolfssl/wolfcrypt/cpuid.h>
  51. #ifdef WOLF_CRYPTO_CB
  52. #include <wolfssl/wolfcrypt/cryptocb.h>
  53. #endif
  54. #ifdef WOLFSSL_SECO_CAAM
  55. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  56. #endif
  57. #ifdef WOLFSSL_IMXRT_DCP
  58. #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
  59. #endif
  60. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  61. #include <wolfssl/wolfcrypt/port/nxp/se050_port.h>
  62. #endif
  63. #if defined(WOLFSSL_AES_SIV)
  64. #include <wolfssl/wolfcrypt/cmac.h>
  65. #endif /* WOLFSSL_AES_SIV */
  66. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  67. #include <wolfssl/wolfcrypt/port/psa/psa.h>
  68. #endif
  69. #if defined(WOLFSSL_TI_CRYPT)
  70. #include <wolfcrypt/src/port/ti/ti-aes.c>
  71. #else
  72. #include <wolfssl/wolfcrypt/logging.h>
  73. #ifdef NO_INLINE
  74. #include <wolfssl/wolfcrypt/misc.h>
  75. #else
  76. #define WOLFSSL_MISC_INCLUDED
  77. #include <wolfcrypt/src/misc.c>
  78. #endif
  79. #ifndef WOLFSSL_ARMASM
  80. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  81. /* case of possibly not using hardware acceleration for AES but using key
  82. blobs */
  83. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  84. #endif
  85. #ifdef DEBUG_AESNI
  86. #include <stdio.h>
  87. #endif
  88. #ifdef _MSC_VER
  89. /* 4127 warning constant while(1) */
  90. #pragma warning(disable: 4127)
  91. #endif
  92. /* Define AES implementation includes and functions */
  93. #if defined(STM32_CRYPTO)
  94. /* STM32F2/F4/F7/L4/L5/H7/WB55 hardware AES support for ECB, CBC, CTR and GCM modes */
  95. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESGCM) || defined(HAVE_AESCCM)
  96. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  97. Aes* aes, const byte* inBlock, byte* outBlock)
  98. {
  99. int ret = 0;
  100. #ifdef WOLFSSL_STM32_CUBEMX
  101. CRYP_HandleTypeDef hcryp;
  102. #else
  103. CRYP_InitTypeDef cryptInit;
  104. CRYP_KeyInitTypeDef keyInit;
  105. #endif
  106. #ifdef WOLFSSL_STM32_CUBEMX
  107. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  108. if (ret != 0)
  109. return ret;
  110. ret = wolfSSL_CryptHwMutexLock();
  111. if (ret != 0)
  112. return ret;
  113. #if defined(STM32_HAL_V2)
  114. hcryp.Init.Algorithm = CRYP_AES_ECB;
  115. #elif defined(STM32_CRYPTO_AES_ONLY)
  116. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  117. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  118. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  119. #endif
  120. HAL_CRYP_Init(&hcryp);
  121. #if defined(STM32_HAL_V2)
  122. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  123. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  124. #elif defined(STM32_CRYPTO_AES_ONLY)
  125. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  126. outBlock, STM32_HAL_TIMEOUT);
  127. #else
  128. ret = HAL_CRYP_AESECB_Encrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  129. outBlock, STM32_HAL_TIMEOUT);
  130. #endif
  131. if (ret != HAL_OK) {
  132. ret = WC_TIMEOUT_E;
  133. }
  134. HAL_CRYP_DeInit(&hcryp);
  135. #else /* Standard Peripheral Library */
  136. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  137. if (ret != 0)
  138. return ret;
  139. ret = wolfSSL_CryptHwMutexLock();
  140. if (ret != 0)
  141. return ret;
  142. /* reset registers to their default values */
  143. CRYP_DeInit();
  144. /* setup key */
  145. CRYP_KeyInit(&keyInit);
  146. /* set direction and mode */
  147. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  148. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  149. CRYP_Init(&cryptInit);
  150. /* enable crypto processor */
  151. CRYP_Cmd(ENABLE);
  152. /* flush IN/OUT FIFOs */
  153. CRYP_FIFOFlush();
  154. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  155. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  156. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  157. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  158. /* wait until the complete message has been processed */
  159. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  160. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  161. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  162. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  163. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  164. /* disable crypto processor */
  165. CRYP_Cmd(DISABLE);
  166. #endif /* WOLFSSL_STM32_CUBEMX */
  167. wolfSSL_CryptHwMutexUnLock();
  168. wc_Stm32_Aes_Cleanup();
  169. return ret;
  170. }
  171. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESGCM || HAVE_AESCCM */
  172. #ifdef HAVE_AES_DECRYPT
  173. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESCCM)
  174. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  175. Aes* aes, const byte* inBlock, byte* outBlock)
  176. {
  177. int ret = 0;
  178. #ifdef WOLFSSL_STM32_CUBEMX
  179. CRYP_HandleTypeDef hcryp;
  180. #else
  181. CRYP_InitTypeDef cryptInit;
  182. CRYP_KeyInitTypeDef keyInit;
  183. #endif
  184. #ifdef WOLFSSL_STM32_CUBEMX
  185. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  186. if (ret != 0)
  187. return ret;
  188. ret = wolfSSL_CryptHwMutexLock();
  189. if (ret != 0)
  190. return ret;
  191. #if defined(STM32_HAL_V2)
  192. hcryp.Init.Algorithm = CRYP_AES_ECB;
  193. #elif defined(STM32_CRYPTO_AES_ONLY)
  194. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  195. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  196. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  197. #endif
  198. HAL_CRYP_Init(&hcryp);
  199. #if defined(STM32_HAL_V2)
  200. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  201. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  202. #elif defined(STM32_CRYPTO_AES_ONLY)
  203. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  204. outBlock, STM32_HAL_TIMEOUT);
  205. #else
  206. ret = HAL_CRYP_AESECB_Decrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  207. outBlock, STM32_HAL_TIMEOUT);
  208. #endif
  209. if (ret != HAL_OK) {
  210. ret = WC_TIMEOUT_E;
  211. }
  212. HAL_CRYP_DeInit(&hcryp);
  213. #else /* Standard Peripheral Library */
  214. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  215. if (ret != 0)
  216. return ret;
  217. ret = wolfSSL_CryptHwMutexLock();
  218. if (ret != 0)
  219. return ret;
  220. /* reset registers to their default values */
  221. CRYP_DeInit();
  222. /* set direction and key */
  223. CRYP_KeyInit(&keyInit);
  224. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  225. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  226. CRYP_Init(&cryptInit);
  227. /* enable crypto processor */
  228. CRYP_Cmd(ENABLE);
  229. /* wait until decrypt key has been initialized */
  230. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  231. /* set direction and mode */
  232. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  233. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  234. CRYP_Init(&cryptInit);
  235. /* enable crypto processor */
  236. CRYP_Cmd(ENABLE);
  237. /* flush IN/OUT FIFOs */
  238. CRYP_FIFOFlush();
  239. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  240. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  241. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  242. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  243. /* wait until the complete message has been processed */
  244. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  245. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  246. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  247. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  248. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  249. /* disable crypto processor */
  250. CRYP_Cmd(DISABLE);
  251. #endif /* WOLFSSL_STM32_CUBEMX */
  252. wolfSSL_CryptHwMutexUnLock();
  253. wc_Stm32_Aes_Cleanup();
  254. return ret;
  255. }
  256. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESCCM */
  257. #endif /* HAVE_AES_DECRYPT */
  258. #elif defined(HAVE_COLDFIRE_SEC)
  259. /* Freescale Coldfire SEC support for CBC mode.
  260. * NOTE: no support for AES-CTR/GCM/CCM/Direct */
  261. #include <wolfssl/wolfcrypt/types.h>
  262. #include "sec.h"
  263. #include "mcf5475_sec.h"
  264. #include "mcf5475_siu.h"
  265. #elif defined(FREESCALE_LTC)
  266. #include "fsl_ltc.h"
  267. #if defined(FREESCALE_LTC_AES_GCM)
  268. #undef NEED_AES_TABLES
  269. #undef GCM_TABLE
  270. #endif
  271. /* if LTC doesn't have GCM, use software with LTC AES ECB mode */
  272. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  273. Aes* aes, const byte* inBlock, byte* outBlock)
  274. {
  275. word32 keySize = 0;
  276. byte* key = (byte*)aes->key;
  277. int ret = wc_AesGetKeySize(aes, &keySize);
  278. if (ret != 0)
  279. return ret;
  280. if (wolfSSL_CryptHwMutexLock() == 0) {
  281. LTC_AES_EncryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  282. key, keySize);
  283. wolfSSL_CryptHwMutexUnLock();
  284. }
  285. return 0;
  286. }
  287. #ifdef HAVE_AES_DECRYPT
  288. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  289. Aes* aes, const byte* inBlock, byte* outBlock)
  290. {
  291. word32 keySize = 0;
  292. byte* key = (byte*)aes->key;
  293. int ret = wc_AesGetKeySize(aes, &keySize);
  294. if (ret != 0)
  295. return ret;
  296. if (wolfSSL_CryptHwMutexLock() == 0) {
  297. LTC_AES_DecryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  298. key, keySize, kLTC_EncryptKey);
  299. wolfSSL_CryptHwMutexUnLock();
  300. }
  301. return 0;
  302. }
  303. #endif
  304. #elif defined(FREESCALE_MMCAU)
  305. /* Freescale mmCAU hardware AES support for Direct, CBC, CCM, GCM modes
  306. * through the CAU/mmCAU library. Documentation located in
  307. * ColdFire/ColdFire+ CAU and Kinetis mmCAU Software Library User
  308. * Guide (See note in README). */
  309. #ifdef FREESCALE_MMCAU_CLASSIC
  310. /* MMCAU 1.4 library used with non-KSDK / classic MQX builds */
  311. #include "cau_api.h"
  312. #else
  313. #include "fsl_mmcau.h"
  314. #endif
  315. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  316. Aes* aes, const byte* inBlock, byte* outBlock)
  317. {
  318. if (wolfSSL_CryptHwMutexLock() == 0) {
  319. #ifdef FREESCALE_MMCAU_CLASSIC
  320. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  321. WOLFSSL_MSG("Bad cau_aes_encrypt alignment");
  322. return BAD_ALIGN_E;
  323. }
  324. cau_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  325. #else
  326. MMCAU_AES_EncryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  327. outBlock);
  328. #endif
  329. wolfSSL_CryptHwMutexUnLock();
  330. }
  331. return 0;
  332. }
  333. #ifdef HAVE_AES_DECRYPT
  334. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  335. Aes* aes, const byte* inBlock, byte* outBlock)
  336. {
  337. if (wolfSSL_CryptHwMutexLock() == 0) {
  338. #ifdef FREESCALE_MMCAU_CLASSIC
  339. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  340. WOLFSSL_MSG("Bad cau_aes_decrypt alignment");
  341. return BAD_ALIGN_E;
  342. }
  343. cau_aes_decrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  344. #else
  345. MMCAU_AES_DecryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  346. outBlock);
  347. #endif
  348. wolfSSL_CryptHwMutexUnLock();
  349. }
  350. return 0;
  351. }
  352. #endif /* HAVE_AES_DECRYPT */
  353. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  354. #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
  355. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  356. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  357. Aes* aes, const byte* inBlock, byte* outBlock)
  358. {
  359. /* Thread mutex protection handled in Pic32Crypto */
  360. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  361. outBlock, inBlock, AES_BLOCK_SIZE,
  362. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  363. }
  364. #endif
  365. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  366. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  367. Aes* aes, const byte* inBlock, byte* outBlock)
  368. {
  369. /* Thread mutex protection handled in Pic32Crypto */
  370. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  371. outBlock, inBlock, AES_BLOCK_SIZE,
  372. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  373. }
  374. #endif
  375. #elif defined(WOLFSSL_NRF51_AES)
  376. /* Use built-in AES hardware - AES 128 ECB Encrypt Only */
  377. #include "wolfssl/wolfcrypt/port/nrf51.h"
  378. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  379. Aes* aes, const byte* inBlock, byte* outBlock)
  380. {
  381. int ret;
  382. ret = wolfSSL_CryptHwMutexLock();
  383. if (ret == 0) {
  384. ret = nrf51_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds,
  385. outBlock);
  386. wolfSSL_CryptHwMutexUnLock();
  387. }
  388. return ret;
  389. }
  390. #ifdef HAVE_AES_DECRYPT
  391. #error nRF51 AES Hardware does not support decrypt
  392. #endif /* HAVE_AES_DECRYPT */
  393. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  394. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  395. #include <esp_log.h>
  396. #include <wolfssl/wolfcrypt/port/Espressif/esp32-crypt.h>
  397. const char* TAG = "aes";
  398. /* We'll use SW for fallback:
  399. * unsupported key lengths. (e.g. ESP32-S3)
  400. * chipsets not ikmplemented.
  401. * hardware busy. */
  402. #define NEED_AES_TABLES
  403. #define NEED_AES_HW_FALLBACK
  404. #define NEED_SOFTWARE_AES_SETKEY
  405. #undef WOLFSSL_AES_DIRECT
  406. #define WOLFSSL_AES_DIRECT
  407. /* If we choose to never have a fallback to SW: */
  408. #if !defined(NEED_AES_HW_FALLBACK) && (defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT))
  409. static WARN_UNUSED_RESULT int wc_AesEncrypt( /* calling this one when NO_AES_192 is defined */
  410. Aes* aes, const byte* inBlock, byte* outBlock)
  411. {
  412. int ret;
  413. /* Thread mutex protection handled in esp_aes_hw_InUse */
  414. #ifdef NEED_AES_HW_FALLBACK
  415. if (wc_esp32AesSupportedKeyLen(aes)) {
  416. ret = wc_esp32AesEncrypt(aes, inBlock, outBlock);
  417. }
  418. #else
  419. ret = wc_esp32AesEncrypt(aes, inBlock, outBlock);
  420. #endif
  421. return ret;
  422. }
  423. #endif
  424. /* If we choose to never have a fallback to SW */
  425. #if !defined(NEED_AES_HW_FALLBACK) && (defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT))
  426. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  427. Aes* aes, const byte* inBlock, byte* outBlock)
  428. {
  429. int ret = 0;
  430. /* Thread mutex protection handled in esp_aes_hw_InUse */
  431. #ifdef NEED_AES_HW_FALLBACK
  432. if (wc_esp32AesSupportedKeyLen(aes)) {
  433. ret = wc_esp32AesDecrypt(aes, inBlock, outBlock);
  434. }
  435. else {
  436. ret = wc_AesDecrypt_SW(aes, inBlock, outBlock);
  437. }
  438. #else
  439. /* if we don't need fallback, always use HW */
  440. ret = wc_esp32AesDecrypt(aes, inBlock, outBlock);
  441. #endif
  442. return ret;
  443. }
  444. #endif
  445. #elif defined(WOLFSSL_AESNI)
  446. #define NEED_AES_TABLES
  447. /* Each platform needs to query info type 1 from cpuid to see if aesni is
  448. * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
  449. */
  450. #ifndef AESNI_ALIGN
  451. #define AESNI_ALIGN 16
  452. #endif
  453. static int checkAESNI = 0;
  454. static int haveAESNI = 0;
  455. static word32 intel_flags = 0;
  456. static WARN_UNUSED_RESULT int Check_CPU_support_AES(void)
  457. {
  458. intel_flags = cpuid_get_flags();
  459. return IS_INTEL_AESNI(intel_flags) != 0;
  460. }
  461. /* tell C compiler these are asm functions in case any mix up of ABI underscore
  462. prefix between clang/gcc/llvm etc */
  463. #ifdef HAVE_AES_CBC
  464. void AES_CBC_encrypt(const unsigned char* in, unsigned char* out,
  465. unsigned char* ivec, unsigned long length,
  466. const unsigned char* KS, int nr)
  467. XASM_LINK("AES_CBC_encrypt");
  468. #ifdef HAVE_AES_DECRYPT
  469. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  470. void AES_CBC_decrypt_by4(const unsigned char* in, unsigned char* out,
  471. unsigned char* ivec, unsigned long length,
  472. const unsigned char* KS, int nr)
  473. XASM_LINK("AES_CBC_decrypt_by4");
  474. #elif defined(WOLFSSL_AESNI_BY6)
  475. void AES_CBC_decrypt_by6(const unsigned char* in, unsigned char* out,
  476. unsigned char* ivec, unsigned long length,
  477. const unsigned char* KS, int nr)
  478. XASM_LINK("AES_CBC_decrypt_by6");
  479. #else /* WOLFSSL_AESNI_BYx */
  480. void AES_CBC_decrypt_by8(const unsigned char* in, unsigned char* out,
  481. unsigned char* ivec, unsigned long length,
  482. const unsigned char* KS, int nr)
  483. XASM_LINK("AES_CBC_decrypt_by8");
  484. #endif /* WOLFSSL_AESNI_BYx */
  485. #endif /* HAVE_AES_DECRYPT */
  486. #endif /* HAVE_AES_CBC */
  487. void AES_ECB_encrypt(const unsigned char* in, unsigned char* out,
  488. unsigned long length, const unsigned char* KS, int nr)
  489. XASM_LINK("AES_ECB_encrypt");
  490. #ifdef HAVE_AES_DECRYPT
  491. void AES_ECB_decrypt(const unsigned char* in, unsigned char* out,
  492. unsigned long length, const unsigned char* KS, int nr)
  493. XASM_LINK("AES_ECB_decrypt");
  494. #endif
  495. void AES_128_Key_Expansion(const unsigned char* userkey,
  496. unsigned char* key_schedule)
  497. XASM_LINK("AES_128_Key_Expansion");
  498. void AES_192_Key_Expansion(const unsigned char* userkey,
  499. unsigned char* key_schedule)
  500. XASM_LINK("AES_192_Key_Expansion");
  501. void AES_256_Key_Expansion(const unsigned char* userkey,
  502. unsigned char* key_schedule)
  503. XASM_LINK("AES_256_Key_Expansion");
  504. static WARN_UNUSED_RESULT int AES_set_encrypt_key(
  505. const unsigned char *userKey, const int bits, Aes* aes)
  506. {
  507. int ret;
  508. if (!userKey || !aes)
  509. return BAD_FUNC_ARG;
  510. switch (bits) {
  511. case 128:
  512. AES_128_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 10;
  513. return 0;
  514. case 192:
  515. AES_192_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 12;
  516. return 0;
  517. case 256:
  518. AES_256_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 14;
  519. return 0;
  520. default:
  521. ret = BAD_FUNC_ARG;
  522. }
  523. return ret;
  524. }
  525. #ifdef HAVE_AES_DECRYPT
  526. static WARN_UNUSED_RESULT int AES_set_decrypt_key(
  527. const unsigned char* userKey, const int bits, Aes* aes)
  528. {
  529. word32 nr;
  530. #ifdef WOLFSSL_SMALL_STACK
  531. Aes *temp_key;
  532. #else
  533. Aes temp_key[1];
  534. #endif
  535. __m128i *Key_Schedule;
  536. __m128i *Temp_Key_Schedule;
  537. if (!userKey || !aes)
  538. return BAD_FUNC_ARG;
  539. #ifdef WOLFSSL_SMALL_STACK
  540. if ((temp_key = (Aes *)XMALLOC(sizeof *aes, aes->heap,
  541. DYNAMIC_TYPE_AES)) == NULL)
  542. return MEMORY_E;
  543. #endif
  544. if (AES_set_encrypt_key(userKey,bits,temp_key) == BAD_FUNC_ARG) {
  545. #ifdef WOLFSSL_SMALL_STACK
  546. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  547. #endif
  548. return BAD_FUNC_ARG;
  549. }
  550. Key_Schedule = (__m128i*)aes->key;
  551. Temp_Key_Schedule = (__m128i*)temp_key->key;
  552. nr = temp_key->rounds;
  553. aes->rounds = nr;
  554. #ifdef WOLFSSL_SMALL_STACK
  555. SAVE_VECTOR_REGISTERS(XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); return _svr_ret;);
  556. #else
  557. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  558. #endif
  559. Key_Schedule[nr] = Temp_Key_Schedule[0];
  560. Key_Schedule[nr-1] = _mm_aesimc_si128(Temp_Key_Schedule[1]);
  561. Key_Schedule[nr-2] = _mm_aesimc_si128(Temp_Key_Schedule[2]);
  562. Key_Schedule[nr-3] = _mm_aesimc_si128(Temp_Key_Schedule[3]);
  563. Key_Schedule[nr-4] = _mm_aesimc_si128(Temp_Key_Schedule[4]);
  564. Key_Schedule[nr-5] = _mm_aesimc_si128(Temp_Key_Schedule[5]);
  565. Key_Schedule[nr-6] = _mm_aesimc_si128(Temp_Key_Schedule[6]);
  566. Key_Schedule[nr-7] = _mm_aesimc_si128(Temp_Key_Schedule[7]);
  567. Key_Schedule[nr-8] = _mm_aesimc_si128(Temp_Key_Schedule[8]);
  568. Key_Schedule[nr-9] = _mm_aesimc_si128(Temp_Key_Schedule[9]);
  569. if (nr>10) {
  570. Key_Schedule[nr-10] = _mm_aesimc_si128(Temp_Key_Schedule[10]);
  571. Key_Schedule[nr-11] = _mm_aesimc_si128(Temp_Key_Schedule[11]);
  572. }
  573. if (nr>12) {
  574. Key_Schedule[nr-12] = _mm_aesimc_si128(Temp_Key_Schedule[12]);
  575. Key_Schedule[nr-13] = _mm_aesimc_si128(Temp_Key_Schedule[13]);
  576. }
  577. Key_Schedule[0] = Temp_Key_Schedule[nr];
  578. RESTORE_VECTOR_REGISTERS();
  579. #ifdef WOLFSSL_SMALL_STACK
  580. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  581. #endif
  582. return 0;
  583. }
  584. #endif /* HAVE_AES_DECRYPT */
  585. #elif (defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  586. && !defined(WOLFSSL_QNX_CAAM)) || \
  587. ((defined(WOLFSSL_AFALG) || defined(WOLFSSL_DEVCRYPTO_AES)) && \
  588. defined(HAVE_AESCCM))
  589. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  590. Aes* aes, const byte* inBlock, byte* outBlock)
  591. {
  592. return wc_AesEncryptDirect(aes, outBlock, inBlock);
  593. }
  594. #elif defined(WOLFSSL_AFALG)
  595. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  596. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  597. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  598. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  599. #include "hal_data.h"
  600. #ifndef WOLFSSL_SCE_AES256_HANDLE
  601. #define WOLFSSL_SCE_AES256_HANDLE g_sce_aes_256
  602. #endif
  603. #ifndef WOLFSSL_SCE_AES192_HANDLE
  604. #define WOLFSSL_SCE_AES192_HANDLE g_sce_aes_192
  605. #endif
  606. #ifndef WOLFSSL_SCE_AES128_HANDLE
  607. #define WOLFSSL_SCE_AES128_HANDLE g_sce_aes_128
  608. #endif
  609. static WARN_UNUSED_RESULT int AES_ECB_encrypt(
  610. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  611. {
  612. word32 ret;
  613. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  614. CRYPTO_WORD_ENDIAN_BIG) {
  615. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  616. }
  617. switch (aes->keylen) {
  618. #ifdef WOLFSSL_AES_128
  619. case AES_128_KEY_SIZE:
  620. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->encrypt(
  621. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key,
  622. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  623. (word32*)outBlock);
  624. break;
  625. #endif
  626. #ifdef WOLFSSL_AES_192
  627. case AES_192_KEY_SIZE:
  628. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->encrypt(
  629. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key,
  630. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  631. (word32*)outBlock);
  632. break;
  633. #endif
  634. #ifdef WOLFSSL_AES_256
  635. case AES_256_KEY_SIZE:
  636. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->encrypt(
  637. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key,
  638. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  639. (word32*)outBlock);
  640. break;
  641. #endif
  642. default:
  643. WOLFSSL_MSG("Unknown key size");
  644. return BAD_FUNC_ARG;
  645. }
  646. if (ret != SSP_SUCCESS) {
  647. /* revert input */
  648. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  649. return WC_HW_E;
  650. }
  651. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  652. CRYPTO_WORD_ENDIAN_BIG) {
  653. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  654. if (inBlock != outBlock) {
  655. /* revert input */
  656. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  657. }
  658. }
  659. return 0;
  660. }
  661. #if defined(HAVE_AES_DECRYPT)
  662. static WARN_UNUSED_RESULT int AES_ECB_decrypt(
  663. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  664. {
  665. word32 ret;
  666. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  667. CRYPTO_WORD_ENDIAN_BIG) {
  668. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  669. }
  670. switch (aes->keylen) {
  671. #ifdef WOLFSSL_AES_128
  672. case AES_128_KEY_SIZE:
  673. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->decrypt(
  674. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, aes->reg,
  675. (sz / sizeof(word32)), (word32*)inBlock,
  676. (word32*)outBlock);
  677. break;
  678. #endif
  679. #ifdef WOLFSSL_AES_192
  680. case AES_192_KEY_SIZE:
  681. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->decrypt(
  682. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, aes->reg,
  683. (sz / sizeof(word32)), (word32*)inBlock,
  684. (word32*)outBlock);
  685. break;
  686. #endif
  687. #ifdef WOLFSSL_AES_256
  688. case AES_256_KEY_SIZE:
  689. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->decrypt(
  690. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, aes->reg,
  691. (sz / sizeof(word32)), (word32*)inBlock,
  692. (word32*)outBlock);
  693. break;
  694. #endif
  695. default:
  696. WOLFSSL_MSG("Unknown key size");
  697. return BAD_FUNC_ARG;
  698. }
  699. if (ret != SSP_SUCCESS) {
  700. return WC_HW_E;
  701. }
  702. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  703. CRYPTO_WORD_ENDIAN_BIG) {
  704. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  705. if (inBlock != outBlock) {
  706. /* revert input */
  707. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  708. }
  709. }
  710. return 0;
  711. }
  712. #endif /* HAVE_AES_DECRYPT */
  713. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  714. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  715. Aes* aes, const byte* inBlock, byte* outBlock)
  716. {
  717. return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  718. }
  719. #endif
  720. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  721. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  722. Aes* aes, const byte* inBlock, byte* outBlock)
  723. {
  724. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  725. }
  726. #endif
  727. #elif defined(WOLFSSL_KCAPI_AES)
  728. /* Only CBC and GCM that are in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  729. #if defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AESCCM) || \
  730. defined(WOLFSSL_CMAC) || defined(WOLFSSL_AES_OFB) || \
  731. defined(WOLFSSL_AES_CFB) || defined(HAVE_AES_ECB) || \
  732. defined(WOLFSSL_AES_DIRECT) || \
  733. (defined(HAVE_AES_CBC) && defined(WOLFSSL_NO_KCAPI_AES_CBC))
  734. #define NEED_AES_TABLES
  735. #endif
  736. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  737. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  738. #else
  739. /* using wolfCrypt software implementation */
  740. #define NEED_AES_TABLES
  741. #endif
  742. #ifdef NEED_AES_TABLES
  743. #if (!defined(WOLFSSL_SILABS_SE_ACCEL) && \
  744. !defined(WOLFSSL_ESP32_CRYPT_RSA_PRI) \
  745. ) || \
  746. (defined(WOLFSSL_ESP32_CRYPT_RSA_PRI) && defined(NEED_AES_HW_FALLBACK))
  747. static const FLASH_QUALIFIER word32 rcon[] = {
  748. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  749. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  750. 0x1B000000, 0x36000000,
  751. /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  752. };
  753. #endif
  754. #ifndef WOLFSSL_AES_SMALL_TABLES
  755. static const FLASH_QUALIFIER word32 Te[4][256] = {
  756. {
  757. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  758. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  759. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  760. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  761. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  762. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  763. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  764. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  765. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  766. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  767. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  768. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  769. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  770. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  771. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  772. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  773. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  774. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  775. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  776. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  777. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  778. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  779. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  780. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  781. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  782. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  783. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  784. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  785. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  786. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  787. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  788. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  789. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  790. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  791. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  792. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  793. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  794. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  795. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  796. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  797. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  798. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  799. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  800. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  801. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  802. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  803. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  804. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  805. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  806. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  807. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  808. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  809. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  810. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  811. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  812. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  813. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  814. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  815. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  816. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  817. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  818. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  819. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  820. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  821. },
  822. {
  823. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  824. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  825. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  826. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  827. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  828. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  829. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  830. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  831. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  832. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  833. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  834. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  835. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  836. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  837. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  838. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  839. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  840. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  841. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  842. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  843. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  844. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  845. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  846. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  847. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  848. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  849. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  850. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  851. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  852. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  853. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  854. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  855. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  856. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  857. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  858. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  859. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  860. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  861. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  862. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  863. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  864. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  865. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  866. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  867. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  868. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  869. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  870. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  871. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  872. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  873. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  874. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  875. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  876. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  877. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  878. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  879. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  880. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  881. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  882. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  883. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  884. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  885. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  886. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  887. },
  888. {
  889. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  890. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  891. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  892. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  893. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  894. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  895. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  896. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  897. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  898. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  899. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  900. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  901. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  902. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  903. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  904. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  905. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  906. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  907. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  908. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  909. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  910. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  911. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  912. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  913. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  914. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  915. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  916. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  917. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  918. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  919. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  920. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  921. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  922. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  923. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  924. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  925. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  926. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  927. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  928. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  929. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  930. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  931. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  932. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  933. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  934. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  935. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  936. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  937. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  938. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  939. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  940. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  941. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  942. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  943. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  944. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  945. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  946. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  947. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  948. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  949. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  950. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  951. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  952. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  953. },
  954. {
  955. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  956. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  957. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  958. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  959. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  960. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  961. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  962. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  963. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  964. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  965. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  966. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  967. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  968. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  969. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  970. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  971. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  972. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  973. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  974. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  975. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  976. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  977. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  978. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  979. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  980. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  981. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  982. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  983. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  984. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  985. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  986. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  987. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  988. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  989. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  990. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  991. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  992. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  993. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  994. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  995. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  996. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  997. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  998. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  999. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  1000. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  1001. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  1002. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  1003. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  1004. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  1005. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  1006. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  1007. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  1008. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  1009. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  1010. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  1011. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  1012. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  1013. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  1014. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  1015. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  1016. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  1017. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  1018. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  1019. }
  1020. };
  1021. #if defined(HAVE_AES_DECRYPT) && !defined(WOLFSSL_SILABS_SE_ACCEL)
  1022. static const FLASH_QUALIFIER word32 Td[4][256] = {
  1023. {
  1024. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  1025. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  1026. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  1027. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  1028. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  1029. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  1030. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  1031. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  1032. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  1033. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  1034. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  1035. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  1036. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  1037. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  1038. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  1039. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  1040. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  1041. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  1042. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  1043. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  1044. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  1045. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  1046. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  1047. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  1048. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  1049. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  1050. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  1051. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  1052. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  1053. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  1054. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  1055. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  1056. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  1057. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  1058. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  1059. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  1060. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  1061. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  1062. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  1063. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  1064. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  1065. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  1066. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  1067. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  1068. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  1069. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  1070. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  1071. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  1072. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  1073. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  1074. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  1075. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  1076. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  1077. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  1078. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  1079. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  1080. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  1081. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  1082. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  1083. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  1084. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  1085. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  1086. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  1087. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  1088. },
  1089. {
  1090. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  1091. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  1092. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  1093. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  1094. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  1095. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  1096. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  1097. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  1098. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  1099. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  1100. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  1101. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  1102. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  1103. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  1104. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  1105. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  1106. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  1107. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  1108. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  1109. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  1110. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  1111. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  1112. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  1113. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  1114. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  1115. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  1116. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  1117. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  1118. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  1119. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  1120. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  1121. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  1122. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  1123. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1124. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1125. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1126. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1127. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1128. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1129. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1130. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1131. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1132. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1133. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1134. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1135. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1136. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1137. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1138. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1139. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1140. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1141. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1142. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1143. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1144. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1145. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1146. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1147. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1148. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1149. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1150. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1151. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1152. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1153. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1154. },
  1155. {
  1156. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1157. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1158. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1159. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1160. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1161. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1162. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1163. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1164. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1165. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1166. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1167. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1168. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1169. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1170. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1171. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1172. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1173. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1174. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1175. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1176. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1177. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1178. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1179. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1180. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1181. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1182. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1183. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1184. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1185. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1186. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1187. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1188. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1189. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1190. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1191. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1192. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1193. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1194. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1195. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1196. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1197. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1198. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1199. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1200. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1201. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1202. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1203. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1204. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1205. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1206. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1207. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1208. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1209. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1210. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1211. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1212. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1213. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1214. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1215. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1216. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1217. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1218. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1219. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1220. },
  1221. {
  1222. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1223. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1224. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1225. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1226. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1227. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1228. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1229. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1230. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1231. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1232. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1233. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1234. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1235. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1236. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1237. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1238. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1239. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1240. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1241. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1242. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1243. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1244. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1245. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1246. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1247. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1248. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1249. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1250. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1251. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1252. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1253. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1254. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1255. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1256. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1257. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1258. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1259. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1260. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1261. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1262. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1263. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1264. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1265. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1266. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1267. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1268. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1269. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1270. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1271. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1272. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1273. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1274. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1275. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1276. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1277. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1278. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1279. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1280. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1281. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1282. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1283. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1284. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1285. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1286. }
  1287. };
  1288. #endif /* HAVE_AES_DECRYPT */
  1289. #endif /* WOLFSSL_AES_SMALL_TABLES */
  1290. #ifdef HAVE_AES_DECRYPT
  1291. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC) && \
  1292. !defined(WOLFSSL_SILABS_SE_ACCEL)) || \
  1293. defined(WOLFSSL_AES_DIRECT)
  1294. static const FLASH_QUALIFIER byte Td4[256] =
  1295. {
  1296. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1297. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1298. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1299. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1300. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1301. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1302. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1303. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1304. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1305. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1306. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1307. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1308. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1309. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1310. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1311. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1312. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1313. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1314. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1315. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1316. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1317. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1318. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1319. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1320. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1321. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1322. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1323. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1324. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1325. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1326. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1327. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1328. };
  1329. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  1330. #endif /* HAVE_AES_DECRYPT */
  1331. #define GETBYTE(x, y) (word32)((byte)((x) >> (8 * (y))))
  1332. #ifdef WOLFSSL_AES_SMALL_TABLES
  1333. static const byte Tsbox[256] = {
  1334. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1335. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1336. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1337. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1338. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1339. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1340. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1341. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1342. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1343. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1344. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1345. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1346. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1347. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1348. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1349. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1350. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1351. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1352. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1353. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1354. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1355. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1356. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1357. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1358. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1359. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1360. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1361. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1362. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1363. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1364. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1365. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1366. };
  1367. #define AES_XTIME(x) ((byte)((byte)((x) << 1) ^ ((0 - ((x) >> 7)) & 0x1b)))
  1368. static WARN_UNUSED_RESULT word32 col_mul(
  1369. word32 t, int i2, int i3, int ia, int ib)
  1370. {
  1371. byte t3 = GETBYTE(t, i3);
  1372. byte tm = AES_XTIME(GETBYTE(t, i2) ^ t3);
  1373. return GETBYTE(t, ia) ^ GETBYTE(t, ib) ^ t3 ^ tm;
  1374. }
  1375. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)
  1376. static WARN_UNUSED_RESULT word32 inv_col_mul(
  1377. word32 t, int i9, int ib, int id, int ie)
  1378. {
  1379. byte t9 = GETBYTE(t, i9);
  1380. byte tb = GETBYTE(t, ib);
  1381. byte td = GETBYTE(t, id);
  1382. byte te = GETBYTE(t, ie);
  1383. byte t0 = t9 ^ tb ^ td;
  1384. return t0 ^ AES_XTIME(AES_XTIME(AES_XTIME(t0 ^ te) ^ td ^ te) ^ tb ^ te);
  1385. }
  1386. #endif
  1387. #endif
  1388. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) || \
  1389. defined(HAVE_AESCCM) || defined(HAVE_AESGCM)
  1390. #ifndef WC_CACHE_LINE_SZ
  1391. #if defined(__x86_64__) || defined(_M_X64) || \
  1392. (defined(__ILP32__) && (__ILP32__ >= 1))
  1393. #define WC_CACHE_LINE_SZ 64
  1394. #else
  1395. /* default cache line size */
  1396. #define WC_CACHE_LINE_SZ 32
  1397. #endif
  1398. #endif
  1399. #ifndef WC_NO_CACHE_RESISTANT
  1400. #if defined(__riscv) && !defined(WOLFSSL_AES_TOUCH_LINES)
  1401. #define WOLFSSL_AES_TOUCH_LINES
  1402. #endif
  1403. #ifndef WOLFSSL_AES_SMALL_TABLES
  1404. /* load 4 Te Tables into cache by cache line stride */
  1405. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTe(void)
  1406. {
  1407. #ifndef WOLFSSL_AES_TOUCH_LINES
  1408. word32 x = 0;
  1409. int i,j;
  1410. for (i = 0; i < 4; i++) {
  1411. /* 256 elements, each one is 4 bytes */
  1412. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1413. x &= Te[i][j];
  1414. }
  1415. }
  1416. return x;
  1417. #else
  1418. return 0;
  1419. #endif
  1420. }
  1421. #else
  1422. /* load sbox into cache by cache line stride */
  1423. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchSBox(void)
  1424. {
  1425. #ifndef WOLFSSL_AES_TOUCH_LINES
  1426. word32 x = 0;
  1427. int i;
  1428. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ/4) {
  1429. x &= Tsbox[i];
  1430. }
  1431. return x;
  1432. #else
  1433. return 0;
  1434. #endif
  1435. }
  1436. #endif
  1437. #endif
  1438. #ifdef WOLFSSL_AES_TOUCH_LINES
  1439. #if WC_CACHE_LINE_SZ == 128
  1440. #define WC_CACHE_LINE_BITS 5
  1441. #define WC_CACHE_LINE_MASK_HI 0xe0
  1442. #define WC_CACHE_LINE_MASK_LO 0x1f
  1443. #define WC_CACHE_LINE_ADD 0x20
  1444. #elif WC_CACHE_LINE_SZ == 64
  1445. #define WC_CACHE_LINE_BITS 4
  1446. #define WC_CACHE_LINE_MASK_HI 0xf0
  1447. #define WC_CACHE_LINE_MASK_LO 0x0f
  1448. #define WC_CACHE_LINE_ADD 0x10
  1449. #elif WC_CACHE_LINE_SZ == 32
  1450. #define WC_CACHE_LINE_BITS 3
  1451. #define WC_CACHE_LINE_MASK_HI 0xf8
  1452. #define WC_CACHE_LINE_MASK_LO 0x07
  1453. #define WC_CACHE_LINE_ADD 0x08
  1454. #elif WC_CACHE_LINE_SZ = 16
  1455. #define WC_CACHE_LINE_BITS 2
  1456. #define WC_CACHE_LINE_MASK_HI 0xfc
  1457. #define WC_CACHE_LINE_MASK_LO 0x03
  1458. #define WC_CACHE_LINE_ADD 0x04
  1459. #else
  1460. #error Cache line size not supported
  1461. #endif
  1462. #ifndef WOLFSSL_AES_SMALL_TABLES
  1463. static word32 GetTable(const word32* t, byte o)
  1464. {
  1465. #if WC_CACHE_LINE_SZ == 64
  1466. word32 e;
  1467. byte hi = o & 0xf0;
  1468. byte lo = o & 0x0f;
  1469. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1470. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1471. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1472. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1473. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1474. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1475. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1476. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1477. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1478. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1479. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1480. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1481. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1482. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1483. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1484. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1485. return e;
  1486. #else
  1487. word32 e = 0;
  1488. int i;
  1489. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1490. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1491. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1492. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1493. hi -= WC_CACHE_LINE_ADD;
  1494. }
  1495. return e;
  1496. #endif
  1497. }
  1498. #endif
  1499. #ifdef WOLFSSL_AES_SMALL_TABLES
  1500. static byte GetTable8(const byte* t, byte o)
  1501. {
  1502. #if WC_CACHE_LINE_SZ == 64
  1503. byte e;
  1504. byte hi = o & 0xf0;
  1505. byte lo = o & 0x0f;
  1506. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1507. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1508. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1509. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1510. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1511. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1512. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1513. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1514. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1515. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1516. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1517. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1518. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1519. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1520. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1521. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1522. return e;
  1523. #else
  1524. byte e = 0;
  1525. int i;
  1526. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1527. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1528. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1529. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1530. hi -= WC_CACHE_LINE_ADD;
  1531. }
  1532. return e;
  1533. #endif
  1534. }
  1535. #endif
  1536. #ifndef WOLFSSL_AES_SMALL_TABLES
  1537. static void GetTable_Multi(const word32* t, word32* t0, byte o0,
  1538. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1539. {
  1540. word32 e0 = 0;
  1541. word32 e1 = 0;
  1542. word32 e2 = 0;
  1543. word32 e3 = 0;
  1544. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1545. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1546. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1547. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1548. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1549. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1550. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1551. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1552. int i;
  1553. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1554. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1555. hi0 -= WC_CACHE_LINE_ADD;
  1556. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1557. hi1 -= WC_CACHE_LINE_ADD;
  1558. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1559. hi2 -= WC_CACHE_LINE_ADD;
  1560. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1561. hi3 -= WC_CACHE_LINE_ADD;
  1562. }
  1563. *t0 = e0;
  1564. *t1 = e1;
  1565. *t2 = e2;
  1566. *t3 = e3;
  1567. }
  1568. static void XorTable_Multi(const word32* t, word32* t0, byte o0,
  1569. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1570. {
  1571. word32 e0 = 0;
  1572. word32 e1 = 0;
  1573. word32 e2 = 0;
  1574. word32 e3 = 0;
  1575. byte hi0 = o0 & 0xf0;
  1576. byte lo0 = o0 & 0x0f;
  1577. byte hi1 = o1 & 0xf0;
  1578. byte lo1 = o1 & 0x0f;
  1579. byte hi2 = o2 & 0xf0;
  1580. byte lo2 = o2 & 0x0f;
  1581. byte hi3 = o3 & 0xf0;
  1582. byte lo3 = o3 & 0x0f;
  1583. int i;
  1584. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1585. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1586. hi0 -= WC_CACHE_LINE_ADD;
  1587. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1588. hi1 -= WC_CACHE_LINE_ADD;
  1589. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1590. hi2 -= WC_CACHE_LINE_ADD;
  1591. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1592. hi3 -= WC_CACHE_LINE_ADD;
  1593. }
  1594. *t0 ^= e0;
  1595. *t1 ^= e1;
  1596. *t2 ^= e2;
  1597. *t3 ^= e3;
  1598. }
  1599. static word32 GetTable8_4(const byte* t, byte o0, byte o1, byte o2, byte o3)
  1600. {
  1601. word32 e = 0;
  1602. int i;
  1603. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1604. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1605. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1606. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1607. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1608. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1609. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1610. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1611. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1612. e |= (word32)(t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31)))
  1613. << 24;
  1614. hi0 -= WC_CACHE_LINE_ADD;
  1615. e |= (word32)(t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31)))
  1616. << 16;
  1617. hi1 -= WC_CACHE_LINE_ADD;
  1618. e |= (word32)(t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31)))
  1619. << 8;
  1620. hi2 -= WC_CACHE_LINE_ADD;
  1621. e |= (word32)(t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31)))
  1622. << 0;
  1623. hi3 -= WC_CACHE_LINE_ADD;
  1624. }
  1625. return e;
  1626. }
  1627. #endif
  1628. #else
  1629. #define GetTable(t, o) t[o]
  1630. #define GetTable8(t, o) t[o]
  1631. #define GetTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1632. *(t0) = (t)[o0]; *(t1) = (t)[o1]; *(t2) = (t)[o2]; *(t3) = (t)[o3]
  1633. #define XorTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1634. *(t0) ^= (t)[o0]; *(t1) ^= (t)[o1]; *(t2) ^= (t)[o2]; *(t3) ^= (t)[o3]
  1635. #define GetTable8_4(t, o0, o1, o2, o3) \
  1636. (((word32)(t)[o0] << 24) | ((word32)(t)[o1] << 16) | \
  1637. ((word32)(t)[o2] << 8) | ((word32)(t)[o3] << 0))
  1638. #endif
  1639. /* this section disabled with NO_AES_192 */
  1640. static WARN_UNUSED_RESULT int wc_AesEncrypt( /* calling this one when missing NO_AES_192 */
  1641. Aes* aes, const byte* inBlock, byte* outBlock)
  1642. {
  1643. word32 s0, s1, s2, s3;
  1644. word32 t0, t1, t2, t3;
  1645. word32 r;
  1646. const word32* rk;
  1647. if (aes == NULL) {
  1648. return BAD_FUNC_ARG;
  1649. }
  1650. r = aes->rounds >> 1;
  1651. rk = aes->key;
  1652. if (r > 7 || r == 0) {
  1653. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  1654. return KEYUSAGE_E;
  1655. }
  1656. #ifdef WOLFSSL_AESNI
  1657. if (haveAESNI && aes->use_aesni) {
  1658. #ifdef DEBUG_AESNI
  1659. printf("about to aes encrypt\n");
  1660. printf("in = %p\n", inBlock);
  1661. printf("out = %p\n", outBlock);
  1662. printf("aes->key = %p\n", aes->key);
  1663. printf("aes->rounds = %d\n", aes->rounds);
  1664. printf("sz = %d\n", AES_BLOCK_SIZE);
  1665. #endif
  1666. /* check alignment, decrypt doesn't need alignment */
  1667. if ((wc_ptr_t)inBlock % AESNI_ALIGN) {
  1668. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  1669. byte* tmp = (byte*)XMALLOC(AES_BLOCK_SIZE + AESNI_ALIGN, aes->heap,
  1670. DYNAMIC_TYPE_TMP_BUFFER);
  1671. byte* tmp_align;
  1672. if (tmp == NULL)
  1673. return MEMORY_E;
  1674. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  1675. XMEMCPY(tmp_align, inBlock, AES_BLOCK_SIZE);
  1676. AES_ECB_encrypt(tmp_align, tmp_align, AES_BLOCK_SIZE,
  1677. (byte*)aes->key, (int)aes->rounds);
  1678. XMEMCPY(outBlock, tmp_align, AES_BLOCK_SIZE);
  1679. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1680. return 0;
  1681. #else
  1682. WOLFSSL_MSG("AES-ECB encrypt with bad alignment");
  1683. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  1684. return BAD_ALIGN_E;
  1685. #endif
  1686. }
  1687. AES_ECB_encrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  1688. (int)aes->rounds);
  1689. return 0;
  1690. }
  1691. else {
  1692. #ifdef DEBUG_AESNI
  1693. printf("Skipping AES-NI\n");
  1694. #endif
  1695. }
  1696. #endif
  1697. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  1698. AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  1699. return 0;
  1700. #endif
  1701. #if defined(WOLFSSL_IMXRT_DCP)
  1702. if (aes->keylen == 16) {
  1703. DCPAesEcbEncrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  1704. return 0;
  1705. }
  1706. #endif
  1707. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  1708. if (aes->useSWCrypt == 0) {
  1709. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  1710. AES_ENCRYPTION, kAlgorithm_SSS_AES_ECB);
  1711. }
  1712. #endif
  1713. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AES_HW_FALLBACK)
  1714. ESP_LOGV(TAG, "wc_AesEncrypt fallback check");
  1715. if (wc_esp32AesSupportedKeyLen(aes)) {
  1716. return wc_esp32AesEncrypt(aes, inBlock, outBlock);
  1717. }
  1718. else {
  1719. /* For example, the ESP32-S3 does not support HW for len = 24,
  1720. * so fall back to SW */
  1721. #ifdef DEBUG_WOLFSSL
  1722. ESP_LOGW(TAG, "wc_AesEncrypt HW Falling back, unsupported keylen = %d",
  1723. aes->keylen);
  1724. #endif
  1725. }
  1726. #endif
  1727. /*
  1728. * map byte array block to cipher state
  1729. * and add initial round key:
  1730. */
  1731. XMEMCPY(&s0, inBlock, sizeof(s0));
  1732. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  1733. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  1734. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  1735. #ifdef LITTLE_ENDIAN_ORDER
  1736. s0 = ByteReverseWord32(s0);
  1737. s1 = ByteReverseWord32(s1);
  1738. s2 = ByteReverseWord32(s2);
  1739. s3 = ByteReverseWord32(s3);
  1740. #endif
  1741. /* AddRoundKey */
  1742. s0 ^= rk[0];
  1743. s1 ^= rk[1];
  1744. s2 ^= rk[2];
  1745. s3 ^= rk[3];
  1746. #ifndef WOLFSSL_AES_SMALL_TABLES
  1747. #ifndef WC_NO_CACHE_RESISTANT
  1748. s0 |= PreFetchTe();
  1749. #endif
  1750. #ifndef WOLFSSL_AES_TOUCH_LINES
  1751. #define ENC_ROUND_T_S(o) \
  1752. t0 = GetTable(Te[0], GETBYTE(s0, 3)) ^ GetTable(Te[1], GETBYTE(s1, 2)) ^ \
  1753. GetTable(Te[2], GETBYTE(s2, 1)) ^ GetTable(Te[3], GETBYTE(s3, 0)) ^ \
  1754. rk[(o)+4]; \
  1755. t1 = GetTable(Te[0], GETBYTE(s1, 3)) ^ GetTable(Te[1], GETBYTE(s2, 2)) ^ \
  1756. GetTable(Te[2], GETBYTE(s3, 1)) ^ GetTable(Te[3], GETBYTE(s0, 0)) ^ \
  1757. rk[(o)+5]; \
  1758. t2 = GetTable(Te[0], GETBYTE(s2, 3)) ^ GetTable(Te[1], GETBYTE(s3, 2)) ^ \
  1759. GetTable(Te[2], GETBYTE(s0, 1)) ^ GetTable(Te[3], GETBYTE(s1, 0)) ^ \
  1760. rk[(o)+6]; \
  1761. t3 = GetTable(Te[0], GETBYTE(s3, 3)) ^ GetTable(Te[1], GETBYTE(s0, 2)) ^ \
  1762. GetTable(Te[2], GETBYTE(s1, 1)) ^ GetTable(Te[3], GETBYTE(s2, 0)) ^ \
  1763. rk[(o)+7]
  1764. #define ENC_ROUND_S_T(o) \
  1765. s0 = GetTable(Te[0], GETBYTE(t0, 3)) ^ GetTable(Te[1], GETBYTE(t1, 2)) ^ \
  1766. GetTable(Te[2], GETBYTE(t2, 1)) ^ GetTable(Te[3], GETBYTE(t3, 0)) ^ \
  1767. rk[(o)+0]; \
  1768. s1 = GetTable(Te[0], GETBYTE(t1, 3)) ^ GetTable(Te[1], GETBYTE(t2, 2)) ^ \
  1769. GetTable(Te[2], GETBYTE(t3, 1)) ^ GetTable(Te[3], GETBYTE(t0, 0)) ^ \
  1770. rk[(o)+1]; \
  1771. s2 = GetTable(Te[0], GETBYTE(t2, 3)) ^ GetTable(Te[1], GETBYTE(t3, 2)) ^ \
  1772. GetTable(Te[2], GETBYTE(t0, 1)) ^ GetTable(Te[3], GETBYTE(t1, 0)) ^ \
  1773. rk[(o)+2]; \
  1774. s3 = GetTable(Te[0], GETBYTE(t3, 3)) ^ GetTable(Te[1], GETBYTE(t0, 2)) ^ \
  1775. GetTable(Te[2], GETBYTE(t1, 1)) ^ GetTable(Te[3], GETBYTE(t2, 0)) ^ \
  1776. rk[(o)+3]
  1777. #else
  1778. #define ENC_ROUND_T_S(o) \
  1779. GetTable_Multi(Te[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  1780. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  1781. XorTable_Multi(Te[1], &t0, GETBYTE(s1, 2), &t1, GETBYTE(s2, 2), \
  1782. &t2, GETBYTE(s3, 2), &t3, GETBYTE(s0, 2)); \
  1783. XorTable_Multi(Te[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  1784. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  1785. XorTable_Multi(Te[3], &t0, GETBYTE(s3, 0), &t1, GETBYTE(s0, 0), \
  1786. &t2, GETBYTE(s1, 0), &t3, GETBYTE(s2, 0)); \
  1787. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  1788. #define ENC_ROUND_S_T(o) \
  1789. GetTable_Multi(Te[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  1790. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  1791. XorTable_Multi(Te[1], &s0, GETBYTE(t1, 2), &s1, GETBYTE(t2, 2), \
  1792. &s2, GETBYTE(t3, 2), &s3, GETBYTE(t0, 2)); \
  1793. XorTable_Multi(Te[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  1794. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  1795. XorTable_Multi(Te[3], &s0, GETBYTE(t3, 0), &s1, GETBYTE(t0, 0), \
  1796. &s2, GETBYTE(t1, 0), &s3, GETBYTE(t2, 0)); \
  1797. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  1798. #endif
  1799. #ifndef WOLFSSL_AES_NO_UNROLL
  1800. /* Unroll the loop. */
  1801. ENC_ROUND_T_S( 0);
  1802. ENC_ROUND_S_T( 8); ENC_ROUND_T_S( 8);
  1803. ENC_ROUND_S_T(16); ENC_ROUND_T_S(16);
  1804. ENC_ROUND_S_T(24); ENC_ROUND_T_S(24);
  1805. ENC_ROUND_S_T(32); ENC_ROUND_T_S(32);
  1806. if (r > 5) {
  1807. ENC_ROUND_S_T(40); ENC_ROUND_T_S(40);
  1808. if (r > 6) {
  1809. ENC_ROUND_S_T(48); ENC_ROUND_T_S(48);
  1810. }
  1811. }
  1812. rk += r * 8;
  1813. #else
  1814. /*
  1815. * Nr - 1 full rounds:
  1816. */
  1817. for (;;) {
  1818. ENC_ROUND_T_S(0);
  1819. rk += 8;
  1820. if (--r == 0) {
  1821. break;
  1822. }
  1823. ENC_ROUND_S_T(0);
  1824. }
  1825. #endif
  1826. /*
  1827. * apply last round and
  1828. * map cipher state to byte array block:
  1829. */
  1830. #ifndef WOLFSSL_AES_TOUCH_LINES
  1831. s0 =
  1832. (GetTable(Te[2], GETBYTE(t0, 3)) & 0xff000000) ^
  1833. (GetTable(Te[3], GETBYTE(t1, 2)) & 0x00ff0000) ^
  1834. (GetTable(Te[0], GETBYTE(t2, 1)) & 0x0000ff00) ^
  1835. (GetTable(Te[1], GETBYTE(t3, 0)) & 0x000000ff) ^
  1836. rk[0];
  1837. s1 =
  1838. (GetTable(Te[2], GETBYTE(t1, 3)) & 0xff000000) ^
  1839. (GetTable(Te[3], GETBYTE(t2, 2)) & 0x00ff0000) ^
  1840. (GetTable(Te[0], GETBYTE(t3, 1)) & 0x0000ff00) ^
  1841. (GetTable(Te[1], GETBYTE(t0, 0)) & 0x000000ff) ^
  1842. rk[1];
  1843. s2 =
  1844. (GetTable(Te[2], GETBYTE(t2, 3)) & 0xff000000) ^
  1845. (GetTable(Te[3], GETBYTE(t3, 2)) & 0x00ff0000) ^
  1846. (GetTable(Te[0], GETBYTE(t0, 1)) & 0x0000ff00) ^
  1847. (GetTable(Te[1], GETBYTE(t1, 0)) & 0x000000ff) ^
  1848. rk[2];
  1849. s3 =
  1850. (GetTable(Te[2], GETBYTE(t3, 3)) & 0xff000000) ^
  1851. (GetTable(Te[3], GETBYTE(t0, 2)) & 0x00ff0000) ^
  1852. (GetTable(Te[0], GETBYTE(t1, 1)) & 0x0000ff00) ^
  1853. (GetTable(Te[1], GETBYTE(t2, 0)) & 0x000000ff) ^
  1854. rk[3];
  1855. #else
  1856. {
  1857. word32 u0;
  1858. word32 u1;
  1859. word32 u2;
  1860. word32 u3;
  1861. s0 = rk[0]; s1 = rk[1]; s2 = rk[2]; s3 = rk[3];
  1862. GetTable_Multi(Te[2], &u0, GETBYTE(t0, 3), &u1, GETBYTE(t1, 3),
  1863. &u2, GETBYTE(t2, 3), &u3, GETBYTE(t3, 3));
  1864. s0 ^= u0 & 0xff000000; s1 ^= u1 & 0xff000000;
  1865. s2 ^= u2 & 0xff000000; s3 ^= u3 & 0xff000000;
  1866. GetTable_Multi(Te[3], &u0, GETBYTE(t1, 2), &u1, GETBYTE(t2, 2),
  1867. &u2, GETBYTE(t3, 2), &u3, GETBYTE(t0, 2));
  1868. s0 ^= u0 & 0x00ff0000; s1 ^= u1 & 0x00ff0000;
  1869. s2 ^= u2 & 0x00ff0000; s3 ^= u3 & 0x00ff0000;
  1870. GetTable_Multi(Te[0], &u0, GETBYTE(t2, 1), &u1, GETBYTE(t3, 1),
  1871. &u2, GETBYTE(t0, 1), &u3, GETBYTE(t1, 1));
  1872. s0 ^= u0 & 0x0000ff00; s1 ^= u1 & 0x0000ff00;
  1873. s2 ^= u2 & 0x0000ff00; s3 ^= u3 & 0x0000ff00;
  1874. GetTable_Multi(Te[1], &u0, GETBYTE(t3, 0), &u1, GETBYTE(t0, 0),
  1875. &u2, GETBYTE(t1, 0), &u3, GETBYTE(t2, 0));
  1876. s0 ^= u0 & 0x000000ff; s1 ^= u1 & 0x000000ff;
  1877. s2 ^= u2 & 0x000000ff; s3 ^= u3 & 0x000000ff;
  1878. }
  1879. #endif
  1880. #else
  1881. #ifndef WC_NO_CACHE_RESISTANT
  1882. s0 |= PreFetchSBox();
  1883. #endif
  1884. r *= 2;
  1885. /* Two rounds at a time */
  1886. for (rk += 4; r > 1; r--, rk += 4) {
  1887. t0 =
  1888. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  1889. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  1890. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  1891. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  1892. t1 =
  1893. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  1894. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  1895. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  1896. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  1897. t2 =
  1898. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  1899. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  1900. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  1901. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  1902. t3 =
  1903. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  1904. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  1905. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  1906. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  1907. s0 =
  1908. (col_mul(t0, 3, 2, 0, 1) << 24) ^
  1909. (col_mul(t0, 2, 1, 0, 3) << 16) ^
  1910. (col_mul(t0, 1, 0, 2, 3) << 8) ^
  1911. (col_mul(t0, 0, 3, 2, 1) ) ^
  1912. rk[0];
  1913. s1 =
  1914. (col_mul(t1, 3, 2, 0, 1) << 24) ^
  1915. (col_mul(t1, 2, 1, 0, 3) << 16) ^
  1916. (col_mul(t1, 1, 0, 2, 3) << 8) ^
  1917. (col_mul(t1, 0, 3, 2, 1) ) ^
  1918. rk[1];
  1919. s2 =
  1920. (col_mul(t2, 3, 2, 0, 1) << 24) ^
  1921. (col_mul(t2, 2, 1, 0, 3) << 16) ^
  1922. (col_mul(t2, 1, 0, 2, 3) << 8) ^
  1923. (col_mul(t2, 0, 3, 2, 1) ) ^
  1924. rk[2];
  1925. s3 =
  1926. (col_mul(t3, 3, 2, 0, 1) << 24) ^
  1927. (col_mul(t3, 2, 1, 0, 3) << 16) ^
  1928. (col_mul(t3, 1, 0, 2, 3) << 8) ^
  1929. (col_mul(t3, 0, 3, 2, 1) ) ^
  1930. rk[3];
  1931. }
  1932. t0 =
  1933. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  1934. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  1935. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  1936. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  1937. t1 =
  1938. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  1939. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  1940. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  1941. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  1942. t2 =
  1943. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  1944. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  1945. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  1946. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  1947. t3 =
  1948. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  1949. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  1950. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  1951. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  1952. s0 = t0 ^ rk[0];
  1953. s1 = t1 ^ rk[1];
  1954. s2 = t2 ^ rk[2];
  1955. s3 = t3 ^ rk[3];
  1956. #endif
  1957. /* write out */
  1958. #ifdef LITTLE_ENDIAN_ORDER
  1959. s0 = ByteReverseWord32(s0);
  1960. s1 = ByteReverseWord32(s1);
  1961. s2 = ByteReverseWord32(s2);
  1962. s3 = ByteReverseWord32(s3);
  1963. #endif
  1964. XMEMCPY(outBlock, &s0, sizeof(s0));
  1965. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  1966. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  1967. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  1968. return 0;
  1969. } /* wc_AesEncrypt */
  1970. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT || HAVE_AESGCM */
  1971. #if defined(HAVE_AES_DECRYPT)
  1972. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC) && \
  1973. !defined(WOLFSSL_SILABS_SE_ACCEL)) || \
  1974. defined(WOLFSSL_AES_DIRECT)
  1975. #ifndef WC_NO_CACHE_RESISTANT
  1976. #ifndef WOLFSSL_AES_SMALL_TABLES
  1977. /* load 4 Td Tables into cache by cache line stride */
  1978. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd(void)
  1979. {
  1980. word32 x = 0;
  1981. int i,j;
  1982. for (i = 0; i < 4; i++) {
  1983. /* 256 elements, each one is 4 bytes */
  1984. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1985. x &= Td[i][j];
  1986. }
  1987. }
  1988. return x;
  1989. }
  1990. #endif
  1991. /* load Td Table4 into cache by cache line stride */
  1992. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd4(void)
  1993. {
  1994. #ifndef WOLFSSL_AES_TOUCH_LINES
  1995. word32 x = 0;
  1996. int i;
  1997. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ) {
  1998. x &= (word32)Td4[i];
  1999. }
  2000. return x;
  2001. #else
  2002. return 0;
  2003. #endif
  2004. }
  2005. #endif
  2006. /* Software AES - ECB Decrypt */
  2007. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  2008. Aes* aes, const byte* inBlock, byte* outBlock)
  2009. {
  2010. word32 s0, s1, s2, s3;
  2011. word32 t0, t1, t2, t3;
  2012. word32 r;
  2013. const word32* rk;
  2014. if (aes == NULL) {
  2015. return BAD_FUNC_ARG;
  2016. }
  2017. r = aes->rounds >> 1;
  2018. rk = aes->key;
  2019. if (r > 7 || r == 0) {
  2020. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  2021. return KEYUSAGE_E;
  2022. }
  2023. #ifdef WOLFSSL_AESNI
  2024. if (haveAESNI && aes->use_aesni) {
  2025. #ifdef DEBUG_AESNI
  2026. printf("about to aes decrypt\n");
  2027. printf("in = %p\n", inBlock);
  2028. printf("out = %p\n", outBlock);
  2029. printf("aes->key = %p\n", aes->key);
  2030. printf("aes->rounds = %d\n", aes->rounds);
  2031. printf("sz = %d\n", AES_BLOCK_SIZE);
  2032. #endif
  2033. /* if input and output same will overwrite input iv */
  2034. if ((const byte*)aes->tmp != inBlock)
  2035. XMEMCPY(aes->tmp, inBlock, AES_BLOCK_SIZE);
  2036. AES_ECB_decrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  2037. (int)aes->rounds);
  2038. return 0;
  2039. }
  2040. else {
  2041. #ifdef DEBUG_AESNI
  2042. printf("Skipping AES-NI\n");
  2043. #endif
  2044. }
  2045. #endif /* WOLFSSL_AESNI */
  2046. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  2047. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  2048. #endif
  2049. #if defined(WOLFSSL_IMXRT_DCP)
  2050. if (aes->keylen == 16) {
  2051. DCPAesEcbDecrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  2052. return 0;
  2053. }
  2054. #endif
  2055. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2056. if (aes->useSWCrypt == 0) {
  2057. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  2058. AES_DECRYPTION, kAlgorithm_SSS_AES_ECB);
  2059. }
  2060. #endif
  2061. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AES_HW_FALLBACK)
  2062. if (wc_esp32AesSupportedKeyLen(aes)) {
  2063. return wc_esp32AesDecrypt(aes, inBlock, outBlock);
  2064. }
  2065. else {
  2066. /* For example, the ESP32-S3 does not support HW for len = 24,
  2067. * so fall back to SW */
  2068. #ifdef DEBUG_WOLFSSL
  2069. ESP_LOGW(TAG, "wc_AesDecrypt HW Falling back, "
  2070. "unsupported keylen = %d", aes->keylen);
  2071. #endif
  2072. } /* else !wc_esp32AesSupportedKeyLen for ESP32 */
  2073. #endif
  2074. /*
  2075. * map byte array block to cipher state
  2076. * and add initial round key:
  2077. */
  2078. XMEMCPY(&s0, inBlock, sizeof(s0));
  2079. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  2080. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  2081. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  2082. #ifdef LITTLE_ENDIAN_ORDER
  2083. s0 = ByteReverseWord32(s0);
  2084. s1 = ByteReverseWord32(s1);
  2085. s2 = ByteReverseWord32(s2);
  2086. s3 = ByteReverseWord32(s3);
  2087. #endif
  2088. s0 ^= rk[0];
  2089. s1 ^= rk[1];
  2090. s2 ^= rk[2];
  2091. s3 ^= rk[3];
  2092. #ifndef WOLFSSL_AES_SMALL_TABLES
  2093. #ifndef WC_NO_CACHE_RESISTANT
  2094. s0 |= PreFetchTd();
  2095. #endif
  2096. #ifndef WOLFSSL_AES_TOUCH_LINES
  2097. /* Unroll the loop. */
  2098. #define DEC_ROUND_T_S(o) \
  2099. t0 = GetTable(Td[0], GETBYTE(s0, 3)) ^ GetTable(Td[1], GETBYTE(s3, 2)) ^ \
  2100. GetTable(Td[2], GETBYTE(s2, 1)) ^ GetTable(Td[3], GETBYTE(s1, 0)) ^ rk[(o)+4]; \
  2101. t1 = GetTable(Td[0], GETBYTE(s1, 3)) ^ GetTable(Td[1], GETBYTE(s0, 2)) ^ \
  2102. GetTable(Td[2], GETBYTE(s3, 1)) ^ GetTable(Td[3], GETBYTE(s2, 0)) ^ rk[(o)+5]; \
  2103. t2 = GetTable(Td[0], GETBYTE(s2, 3)) ^ GetTable(Td[1], GETBYTE(s1, 2)) ^ \
  2104. GetTable(Td[2], GETBYTE(s0, 1)) ^ GetTable(Td[3], GETBYTE(s3, 0)) ^ rk[(o)+6]; \
  2105. t3 = GetTable(Td[0], GETBYTE(s3, 3)) ^ GetTable(Td[1], GETBYTE(s2, 2)) ^ \
  2106. GetTable(Td[2], GETBYTE(s1, 1)) ^ GetTable(Td[3], GETBYTE(s0, 0)) ^ rk[(o)+7]
  2107. #define DEC_ROUND_S_T(o) \
  2108. s0 = GetTable(Td[0], GETBYTE(t0, 3)) ^ GetTable(Td[1], GETBYTE(t3, 2)) ^ \
  2109. GetTable(Td[2], GETBYTE(t2, 1)) ^ GetTable(Td[3], GETBYTE(t1, 0)) ^ rk[(o)+0]; \
  2110. s1 = GetTable(Td[0], GETBYTE(t1, 3)) ^ GetTable(Td[1], GETBYTE(t0, 2)) ^ \
  2111. GetTable(Td[2], GETBYTE(t3, 1)) ^ GetTable(Td[3], GETBYTE(t2, 0)) ^ rk[(o)+1]; \
  2112. s2 = GetTable(Td[0], GETBYTE(t2, 3)) ^ GetTable(Td[1], GETBYTE(t1, 2)) ^ \
  2113. GetTable(Td[2], GETBYTE(t0, 1)) ^ GetTable(Td[3], GETBYTE(t3, 0)) ^ rk[(o)+2]; \
  2114. s3 = GetTable(Td[0], GETBYTE(t3, 3)) ^ GetTable(Td[1], GETBYTE(t2, 2)) ^ \
  2115. GetTable(Td[2], GETBYTE(t1, 1)) ^ GetTable(Td[3], GETBYTE(t0, 0)) ^ rk[(o)+3]
  2116. #else
  2117. #define DEC_ROUND_T_S(o) \
  2118. GetTable_Multi(Td[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  2119. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  2120. XorTable_Multi(Td[1], &t0, GETBYTE(s3, 2), &t1, GETBYTE(s0, 2), \
  2121. &t2, GETBYTE(s1, 2), &t3, GETBYTE(s2, 2)); \
  2122. XorTable_Multi(Td[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  2123. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  2124. XorTable_Multi(Td[3], &t0, GETBYTE(s1, 0), &t1, GETBYTE(s2, 0), \
  2125. &t2, GETBYTE(s3, 0), &t3, GETBYTE(s0, 0)); \
  2126. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  2127. #define DEC_ROUND_S_T(o) \
  2128. GetTable_Multi(Td[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  2129. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  2130. XorTable_Multi(Td[1], &s0, GETBYTE(t3, 2), &s1, GETBYTE(t0, 2), \
  2131. &s2, GETBYTE(t1, 2), &s3, GETBYTE(t2, 2)); \
  2132. XorTable_Multi(Td[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  2133. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  2134. XorTable_Multi(Td[3], &s0, GETBYTE(t1, 0), &s1, GETBYTE(t2, 0), \
  2135. &s2, GETBYTE(t3, 0), &s3, GETBYTE(t0, 0)); \
  2136. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  2137. #endif
  2138. #ifndef WOLFSSL_AES_NO_UNROLL
  2139. DEC_ROUND_T_S( 0);
  2140. DEC_ROUND_S_T( 8); DEC_ROUND_T_S( 8);
  2141. DEC_ROUND_S_T(16); DEC_ROUND_T_S(16);
  2142. DEC_ROUND_S_T(24); DEC_ROUND_T_S(24);
  2143. DEC_ROUND_S_T(32); DEC_ROUND_T_S(32);
  2144. if (r > 5) {
  2145. DEC_ROUND_S_T(40); DEC_ROUND_T_S(40);
  2146. if (r > 6) {
  2147. DEC_ROUND_S_T(48); DEC_ROUND_T_S(48);
  2148. }
  2149. }
  2150. rk += r * 8;
  2151. #else
  2152. /*
  2153. * Nr - 1 full rounds:
  2154. */
  2155. for (;;) {
  2156. DEC_ROUND_T_S(0);
  2157. rk += 8;
  2158. if (--r == 0) {
  2159. break;
  2160. }
  2161. DEC_ROUND_S_T(0);
  2162. }
  2163. #endif
  2164. /*
  2165. * apply last round and
  2166. * map cipher state to byte array block:
  2167. */
  2168. #ifndef WC_NO_CACHE_RESISTANT
  2169. t0 |= PreFetchTd4();
  2170. #endif
  2171. s0 = GetTable8_4(Td4, GETBYTE(t0, 3), GETBYTE(t3, 2),
  2172. GETBYTE(t2, 1), GETBYTE(t1, 0)) ^ rk[0];
  2173. s1 = GetTable8_4(Td4, GETBYTE(t1, 3), GETBYTE(t0, 2),
  2174. GETBYTE(t3, 1), GETBYTE(t2, 0)) ^ rk[1];
  2175. s2 = GetTable8_4(Td4, GETBYTE(t2, 3), GETBYTE(t1, 2),
  2176. GETBYTE(t0, 1), GETBYTE(t3, 0)) ^ rk[2];
  2177. s3 = GetTable8_4(Td4, GETBYTE(t3, 3), GETBYTE(t2, 2),
  2178. GETBYTE(t1, 1), GETBYTE(t0, 0)) ^ rk[3];
  2179. #else
  2180. #ifndef WC_NO_CACHE_RESISTANT
  2181. s0 |= PreFetchTd4();
  2182. #endif
  2183. r *= 2;
  2184. for (rk += 4; r > 1; r--, rk += 4) {
  2185. t0 =
  2186. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2187. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2188. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2189. ((word32)GetTable8(Td4, GETBYTE(s1, 0))) ^
  2190. rk[0];
  2191. t1 =
  2192. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2193. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2194. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2195. ((word32)GetTable8(Td4, GETBYTE(s2, 0))) ^
  2196. rk[1];
  2197. t2 =
  2198. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2199. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2200. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2201. ((word32)GetTable8(Td4, GETBYTE(s3, 0))) ^
  2202. rk[2];
  2203. t3 =
  2204. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2205. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2206. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2207. ((word32)GetTable8(Td4, GETBYTE(s0, 0))) ^
  2208. rk[3];
  2209. s0 =
  2210. (inv_col_mul(t0, 0, 2, 1, 3) << 24) ^
  2211. (inv_col_mul(t0, 3, 1, 0, 2) << 16) ^
  2212. (inv_col_mul(t0, 2, 0, 3, 1) << 8) ^
  2213. (inv_col_mul(t0, 1, 3, 2, 0) );
  2214. s1 =
  2215. (inv_col_mul(t1, 0, 2, 1, 3) << 24) ^
  2216. (inv_col_mul(t1, 3, 1, 0, 2) << 16) ^
  2217. (inv_col_mul(t1, 2, 0, 3, 1) << 8) ^
  2218. (inv_col_mul(t1, 1, 3, 2, 0) );
  2219. s2 =
  2220. (inv_col_mul(t2, 0, 2, 1, 3) << 24) ^
  2221. (inv_col_mul(t2, 3, 1, 0, 2) << 16) ^
  2222. (inv_col_mul(t2, 2, 0, 3, 1) << 8) ^
  2223. (inv_col_mul(t2, 1, 3, 2, 0) );
  2224. s3 =
  2225. (inv_col_mul(t3, 0, 2, 1, 3) << 24) ^
  2226. (inv_col_mul(t3, 3, 1, 0, 2) << 16) ^
  2227. (inv_col_mul(t3, 2, 0, 3, 1) << 8) ^
  2228. (inv_col_mul(t3, 1, 3, 2, 0) );
  2229. }
  2230. t0 =
  2231. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2232. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2233. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2234. ((word32)GetTable8(Td4, GETBYTE(s1, 0)));
  2235. t1 =
  2236. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2237. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2238. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2239. ((word32)GetTable8(Td4, GETBYTE(s2, 0)));
  2240. t2 =
  2241. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2242. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2243. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2244. ((word32)GetTable8(Td4, GETBYTE(s3, 0)));
  2245. t3 =
  2246. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2247. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2248. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2249. ((word32)GetTable8(Td4, GETBYTE(s0, 0)));
  2250. s0 = t0 ^ rk[0];
  2251. s1 = t1 ^ rk[1];
  2252. s2 = t2 ^ rk[2];
  2253. s3 = t3 ^ rk[3];
  2254. #endif
  2255. /* write out */
  2256. #ifdef LITTLE_ENDIAN_ORDER
  2257. s0 = ByteReverseWord32(s0);
  2258. s1 = ByteReverseWord32(s1);
  2259. s2 = ByteReverseWord32(s2);
  2260. s3 = ByteReverseWord32(s3);
  2261. #endif
  2262. XMEMCPY(outBlock, &s0, sizeof(s0));
  2263. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2264. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2265. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2266. return 0;
  2267. } /* wc_AesDecrypt[_SW]() */
  2268. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  2269. #endif /* HAVE_AES_DECRYPT */
  2270. #endif /* NEED_AES_TABLES */
  2271. /* wc_AesSetKey */
  2272. #if defined(STM32_CRYPTO)
  2273. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2274. const byte* iv, int dir)
  2275. {
  2276. word32 *rk;
  2277. (void)dir;
  2278. if (aes == NULL || (keylen != 16 &&
  2279. #ifdef WOLFSSL_AES_192
  2280. keylen != 24 &&
  2281. #endif
  2282. keylen != 32)) {
  2283. return BAD_FUNC_ARG;
  2284. }
  2285. rk = aes->key;
  2286. aes->keylen = keylen;
  2287. aes->rounds = keylen/4 + 6;
  2288. XMEMCPY(rk, userKey, keylen);
  2289. #if !defined(WOLFSSL_STM32_CUBEMX) || defined(STM32_HAL_V2)
  2290. ByteReverseWords(rk, rk, keylen);
  2291. #endif
  2292. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2293. defined(WOLFSSL_AES_OFB)
  2294. aes->left = 0;
  2295. #endif
  2296. return wc_AesSetIV(aes, iv);
  2297. }
  2298. #if defined(WOLFSSL_AES_DIRECT)
  2299. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2300. const byte* iv, int dir)
  2301. {
  2302. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2303. }
  2304. #endif
  2305. #elif defined(HAVE_COLDFIRE_SEC)
  2306. #if defined (HAVE_THREADX)
  2307. #include "memory_pools.h"
  2308. extern TX_BYTE_POOL mp_ncached; /* Non Cached memory pool */
  2309. #endif
  2310. #define AES_BUFFER_SIZE (AES_BLOCK_SIZE * 64)
  2311. static unsigned char *AESBuffIn = NULL;
  2312. static unsigned char *AESBuffOut = NULL;
  2313. static byte *secReg;
  2314. static byte *secKey;
  2315. static volatile SECdescriptorType *secDesc;
  2316. static wolfSSL_Mutex Mutex_AesSEC;
  2317. #define SEC_DESC_AES_CBC_ENCRYPT 0x60300010
  2318. #define SEC_DESC_AES_CBC_DECRYPT 0x60200010
  2319. extern volatile unsigned char __MBAR[];
  2320. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2321. const byte* iv, int dir)
  2322. {
  2323. if (AESBuffIn == NULL) {
  2324. #if defined (HAVE_THREADX)
  2325. int s1, s2, s3, s4, s5;
  2326. s5 = tx_byte_allocate(&mp_ncached,(void *)&secDesc,
  2327. sizeof(SECdescriptorType), TX_NO_WAIT);
  2328. s1 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffIn,
  2329. AES_BUFFER_SIZE, TX_NO_WAIT);
  2330. s2 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffOut,
  2331. AES_BUFFER_SIZE, TX_NO_WAIT);
  2332. s3 = tx_byte_allocate(&mp_ncached, (void *)&secKey,
  2333. AES_BLOCK_SIZE*2, TX_NO_WAIT);
  2334. s4 = tx_byte_allocate(&mp_ncached, (void *)&secReg,
  2335. AES_BLOCK_SIZE, TX_NO_WAIT);
  2336. if (s1 || s2 || s3 || s4 || s5)
  2337. return BAD_FUNC_ARG;
  2338. #else
  2339. #warning "Allocate non-Cache buffers"
  2340. #endif
  2341. wc_InitMutex(&Mutex_AesSEC);
  2342. }
  2343. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2344. return BAD_FUNC_ARG;
  2345. if (aes == NULL)
  2346. return BAD_FUNC_ARG;
  2347. aes->keylen = keylen;
  2348. aes->rounds = keylen/4 + 6;
  2349. XMEMCPY(aes->key, userKey, keylen);
  2350. if (iv)
  2351. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2352. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2353. defined(WOLFSSL_AES_OFB)
  2354. aes->left = 0;
  2355. #endif
  2356. return 0;
  2357. }
  2358. #elif defined(FREESCALE_LTC)
  2359. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2360. int dir)
  2361. {
  2362. if (aes == NULL || !((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2363. return BAD_FUNC_ARG;
  2364. aes->rounds = keylen/4 + 6;
  2365. XMEMCPY(aes->key, userKey, keylen);
  2366. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2367. defined(WOLFSSL_AES_OFB)
  2368. aes->left = 0;
  2369. #endif
  2370. return wc_AesSetIV(aes, iv);
  2371. }
  2372. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2373. const byte* iv, int dir)
  2374. {
  2375. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2376. }
  2377. #elif defined(FREESCALE_MMCAU)
  2378. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2379. const byte* iv, int dir)
  2380. {
  2381. int ret;
  2382. byte* rk;
  2383. byte* tmpKey = (byte*)userKey;
  2384. int tmpKeyDynamic = 0;
  2385. word32 alignOffset = 0;
  2386. (void)dir;
  2387. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2388. return BAD_FUNC_ARG;
  2389. if (aes == NULL)
  2390. return BAD_FUNC_ARG;
  2391. rk = (byte*)aes->key;
  2392. if (rk == NULL)
  2393. return BAD_FUNC_ARG;
  2394. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2395. defined(WOLFSSL_AES_OFB)
  2396. aes->left = 0;
  2397. #endif
  2398. aes->rounds = keylen/4 + 6;
  2399. #ifdef FREESCALE_MMCAU_CLASSIC
  2400. if ((wc_ptr_t)userKey % WOLFSSL_MMCAU_ALIGNMENT) {
  2401. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  2402. byte* tmp = (byte*)XMALLOC(keylen + WOLFSSL_MMCAU_ALIGNMENT,
  2403. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2404. if (tmp == NULL) {
  2405. return MEMORY_E;
  2406. }
  2407. alignOffset = WOLFSSL_MMCAU_ALIGNMENT -
  2408. ((wc_ptr_t)tmp % WOLFSSL_MMCAU_ALIGNMENT);
  2409. tmpKey = tmp + alignOffset;
  2410. XMEMCPY(tmpKey, userKey, keylen);
  2411. tmpKeyDynamic = 1;
  2412. #else
  2413. WOLFSSL_MSG("Bad cau_aes_set_key alignment");
  2414. return BAD_ALIGN_E;
  2415. #endif
  2416. }
  2417. #endif
  2418. ret = wolfSSL_CryptHwMutexLock();
  2419. if(ret == 0) {
  2420. #ifdef FREESCALE_MMCAU_CLASSIC
  2421. cau_aes_set_key(tmpKey, keylen*8, rk);
  2422. #else
  2423. MMCAU_AES_SetKey(tmpKey, keylen, rk);
  2424. #endif
  2425. wolfSSL_CryptHwMutexUnLock();
  2426. ret = wc_AesSetIV(aes, iv);
  2427. }
  2428. if (tmpKeyDynamic == 1) {
  2429. XFREE(tmpKey - alignOffset, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2430. }
  2431. return ret;
  2432. }
  2433. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2434. const byte* iv, int dir)
  2435. {
  2436. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2437. }
  2438. #elif defined(WOLFSSL_NRF51_AES)
  2439. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2440. const byte* iv, int dir)
  2441. {
  2442. int ret;
  2443. (void)dir;
  2444. (void)iv;
  2445. if (aes == NULL || keylen != 16)
  2446. return BAD_FUNC_ARG;
  2447. aes->keylen = keylen;
  2448. aes->rounds = keylen/4 + 6;
  2449. XMEMCPY(aes->key, userKey, keylen);
  2450. ret = nrf51_aes_set_key(userKey);
  2451. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2452. defined(WOLFSSL_AES_OFB)
  2453. aes->left = 0;
  2454. #endif
  2455. return ret;
  2456. }
  2457. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2458. const byte* iv, int dir)
  2459. {
  2460. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2461. }
  2462. #elif defined(WOLFSSL_ESP32_CRYPT) && !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  2463. /* This is the only definition for HW only.
  2464. * but needs to be renamed when fallback needed.
  2465. * See call in wc_AesSetKey() */
  2466. int wc_AesSetKey_for_ESP32(Aes* aes, const byte* userKey, word32 keylen,
  2467. const byte* iv, int dir)
  2468. {
  2469. (void)dir;
  2470. (void)iv;
  2471. ESP_LOGV(TAG, "wc_AesSetKey_for_ESP32");
  2472. if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) {
  2473. return BAD_FUNC_ARG;
  2474. }
  2475. #if !defined(WOLFSSL_AES_128)
  2476. if (keylen == 16) {
  2477. return BAD_FUNC_ARG;
  2478. }
  2479. #endif
  2480. #if !defined(WOLFSSL_AES_192)
  2481. if (keylen == 24) {
  2482. return BAD_FUNC_ARG;
  2483. }
  2484. #endif
  2485. #if !defined(WOLFSSL_AES_256)
  2486. if (keylen == 32) {
  2487. return BAD_FUNC_ARG;
  2488. }
  2489. #endif
  2490. aes->keylen = keylen;
  2491. aes->rounds = keylen/4 + 6;
  2492. XMEMCPY(aes->key, userKey, keylen);
  2493. #if defined(WOLFSSL_AES_COUNTER)
  2494. aes->left = 0;
  2495. #endif
  2496. return wc_AesSetIV(aes, iv);
  2497. } /* wc_AesSetKey */
  2498. /* end #elif ESP32 */
  2499. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  2500. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2501. int dir)
  2502. {
  2503. SaSiError_t ret = SASI_OK;
  2504. SaSiAesIv_t iv_aes;
  2505. if (aes == NULL ||
  2506. (keylen != AES_128_KEY_SIZE &&
  2507. keylen != AES_192_KEY_SIZE &&
  2508. keylen != AES_256_KEY_SIZE)) {
  2509. return BAD_FUNC_ARG;
  2510. }
  2511. #if defined(AES_MAX_KEY_SIZE)
  2512. if (keylen > (AES_MAX_KEY_SIZE/8)) {
  2513. return BAD_FUNC_ARG;
  2514. }
  2515. #endif
  2516. if (dir != AES_ENCRYPTION &&
  2517. dir != AES_DECRYPTION) {
  2518. return BAD_FUNC_ARG;
  2519. }
  2520. if (dir == AES_ENCRYPTION) {
  2521. aes->ctx.mode = SASI_AES_ENCRYPT;
  2522. SaSi_AesInit(&aes->ctx.user_ctx,
  2523. SASI_AES_ENCRYPT,
  2524. SASI_AES_MODE_CBC,
  2525. SASI_AES_PADDING_NONE);
  2526. }
  2527. else {
  2528. aes->ctx.mode = SASI_AES_DECRYPT;
  2529. SaSi_AesInit(&aes->ctx.user_ctx,
  2530. SASI_AES_DECRYPT,
  2531. SASI_AES_MODE_CBC,
  2532. SASI_AES_PADDING_NONE);
  2533. }
  2534. aes->keylen = keylen;
  2535. aes->rounds = keylen/4 + 6;
  2536. XMEMCPY(aes->key, userKey, keylen);
  2537. aes->ctx.key.pKey = (byte*)aes->key;
  2538. aes->ctx.key.keySize= keylen;
  2539. ret = SaSi_AesSetKey(&aes->ctx.user_ctx,
  2540. SASI_AES_USER_KEY,
  2541. &aes->ctx.key,
  2542. sizeof(aes->ctx.key));
  2543. if (ret != SASI_OK) {
  2544. return BAD_FUNC_ARG;
  2545. }
  2546. ret = wc_AesSetIV(aes, iv);
  2547. if (iv)
  2548. XMEMCPY(iv_aes, iv, AES_BLOCK_SIZE);
  2549. else
  2550. XMEMSET(iv_aes, 0, AES_BLOCK_SIZE);
  2551. ret = SaSi_AesSetIv(&aes->ctx.user_ctx, iv_aes);
  2552. if (ret != SASI_OK) {
  2553. return ret;
  2554. }
  2555. return ret;
  2556. }
  2557. #if defined(WOLFSSL_AES_DIRECT)
  2558. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2559. const byte* iv, int dir)
  2560. {
  2561. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2562. }
  2563. #endif
  2564. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  2565. && !defined(WOLFSSL_QNX_CAAM)
  2566. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  2567. #elif defined(WOLFSSL_AFALG)
  2568. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  2569. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  2570. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  2571. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  2572. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  2573. #elif defined(WOLFSSL_RENESAS_FSPSM_CRYPTONLY) && \
  2574. !defined(NO_WOLFSSL_RENESAS_FSPSM_AES)
  2575. /* implemented in wolfcrypt/src/port/renesas/renesas_fspsm_aes.c */
  2576. #else
  2577. #define NEED_SOFTWARE_AES_SETKEY
  2578. #endif
  2579. /* Either we fell though with no HW support at all,
  2580. * or perhaps there's HW support for *some* keylengths
  2581. * and we need both HW and SW. */
  2582. #ifdef NEED_SOFTWARE_AES_SETKEY
  2583. /* Software AES - SetKey */
  2584. static WARN_UNUSED_RESULT int wc_AesSetKeyLocal(
  2585. Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir,
  2586. int checkKeyLen)
  2587. {
  2588. int ret;
  2589. word32 *rk;
  2590. #ifdef NEED_AES_TABLES
  2591. word32 temp;
  2592. unsigned int i = 0;
  2593. #endif
  2594. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2595. byte local[32];
  2596. word32 localSz = 32;
  2597. #endif
  2598. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  2599. if (wc_MAXQ10XX_AesSetKey(aes, userKey, keylen) != 0) {
  2600. return WC_HW_E;
  2601. }
  2602. #endif
  2603. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2604. if (keylen == (16 + WC_CAAM_BLOB_SZ) ||
  2605. keylen == (24 + WC_CAAM_BLOB_SZ) ||
  2606. keylen == (32 + WC_CAAM_BLOB_SZ)) {
  2607. if (wc_caamOpenBlob((byte*)userKey, keylen, local, &localSz) != 0) {
  2608. return BAD_FUNC_ARG;
  2609. }
  2610. /* set local values */
  2611. userKey = local;
  2612. keylen = localSz;
  2613. }
  2614. #endif
  2615. #ifdef WOLFSSL_SECO_CAAM
  2616. /* if set to use hardware than import the key */
  2617. if (aes->devId == WOLFSSL_SECO_DEVID) {
  2618. int keyGroup = 1; /* group one was chosen arbitrarily */
  2619. unsigned int keyIdOut;
  2620. byte importiv[GCM_NONCE_MID_SZ];
  2621. int importivSz = GCM_NONCE_MID_SZ;
  2622. int keyType = 0;
  2623. WC_RNG rng;
  2624. if (wc_InitRng(&rng) != 0) {
  2625. WOLFSSL_MSG("RNG init for IV failed");
  2626. return WC_HW_E;
  2627. }
  2628. if (wc_RNG_GenerateBlock(&rng, importiv, importivSz) != 0) {
  2629. WOLFSSL_MSG("Generate IV failed");
  2630. wc_FreeRng(&rng);
  2631. return WC_HW_E;
  2632. }
  2633. wc_FreeRng(&rng);
  2634. if (iv)
  2635. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2636. else
  2637. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2638. switch (keylen) {
  2639. case AES_128_KEY_SIZE: keyType = CAAM_KEYTYPE_AES128; break;
  2640. case AES_192_KEY_SIZE: keyType = CAAM_KEYTYPE_AES192; break;
  2641. case AES_256_KEY_SIZE: keyType = CAAM_KEYTYPE_AES256; break;
  2642. }
  2643. keyIdOut = wc_SECO_WrapKey(0, (byte*)userKey, keylen, importiv,
  2644. importivSz, keyType, CAAM_KEY_TRANSIENT, keyGroup);
  2645. if (keyIdOut == 0) {
  2646. return WC_HW_E;
  2647. }
  2648. aes->blackKey = keyIdOut;
  2649. return 0;
  2650. }
  2651. #endif
  2652. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  2653. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  2654. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  2655. #ifdef WOLF_CRYPTO_CB
  2656. if (aes->devId != INVALID_DEVID)
  2657. #endif
  2658. {
  2659. if (keylen > sizeof(aes->devKey)) {
  2660. return BAD_FUNC_ARG;
  2661. }
  2662. XMEMCPY(aes->devKey, userKey, keylen);
  2663. }
  2664. #endif
  2665. if (checkKeyLen) {
  2666. if (keylen != 16 && keylen != 24 && keylen != 32) {
  2667. return BAD_FUNC_ARG;
  2668. }
  2669. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE < 256
  2670. /* Check key length only when AES_MAX_KEY_SIZE doesn't allow
  2671. * all key sizes. Otherwise this condition is never true. */
  2672. if (keylen > (AES_MAX_KEY_SIZE / 8)) {
  2673. return BAD_FUNC_ARG;
  2674. }
  2675. #endif
  2676. }
  2677. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2678. defined(WOLFSSL_AES_OFB)
  2679. aes->left = 0;
  2680. #endif
  2681. aes->keylen = (int)keylen;
  2682. aes->rounds = (keylen/4) + 6;
  2683. #ifdef WOLFSSL_AESNI
  2684. aes->use_aesni = 0;
  2685. if (checkAESNI == 0) {
  2686. haveAESNI = Check_CPU_support_AES();
  2687. checkAESNI = 1;
  2688. }
  2689. if (haveAESNI) {
  2690. #ifdef WOLFSSL_LINUXKM
  2691. /* runtime alignment check */
  2692. if ((wc_ptr_t)&aes->key & (wc_ptr_t)0xf) {
  2693. return BAD_ALIGN_E;
  2694. }
  2695. #endif
  2696. aes->use_aesni = 1;
  2697. if (iv)
  2698. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2699. else
  2700. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2701. if (dir == AES_ENCRYPTION)
  2702. return AES_set_encrypt_key(userKey, (int)keylen * 8, aes);
  2703. #ifdef HAVE_AES_DECRYPT
  2704. else
  2705. return AES_set_decrypt_key(userKey, (int)keylen * 8, aes);
  2706. #endif
  2707. }
  2708. #endif /* WOLFSSL_AESNI */
  2709. #ifdef WOLFSSL_KCAPI_AES
  2710. XMEMCPY(aes->devKey, userKey, keylen);
  2711. if (aes->init != 0) {
  2712. kcapi_cipher_destroy(aes->handle);
  2713. aes->handle = NULL;
  2714. aes->init = 0;
  2715. }
  2716. (void)dir;
  2717. #endif
  2718. if (keylen > sizeof(aes->key)) {
  2719. return BAD_FUNC_ARG;
  2720. }
  2721. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  2722. return wc_psa_aes_set_key(aes, userKey, keylen, (uint8_t*)iv,
  2723. ((psa_algorithm_t)0), dir);
  2724. #endif
  2725. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2726. /* wolfSSL HostCrypto in SE05x SDK can request to use SW crypto
  2727. * instead of SE05x crypto by setting useSWCrypt */
  2728. if (aes->useSWCrypt == 0) {
  2729. ret = se050_aes_set_key(aes, userKey, keylen, iv, dir);
  2730. if (ret == 0) {
  2731. ret = wc_AesSetIV(aes, iv);
  2732. }
  2733. return ret;
  2734. }
  2735. #endif
  2736. rk = aes->key;
  2737. XMEMCPY(rk, userKey, keylen);
  2738. #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  2739. (!defined(WOLFSSL_ESP32_CRYPT) || \
  2740. defined(NO_WOLFSSL_ESP32_CRYPT_AES))
  2741. /* software */
  2742. ByteReverseWords(rk, rk, keylen);
  2743. #elif defined(WOLFSSL_ESP32_CRYPT) && !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  2744. if (wc_esp32AesSupportedKeyLen(aes)) {
  2745. /* supported lengths don't get reversed */
  2746. ESP_LOGV(TAG, "wc_AesSetKeyLocal (no ByteReverseWords)");
  2747. }
  2748. else {
  2749. /* For example, the ESP32-S3 does not support HW for len = 24,
  2750. * so fall back to SW */
  2751. #ifdef DEBUG_WOLFSSL
  2752. ESP_LOGW(TAG, "wc_AesSetKeyLocal ByteReverseWords");
  2753. #endif
  2754. /* When not ESP32 HW, we need to reverse endianess */
  2755. ByteReverseWords(rk, rk, keylen);
  2756. }
  2757. #endif
  2758. #ifdef WOLFSSL_IMXRT_DCP
  2759. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  2760. temp = 0;
  2761. if (keylen == 16)
  2762. temp = DCPAesSetKey(aes, userKey, keylen, iv, dir);
  2763. if (temp != 0)
  2764. return WC_HW_E;
  2765. #endif
  2766. #ifdef NEED_AES_TABLES
  2767. switch (keylen) {
  2768. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \
  2769. defined(WOLFSSL_AES_128)
  2770. case 16:
  2771. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2772. temp = (word32)-1;
  2773. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2774. #endif
  2775. while (1)
  2776. {
  2777. temp = rk[3];
  2778. rk[4] = rk[0] ^
  2779. #ifndef WOLFSSL_AES_SMALL_TABLES
  2780. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2781. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2782. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2783. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2784. #else
  2785. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2786. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2787. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2788. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2789. #endif
  2790. rcon[i];
  2791. rk[5] = rk[1] ^ rk[4];
  2792. rk[6] = rk[2] ^ rk[5];
  2793. rk[7] = rk[3] ^ rk[6];
  2794. if (++i == 10)
  2795. break;
  2796. rk += 4;
  2797. }
  2798. break;
  2799. #endif /* 128 */
  2800. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \
  2801. defined(WOLFSSL_AES_192)
  2802. case 24:
  2803. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2804. temp = (word32)-1;
  2805. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2806. #endif
  2807. /* for (;;) here triggers a bug in VC60 SP4 w/ Pro Pack */
  2808. while (1)
  2809. {
  2810. temp = rk[ 5];
  2811. rk[ 6] = rk[ 0] ^
  2812. #ifndef WOLFSSL_AES_SMALL_TABLES
  2813. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2814. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2815. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2816. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2817. #else
  2818. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2819. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2820. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2821. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2822. #endif
  2823. rcon[i];
  2824. rk[ 7] = rk[ 1] ^ rk[ 6];
  2825. rk[ 8] = rk[ 2] ^ rk[ 7];
  2826. rk[ 9] = rk[ 3] ^ rk[ 8];
  2827. if (++i == 8)
  2828. break;
  2829. rk[10] = rk[ 4] ^ rk[ 9];
  2830. rk[11] = rk[ 5] ^ rk[10];
  2831. rk += 6;
  2832. }
  2833. break;
  2834. #endif /* 192 */
  2835. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \
  2836. defined(WOLFSSL_AES_256)
  2837. case 32:
  2838. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2839. temp = (word32)-1;
  2840. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2841. #endif
  2842. while (1)
  2843. {
  2844. temp = rk[ 7];
  2845. rk[ 8] = rk[ 0] ^
  2846. #ifndef WOLFSSL_AES_SMALL_TABLES
  2847. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2848. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2849. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2850. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2851. #else
  2852. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2853. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2854. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2855. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2856. #endif
  2857. rcon[i];
  2858. rk[ 9] = rk[ 1] ^ rk[ 8];
  2859. rk[10] = rk[ 2] ^ rk[ 9];
  2860. rk[11] = rk[ 3] ^ rk[10];
  2861. if (++i == 7)
  2862. break;
  2863. temp = rk[11];
  2864. rk[12] = rk[ 4] ^
  2865. #ifndef WOLFSSL_AES_SMALL_TABLES
  2866. (GetTable(Te[2], GETBYTE(temp, 3)) & 0xff000000) ^
  2867. (GetTable(Te[3], GETBYTE(temp, 2)) & 0x00ff0000) ^
  2868. (GetTable(Te[0], GETBYTE(temp, 1)) & 0x0000ff00) ^
  2869. (GetTable(Te[1], GETBYTE(temp, 0)) & 0x000000ff);
  2870. #else
  2871. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3)) << 24) ^
  2872. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 16) ^
  2873. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 8) ^
  2874. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)));
  2875. #endif
  2876. rk[13] = rk[ 5] ^ rk[12];
  2877. rk[14] = rk[ 6] ^ rk[13];
  2878. rk[15] = rk[ 7] ^ rk[14];
  2879. rk += 8;
  2880. }
  2881. break;
  2882. #endif /* 256 */
  2883. default:
  2884. return BAD_FUNC_ARG;
  2885. } /* switch */
  2886. ForceZero(&temp, sizeof(temp));
  2887. #if defined(HAVE_AES_DECRYPT)
  2888. if (dir == AES_DECRYPTION) {
  2889. unsigned int j;
  2890. rk = aes->key;
  2891. /* invert the order of the round keys: */
  2892. for (i = 0, j = 4* aes->rounds; i < j; i += 4, j -= 4) {
  2893. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  2894. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  2895. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  2896. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  2897. }
  2898. ForceZero(&temp, sizeof(temp));
  2899. #if !defined(WOLFSSL_AES_SMALL_TABLES)
  2900. /* apply the inverse MixColumn transform to all round keys but the
  2901. first and the last: */
  2902. for (i = 1; i < aes->rounds; i++) {
  2903. rk += 4;
  2904. rk[0] =
  2905. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[0], 3)) & 0xff) ^
  2906. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[0], 2)) & 0xff) ^
  2907. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[0], 1)) & 0xff) ^
  2908. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[0], 0)) & 0xff);
  2909. rk[1] =
  2910. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[1], 3)) & 0xff) ^
  2911. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[1], 2)) & 0xff) ^
  2912. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[1], 1)) & 0xff) ^
  2913. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[1], 0)) & 0xff);
  2914. rk[2] =
  2915. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[2], 3)) & 0xff) ^
  2916. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[2], 2)) & 0xff) ^
  2917. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[2], 1)) & 0xff) ^
  2918. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[2], 0)) & 0xff);
  2919. rk[3] =
  2920. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[3], 3)) & 0xff) ^
  2921. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[3], 2)) & 0xff) ^
  2922. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[3], 1)) & 0xff) ^
  2923. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[3], 0)) & 0xff);
  2924. }
  2925. #endif
  2926. }
  2927. #else
  2928. (void)dir;
  2929. #endif /* HAVE_AES_DECRYPT */
  2930. (void)temp;
  2931. #endif /* NEED_AES_TABLES */
  2932. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  2933. XMEMCPY((byte*)aes->key, userKey, keylen);
  2934. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == CRYPTO_WORD_ENDIAN_BIG) {
  2935. ByteReverseWords(aes->key, aes->key, 32);
  2936. }
  2937. #endif
  2938. ret = wc_AesSetIV(aes, iv);
  2939. #if defined(WOLFSSL_DEVCRYPTO) && \
  2940. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  2941. aes->ctx.cfd = -1;
  2942. #endif
  2943. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2944. ForceZero(local, sizeof(local));
  2945. #endif
  2946. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2947. wc_MemZero_Check(&temp, sizeof(temp));
  2948. #endif
  2949. return ret;
  2950. } /* wc_AesSetKeyLocal */
  2951. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2952. const byte* iv, int dir)
  2953. {
  2954. if (aes == NULL) {
  2955. return BAD_FUNC_ARG;
  2956. }
  2957. if (keylen > sizeof(aes->key)) {
  2958. return BAD_FUNC_ARG;
  2959. }
  2960. /* sometimes hardware may not support all keylengths (e.g. ESP32-S3) */
  2961. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AES_HW_FALLBACK)
  2962. ESP_LOGV(TAG, "wc_AesSetKey fallback check %d", keylen);
  2963. if (wc_esp32AesSupportedKeyLenValue(keylen)) {
  2964. ESP_LOGV(TAG, "wc_AesSetKey calling wc_AesSetKey_for_ESP32");
  2965. return wc_AesSetKey_for_ESP32(aes, userKey, keylen, iv, dir);
  2966. }
  2967. else {
  2968. #ifdef DEBUG_WOLFSSL
  2969. ESP_LOGW(TAG, "wc_AesSetKey HW Fallback, unsupported keylen = %d",
  2970. keylen);
  2971. #endif
  2972. }
  2973. #endif
  2974. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  2975. } /* wc_AesSetKey() */
  2976. #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER)
  2977. /* AES-CTR and AES-DIRECT need to use this for key setup */
  2978. /* This function allows key sizes that are not 128/192/256 bits */
  2979. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2980. const byte* iv, int dir)
  2981. {
  2982. if (aes == NULL) {
  2983. return BAD_FUNC_ARG;
  2984. }
  2985. if (keylen > sizeof(aes->key)) {
  2986. return BAD_FUNC_ARG;
  2987. }
  2988. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 0);
  2989. }
  2990. #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER */
  2991. #endif /* wc_AesSetKey block */
  2992. /* wc_AesSetIV is shared between software and hardware */
  2993. int wc_AesSetIV(Aes* aes, const byte* iv)
  2994. {
  2995. if (aes == NULL)
  2996. return BAD_FUNC_ARG;
  2997. if (iv)
  2998. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2999. else
  3000. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  3001. #if defined(WOLFSSL_AES_COUNTER) || defined(WOLFSSL_AES_CFB) || \
  3002. defined(WOLFSSL_AES_OFB) || defined(WOLFSSL_AES_XTS)
  3003. /* Clear any unused bytes from last cipher op. */
  3004. aes->left = 0;
  3005. #endif
  3006. return 0;
  3007. }
  3008. /* AES-DIRECT */
  3009. #if defined(WOLFSSL_AES_DIRECT)
  3010. #if defined(HAVE_COLDFIRE_SEC)
  3011. #error "Coldfire SEC doesn't yet support AES direct"
  3012. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3013. !defined(WOLFSSL_QNX_CAAM)
  3014. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3015. #elif defined(WOLFSSL_AFALG)
  3016. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3017. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  3018. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3019. #elif defined(WOLFSSL_LINUXKM) && defined(WOLFSSL_AESNI)
  3020. WARN_UNUSED_RESULT int wc_AesEncryptDirect(
  3021. Aes* aes, byte* out, const byte* in)
  3022. {
  3023. int ret;
  3024. if (haveAESNI && aes->use_aesni)
  3025. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3026. ret = wc_AesEncrypt(aes, in, out);
  3027. if (haveAESNI && aes->use_aesni)
  3028. RESTORE_VECTOR_REGISTERS();
  3029. return ret;
  3030. }
  3031. /* vector reg save/restore is explicit in all below calls to
  3032. * wc_Aes{En,De}cryptDirect(), so bypass the public version with a
  3033. * macro.
  3034. */
  3035. #define wc_AesEncryptDirect(aes, out, in) wc_AesEncrypt(aes, in, out)
  3036. #ifdef HAVE_AES_DECRYPT
  3037. /* Allow direct access to one block decrypt */
  3038. WARN_UNUSED_RESULT int wc_AesDecryptDirect(
  3039. Aes* aes, byte* out, const byte* in)
  3040. {
  3041. int ret;
  3042. if (haveAESNI && aes->use_aesni)
  3043. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3044. ret = wc_AesDecrypt(aes, in, out);
  3045. if (haveAESNI && aes->use_aesni)
  3046. RESTORE_VECTOR_REGISTERS();
  3047. return ret;
  3048. }
  3049. #define wc_AesDecryptDirect(aes, out, in) wc_AesDecrypt(aes, in, out)
  3050. #endif /* HAVE_AES_DECRYPT */
  3051. #else
  3052. /* Allow direct access to one block encrypt */
  3053. int wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  3054. {
  3055. return wc_AesEncrypt(aes, in, out);
  3056. }
  3057. #ifdef HAVE_AES_DECRYPT
  3058. /* Allow direct access to one block decrypt */
  3059. int wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  3060. {
  3061. return wc_AesDecrypt(aes, in, out);
  3062. }
  3063. #endif /* HAVE_AES_DECRYPT */
  3064. #endif /* AES direct block */
  3065. #endif /* WOLFSSL_AES_DIRECT */
  3066. /* AES-CBC */
  3067. #ifdef HAVE_AES_CBC
  3068. #if defined(STM32_CRYPTO)
  3069. #ifdef WOLFSSL_STM32_CUBEMX
  3070. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3071. {
  3072. int ret = 0;
  3073. CRYP_HandleTypeDef hcryp;
  3074. word32 blocks = (sz / AES_BLOCK_SIZE);
  3075. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3076. if (sz % AES_BLOCK_SIZE) {
  3077. return BAD_LENGTH_E;
  3078. }
  3079. #endif
  3080. if (blocks == 0)
  3081. return 0;
  3082. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3083. if (ret != 0)
  3084. return ret;
  3085. ret = wolfSSL_CryptHwMutexLock();
  3086. if (ret != 0) {
  3087. return ret;
  3088. }
  3089. #if defined(STM32_HAL_V2)
  3090. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3091. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3092. #elif defined(STM32_CRYPTO_AES_ONLY)
  3093. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3094. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3095. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3096. #endif
  3097. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3098. HAL_CRYP_Init(&hcryp);
  3099. #if defined(STM32_HAL_V2)
  3100. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3101. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3102. #elif defined(STM32_CRYPTO_AES_ONLY)
  3103. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3104. out, STM32_HAL_TIMEOUT);
  3105. #else
  3106. ret = HAL_CRYP_AESCBC_Encrypt(&hcryp, (uint8_t*)in,
  3107. blocks * AES_BLOCK_SIZE,
  3108. out, STM32_HAL_TIMEOUT);
  3109. #endif
  3110. if (ret != HAL_OK) {
  3111. ret = WC_TIMEOUT_E;
  3112. }
  3113. /* store iv for next call */
  3114. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3115. HAL_CRYP_DeInit(&hcryp);
  3116. wolfSSL_CryptHwMutexUnLock();
  3117. wc_Stm32_Aes_Cleanup();
  3118. return ret;
  3119. }
  3120. #ifdef HAVE_AES_DECRYPT
  3121. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3122. {
  3123. int ret = 0;
  3124. CRYP_HandleTypeDef hcryp;
  3125. word32 blocks = (sz / AES_BLOCK_SIZE);
  3126. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3127. if (sz % AES_BLOCK_SIZE) {
  3128. return BAD_LENGTH_E;
  3129. }
  3130. #endif
  3131. if (blocks == 0)
  3132. return 0;
  3133. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3134. if (ret != 0)
  3135. return ret;
  3136. ret = wolfSSL_CryptHwMutexLock();
  3137. if (ret != 0) {
  3138. return ret;
  3139. }
  3140. /* if input and output same will overwrite input iv */
  3141. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3142. #if defined(STM32_HAL_V2)
  3143. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3144. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3145. #elif defined(STM32_CRYPTO_AES_ONLY)
  3146. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  3147. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3148. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3149. #endif
  3150. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3151. HAL_CRYP_Init(&hcryp);
  3152. #if defined(STM32_HAL_V2)
  3153. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3154. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3155. #elif defined(STM32_CRYPTO_AES_ONLY)
  3156. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3157. out, STM32_HAL_TIMEOUT);
  3158. #else
  3159. ret = HAL_CRYP_AESCBC_Decrypt(&hcryp, (uint8_t*)in,
  3160. blocks * AES_BLOCK_SIZE,
  3161. out, STM32_HAL_TIMEOUT);
  3162. #endif
  3163. if (ret != HAL_OK) {
  3164. ret = WC_TIMEOUT_E;
  3165. }
  3166. /* store iv for next call */
  3167. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3168. HAL_CRYP_DeInit(&hcryp);
  3169. wolfSSL_CryptHwMutexUnLock();
  3170. wc_Stm32_Aes_Cleanup();
  3171. return ret;
  3172. }
  3173. #endif /* HAVE_AES_DECRYPT */
  3174. #else /* Standard Peripheral Library */
  3175. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3176. {
  3177. int ret;
  3178. word32 *iv;
  3179. CRYP_InitTypeDef cryptInit;
  3180. CRYP_KeyInitTypeDef keyInit;
  3181. CRYP_IVInitTypeDef ivInit;
  3182. word32 blocks = (sz / AES_BLOCK_SIZE);
  3183. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3184. if (sz % AES_BLOCK_SIZE) {
  3185. return BAD_LENGTH_E;
  3186. }
  3187. #endif
  3188. if (blocks == 0)
  3189. return 0;
  3190. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3191. if (ret != 0)
  3192. return ret;
  3193. ret = wolfSSL_CryptHwMutexLock();
  3194. if (ret != 0) {
  3195. return ret;
  3196. }
  3197. /* reset registers to their default values */
  3198. CRYP_DeInit();
  3199. /* set key */
  3200. CRYP_KeyInit(&keyInit);
  3201. /* set iv */
  3202. iv = aes->reg;
  3203. CRYP_IVStructInit(&ivInit);
  3204. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3205. ivInit.CRYP_IV0Left = iv[0];
  3206. ivInit.CRYP_IV0Right = iv[1];
  3207. ivInit.CRYP_IV1Left = iv[2];
  3208. ivInit.CRYP_IV1Right = iv[3];
  3209. CRYP_IVInit(&ivInit);
  3210. /* set direction and mode */
  3211. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3212. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3213. CRYP_Init(&cryptInit);
  3214. /* enable crypto processor */
  3215. CRYP_Cmd(ENABLE);
  3216. while (blocks--) {
  3217. /* flush IN/OUT FIFOs */
  3218. CRYP_FIFOFlush();
  3219. CRYP_DataIn(*(uint32_t*)&in[0]);
  3220. CRYP_DataIn(*(uint32_t*)&in[4]);
  3221. CRYP_DataIn(*(uint32_t*)&in[8]);
  3222. CRYP_DataIn(*(uint32_t*)&in[12]);
  3223. /* wait until the complete message has been processed */
  3224. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3225. *(uint32_t*)&out[0] = CRYP_DataOut();
  3226. *(uint32_t*)&out[4] = CRYP_DataOut();
  3227. *(uint32_t*)&out[8] = CRYP_DataOut();
  3228. *(uint32_t*)&out[12] = CRYP_DataOut();
  3229. /* store iv for next call */
  3230. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3231. sz -= AES_BLOCK_SIZE;
  3232. in += AES_BLOCK_SIZE;
  3233. out += AES_BLOCK_SIZE;
  3234. }
  3235. /* disable crypto processor */
  3236. CRYP_Cmd(DISABLE);
  3237. wolfSSL_CryptHwMutexUnLock();
  3238. wc_Stm32_Aes_Cleanup();
  3239. return ret;
  3240. }
  3241. #ifdef HAVE_AES_DECRYPT
  3242. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3243. {
  3244. int ret;
  3245. word32 *iv;
  3246. CRYP_InitTypeDef cryptInit;
  3247. CRYP_KeyInitTypeDef keyInit;
  3248. CRYP_IVInitTypeDef ivInit;
  3249. word32 blocks = (sz / AES_BLOCK_SIZE);
  3250. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3251. if (sz % AES_BLOCK_SIZE) {
  3252. return BAD_LENGTH_E;
  3253. }
  3254. #endif
  3255. if (blocks == 0)
  3256. return 0;
  3257. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3258. if (ret != 0)
  3259. return ret;
  3260. ret = wolfSSL_CryptHwMutexLock();
  3261. if (ret != 0) {
  3262. return ret;
  3263. }
  3264. /* if input and output same will overwrite input iv */
  3265. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3266. /* reset registers to their default values */
  3267. CRYP_DeInit();
  3268. /* set direction and key */
  3269. CRYP_KeyInit(&keyInit);
  3270. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3271. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  3272. CRYP_Init(&cryptInit);
  3273. /* enable crypto processor */
  3274. CRYP_Cmd(ENABLE);
  3275. /* wait until key has been prepared */
  3276. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3277. /* set direction and mode */
  3278. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3279. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3280. CRYP_Init(&cryptInit);
  3281. /* set iv */
  3282. iv = aes->reg;
  3283. CRYP_IVStructInit(&ivInit);
  3284. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3285. ivInit.CRYP_IV0Left = iv[0];
  3286. ivInit.CRYP_IV0Right = iv[1];
  3287. ivInit.CRYP_IV1Left = iv[2];
  3288. ivInit.CRYP_IV1Right = iv[3];
  3289. CRYP_IVInit(&ivInit);
  3290. /* enable crypto processor */
  3291. CRYP_Cmd(ENABLE);
  3292. while (blocks--) {
  3293. /* flush IN/OUT FIFOs */
  3294. CRYP_FIFOFlush();
  3295. CRYP_DataIn(*(uint32_t*)&in[0]);
  3296. CRYP_DataIn(*(uint32_t*)&in[4]);
  3297. CRYP_DataIn(*(uint32_t*)&in[8]);
  3298. CRYP_DataIn(*(uint32_t*)&in[12]);
  3299. /* wait until the complete message has been processed */
  3300. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3301. *(uint32_t*)&out[0] = CRYP_DataOut();
  3302. *(uint32_t*)&out[4] = CRYP_DataOut();
  3303. *(uint32_t*)&out[8] = CRYP_DataOut();
  3304. *(uint32_t*)&out[12] = CRYP_DataOut();
  3305. /* store iv for next call */
  3306. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3307. in += AES_BLOCK_SIZE;
  3308. out += AES_BLOCK_SIZE;
  3309. }
  3310. /* disable crypto processor */
  3311. CRYP_Cmd(DISABLE);
  3312. wolfSSL_CryptHwMutexUnLock();
  3313. wc_Stm32_Aes_Cleanup();
  3314. return ret;
  3315. }
  3316. #endif /* HAVE_AES_DECRYPT */
  3317. #endif /* WOLFSSL_STM32_CUBEMX */
  3318. #elif defined(HAVE_COLDFIRE_SEC)
  3319. static WARN_UNUSED_RESULT int wc_AesCbcCrypt(
  3320. Aes* aes, byte* po, const byte* pi, word32 sz, word32 descHeader)
  3321. {
  3322. #ifdef DEBUG_WOLFSSL
  3323. int i; int stat1, stat2; int ret;
  3324. #endif
  3325. int size;
  3326. volatile int v;
  3327. if ((pi == NULL) || (po == NULL))
  3328. return BAD_FUNC_ARG; /*wrong pointer*/
  3329. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3330. if (sz % AES_BLOCK_SIZE) {
  3331. return BAD_LENGTH_E;
  3332. }
  3333. #endif
  3334. wc_LockMutex(&Mutex_AesSEC);
  3335. /* Set descriptor for SEC */
  3336. secDesc->length1 = 0x0;
  3337. secDesc->pointer1 = NULL;
  3338. secDesc->length2 = AES_BLOCK_SIZE;
  3339. secDesc->pointer2 = (byte *)secReg; /* Initial Vector */
  3340. switch(aes->rounds) {
  3341. case 10: secDesc->length3 = 16; break;
  3342. case 12: secDesc->length3 = 24; break;
  3343. case 14: secDesc->length3 = 32; break;
  3344. }
  3345. XMEMCPY(secKey, aes->key, secDesc->length3);
  3346. secDesc->pointer3 = (byte *)secKey;
  3347. secDesc->pointer4 = AESBuffIn;
  3348. secDesc->pointer5 = AESBuffOut;
  3349. secDesc->length6 = 0x0;
  3350. secDesc->pointer6 = NULL;
  3351. secDesc->length7 = 0x0;
  3352. secDesc->pointer7 = NULL;
  3353. secDesc->nextDescriptorPtr = NULL;
  3354. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3355. size = AES_BUFFER_SIZE;
  3356. #endif
  3357. while (sz) {
  3358. secDesc->header = descHeader;
  3359. XMEMCPY(secReg, aes->reg, AES_BLOCK_SIZE);
  3360. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3361. sz -= AES_BUFFER_SIZE;
  3362. #else
  3363. if (sz < AES_BUFFER_SIZE) {
  3364. size = sz;
  3365. sz = 0;
  3366. } else {
  3367. size = AES_BUFFER_SIZE;
  3368. sz -= AES_BUFFER_SIZE;
  3369. }
  3370. #endif
  3371. secDesc->length4 = size;
  3372. secDesc->length5 = size;
  3373. XMEMCPY(AESBuffIn, pi, size);
  3374. if(descHeader == SEC_DESC_AES_CBC_DECRYPT) {
  3375. XMEMCPY((void*)aes->tmp, (void*)&(pi[size-AES_BLOCK_SIZE]),
  3376. AES_BLOCK_SIZE);
  3377. }
  3378. /* Point SEC to the location of the descriptor */
  3379. MCF_SEC_FR0 = (uint32)secDesc;
  3380. /* Initialize SEC and wait for encryption to complete */
  3381. MCF_SEC_CCCR0 = 0x0000001a;
  3382. /* poll SISR to determine when channel is complete */
  3383. v=0;
  3384. while ((secDesc->header>> 24) != 0xff) v++;
  3385. #ifdef DEBUG_WOLFSSL
  3386. ret = MCF_SEC_SISRH;
  3387. stat1 = MCF_SEC_AESSR;
  3388. stat2 = MCF_SEC_AESISR;
  3389. if (ret & 0xe0000000) {
  3390. db_printf("Aes_Cbc(i=%d):ISRH=%08x, AESSR=%08x, "
  3391. "AESISR=%08x\n", i, ret, stat1, stat2);
  3392. }
  3393. #endif
  3394. XMEMCPY(po, AESBuffOut, size);
  3395. if (descHeader == SEC_DESC_AES_CBC_ENCRYPT) {
  3396. XMEMCPY((void*)aes->reg, (void*)&(po[size-AES_BLOCK_SIZE]),
  3397. AES_BLOCK_SIZE);
  3398. } else {
  3399. XMEMCPY((void*)aes->reg, (void*)aes->tmp, AES_BLOCK_SIZE);
  3400. }
  3401. pi += size;
  3402. po += size;
  3403. }
  3404. wc_UnLockMutex(&Mutex_AesSEC);
  3405. return 0;
  3406. }
  3407. int wc_AesCbcEncrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3408. {
  3409. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_ENCRYPT));
  3410. }
  3411. #ifdef HAVE_AES_DECRYPT
  3412. int wc_AesCbcDecrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3413. {
  3414. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_DECRYPT));
  3415. }
  3416. #endif /* HAVE_AES_DECRYPT */
  3417. #elif defined(FREESCALE_LTC)
  3418. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3419. {
  3420. word32 keySize;
  3421. status_t status;
  3422. byte *iv, *enc_key;
  3423. word32 blocks = (sz / AES_BLOCK_SIZE);
  3424. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3425. if (sz % AES_BLOCK_SIZE) {
  3426. return BAD_LENGTH_E;
  3427. }
  3428. #endif
  3429. if (blocks == 0)
  3430. return 0;
  3431. iv = (byte*)aes->reg;
  3432. enc_key = (byte*)aes->key;
  3433. status = wc_AesGetKeySize(aes, &keySize);
  3434. if (status != 0) {
  3435. return status;
  3436. }
  3437. status = wolfSSL_CryptHwMutexLock();
  3438. if (status != 0)
  3439. return status;
  3440. status = LTC_AES_EncryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3441. iv, enc_key, keySize);
  3442. wolfSSL_CryptHwMutexUnLock();
  3443. /* store iv for next call */
  3444. if (status == kStatus_Success) {
  3445. XMEMCPY(iv, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3446. }
  3447. return (status == kStatus_Success) ? 0 : -1;
  3448. }
  3449. #ifdef HAVE_AES_DECRYPT
  3450. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3451. {
  3452. word32 keySize;
  3453. status_t status;
  3454. byte* iv, *dec_key;
  3455. byte temp_block[AES_BLOCK_SIZE];
  3456. word32 blocks = (sz / AES_BLOCK_SIZE);
  3457. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3458. if (sz % AES_BLOCK_SIZE) {
  3459. return BAD_LENGTH_E;
  3460. }
  3461. #endif
  3462. if (blocks == 0)
  3463. return 0;
  3464. iv = (byte*)aes->reg;
  3465. dec_key = (byte*)aes->key;
  3466. status = wc_AesGetKeySize(aes, &keySize);
  3467. if (status != 0) {
  3468. return status;
  3469. }
  3470. /* get IV for next call */
  3471. XMEMCPY(temp_block, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3472. status = wolfSSL_CryptHwMutexLock();
  3473. if (status != 0)
  3474. return status;
  3475. status = LTC_AES_DecryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3476. iv, dec_key, keySize, kLTC_EncryptKey);
  3477. wolfSSL_CryptHwMutexUnLock();
  3478. /* store IV for next call */
  3479. if (status == kStatus_Success) {
  3480. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3481. }
  3482. return (status == kStatus_Success) ? 0 : -1;
  3483. }
  3484. #endif /* HAVE_AES_DECRYPT */
  3485. #elif defined(FREESCALE_MMCAU)
  3486. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3487. {
  3488. int offset = 0;
  3489. byte *iv;
  3490. byte temp_block[AES_BLOCK_SIZE];
  3491. word32 blocks = (sz / AES_BLOCK_SIZE);
  3492. int ret;
  3493. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3494. if (sz % AES_BLOCK_SIZE) {
  3495. return BAD_LENGTH_E;
  3496. }
  3497. #endif
  3498. if (blocks == 0)
  3499. return 0;
  3500. iv = (byte*)aes->reg;
  3501. while (blocks--) {
  3502. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3503. /* XOR block with IV for CBC */
  3504. xorbuf(temp_block, iv, AES_BLOCK_SIZE);
  3505. ret = wc_AesEncrypt(aes, temp_block, out + offset);
  3506. if (ret != 0)
  3507. return ret;
  3508. offset += AES_BLOCK_SIZE;
  3509. /* store IV for next block */
  3510. XMEMCPY(iv, out + offset - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3511. }
  3512. return 0;
  3513. }
  3514. #ifdef HAVE_AES_DECRYPT
  3515. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3516. {
  3517. int offset = 0;
  3518. byte* iv;
  3519. byte temp_block[AES_BLOCK_SIZE];
  3520. word32 blocks = (sz / AES_BLOCK_SIZE);
  3521. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3522. if (sz % AES_BLOCK_SIZE) {
  3523. return BAD_LENGTH_E;
  3524. }
  3525. #endif
  3526. if (blocks == 0)
  3527. return 0;
  3528. iv = (byte*)aes->reg;
  3529. while (blocks--) {
  3530. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3531. wc_AesDecrypt(aes, in + offset, out + offset);
  3532. /* XOR block with IV for CBC */
  3533. xorbuf(out + offset, iv, AES_BLOCK_SIZE);
  3534. /* store IV for next block */
  3535. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3536. offset += AES_BLOCK_SIZE;
  3537. }
  3538. return 0;
  3539. }
  3540. #endif /* HAVE_AES_DECRYPT */
  3541. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  3542. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3543. {
  3544. int ret;
  3545. if (sz == 0)
  3546. return 0;
  3547. /* hardware fails on input that is not a multiple of AES block size */
  3548. if (sz % AES_BLOCK_SIZE != 0) {
  3549. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3550. return BAD_LENGTH_E;
  3551. #else
  3552. return BAD_FUNC_ARG;
  3553. #endif
  3554. }
  3555. ret = wc_Pic32AesCrypt(
  3556. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3557. out, in, sz, PIC32_ENCRYPTION,
  3558. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3559. /* store iv for next call */
  3560. if (ret == 0) {
  3561. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3562. }
  3563. return ret;
  3564. }
  3565. #ifdef HAVE_AES_DECRYPT
  3566. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3567. {
  3568. int ret;
  3569. byte scratch[AES_BLOCK_SIZE];
  3570. if (sz == 0)
  3571. return 0;
  3572. /* hardware fails on input that is not a multiple of AES block size */
  3573. if (sz % AES_BLOCK_SIZE != 0) {
  3574. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3575. return BAD_LENGTH_E;
  3576. #else
  3577. return BAD_FUNC_ARG;
  3578. #endif
  3579. }
  3580. XMEMCPY(scratch, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3581. ret = wc_Pic32AesCrypt(
  3582. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3583. out, in, sz, PIC32_DECRYPTION,
  3584. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3585. /* store iv for next call */
  3586. if (ret == 0) {
  3587. XMEMCPY((byte*)aes->reg, scratch, AES_BLOCK_SIZE);
  3588. }
  3589. return ret;
  3590. }
  3591. #endif /* HAVE_AES_DECRYPT */
  3592. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  3593. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  3594. /* We'll use SW for fall back:
  3595. * unsupported key lengths
  3596. * hardware busy */
  3597. #define NEED_SW_AESCBC
  3598. #define NEED_AESCBC_HW_FALLBACK
  3599. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  3600. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3601. {
  3602. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3603. }
  3604. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3605. {
  3606. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3607. }
  3608. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3609. !defined(WOLFSSL_QNX_CAAM)
  3610. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3611. #elif defined(WOLFSSL_AFALG)
  3612. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3613. #elif defined(WOLFSSL_KCAPI_AES) && !defined(WOLFSSL_NO_KCAPI_AES_CBC)
  3614. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  3615. #elif defined(WOLFSSL_DEVCRYPTO_CBC)
  3616. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3617. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  3618. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  3619. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  3620. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  3621. #else
  3622. /* Reminder: Some HW implementations may also define this as needed.
  3623. * (e.g. for unsupported key length fallback) */
  3624. #define NEED_SW_AESCBC
  3625. #endif
  3626. #ifdef NEED_SW_AESCBC
  3627. /* Software AES - CBC Encrypt */
  3628. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3629. {
  3630. word32 blocks;
  3631. if (aes == NULL || out == NULL || in == NULL) {
  3632. return BAD_FUNC_ARG;
  3633. }
  3634. if (sz == 0) {
  3635. return 0;
  3636. }
  3637. blocks = sz / AES_BLOCK_SIZE;
  3638. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3639. if (sz % AES_BLOCK_SIZE) {
  3640. WOLFSSL_ERROR_VERBOSE(BAD_LENGTH_E);
  3641. return BAD_LENGTH_E;
  3642. }
  3643. #endif
  3644. #ifdef WOLFSSL_IMXRT_DCP
  3645. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3646. if (aes->keylen == 16)
  3647. return DCPAesCbcEncrypt(aes, out, in, sz);
  3648. #endif
  3649. #ifdef WOLF_CRYPTO_CB
  3650. #ifndef WOLF_CRYPTO_CB_FIND
  3651. if (aes->devId != INVALID_DEVID)
  3652. #endif
  3653. {
  3654. int crypto_cb_ret = wc_CryptoCb_AesCbcEncrypt(aes, out, in, sz);
  3655. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3656. return crypto_cb_ret;
  3657. /* fall-through when unavailable */
  3658. }
  3659. #endif
  3660. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3661. /* if async and byte count above threshold */
  3662. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3663. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3664. #if defined(HAVE_CAVIUM)
  3665. return NitroxAesCbcEncrypt(aes, out, in, sz);
  3666. #elif defined(HAVE_INTEL_QA)
  3667. return IntelQaSymAesCbcEncrypt(&aes->asyncDev, out, in, sz,
  3668. (const byte*)aes->devKey, aes->keylen,
  3669. (byte*)aes->reg, AES_BLOCK_SIZE);
  3670. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  3671. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_ENCRYPT)) {
  3672. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  3673. sw->aes.aes = aes;
  3674. sw->aes.out = out;
  3675. sw->aes.in = in;
  3676. sw->aes.sz = sz;
  3677. return WC_PENDING_E;
  3678. }
  3679. #endif
  3680. }
  3681. #endif /* WOLFSSL_ASYNC_CRYPT */
  3682. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3683. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  3684. if (aes->useSWCrypt == 0) {
  3685. return se050_aes_crypt(aes, in, out, sz, AES_ENCRYPTION,
  3686. kAlgorithm_SSS_AES_CBC);
  3687. }
  3688. #endif
  3689. #ifdef WOLFSSL_AESNI
  3690. if (haveAESNI) {
  3691. #ifdef DEBUG_AESNI
  3692. printf("about to aes cbc encrypt\n");
  3693. printf("in = %p\n", in);
  3694. printf("out = %p\n", out);
  3695. printf("aes->key = %p\n", aes->key);
  3696. printf("aes->reg = %p\n", aes->reg);
  3697. printf("aes->rounds = %d\n", aes->rounds);
  3698. printf("sz = %d\n", sz);
  3699. #endif
  3700. /* check alignment, decrypt doesn't need alignment */
  3701. if ((wc_ptr_t)in % AESNI_ALIGN) {
  3702. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  3703. byte* tmp = (byte*)XMALLOC(sz + AES_BLOCK_SIZE + AESNI_ALIGN,
  3704. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3705. byte* tmp_align;
  3706. if (tmp == NULL) return MEMORY_E;
  3707. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  3708. XMEMCPY(tmp_align, in, sz);
  3709. SAVE_VECTOR_REGISTERS(XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); return _svr_ret;);
  3710. AES_CBC_encrypt(tmp_align, tmp_align, (byte*)aes->reg, sz,
  3711. (byte*)aes->key, (int)aes->rounds);
  3712. RESTORE_VECTOR_REGISTERS();
  3713. /* store iv for next call */
  3714. XMEMCPY(aes->reg, tmp_align + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3715. XMEMCPY(out, tmp_align, sz);
  3716. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3717. return 0;
  3718. #else
  3719. WOLFSSL_MSG("AES-CBC encrypt with bad alignment");
  3720. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  3721. return BAD_ALIGN_E;
  3722. #endif
  3723. }
  3724. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3725. AES_CBC_encrypt(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3726. (int)aes->rounds);
  3727. RESTORE_VECTOR_REGISTERS();
  3728. /* store iv for next call */
  3729. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3730. return 0;
  3731. }
  3732. #endif
  3733. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AESCBC_HW_FALLBACK)
  3734. if (wc_esp32AesSupportedKeyLen(aes)) {
  3735. ESP_LOGV(TAG, "wc_AesCbcEncrypt calling wc_esp32AesCbcEncrypt");
  3736. return wc_esp32AesCbcEncrypt(aes, out, in, sz);
  3737. }
  3738. else {
  3739. /* For example, the ESP32-S3 does not support HW for len = 24,
  3740. * so fall back to SW */
  3741. #ifdef DEBUG_WOLFSSL
  3742. ESP_LOGW(TAG, "wc_AesCbcEncrypt HW Falling back, "
  3743. "unsupported keylen = %d", aes->keylen);
  3744. #endif
  3745. }
  3746. #endif
  3747. while (blocks--) {
  3748. int ret;
  3749. xorbuf((byte*)aes->reg, in, AES_BLOCK_SIZE);
  3750. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->reg);
  3751. if (ret != 0)
  3752. return ret;
  3753. XMEMCPY(out, aes->reg, AES_BLOCK_SIZE);
  3754. out += AES_BLOCK_SIZE;
  3755. in += AES_BLOCK_SIZE;
  3756. }
  3757. return 0;
  3758. } /* wc_AesCbcEncrypt */
  3759. #ifdef HAVE_AES_DECRYPT
  3760. /* Software AES - CBC Decrypt */
  3761. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3762. {
  3763. word32 blocks;
  3764. if (aes == NULL || out == NULL || in == NULL) {
  3765. return BAD_FUNC_ARG;
  3766. }
  3767. if (sz == 0) {
  3768. return 0;
  3769. }
  3770. #if defined(WOLFSSL_ESPIDF) && defined(NEED_AESCBC_HW_FALLBACK)
  3771. if (wc_esp32AesSupportedKeyLen(aes)) {
  3772. ESP_LOGV(TAG, "wc_AesCbcDecrypt calling wc_esp32AesCbcDecrypt");
  3773. return wc_esp32AesCbcDecrypt(aes, out, in, sz);
  3774. }
  3775. else {
  3776. /* For example, the ESP32-S3 does not support HW for len = 24,
  3777. * so fall back to SW */
  3778. #ifdef DEBUG_WOLFSSL
  3779. ESP_LOGW(TAG, "wc_AesCbcDecrypt HW Falling back, "
  3780. "unsupported keylen = %d", aes->keylen);
  3781. #endif
  3782. }
  3783. #endif
  3784. blocks = sz / AES_BLOCK_SIZE;
  3785. if (sz % AES_BLOCK_SIZE) {
  3786. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3787. return BAD_LENGTH_E;
  3788. #else
  3789. return BAD_FUNC_ARG;
  3790. #endif
  3791. }
  3792. #ifdef WOLFSSL_IMXRT_DCP
  3793. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3794. if (aes->keylen == 16)
  3795. return DCPAesCbcDecrypt(aes, out, in, sz);
  3796. #endif
  3797. #ifdef WOLF_CRYPTO_CB
  3798. #ifndef WOLF_CRYPTO_CB_FIND
  3799. if (aes->devId != INVALID_DEVID)
  3800. #endif
  3801. {
  3802. int crypto_cb_ret = wc_CryptoCb_AesCbcDecrypt(aes, out, in, sz);
  3803. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3804. return crypto_cb_ret;
  3805. /* fall-through when unavailable */
  3806. }
  3807. #endif
  3808. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3809. /* if async and byte count above threshold */
  3810. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3811. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3812. #if defined(HAVE_CAVIUM)
  3813. return NitroxAesCbcDecrypt(aes, out, in, sz);
  3814. #elif defined(HAVE_INTEL_QA)
  3815. return IntelQaSymAesCbcDecrypt(&aes->asyncDev, out, in, sz,
  3816. (const byte*)aes->devKey, aes->keylen,
  3817. (byte*)aes->reg, AES_BLOCK_SIZE);
  3818. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  3819. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_DECRYPT)) {
  3820. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  3821. sw->aes.aes = aes;
  3822. sw->aes.out = out;
  3823. sw->aes.in = in;
  3824. sw->aes.sz = sz;
  3825. return WC_PENDING_E;
  3826. }
  3827. #endif
  3828. }
  3829. #endif
  3830. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3831. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  3832. if (aes->useSWCrypt == 0) {
  3833. return se050_aes_crypt(aes, in, out, sz, AES_DECRYPTION,
  3834. kAlgorithm_SSS_AES_CBC);
  3835. }
  3836. #endif
  3837. #ifdef WOLFSSL_AESNI
  3838. if (haveAESNI) {
  3839. #ifdef DEBUG_AESNI
  3840. printf("about to aes cbc decrypt\n");
  3841. printf("in = %p\n", in);
  3842. printf("out = %p\n", out);
  3843. printf("aes->key = %p\n", aes->key);
  3844. printf("aes->reg = %p\n", aes->reg);
  3845. printf("aes->rounds = %d\n", aes->rounds);
  3846. printf("sz = %d\n", sz);
  3847. #endif
  3848. /* if input and output same will overwrite input iv */
  3849. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3850. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3851. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  3852. AES_CBC_decrypt_by4(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3853. aes->rounds);
  3854. #elif defined(WOLFSSL_AESNI_BY6)
  3855. AES_CBC_decrypt_by6(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3856. aes->rounds);
  3857. #else /* WOLFSSL_AESNI_BYx */
  3858. AES_CBC_decrypt_by8(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3859. (int)aes->rounds);
  3860. #endif /* WOLFSSL_AESNI_BYx */
  3861. /* store iv for next call */
  3862. RESTORE_VECTOR_REGISTERS();
  3863. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3864. return 0;
  3865. }
  3866. #endif
  3867. while (blocks--) {
  3868. int ret;
  3869. XMEMCPY(aes->tmp, in, AES_BLOCK_SIZE);
  3870. ret = wc_AesDecrypt(aes, (byte*)aes->tmp, out);
  3871. if (ret != 0)
  3872. return ret;
  3873. xorbuf(out, (byte*)aes->reg, AES_BLOCK_SIZE);
  3874. /* store iv for next call */
  3875. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3876. out += AES_BLOCK_SIZE;
  3877. in += AES_BLOCK_SIZE;
  3878. }
  3879. return 0;
  3880. }
  3881. #endif /* HAVE_AES_DECRYPT */
  3882. #endif /* AES-CBC block */
  3883. #endif /* HAVE_AES_CBC */
  3884. /* AES-CTR */
  3885. #if defined(WOLFSSL_AES_COUNTER)
  3886. #ifdef STM32_CRYPTO
  3887. #define NEED_AES_CTR_SOFT
  3888. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  3889. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  3890. {
  3891. int ret = 0;
  3892. #ifdef WOLFSSL_STM32_CUBEMX
  3893. CRYP_HandleTypeDef hcryp;
  3894. #ifdef STM32_HAL_V2
  3895. word32 iv[AES_BLOCK_SIZE/sizeof(word32)];
  3896. #endif
  3897. #else
  3898. word32 *iv;
  3899. CRYP_InitTypeDef cryptInit;
  3900. CRYP_KeyInitTypeDef keyInit;
  3901. CRYP_IVInitTypeDef ivInit;
  3902. #endif
  3903. #ifdef WOLFSSL_STM32_CUBEMX
  3904. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3905. if (ret != 0) {
  3906. return ret;
  3907. }
  3908. ret = wolfSSL_CryptHwMutexLock();
  3909. if (ret != 0) {
  3910. return ret;
  3911. }
  3912. #if defined(STM32_HAL_V2)
  3913. hcryp.Init.Algorithm = CRYP_AES_CTR;
  3914. ByteReverseWords(iv, aes->reg, AES_BLOCK_SIZE);
  3915. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)iv;
  3916. #elif defined(STM32_CRYPTO_AES_ONLY)
  3917. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3918. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CTR;
  3919. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3920. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3921. #else
  3922. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3923. #endif
  3924. HAL_CRYP_Init(&hcryp);
  3925. #if defined(STM32_HAL_V2)
  3926. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, AES_BLOCK_SIZE,
  3927. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3928. #elif defined(STM32_CRYPTO_AES_ONLY)
  3929. ret = HAL_CRYPEx_AES(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3930. out, STM32_HAL_TIMEOUT);
  3931. #else
  3932. ret = HAL_CRYP_AESCTR_Encrypt(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3933. out, STM32_HAL_TIMEOUT);
  3934. #endif
  3935. if (ret != HAL_OK) {
  3936. ret = WC_TIMEOUT_E;
  3937. }
  3938. HAL_CRYP_DeInit(&hcryp);
  3939. #else /* Standard Peripheral Library */
  3940. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3941. if (ret != 0) {
  3942. return ret;
  3943. }
  3944. ret = wolfSSL_CryptHwMutexLock();
  3945. if (ret != 0) {
  3946. return ret;
  3947. }
  3948. /* reset registers to their default values */
  3949. CRYP_DeInit();
  3950. /* set key */
  3951. CRYP_KeyInit(&keyInit);
  3952. /* set iv */
  3953. iv = aes->reg;
  3954. CRYP_IVStructInit(&ivInit);
  3955. ivInit.CRYP_IV0Left = ByteReverseWord32(iv[0]);
  3956. ivInit.CRYP_IV0Right = ByteReverseWord32(iv[1]);
  3957. ivInit.CRYP_IV1Left = ByteReverseWord32(iv[2]);
  3958. ivInit.CRYP_IV1Right = ByteReverseWord32(iv[3]);
  3959. CRYP_IVInit(&ivInit);
  3960. /* set direction and mode */
  3961. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3962. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CTR;
  3963. CRYP_Init(&cryptInit);
  3964. /* enable crypto processor */
  3965. CRYP_Cmd(ENABLE);
  3966. /* flush IN/OUT FIFOs */
  3967. CRYP_FIFOFlush();
  3968. CRYP_DataIn(*(uint32_t*)&in[0]);
  3969. CRYP_DataIn(*(uint32_t*)&in[4]);
  3970. CRYP_DataIn(*(uint32_t*)&in[8]);
  3971. CRYP_DataIn(*(uint32_t*)&in[12]);
  3972. /* wait until the complete message has been processed */
  3973. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3974. *(uint32_t*)&out[0] = CRYP_DataOut();
  3975. *(uint32_t*)&out[4] = CRYP_DataOut();
  3976. *(uint32_t*)&out[8] = CRYP_DataOut();
  3977. *(uint32_t*)&out[12] = CRYP_DataOut();
  3978. /* disable crypto processor */
  3979. CRYP_Cmd(DISABLE);
  3980. #endif /* WOLFSSL_STM32_CUBEMX */
  3981. wolfSSL_CryptHwMutexUnLock();
  3982. wc_Stm32_Aes_Cleanup();
  3983. return ret;
  3984. }
  3985. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  3986. #define NEED_AES_CTR_SOFT
  3987. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  3988. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  3989. {
  3990. word32 tmpIv[AES_BLOCK_SIZE / sizeof(word32)];
  3991. XMEMCPY(tmpIv, aes->reg, AES_BLOCK_SIZE);
  3992. return wc_Pic32AesCrypt(
  3993. aes->key, aes->keylen, tmpIv, AES_BLOCK_SIZE,
  3994. out, in, AES_BLOCK_SIZE,
  3995. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCTR);
  3996. }
  3997. #elif defined(HAVE_COLDFIRE_SEC)
  3998. #error "Coldfire SEC doesn't currently support AES-CTR mode"
  3999. #elif defined(FREESCALE_LTC)
  4000. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4001. {
  4002. int ret = 0;
  4003. word32 keySize;
  4004. byte *iv, *enc_key;
  4005. byte* tmp;
  4006. if (aes == NULL || out == NULL || in == NULL) {
  4007. return BAD_FUNC_ARG;
  4008. }
  4009. /* consume any unused bytes left in aes->tmp */
  4010. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  4011. while (aes->left && sz) {
  4012. *(out++) = *(in++) ^ *(tmp++);
  4013. aes->left--;
  4014. sz--;
  4015. }
  4016. if (sz) {
  4017. iv = (byte*)aes->reg;
  4018. enc_key = (byte*)aes->key;
  4019. ret = wc_AesGetKeySize(aes, &keySize);
  4020. if (ret != 0)
  4021. return ret;
  4022. ret = wolfSSL_CryptHwMutexLock();
  4023. if (ret != 0)
  4024. return ret;
  4025. LTC_AES_CryptCtr(LTC_BASE, in, out, sz,
  4026. iv, enc_key, keySize, (byte*)aes->tmp,
  4027. (uint32_t*)&aes->left);
  4028. wolfSSL_CryptHwMutexUnLock();
  4029. }
  4030. return ret;
  4031. }
  4032. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  4033. !defined(WOLFSSL_QNX_CAAM)
  4034. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  4035. #elif defined(WOLFSSL_AFALG)
  4036. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  4037. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4038. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4039. #elif defined(WOLFSSL_ESP32_CRYPT) && \
  4040. !defined(NO_WOLFSSL_ESP32_CRYPT_AES)
  4041. /* esp32 doesn't support CRT mode by hw. */
  4042. /* use aes ecnryption plus sw implementation */
  4043. #define NEED_AES_CTR_SOFT
  4044. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  4045. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  4046. #else
  4047. /* Use software based AES counter */
  4048. #define NEED_AES_CTR_SOFT
  4049. #endif
  4050. #ifdef NEED_AES_CTR_SOFT
  4051. /* Increment AES counter */
  4052. static WC_INLINE void IncrementAesCounter(byte* inOutCtr)
  4053. {
  4054. /* in network byte order so start at end and work back */
  4055. int i;
  4056. for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) {
  4057. if (++inOutCtr[i]) /* we're done unless we overflow */
  4058. return;
  4059. }
  4060. }
  4061. /* Software AES - CTR Encrypt */
  4062. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4063. {
  4064. byte scratch[AES_BLOCK_SIZE];
  4065. int ret;
  4066. word32 processed;
  4067. if (aes == NULL || out == NULL || in == NULL) {
  4068. return BAD_FUNC_ARG;
  4069. }
  4070. #ifdef WOLF_CRYPTO_CB
  4071. #ifndef WOLF_CRYPTO_CB_FIND
  4072. if (aes->devId != INVALID_DEVID)
  4073. #endif
  4074. {
  4075. int crypto_cb_ret = wc_CryptoCb_AesCtrEncrypt(aes, out, in, sz);
  4076. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  4077. return crypto_cb_ret;
  4078. /* fall-through when unavailable */
  4079. }
  4080. #endif
  4081. /* consume any unused bytes left in aes->tmp */
  4082. processed = min(aes->left, sz);
  4083. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left,
  4084. processed);
  4085. out += processed;
  4086. in += processed;
  4087. aes->left -= processed;
  4088. sz -= processed;
  4089. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  4090. !defined(XTRANSFORM_AESCTRBLOCK)
  4091. if (in != out && sz >= AES_BLOCK_SIZE) {
  4092. word32 blocks = sz / AES_BLOCK_SIZE;
  4093. byte* counter = (byte*)aes->reg;
  4094. byte* c = out;
  4095. while (blocks--) {
  4096. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  4097. c += AES_BLOCK_SIZE;
  4098. IncrementAesCounter(counter);
  4099. }
  4100. /* reset number of blocks and then do encryption */
  4101. blocks = sz / AES_BLOCK_SIZE;
  4102. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  4103. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  4104. in += AES_BLOCK_SIZE * blocks;
  4105. out += AES_BLOCK_SIZE * blocks;
  4106. sz -= blocks * AES_BLOCK_SIZE;
  4107. }
  4108. else
  4109. #endif
  4110. {
  4111. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4112. wc_MemZero_Add("wc_AesCtrEncrypt scratch", scratch,
  4113. AES_BLOCK_SIZE);
  4114. #endif
  4115. /* do as many block size ops as possible */
  4116. while (sz >= AES_BLOCK_SIZE) {
  4117. #ifdef XTRANSFORM_AESCTRBLOCK
  4118. XTRANSFORM_AESCTRBLOCK(aes, out, in);
  4119. #else
  4120. ret = wc_AesEncrypt(aes, (byte*)aes->reg, scratch);
  4121. if (ret != 0) {
  4122. ForceZero(scratch, AES_BLOCK_SIZE);
  4123. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4124. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4125. #endif
  4126. return ret;
  4127. }
  4128. xorbuf(scratch, in, AES_BLOCK_SIZE);
  4129. XMEMCPY(out, scratch, AES_BLOCK_SIZE);
  4130. #endif
  4131. IncrementAesCounter((byte*)aes->reg);
  4132. out += AES_BLOCK_SIZE;
  4133. in += AES_BLOCK_SIZE;
  4134. sz -= AES_BLOCK_SIZE;
  4135. aes->left = 0;
  4136. }
  4137. ForceZero(scratch, AES_BLOCK_SIZE);
  4138. }
  4139. /* handle non block size remaining and store unused byte count in left */
  4140. if (sz) {
  4141. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->tmp);
  4142. if (ret != 0) {
  4143. ForceZero(scratch, AES_BLOCK_SIZE);
  4144. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4145. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4146. #endif
  4147. return ret;
  4148. }
  4149. IncrementAesCounter((byte*)aes->reg);
  4150. aes->left = AES_BLOCK_SIZE - sz;
  4151. xorbufout(out, in, aes->tmp, sz);
  4152. }
  4153. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4154. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4155. #endif
  4156. return 0;
  4157. }
  4158. int wc_AesCtrSetKey(Aes* aes, const byte* key, word32 len,
  4159. const byte* iv, int dir)
  4160. {
  4161. if (aes == NULL) {
  4162. return BAD_FUNC_ARG;
  4163. }
  4164. if (len > sizeof(aes->key)) {
  4165. return BAD_FUNC_ARG;
  4166. }
  4167. return wc_AesSetKeyLocal(aes, key, len, iv, dir, 0);
  4168. }
  4169. #endif /* NEED_AES_CTR_SOFT */
  4170. #endif /* WOLFSSL_AES_COUNTER */
  4171. #endif /* !WOLFSSL_ARMASM */
  4172. /*
  4173. * The IV for AES GCM and CCM, stored in struct Aes's member reg, is comprised
  4174. * of two parts in order:
  4175. * 1. The fixed field which may be 0 or 4 bytes long. In TLS, this is set
  4176. * to the implicit IV.
  4177. * 2. The explicit IV is generated by wolfCrypt. It needs to be managed
  4178. * by wolfCrypt to ensure the IV is unique for each call to encrypt.
  4179. * The IV may be a 96-bit random value, or the 32-bit fixed value and a
  4180. * 64-bit set of 0 or random data. The final 32-bits of reg is used as a
  4181. * block counter during the encryption.
  4182. */
  4183. #if (defined(HAVE_AESGCM) && !defined(WC_NO_RNG)) || defined(HAVE_AESCCM)
  4184. static WC_INLINE void IncCtr(byte* ctr, word32 ctrSz)
  4185. {
  4186. int i;
  4187. for (i = (int)ctrSz - 1; i >= 0; i--) {
  4188. if (++ctr[i])
  4189. break;
  4190. }
  4191. }
  4192. #endif /* HAVE_AESGCM || HAVE_AESCCM */
  4193. #ifdef HAVE_AESGCM
  4194. #ifdef WOLFSSL_AESGCM_STREAM
  4195. /* Access initialization counter data. */
  4196. #define AES_INITCTR(aes) ((aes)->streamData + 0 * AES_BLOCK_SIZE)
  4197. /* Access counter data. */
  4198. #define AES_COUNTER(aes) ((aes)->streamData + 1 * AES_BLOCK_SIZE)
  4199. /* Access tag data. */
  4200. #define AES_TAG(aes) ((aes)->streamData + 2 * AES_BLOCK_SIZE)
  4201. /* Access last GHASH block. */
  4202. #define AES_LASTGBLOCK(aes) ((aes)->streamData + 3 * AES_BLOCK_SIZE)
  4203. /* Access last encrypted block. */
  4204. #define AES_LASTBLOCK(aes) ((aes)->streamData + 4 * AES_BLOCK_SIZE)
  4205. #endif
  4206. #if defined(HAVE_COLDFIRE_SEC)
  4207. #error "Coldfire SEC doesn't currently support AES-GCM mode"
  4208. #endif
  4209. #ifdef WOLFSSL_ARMASM
  4210. /* implementation is located in wolfcrypt/src/port/arm/armv8-aes.c */
  4211. #elif defined(WOLFSSL_AFALG)
  4212. /* implemented in wolfcrypt/src/port/afalg/afalg_aes.c */
  4213. #elif defined(WOLFSSL_KCAPI_AES)
  4214. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  4215. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4216. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4217. #else /* software + AESNI implementation */
  4218. #if !defined(FREESCALE_LTC_AES_GCM)
  4219. static WC_INLINE void IncrementGcmCounter(byte* inOutCtr)
  4220. {
  4221. int i;
  4222. /* in network byte order so start at end and work back */
  4223. for (i = AES_BLOCK_SIZE - 1; i >= AES_BLOCK_SIZE - CTR_SZ; i--) {
  4224. if (++inOutCtr[i]) /* we're done unless we overflow */
  4225. return;
  4226. }
  4227. }
  4228. #endif /* !FREESCALE_LTC_AES_GCM */
  4229. #if defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4230. static WC_INLINE void FlattenSzInBits(byte* buf, word32 sz)
  4231. {
  4232. /* Multiply the sz by 8 */
  4233. word32 szHi = (sz >> (8*sizeof(sz) - 3));
  4234. sz <<= 3;
  4235. /* copy over the words of the sz into the destination buffer */
  4236. buf[0] = (byte)(szHi >> 24);
  4237. buf[1] = (byte)(szHi >> 16);
  4238. buf[2] = (byte)(szHi >> 8);
  4239. buf[3] = (byte)szHi;
  4240. buf[4] = (byte)(sz >> 24);
  4241. buf[5] = (byte)(sz >> 16);
  4242. buf[6] = (byte)(sz >> 8);
  4243. buf[7] = (byte)sz;
  4244. }
  4245. static WC_INLINE void RIGHTSHIFTX(byte* x)
  4246. {
  4247. int i;
  4248. int carryIn = 0;
  4249. byte borrow = (0x00 - (x[15] & 0x01)) & 0xE1;
  4250. for (i = 0; i < AES_BLOCK_SIZE; i++) {
  4251. int carryOut = (x[i] & 0x01) << 7;
  4252. x[i] = (byte) ((x[i] >> 1) | carryIn);
  4253. carryIn = carryOut;
  4254. }
  4255. x[0] ^= borrow;
  4256. }
  4257. #endif /* defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) */
  4258. #ifdef GCM_TABLE
  4259. void GenerateM0(Gcm* gcm)
  4260. {
  4261. int i, j;
  4262. byte (*m)[AES_BLOCK_SIZE] = gcm->M0;
  4263. XMEMCPY(m[128], gcm->H, AES_BLOCK_SIZE);
  4264. for (i = 64; i > 0; i /= 2) {
  4265. XMEMCPY(m[i], m[i*2], AES_BLOCK_SIZE);
  4266. RIGHTSHIFTX(m[i]);
  4267. }
  4268. for (i = 2; i < 256; i *= 2) {
  4269. for (j = 1; j < i; j++) {
  4270. XMEMCPY(m[i+j], m[i], AES_BLOCK_SIZE);
  4271. xorbuf(m[i+j], m[j], AES_BLOCK_SIZE);
  4272. }
  4273. }
  4274. XMEMSET(m[0], 0, AES_BLOCK_SIZE);
  4275. }
  4276. #elif defined(GCM_TABLE_4BIT)
  4277. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4278. static WC_INLINE void Shift4_M0(byte *r8, byte *z8)
  4279. {
  4280. int i;
  4281. for (i = 15; i > 0; i--)
  4282. r8[i] = (byte)(z8[i-1] << 4) | (byte)(z8[i] >> 4);
  4283. r8[0] = (byte)(z8[0] >> 4);
  4284. }
  4285. #endif
  4286. void GenerateM0(Gcm* gcm)
  4287. {
  4288. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4289. int i;
  4290. #endif
  4291. byte (*m)[AES_BLOCK_SIZE] = gcm->M0;
  4292. /* 0 times -> 0x0 */
  4293. XMEMSET(m[0x0], 0, AES_BLOCK_SIZE);
  4294. /* 1 times -> 0x8 */
  4295. XMEMCPY(m[0x8], gcm->H, AES_BLOCK_SIZE);
  4296. /* 2 times -> 0x4 */
  4297. XMEMCPY(m[0x4], m[0x8], AES_BLOCK_SIZE);
  4298. RIGHTSHIFTX(m[0x4]);
  4299. /* 4 times -> 0x2 */
  4300. XMEMCPY(m[0x2], m[0x4], AES_BLOCK_SIZE);
  4301. RIGHTSHIFTX(m[0x2]);
  4302. /* 8 times -> 0x1 */
  4303. XMEMCPY(m[0x1], m[0x2], AES_BLOCK_SIZE);
  4304. RIGHTSHIFTX(m[0x1]);
  4305. /* 0x3 */
  4306. XMEMCPY(m[0x3], m[0x2], AES_BLOCK_SIZE);
  4307. xorbuf (m[0x3], m[0x1], AES_BLOCK_SIZE);
  4308. /* 0x5 -> 0x7 */
  4309. XMEMCPY(m[0x5], m[0x4], AES_BLOCK_SIZE);
  4310. xorbuf (m[0x5], m[0x1], AES_BLOCK_SIZE);
  4311. XMEMCPY(m[0x6], m[0x4], AES_BLOCK_SIZE);
  4312. xorbuf (m[0x6], m[0x2], AES_BLOCK_SIZE);
  4313. XMEMCPY(m[0x7], m[0x4], AES_BLOCK_SIZE);
  4314. xorbuf (m[0x7], m[0x3], AES_BLOCK_SIZE);
  4315. /* 0x9 -> 0xf */
  4316. XMEMCPY(m[0x9], m[0x8], AES_BLOCK_SIZE);
  4317. xorbuf (m[0x9], m[0x1], AES_BLOCK_SIZE);
  4318. XMEMCPY(m[0xa], m[0x8], AES_BLOCK_SIZE);
  4319. xorbuf (m[0xa], m[0x2], AES_BLOCK_SIZE);
  4320. XMEMCPY(m[0xb], m[0x8], AES_BLOCK_SIZE);
  4321. xorbuf (m[0xb], m[0x3], AES_BLOCK_SIZE);
  4322. XMEMCPY(m[0xc], m[0x8], AES_BLOCK_SIZE);
  4323. xorbuf (m[0xc], m[0x4], AES_BLOCK_SIZE);
  4324. XMEMCPY(m[0xd], m[0x8], AES_BLOCK_SIZE);
  4325. xorbuf (m[0xd], m[0x5], AES_BLOCK_SIZE);
  4326. XMEMCPY(m[0xe], m[0x8], AES_BLOCK_SIZE);
  4327. xorbuf (m[0xe], m[0x6], AES_BLOCK_SIZE);
  4328. XMEMCPY(m[0xf], m[0x8], AES_BLOCK_SIZE);
  4329. xorbuf (m[0xf], m[0x7], AES_BLOCK_SIZE);
  4330. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4331. for (i = 0; i < 16; i++) {
  4332. Shift4_M0(m[16+i], m[i]);
  4333. }
  4334. #endif
  4335. }
  4336. #endif /* GCM_TABLE */
  4337. /* Software AES - GCM SetKey */
  4338. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  4339. {
  4340. int ret;
  4341. byte iv[AES_BLOCK_SIZE];
  4342. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4343. byte local[32];
  4344. word32 localSz = 32;
  4345. if (len == (16 + WC_CAAM_BLOB_SZ) ||
  4346. len == (24 + WC_CAAM_BLOB_SZ) ||
  4347. len == (32 + WC_CAAM_BLOB_SZ)) {
  4348. if (wc_caamOpenBlob((byte*)key, len, local, &localSz) != 0) {
  4349. return BAD_FUNC_ARG;
  4350. }
  4351. /* set local values */
  4352. key = local;
  4353. len = localSz;
  4354. }
  4355. #endif
  4356. if (!((len == 16) || (len == 24) || (len == 32)))
  4357. return BAD_FUNC_ARG;
  4358. if (aes == NULL) {
  4359. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4360. ForceZero(local, sizeof(local));
  4361. #endif
  4362. return BAD_FUNC_ARG;
  4363. }
  4364. #ifdef OPENSSL_EXTRA
  4365. XMEMSET(aes->gcm.aadH, 0, sizeof(aes->gcm.aadH));
  4366. aes->gcm.aadLen = 0;
  4367. #endif
  4368. XMEMSET(iv, 0, AES_BLOCK_SIZE);
  4369. ret = wc_AesSetKey(aes, key, len, iv, AES_ENCRYPTION);
  4370. #ifdef WOLFSSL_AESGCM_STREAM
  4371. aes->gcmKeySet = 1;
  4372. #endif
  4373. #ifdef WOLFSSL_AESNI
  4374. /* AES-NI code generates its own H value. */
  4375. if (haveAESNI)
  4376. return ret;
  4377. #endif /* WOLFSSL_AESNI */
  4378. #if defined(WOLFSSL_SECO_CAAM)
  4379. if (aes->devId == WOLFSSL_SECO_DEVID) {
  4380. return ret;
  4381. }
  4382. #endif /* WOLFSSL_SECO_CAAM */
  4383. #if defined(WOLFSSL_RENESAS_FSPSM_CRYPTONLY) && \
  4384. !defined(NO_WOLFSSL_RENESAS_FSPSM_AES)
  4385. return ret;
  4386. #endif /* WOLFSSL_RENESAS_RSIP && WOLFSSL_RENESAS_FSPSM_CRYPTONLY*/
  4387. #if !defined(FREESCALE_LTC_AES_GCM)
  4388. if (ret == 0)
  4389. ret = wc_AesEncrypt(aes, iv, aes->gcm.H);
  4390. if (ret == 0) {
  4391. #if defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4392. GenerateM0(&aes->gcm);
  4393. #endif /* GCM_TABLE */
  4394. }
  4395. #endif /* FREESCALE_LTC_AES_GCM */
  4396. #if defined(WOLFSSL_XILINX_CRYPT) || defined(WOLFSSL_AFALG_XILINX_AES)
  4397. wc_AesGcmSetKey_ex(aes, key, len, WOLFSSL_XILINX_AES_KEY_SRC);
  4398. #endif
  4399. #ifdef WOLF_CRYPTO_CB
  4400. if (aes->devId != INVALID_DEVID) {
  4401. XMEMCPY(aes->devKey, key, len);
  4402. }
  4403. #endif
  4404. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4405. ForceZero(local, sizeof(local));
  4406. #endif
  4407. return ret;
  4408. }
  4409. #ifdef WOLFSSL_AESNI
  4410. #if defined(USE_INTEL_SPEEDUP)
  4411. #define HAVE_INTEL_AVX1
  4412. #define HAVE_INTEL_AVX2
  4413. #endif /* USE_INTEL_SPEEDUP */
  4414. void AES_GCM_encrypt(const unsigned char *in, unsigned char *out,
  4415. const unsigned char* addt, const unsigned char* ivec,
  4416. unsigned char *tag, word32 nbytes,
  4417. word32 abytes, word32 ibytes,
  4418. word32 tbytes, const unsigned char* key, int nr)
  4419. XASM_LINK("AES_GCM_encrypt");
  4420. #ifdef HAVE_INTEL_AVX1
  4421. void AES_GCM_encrypt_avx1(const unsigned char *in, unsigned char *out,
  4422. const unsigned char* addt, const unsigned char* ivec,
  4423. unsigned char *tag, word32 nbytes,
  4424. word32 abytes, word32 ibytes,
  4425. word32 tbytes, const unsigned char* key,
  4426. int nr)
  4427. XASM_LINK("AES_GCM_encrypt_avx1");
  4428. #ifdef HAVE_INTEL_AVX2
  4429. void AES_GCM_encrypt_avx2(const unsigned char *in, unsigned char *out,
  4430. const unsigned char* addt, const unsigned char* ivec,
  4431. unsigned char *tag, word32 nbytes,
  4432. word32 abytes, word32 ibytes,
  4433. word32 tbytes, const unsigned char* key,
  4434. int nr)
  4435. XASM_LINK("AES_GCM_encrypt_avx2");
  4436. #endif /* HAVE_INTEL_AVX2 */
  4437. #endif /* HAVE_INTEL_AVX1 */
  4438. #ifdef HAVE_AES_DECRYPT
  4439. void AES_GCM_decrypt(const unsigned char *in, unsigned char *out,
  4440. const unsigned char* addt, const unsigned char* ivec,
  4441. const unsigned char *tag, word32 nbytes, word32 abytes,
  4442. word32 ibytes, word32 tbytes, const unsigned char* key,
  4443. int nr, int* res)
  4444. XASM_LINK("AES_GCM_decrypt");
  4445. #ifdef HAVE_INTEL_AVX1
  4446. void AES_GCM_decrypt_avx1(const unsigned char *in, unsigned char *out,
  4447. const unsigned char* addt, const unsigned char* ivec,
  4448. const unsigned char *tag, word32 nbytes,
  4449. word32 abytes, word32 ibytes, word32 tbytes,
  4450. const unsigned char* key, int nr, int* res)
  4451. XASM_LINK("AES_GCM_decrypt_avx1");
  4452. #ifdef HAVE_INTEL_AVX2
  4453. void AES_GCM_decrypt_avx2(const unsigned char *in, unsigned char *out,
  4454. const unsigned char* addt, const unsigned char* ivec,
  4455. const unsigned char *tag, word32 nbytes,
  4456. word32 abytes, word32 ibytes, word32 tbytes,
  4457. const unsigned char* key, int nr, int* res)
  4458. XASM_LINK("AES_GCM_decrypt_avx2");
  4459. #endif /* HAVE_INTEL_AVX2 */
  4460. #endif /* HAVE_INTEL_AVX1 */
  4461. #endif /* HAVE_AES_DECRYPT */
  4462. #endif /* WOLFSSL_AESNI */
  4463. #if defined(GCM_SMALL)
  4464. static void GMULT(byte* X, byte* Y)
  4465. {
  4466. byte Z[AES_BLOCK_SIZE];
  4467. byte V[AES_BLOCK_SIZE];
  4468. int i, j;
  4469. XMEMSET(Z, 0, AES_BLOCK_SIZE);
  4470. XMEMCPY(V, X, AES_BLOCK_SIZE);
  4471. for (i = 0; i < AES_BLOCK_SIZE; i++)
  4472. {
  4473. byte y = Y[i];
  4474. for (j = 0; j < 8; j++)
  4475. {
  4476. if (y & 0x80) {
  4477. xorbuf(Z, V, AES_BLOCK_SIZE);
  4478. }
  4479. RIGHTSHIFTX(V);
  4480. y = y << 1;
  4481. }
  4482. }
  4483. XMEMCPY(X, Z, AES_BLOCK_SIZE);
  4484. }
  4485. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  4486. word32 cSz, byte* s, word32 sSz)
  4487. {
  4488. byte x[AES_BLOCK_SIZE];
  4489. byte scratch[AES_BLOCK_SIZE];
  4490. word32 blocks, partial;
  4491. byte* h;
  4492. if (gcm == NULL) {
  4493. return;
  4494. }
  4495. h = gcm->H;
  4496. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4497. /* Hash in A, the Additional Authentication Data */
  4498. if (aSz != 0 && a != NULL) {
  4499. blocks = aSz / AES_BLOCK_SIZE;
  4500. partial = aSz % AES_BLOCK_SIZE;
  4501. while (blocks--) {
  4502. xorbuf(x, a, AES_BLOCK_SIZE);
  4503. GMULT(x, h);
  4504. a += AES_BLOCK_SIZE;
  4505. }
  4506. if (partial != 0) {
  4507. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4508. XMEMCPY(scratch, a, partial);
  4509. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4510. GMULT(x, h);
  4511. }
  4512. }
  4513. /* Hash in C, the Ciphertext */
  4514. if (cSz != 0 && c != NULL) {
  4515. blocks = cSz / AES_BLOCK_SIZE;
  4516. partial = cSz % AES_BLOCK_SIZE;
  4517. while (blocks--) {
  4518. xorbuf(x, c, AES_BLOCK_SIZE);
  4519. GMULT(x, h);
  4520. c += AES_BLOCK_SIZE;
  4521. }
  4522. if (partial != 0) {
  4523. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4524. XMEMCPY(scratch, c, partial);
  4525. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4526. GMULT(x, h);
  4527. }
  4528. }
  4529. /* Hash in the lengths of A and C in bits */
  4530. FlattenSzInBits(&scratch[0], aSz);
  4531. FlattenSzInBits(&scratch[8], cSz);
  4532. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4533. GMULT(x, h);
  4534. /* Copy the result into s. */
  4535. XMEMCPY(s, x, sSz);
  4536. }
  4537. #ifdef WOLFSSL_AESGCM_STREAM
  4538. /* No extra initialization for small implementation.
  4539. *
  4540. * @param [in] aes AES GCM object.
  4541. */
  4542. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  4543. /* GHASH one block of data..
  4544. *
  4545. * XOR block into tag and GMULT with H.
  4546. *
  4547. * @param [in, out] aes AES GCM object.
  4548. * @param [in] block Block of AAD or cipher text.
  4549. */
  4550. #define GHASH_ONE_BLOCK(aes, block) \
  4551. do { \
  4552. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  4553. GMULT(AES_TAG(aes), aes->gcm.H); \
  4554. } \
  4555. while (0)
  4556. #endif /* WOLFSSL_AESGCM_STREAM */
  4557. /* end GCM_SMALL */
  4558. #elif defined(GCM_TABLE)
  4559. ALIGN16 static const byte R[256][2] = {
  4560. {0x00, 0x00}, {0x01, 0xc2}, {0x03, 0x84}, {0x02, 0x46},
  4561. {0x07, 0x08}, {0x06, 0xca}, {0x04, 0x8c}, {0x05, 0x4e},
  4562. {0x0e, 0x10}, {0x0f, 0xd2}, {0x0d, 0x94}, {0x0c, 0x56},
  4563. {0x09, 0x18}, {0x08, 0xda}, {0x0a, 0x9c}, {0x0b, 0x5e},
  4564. {0x1c, 0x20}, {0x1d, 0xe2}, {0x1f, 0xa4}, {0x1e, 0x66},
  4565. {0x1b, 0x28}, {0x1a, 0xea}, {0x18, 0xac}, {0x19, 0x6e},
  4566. {0x12, 0x30}, {0x13, 0xf2}, {0x11, 0xb4}, {0x10, 0x76},
  4567. {0x15, 0x38}, {0x14, 0xfa}, {0x16, 0xbc}, {0x17, 0x7e},
  4568. {0x38, 0x40}, {0x39, 0x82}, {0x3b, 0xc4}, {0x3a, 0x06},
  4569. {0x3f, 0x48}, {0x3e, 0x8a}, {0x3c, 0xcc}, {0x3d, 0x0e},
  4570. {0x36, 0x50}, {0x37, 0x92}, {0x35, 0xd4}, {0x34, 0x16},
  4571. {0x31, 0x58}, {0x30, 0x9a}, {0x32, 0xdc}, {0x33, 0x1e},
  4572. {0x24, 0x60}, {0x25, 0xa2}, {0x27, 0xe4}, {0x26, 0x26},
  4573. {0x23, 0x68}, {0x22, 0xaa}, {0x20, 0xec}, {0x21, 0x2e},
  4574. {0x2a, 0x70}, {0x2b, 0xb2}, {0x29, 0xf4}, {0x28, 0x36},
  4575. {0x2d, 0x78}, {0x2c, 0xba}, {0x2e, 0xfc}, {0x2f, 0x3e},
  4576. {0x70, 0x80}, {0x71, 0x42}, {0x73, 0x04}, {0x72, 0xc6},
  4577. {0x77, 0x88}, {0x76, 0x4a}, {0x74, 0x0c}, {0x75, 0xce},
  4578. {0x7e, 0x90}, {0x7f, 0x52}, {0x7d, 0x14}, {0x7c, 0xd6},
  4579. {0x79, 0x98}, {0x78, 0x5a}, {0x7a, 0x1c}, {0x7b, 0xde},
  4580. {0x6c, 0xa0}, {0x6d, 0x62}, {0x6f, 0x24}, {0x6e, 0xe6},
  4581. {0x6b, 0xa8}, {0x6a, 0x6a}, {0x68, 0x2c}, {0x69, 0xee},
  4582. {0x62, 0xb0}, {0x63, 0x72}, {0x61, 0x34}, {0x60, 0xf6},
  4583. {0x65, 0xb8}, {0x64, 0x7a}, {0x66, 0x3c}, {0x67, 0xfe},
  4584. {0x48, 0xc0}, {0x49, 0x02}, {0x4b, 0x44}, {0x4a, 0x86},
  4585. {0x4f, 0xc8}, {0x4e, 0x0a}, {0x4c, 0x4c}, {0x4d, 0x8e},
  4586. {0x46, 0xd0}, {0x47, 0x12}, {0x45, 0x54}, {0x44, 0x96},
  4587. {0x41, 0xd8}, {0x40, 0x1a}, {0x42, 0x5c}, {0x43, 0x9e},
  4588. {0x54, 0xe0}, {0x55, 0x22}, {0x57, 0x64}, {0x56, 0xa6},
  4589. {0x53, 0xe8}, {0x52, 0x2a}, {0x50, 0x6c}, {0x51, 0xae},
  4590. {0x5a, 0xf0}, {0x5b, 0x32}, {0x59, 0x74}, {0x58, 0xb6},
  4591. {0x5d, 0xf8}, {0x5c, 0x3a}, {0x5e, 0x7c}, {0x5f, 0xbe},
  4592. {0xe1, 0x00}, {0xe0, 0xc2}, {0xe2, 0x84}, {0xe3, 0x46},
  4593. {0xe6, 0x08}, {0xe7, 0xca}, {0xe5, 0x8c}, {0xe4, 0x4e},
  4594. {0xef, 0x10}, {0xee, 0xd2}, {0xec, 0x94}, {0xed, 0x56},
  4595. {0xe8, 0x18}, {0xe9, 0xda}, {0xeb, 0x9c}, {0xea, 0x5e},
  4596. {0xfd, 0x20}, {0xfc, 0xe2}, {0xfe, 0xa4}, {0xff, 0x66},
  4597. {0xfa, 0x28}, {0xfb, 0xea}, {0xf9, 0xac}, {0xf8, 0x6e},
  4598. {0xf3, 0x30}, {0xf2, 0xf2}, {0xf0, 0xb4}, {0xf1, 0x76},
  4599. {0xf4, 0x38}, {0xf5, 0xfa}, {0xf7, 0xbc}, {0xf6, 0x7e},
  4600. {0xd9, 0x40}, {0xd8, 0x82}, {0xda, 0xc4}, {0xdb, 0x06},
  4601. {0xde, 0x48}, {0xdf, 0x8a}, {0xdd, 0xcc}, {0xdc, 0x0e},
  4602. {0xd7, 0x50}, {0xd6, 0x92}, {0xd4, 0xd4}, {0xd5, 0x16},
  4603. {0xd0, 0x58}, {0xd1, 0x9a}, {0xd3, 0xdc}, {0xd2, 0x1e},
  4604. {0xc5, 0x60}, {0xc4, 0xa2}, {0xc6, 0xe4}, {0xc7, 0x26},
  4605. {0xc2, 0x68}, {0xc3, 0xaa}, {0xc1, 0xec}, {0xc0, 0x2e},
  4606. {0xcb, 0x70}, {0xca, 0xb2}, {0xc8, 0xf4}, {0xc9, 0x36},
  4607. {0xcc, 0x78}, {0xcd, 0xba}, {0xcf, 0xfc}, {0xce, 0x3e},
  4608. {0x91, 0x80}, {0x90, 0x42}, {0x92, 0x04}, {0x93, 0xc6},
  4609. {0x96, 0x88}, {0x97, 0x4a}, {0x95, 0x0c}, {0x94, 0xce},
  4610. {0x9f, 0x90}, {0x9e, 0x52}, {0x9c, 0x14}, {0x9d, 0xd6},
  4611. {0x98, 0x98}, {0x99, 0x5a}, {0x9b, 0x1c}, {0x9a, 0xde},
  4612. {0x8d, 0xa0}, {0x8c, 0x62}, {0x8e, 0x24}, {0x8f, 0xe6},
  4613. {0x8a, 0xa8}, {0x8b, 0x6a}, {0x89, 0x2c}, {0x88, 0xee},
  4614. {0x83, 0xb0}, {0x82, 0x72}, {0x80, 0x34}, {0x81, 0xf6},
  4615. {0x84, 0xb8}, {0x85, 0x7a}, {0x87, 0x3c}, {0x86, 0xfe},
  4616. {0xa9, 0xc0}, {0xa8, 0x02}, {0xaa, 0x44}, {0xab, 0x86},
  4617. {0xae, 0xc8}, {0xaf, 0x0a}, {0xad, 0x4c}, {0xac, 0x8e},
  4618. {0xa7, 0xd0}, {0xa6, 0x12}, {0xa4, 0x54}, {0xa5, 0x96},
  4619. {0xa0, 0xd8}, {0xa1, 0x1a}, {0xa3, 0x5c}, {0xa2, 0x9e},
  4620. {0xb5, 0xe0}, {0xb4, 0x22}, {0xb6, 0x64}, {0xb7, 0xa6},
  4621. {0xb2, 0xe8}, {0xb3, 0x2a}, {0xb1, 0x6c}, {0xb0, 0xae},
  4622. {0xbb, 0xf0}, {0xba, 0x32}, {0xb8, 0x74}, {0xb9, 0xb6},
  4623. {0xbc, 0xf8}, {0xbd, 0x3a}, {0xbf, 0x7c}, {0xbe, 0xbe} };
  4624. static void GMULT(byte *x, byte m[256][AES_BLOCK_SIZE])
  4625. {
  4626. #if !defined(WORD64_AVAILABLE) || defined(BIG_ENDIAN_ORDER)
  4627. int i, j;
  4628. byte Z[AES_BLOCK_SIZE];
  4629. byte a;
  4630. XMEMSET(Z, 0, sizeof(Z));
  4631. for (i = 15; i > 0; i--) {
  4632. xorbuf(Z, m[x[i]], AES_BLOCK_SIZE);
  4633. a = Z[15];
  4634. for (j = 15; j > 0; j--) {
  4635. Z[j] = Z[j-1];
  4636. }
  4637. Z[0] = R[a][0];
  4638. Z[1] ^= R[a][1];
  4639. }
  4640. xorbuf(Z, m[x[0]], AES_BLOCK_SIZE);
  4641. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  4642. #elif defined(WC_32BIT_CPU)
  4643. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  4644. byte a;
  4645. word32* pZ;
  4646. word32* pm;
  4647. word32* px = (word32*)(x);
  4648. int i;
  4649. pZ = (word32*)(Z + 15 + 1);
  4650. pm = (word32*)(m[x[15]]);
  4651. pZ[0] = pm[0];
  4652. pZ[1] = pm[1];
  4653. pZ[2] = pm[2];
  4654. pZ[3] = pm[3];
  4655. a = Z[16 + 15];
  4656. Z[15] = R[a][0];
  4657. Z[16] ^= R[a][1];
  4658. for (i = 14; i > 0; i--) {
  4659. pZ = (word32*)(Z + i + 1);
  4660. pm = (word32*)(m[x[i]]);
  4661. pZ[0] ^= pm[0];
  4662. pZ[1] ^= pm[1];
  4663. pZ[2] ^= pm[2];
  4664. pZ[3] ^= pm[3];
  4665. a = Z[16 + i];
  4666. Z[i] = R[a][0];
  4667. Z[i+1] ^= R[a][1];
  4668. }
  4669. pZ = (word32*)(Z + 1);
  4670. pm = (word32*)(m[x[0]]);
  4671. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  4672. px[2] = pZ[2] ^ pm[2]; px[3] = pZ[3] ^ pm[3];
  4673. #else
  4674. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  4675. byte a;
  4676. word64* pZ;
  4677. word64* pm;
  4678. word64* px = (word64*)(x);
  4679. int i;
  4680. pZ = (word64*)(Z + 15 + 1);
  4681. pm = (word64*)(m[x[15]]);
  4682. pZ[0] = pm[0];
  4683. pZ[1] = pm[1];
  4684. a = Z[16 + 15];
  4685. Z[15] = R[a][0];
  4686. Z[16] ^= R[a][1];
  4687. for (i = 14; i > 0; i--) {
  4688. pZ = (word64*)(Z + i + 1);
  4689. pm = (word64*)(m[x[i]]);
  4690. pZ[0] ^= pm[0];
  4691. pZ[1] ^= pm[1];
  4692. a = Z[16 + i];
  4693. Z[i] = R[a][0];
  4694. Z[i+1] ^= R[a][1];
  4695. }
  4696. pZ = (word64*)(Z + 1);
  4697. pm = (word64*)(m[x[0]]);
  4698. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  4699. #endif
  4700. }
  4701. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  4702. word32 cSz, byte* s, word32 sSz)
  4703. {
  4704. byte x[AES_BLOCK_SIZE];
  4705. byte scratch[AES_BLOCK_SIZE];
  4706. word32 blocks, partial;
  4707. if (gcm == NULL) {
  4708. return;
  4709. }
  4710. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4711. /* Hash in A, the Additional Authentication Data */
  4712. if (aSz != 0 && a != NULL) {
  4713. blocks = aSz / AES_BLOCK_SIZE;
  4714. partial = aSz % AES_BLOCK_SIZE;
  4715. while (blocks--) {
  4716. xorbuf(x, a, AES_BLOCK_SIZE);
  4717. GMULT(x, gcm->M0);
  4718. a += AES_BLOCK_SIZE;
  4719. }
  4720. if (partial != 0) {
  4721. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4722. XMEMCPY(scratch, a, partial);
  4723. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4724. GMULT(x, gcm->M0);
  4725. }
  4726. }
  4727. /* Hash in C, the Ciphertext */
  4728. if (cSz != 0 && c != NULL) {
  4729. blocks = cSz / AES_BLOCK_SIZE;
  4730. partial = cSz % AES_BLOCK_SIZE;
  4731. while (blocks--) {
  4732. xorbuf(x, c, AES_BLOCK_SIZE);
  4733. GMULT(x, gcm->M0);
  4734. c += AES_BLOCK_SIZE;
  4735. }
  4736. if (partial != 0) {
  4737. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4738. XMEMCPY(scratch, c, partial);
  4739. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4740. GMULT(x, gcm->M0);
  4741. }
  4742. }
  4743. /* Hash in the lengths of A and C in bits */
  4744. FlattenSzInBits(&scratch[0], aSz);
  4745. FlattenSzInBits(&scratch[8], cSz);
  4746. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4747. GMULT(x, gcm->M0);
  4748. /* Copy the result into s. */
  4749. XMEMCPY(s, x, sSz);
  4750. }
  4751. #ifdef WOLFSSL_AESGCM_STREAM
  4752. /* No extra initialization for table implementation.
  4753. *
  4754. * @param [in] aes AES GCM object.
  4755. */
  4756. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  4757. /* GHASH one block of data..
  4758. *
  4759. * XOR block into tag and GMULT with H using pre-computed table.
  4760. *
  4761. * @param [in, out] aes AES GCM object.
  4762. * @param [in] block Block of AAD or cipher text.
  4763. */
  4764. #define GHASH_ONE_BLOCK(aes, block) \
  4765. do { \
  4766. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  4767. GMULT(AES_TAG(aes), aes->gcm.M0); \
  4768. } \
  4769. while (0)
  4770. #endif /* WOLFSSL_AESGCM_STREAM */
  4771. /* end GCM_TABLE */
  4772. #elif defined(GCM_TABLE_4BIT)
  4773. /* remainder = x^7 + x^2 + x^1 + 1 => 0xe1
  4774. * R shifts right a reverse bit pair of bytes such that:
  4775. * R(b0, b1) => b1 = (b1 >> 1) | (b0 << 7); b0 >>= 1
  4776. * 0 => 0, 0, 0, 0 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ 00,00 = 00,00
  4777. * 8 => 0, 0, 0, 1 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ e1,00 = e1,00
  4778. * 4 => 0, 0, 1, 0 => R(R(R(00,00) ^ 00,00) ^ e1,00) ^ 00,00 = 70,80
  4779. * 2 => 0, 1, 0, 0 => R(R(R(00,00) ^ e1,00) ^ 00,00) ^ 00,00 = 38,40
  4780. * 1 => 1, 0, 0, 0 => R(R(R(e1,00) ^ 00,00) ^ 00,00) ^ 00,00 = 1c,20
  4781. * To calculate te rest, XOR result for each bit.
  4782. * e.g. 6 = 4 ^ 2 => 48,c0
  4783. *
  4784. * Second half is same values rotated by 4-bits.
  4785. */
  4786. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  4787. static const byte R[16][2] = {
  4788. {0x00, 0x00}, {0x1c, 0x20}, {0x38, 0x40}, {0x24, 0x60},
  4789. {0x70, 0x80}, {0x6c, 0xa0}, {0x48, 0xc0}, {0x54, 0xe0},
  4790. {0xe1, 0x00}, {0xfd, 0x20}, {0xd9, 0x40}, {0xc5, 0x60},
  4791. {0x91, 0x80}, {0x8d, 0xa0}, {0xa9, 0xc0}, {0xb5, 0xe0},
  4792. };
  4793. #else
  4794. static const word16 R[32] = {
  4795. 0x0000, 0x201c, 0x4038, 0x6024,
  4796. 0x8070, 0xa06c, 0xc048, 0xe054,
  4797. 0x00e1, 0x20fd, 0x40d9, 0x60c5,
  4798. 0x8091, 0xa08d, 0xc0a9, 0xe0b5,
  4799. 0x0000, 0xc201, 0x8403, 0x4602,
  4800. 0x0807, 0xca06, 0x8c04, 0x4e05,
  4801. 0x100e, 0xd20f, 0x940d, 0x560c,
  4802. 0x1809, 0xda08, 0x9c0a, 0x5e0b,
  4803. };
  4804. #endif
  4805. /* Multiply in GF(2^128) defined by polynomial:
  4806. * x^128 + x^7 + x^2 + x^1 + 1.
  4807. *
  4808. * H: hash key = encrypt(key, 0)
  4809. * x = x * H in field
  4810. *
  4811. * x: cumulative result
  4812. * m: 4-bit table
  4813. * [0..15] * H
  4814. */
  4815. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  4816. static void GMULT(byte *x, byte m[16][AES_BLOCK_SIZE])
  4817. {
  4818. int i, j, n;
  4819. byte Z[AES_BLOCK_SIZE];
  4820. byte a;
  4821. XMEMSET(Z, 0, sizeof(Z));
  4822. for (i = 15; i >= 0; i--) {
  4823. for (n = 0; n < 2; n++) {
  4824. if (n == 0)
  4825. xorbuf(Z, m[x[i] & 0xf], AES_BLOCK_SIZE);
  4826. else {
  4827. xorbuf(Z, m[x[i] >> 4], AES_BLOCK_SIZE);
  4828. if (i == 0)
  4829. break;
  4830. }
  4831. a = Z[15] & 0xf;
  4832. for (j = 15; j > 0; j--)
  4833. Z[j] = (Z[j-1] << 4) | (Z[j] >> 4);
  4834. Z[0] >>= 4;
  4835. Z[0] ^= R[a][0];
  4836. Z[1] ^= R[a][1];
  4837. }
  4838. }
  4839. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  4840. }
  4841. #elif defined(WC_32BIT_CPU)
  4842. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  4843. {
  4844. int i;
  4845. word32 z8[4] = {0, 0, 0, 0};
  4846. byte a;
  4847. word32* x8 = (word32*)x;
  4848. word32* m8;
  4849. byte xi;
  4850. word32 n7, n6, n5, n4, n3, n2, n1, n0;
  4851. for (i = 15; i > 0; i--) {
  4852. xi = x[i];
  4853. /* XOR in (msn * H) */
  4854. m8 = (word32*)m[xi & 0xf];
  4855. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4856. /* Cache top byte for remainder calculations - lost in rotate. */
  4857. a = (byte)(z8[3] >> 24);
  4858. /* Rotate Z by 8-bits */
  4859. z8[3] = (z8[2] >> 24) | (z8[3] << 8);
  4860. z8[2] = (z8[1] >> 24) | (z8[2] << 8);
  4861. z8[1] = (z8[0] >> 24) | (z8[1] << 8);
  4862. z8[0] <<= 8;
  4863. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  4864. z8[0] ^= (word32)R[16 + (a & 0xf)];
  4865. xi >>= 4;
  4866. /* XOR in next significant nibble (XORed with H) * remainder */
  4867. m8 = (word32*)m[xi];
  4868. a ^= (byte)(m8[3] >> 20);
  4869. z8[0] ^= (word32)R[a >> 4];
  4870. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  4871. m8 = (word32*)m[16 + xi];
  4872. z8[0] ^= m8[0]; z8[1] ^= m8[1];
  4873. z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4874. }
  4875. xi = x[0];
  4876. /* XOR in most significant nibble * H */
  4877. m8 = (word32*)m[xi & 0xf];
  4878. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4879. /* Cache top byte for remainder calculations - lost in rotate. */
  4880. a = (z8[3] >> 24) & 0xf;
  4881. /* Rotate z by 4-bits */
  4882. n7 = z8[3] & 0xf0f0f0f0ULL;
  4883. n6 = z8[3] & 0x0f0f0f0fULL;
  4884. n5 = z8[2] & 0xf0f0f0f0ULL;
  4885. n4 = z8[2] & 0x0f0f0f0fULL;
  4886. n3 = z8[1] & 0xf0f0f0f0ULL;
  4887. n2 = z8[1] & 0x0f0f0f0fULL;
  4888. n1 = z8[0] & 0xf0f0f0f0ULL;
  4889. n0 = z8[0] & 0x0f0f0f0fULL;
  4890. z8[3] = (n7 >> 4) | (n6 << 12) | (n4 >> 20);
  4891. z8[2] = (n5 >> 4) | (n4 << 12) | (n2 >> 20);
  4892. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 20);
  4893. z8[0] = (n1 >> 4) | (n0 << 12);
  4894. /* XOR in most significant nibble * remainder */
  4895. z8[0] ^= (word32)R[a];
  4896. /* XOR in next significant nibble * H */
  4897. m8 = (word32*)m[xi >> 4];
  4898. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4899. /* Write back result. */
  4900. x8[0] = z8[0]; x8[1] = z8[1]; x8[2] = z8[2]; x8[3] = z8[3];
  4901. }
  4902. #else
  4903. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  4904. {
  4905. int i;
  4906. word64 z8[2] = {0, 0};
  4907. byte a;
  4908. word64* x8 = (word64*)x;
  4909. word64* m8;
  4910. word64 n0, n1, n2, n3;
  4911. byte xi;
  4912. for (i = 15; i > 0; i--) {
  4913. xi = x[i];
  4914. /* XOR in (msn * H) */
  4915. m8 = (word64*)m[xi & 0xf];
  4916. z8[0] ^= m8[0];
  4917. z8[1] ^= m8[1];
  4918. /* Cache top byte for remainder calculations - lost in rotate. */
  4919. a = (byte)(z8[1] >> 56);
  4920. /* Rotate Z by 8-bits */
  4921. z8[1] = (z8[0] >> 56) | (z8[1] << 8);
  4922. z8[0] <<= 8;
  4923. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  4924. m8 = (word64*)m[16 + (xi >> 4)];
  4925. z8[0] ^= m8[0];
  4926. z8[1] ^= m8[1];
  4927. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  4928. z8[0] ^= (word64)R[16 + (a & 0xf)];
  4929. /* XOR in next significant nibble (XORed with H) * remainder */
  4930. m8 = (word64*)m[xi >> 4];
  4931. a ^= (byte)(m8[1] >> 52);
  4932. z8[0] ^= (word64)R[a >> 4];
  4933. }
  4934. xi = x[0];
  4935. /* XOR in most significant nibble * H */
  4936. m8 = (word64*)m[xi & 0xf];
  4937. z8[0] ^= m8[0];
  4938. z8[1] ^= m8[1];
  4939. /* Cache top byte for remainder calculations - lost in rotate. */
  4940. a = (z8[1] >> 56) & 0xf;
  4941. /* Rotate z by 4-bits */
  4942. n3 = z8[1] & W64LIT(0xf0f0f0f0f0f0f0f0);
  4943. n2 = z8[1] & W64LIT(0x0f0f0f0f0f0f0f0f);
  4944. n1 = z8[0] & W64LIT(0xf0f0f0f0f0f0f0f0);
  4945. n0 = z8[0] & W64LIT(0x0f0f0f0f0f0f0f0f);
  4946. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 52);
  4947. z8[0] = (n1 >> 4) | (n0 << 12);
  4948. /* XOR in next significant nibble * H */
  4949. m8 = (word64*)m[xi >> 4];
  4950. z8[0] ^= m8[0];
  4951. z8[1] ^= m8[1];
  4952. /* XOR in most significant nibble * remainder */
  4953. z8[0] ^= (word64)R[a];
  4954. /* Write back result. */
  4955. x8[0] = z8[0];
  4956. x8[1] = z8[1];
  4957. }
  4958. #endif
  4959. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  4960. word32 cSz, byte* s, word32 sSz)
  4961. {
  4962. byte x[AES_BLOCK_SIZE];
  4963. byte scratch[AES_BLOCK_SIZE];
  4964. word32 blocks, partial;
  4965. if (gcm == NULL) {
  4966. return;
  4967. }
  4968. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4969. /* Hash in A, the Additional Authentication Data */
  4970. if (aSz != 0 && a != NULL) {
  4971. blocks = aSz / AES_BLOCK_SIZE;
  4972. partial = aSz % AES_BLOCK_SIZE;
  4973. while (blocks--) {
  4974. xorbuf(x, a, AES_BLOCK_SIZE);
  4975. GMULT(x, gcm->M0);
  4976. a += AES_BLOCK_SIZE;
  4977. }
  4978. if (partial != 0) {
  4979. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4980. XMEMCPY(scratch, a, partial);
  4981. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4982. GMULT(x, gcm->M0);
  4983. }
  4984. }
  4985. /* Hash in C, the Ciphertext */
  4986. if (cSz != 0 && c != NULL) {
  4987. blocks = cSz / AES_BLOCK_SIZE;
  4988. partial = cSz % AES_BLOCK_SIZE;
  4989. while (blocks--) {
  4990. xorbuf(x, c, AES_BLOCK_SIZE);
  4991. GMULT(x, gcm->M0);
  4992. c += AES_BLOCK_SIZE;
  4993. }
  4994. if (partial != 0) {
  4995. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4996. XMEMCPY(scratch, c, partial);
  4997. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4998. GMULT(x, gcm->M0);
  4999. }
  5000. }
  5001. /* Hash in the lengths of A and C in bits */
  5002. FlattenSzInBits(&scratch[0], aSz);
  5003. FlattenSzInBits(&scratch[8], cSz);
  5004. xorbuf(x, scratch, AES_BLOCK_SIZE);
  5005. GMULT(x, gcm->M0);
  5006. /* Copy the result into s. */
  5007. XMEMCPY(s, x, sSz);
  5008. }
  5009. #ifdef WOLFSSL_AESGCM_STREAM
  5010. /* No extra initialization for 4-bit table implementation.
  5011. *
  5012. * @param [in] aes AES GCM object.
  5013. */
  5014. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5015. /* GHASH one block of data..
  5016. *
  5017. * XOR block into tag and GMULT with H using pre-computed table.
  5018. *
  5019. * @param [in, out] aes AES GCM object.
  5020. * @param [in] block Block of AAD or cipher text.
  5021. */
  5022. #define GHASH_ONE_BLOCK(aes, block) \
  5023. do { \
  5024. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5025. GMULT(AES_TAG(aes), (aes)->gcm.M0); \
  5026. } \
  5027. while (0)
  5028. #endif /* WOLFSSL_AESGCM_STREAM */
  5029. #elif defined(WORD64_AVAILABLE) && !defined(GCM_WORD32)
  5030. #if !defined(FREESCALE_LTC_AES_GCM)
  5031. static void GMULT(word64* X, word64* Y)
  5032. {
  5033. word64 Z[2] = {0,0};
  5034. word64 V[2];
  5035. int i, j;
  5036. word64 v1;
  5037. V[0] = X[0]; V[1] = X[1];
  5038. for (i = 0; i < 2; i++)
  5039. {
  5040. word64 y = Y[i];
  5041. for (j = 0; j < 64; j++)
  5042. {
  5043. #ifndef AES_GCM_GMULT_NCT
  5044. word64 mask = 0 - (y >> 63);
  5045. Z[0] ^= V[0] & mask;
  5046. Z[1] ^= V[1] & mask;
  5047. #else
  5048. if (y & 0x8000000000000000ULL) {
  5049. Z[0] ^= V[0];
  5050. Z[1] ^= V[1];
  5051. }
  5052. #endif
  5053. v1 = (0 - (V[1] & 1)) & 0xE100000000000000ULL;
  5054. V[1] >>= 1;
  5055. V[1] |= V[0] << 63;
  5056. V[0] >>= 1;
  5057. V[0] ^= v1;
  5058. y <<= 1;
  5059. }
  5060. }
  5061. X[0] = Z[0];
  5062. X[1] = Z[1];
  5063. }
  5064. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  5065. word32 cSz, byte* s, word32 sSz)
  5066. {
  5067. word64 x[2] = {0,0};
  5068. word32 blocks, partial;
  5069. word64 bigH[2];
  5070. if (gcm == NULL) {
  5071. return;
  5072. }
  5073. XMEMCPY(bigH, gcm->H, AES_BLOCK_SIZE);
  5074. #ifdef LITTLE_ENDIAN_ORDER
  5075. ByteReverseWords64(bigH, bigH, AES_BLOCK_SIZE);
  5076. #endif
  5077. /* Hash in A, the Additional Authentication Data */
  5078. if (aSz != 0 && a != NULL) {
  5079. word64 bigA[2];
  5080. blocks = aSz / AES_BLOCK_SIZE;
  5081. partial = aSz % AES_BLOCK_SIZE;
  5082. while (blocks--) {
  5083. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5084. #ifdef LITTLE_ENDIAN_ORDER
  5085. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5086. #endif
  5087. x[0] ^= bigA[0];
  5088. x[1] ^= bigA[1];
  5089. GMULT(x, bigH);
  5090. a += AES_BLOCK_SIZE;
  5091. }
  5092. if (partial != 0) {
  5093. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5094. XMEMCPY(bigA, a, partial);
  5095. #ifdef LITTLE_ENDIAN_ORDER
  5096. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5097. #endif
  5098. x[0] ^= bigA[0];
  5099. x[1] ^= bigA[1];
  5100. GMULT(x, bigH);
  5101. }
  5102. #ifdef OPENSSL_EXTRA
  5103. /* store AAD partial tag for next call */
  5104. gcm->aadH[0] = (word32)((x[0] & 0xFFFFFFFF00000000ULL) >> 32);
  5105. gcm->aadH[1] = (word32)(x[0] & 0xFFFFFFFF);
  5106. gcm->aadH[2] = (word32)((x[1] & 0xFFFFFFFF00000000ULL) >> 32);
  5107. gcm->aadH[3] = (word32)(x[1] & 0xFFFFFFFF);
  5108. #endif
  5109. }
  5110. /* Hash in C, the Ciphertext */
  5111. if (cSz != 0 && c != NULL) {
  5112. word64 bigC[2];
  5113. blocks = cSz / AES_BLOCK_SIZE;
  5114. partial = cSz % AES_BLOCK_SIZE;
  5115. #ifdef OPENSSL_EXTRA
  5116. /* Start from last AAD partial tag */
  5117. if(gcm->aadLen) {
  5118. x[0] = ((word64)gcm->aadH[0]) << 32 | gcm->aadH[1];
  5119. x[1] = ((word64)gcm->aadH[2]) << 32 | gcm->aadH[3];
  5120. }
  5121. #endif
  5122. while (blocks--) {
  5123. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5124. #ifdef LITTLE_ENDIAN_ORDER
  5125. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5126. #endif
  5127. x[0] ^= bigC[0];
  5128. x[1] ^= bigC[1];
  5129. GMULT(x, bigH);
  5130. c += AES_BLOCK_SIZE;
  5131. }
  5132. if (partial != 0) {
  5133. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  5134. XMEMCPY(bigC, c, partial);
  5135. #ifdef LITTLE_ENDIAN_ORDER
  5136. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5137. #endif
  5138. x[0] ^= bigC[0];
  5139. x[1] ^= bigC[1];
  5140. GMULT(x, bigH);
  5141. }
  5142. }
  5143. /* Hash in the lengths in bits of A and C */
  5144. {
  5145. word64 len[2];
  5146. len[0] = aSz; len[1] = cSz;
  5147. #ifdef OPENSSL_EXTRA
  5148. if (gcm->aadLen)
  5149. len[0] = (word64)gcm->aadLen;
  5150. #endif
  5151. /* Lengths are in bytes. Convert to bits. */
  5152. len[0] *= 8;
  5153. len[1] *= 8;
  5154. x[0] ^= len[0];
  5155. x[1] ^= len[1];
  5156. GMULT(x, bigH);
  5157. }
  5158. #ifdef LITTLE_ENDIAN_ORDER
  5159. ByteReverseWords64(x, x, AES_BLOCK_SIZE);
  5160. #endif
  5161. XMEMCPY(s, x, sSz);
  5162. }
  5163. #endif /* !FREESCALE_LTC_AES_GCM */
  5164. #ifdef WOLFSSL_AESGCM_STREAM
  5165. #ifdef LITTLE_ENDIAN_ORDER
  5166. /* No extra initialization for small implementation.
  5167. *
  5168. * @param [in] aes AES GCM object.
  5169. */
  5170. #define GHASH_INIT_EXTRA(aes) \
  5171. ByteReverseWords64((word64*)aes->gcm.H, (word64*)aes->gcm.H, AES_BLOCK_SIZE)
  5172. /* GHASH one block of data..
  5173. *
  5174. * XOR block into tag and GMULT with H.
  5175. *
  5176. * @param [in, out] aes AES GCM object.
  5177. * @param [in] block Block of AAD or cipher text.
  5178. */
  5179. #define GHASH_ONE_BLOCK(aes, block) \
  5180. do { \
  5181. word64* x = (word64*)AES_TAG(aes); \
  5182. word64* h = (word64*)aes->gcm.H; \
  5183. word64 block64[2]; \
  5184. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  5185. ByteReverseWords64(block64, block64, AES_BLOCK_SIZE); \
  5186. x[0] ^= block64[0]; \
  5187. x[1] ^= block64[1]; \
  5188. GMULT(x, h); \
  5189. } \
  5190. while (0)
  5191. #ifdef OPENSSL_EXTRA
  5192. /* GHASH in AAD and cipher text lengths in bits.
  5193. *
  5194. * Convert tag back to little-endian.
  5195. *
  5196. * @param [in, out] aes AES GCM object.
  5197. */
  5198. #define GHASH_LEN_BLOCK(aes) \
  5199. do { \
  5200. word64* x = (word64*)AES_TAG(aes); \
  5201. word64* h = (word64*)aes->gcm.H; \
  5202. word64 len[2]; \
  5203. len[0] = aes->aSz; len[1] = aes->cSz; \
  5204. if (aes->gcm.aadLen) \
  5205. len[0] = (word64)aes->gcm.aadLen; \
  5206. /* Lengths are in bytes. Convert to bits. */ \
  5207. len[0] *= 8; \
  5208. len[1] *= 8; \
  5209. \
  5210. x[0] ^= len[0]; \
  5211. x[1] ^= len[1]; \
  5212. GMULT(x, h); \
  5213. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  5214. } \
  5215. while (0)
  5216. #else
  5217. /* GHASH in AAD and cipher text lengths in bits.
  5218. *
  5219. * Convert tag back to little-endian.
  5220. *
  5221. * @param [in, out] aes AES GCM object.
  5222. */
  5223. #define GHASH_LEN_BLOCK(aes) \
  5224. do { \
  5225. word64* x = (word64*)AES_TAG(aes); \
  5226. word64* h = (word64*)aes->gcm.H; \
  5227. word64 len[2]; \
  5228. len[0] = aes->aSz; len[1] = aes->cSz; \
  5229. /* Lengths are in bytes. Convert to bits. */ \
  5230. len[0] *= 8; \
  5231. len[1] *= 8; \
  5232. \
  5233. x[0] ^= len[0]; \
  5234. x[1] ^= len[1]; \
  5235. GMULT(x, h); \
  5236. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  5237. } \
  5238. while (0)
  5239. #endif
  5240. #else
  5241. /* No extra initialization for small implementation.
  5242. *
  5243. * @param [in] aes AES GCM object.
  5244. */
  5245. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5246. /* GHASH one block of data..
  5247. *
  5248. * XOR block into tag and GMULT with H.
  5249. *
  5250. * @param [in, out] aes AES GCM object.
  5251. * @param [in] block Block of AAD or cipher text.
  5252. */
  5253. #define GHASH_ONE_BLOCK(aes, block) \
  5254. do { \
  5255. word64* x = (word64*)AES_TAG(aes); \
  5256. word64* h = (word64*)aes->gcm.H; \
  5257. word64 block64[2]; \
  5258. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  5259. x[0] ^= block64[0]; \
  5260. x[1] ^= block64[1]; \
  5261. GMULT(x, h); \
  5262. } \
  5263. while (0)
  5264. #ifdef OPENSSL_EXTRA
  5265. /* GHASH in AAD and cipher text lengths in bits.
  5266. *
  5267. * Convert tag back to little-endian.
  5268. *
  5269. * @param [in, out] aes AES GCM object.
  5270. */
  5271. #define GHASH_LEN_BLOCK(aes) \
  5272. do { \
  5273. word64* x = (word64*)AES_TAG(aes); \
  5274. word64* h = (word64*)aes->gcm.H; \
  5275. word64 len[2]; \
  5276. len[0] = aes->aSz; len[1] = aes->cSz; \
  5277. if (aes->gcm.aadLen) \
  5278. len[0] = (word64)aes->gcm.aadLen; \
  5279. /* Lengths are in bytes. Convert to bits. */ \
  5280. len[0] *= 8; \
  5281. len[1] *= 8; \
  5282. \
  5283. x[0] ^= len[0]; \
  5284. x[1] ^= len[1]; \
  5285. GMULT(x, h); \
  5286. } \
  5287. while (0)
  5288. #else
  5289. /* GHASH in AAD and cipher text lengths in bits.
  5290. *
  5291. * Convert tag back to little-endian.
  5292. *
  5293. * @param [in, out] aes AES GCM object.
  5294. */
  5295. #define GHASH_LEN_BLOCK(aes) \
  5296. do { \
  5297. word64* x = (word64*)AES_TAG(aes); \
  5298. word64* h = (word64*)aes->gcm.H; \
  5299. word64 len[2]; \
  5300. len[0] = aes->aSz; len[1] = aes->cSz; \
  5301. /* Lengths are in bytes. Convert to bits. */ \
  5302. len[0] *= 8; \
  5303. len[1] *= 8; \
  5304. \
  5305. x[0] ^= len[0]; \
  5306. x[1] ^= len[1]; \
  5307. GMULT(x, h); \
  5308. } \
  5309. while (0)
  5310. #endif
  5311. #endif /* !LITTLE_ENDIAN_ORDER */
  5312. #endif /* WOLFSSL_AESGCM_STREAM */
  5313. /* end defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) */
  5314. #else /* GCM_WORD32 */
  5315. static void GMULT(word32* X, word32* Y)
  5316. {
  5317. word32 Z[4] = {0,0,0,0};
  5318. word32 V[4];
  5319. int i, j;
  5320. V[0] = X[0]; V[1] = X[1]; V[2] = X[2]; V[3] = X[3];
  5321. for (i = 0; i < 4; i++)
  5322. {
  5323. word32 y = Y[i];
  5324. for (j = 0; j < 32; j++)
  5325. {
  5326. if (y & 0x80000000) {
  5327. Z[0] ^= V[0];
  5328. Z[1] ^= V[1];
  5329. Z[2] ^= V[2];
  5330. Z[3] ^= V[3];
  5331. }
  5332. if (V[3] & 0x00000001) {
  5333. V[3] >>= 1;
  5334. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  5335. V[2] >>= 1;
  5336. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  5337. V[1] >>= 1;
  5338. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  5339. V[0] >>= 1;
  5340. V[0] ^= 0xE1000000;
  5341. } else {
  5342. V[3] >>= 1;
  5343. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  5344. V[2] >>= 1;
  5345. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  5346. V[1] >>= 1;
  5347. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  5348. V[0] >>= 1;
  5349. }
  5350. y <<= 1;
  5351. }
  5352. }
  5353. X[0] = Z[0];
  5354. X[1] = Z[1];
  5355. X[2] = Z[2];
  5356. X[3] = Z[3];
  5357. }
  5358. void GHASH(Gcm* gcm, const byte* a, word32 aSz, const byte* c,
  5359. word32 cSz, byte* s, word32 sSz)
  5360. {
  5361. word32 x[4] = {0,0,0,0};
  5362. word32 blocks, partial;
  5363. word32 bigH[4];
  5364. if (gcm == NULL) {
  5365. return;
  5366. }
  5367. XMEMCPY(bigH, gcm->H, AES_BLOCK_SIZE);
  5368. #ifdef LITTLE_ENDIAN_ORDER
  5369. ByteReverseWords(bigH, bigH, AES_BLOCK_SIZE);
  5370. #endif
  5371. /* Hash in A, the Additional Authentication Data */
  5372. if (aSz != 0 && a != NULL) {
  5373. word32 bigA[4];
  5374. blocks = aSz / AES_BLOCK_SIZE;
  5375. partial = aSz % AES_BLOCK_SIZE;
  5376. while (blocks--) {
  5377. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5378. #ifdef LITTLE_ENDIAN_ORDER
  5379. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  5380. #endif
  5381. x[0] ^= bigA[0];
  5382. x[1] ^= bigA[1];
  5383. x[2] ^= bigA[2];
  5384. x[3] ^= bigA[3];
  5385. GMULT(x, bigH);
  5386. a += AES_BLOCK_SIZE;
  5387. }
  5388. if (partial != 0) {
  5389. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5390. XMEMCPY(bigA, a, partial);
  5391. #ifdef LITTLE_ENDIAN_ORDER
  5392. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  5393. #endif
  5394. x[0] ^= bigA[0];
  5395. x[1] ^= bigA[1];
  5396. x[2] ^= bigA[2];
  5397. x[3] ^= bigA[3];
  5398. GMULT(x, bigH);
  5399. }
  5400. }
  5401. /* Hash in C, the Ciphertext */
  5402. if (cSz != 0 && c != NULL) {
  5403. word32 bigC[4];
  5404. blocks = cSz / AES_BLOCK_SIZE;
  5405. partial = cSz % AES_BLOCK_SIZE;
  5406. while (blocks--) {
  5407. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5408. #ifdef LITTLE_ENDIAN_ORDER
  5409. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  5410. #endif
  5411. x[0] ^= bigC[0];
  5412. x[1] ^= bigC[1];
  5413. x[2] ^= bigC[2];
  5414. x[3] ^= bigC[3];
  5415. GMULT(x, bigH);
  5416. c += AES_BLOCK_SIZE;
  5417. }
  5418. if (partial != 0) {
  5419. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  5420. XMEMCPY(bigC, c, partial);
  5421. #ifdef LITTLE_ENDIAN_ORDER
  5422. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  5423. #endif
  5424. x[0] ^= bigC[0];
  5425. x[1] ^= bigC[1];
  5426. x[2] ^= bigC[2];
  5427. x[3] ^= bigC[3];
  5428. GMULT(x, bigH);
  5429. }
  5430. }
  5431. /* Hash in the lengths in bits of A and C */
  5432. {
  5433. word32 len[4];
  5434. /* Lengths are in bytes. Convert to bits. */
  5435. len[0] = (aSz >> (8*sizeof(aSz) - 3));
  5436. len[1] = aSz << 3;
  5437. len[2] = (cSz >> (8*sizeof(cSz) - 3));
  5438. len[3] = cSz << 3;
  5439. x[0] ^= len[0];
  5440. x[1] ^= len[1];
  5441. x[2] ^= len[2];
  5442. x[3] ^= len[3];
  5443. GMULT(x, bigH);
  5444. }
  5445. #ifdef LITTLE_ENDIAN_ORDER
  5446. ByteReverseWords(x, x, AES_BLOCK_SIZE);
  5447. #endif
  5448. XMEMCPY(s, x, sSz);
  5449. }
  5450. #ifdef WOLFSSL_AESGCM_STREAM
  5451. #ifdef LITTLE_ENDIAN_ORDER
  5452. /* Little-endian 32-bit word implementation requires byte reversal of H.
  5453. *
  5454. * H is all-zeros block encrypted with key.
  5455. *
  5456. * @param [in, out] aes AES GCM object.
  5457. */
  5458. #define GHASH_INIT_EXTRA(aes) \
  5459. ByteReverseWords((word32*)aes->gcm.H, (word32*)aes->gcm.H, AES_BLOCK_SIZE)
  5460. /* GHASH one block of data..
  5461. *
  5462. * XOR block, in big-endian form, into tag and GMULT with H.
  5463. *
  5464. * @param [in, out] aes AES GCM object.
  5465. * @param [in] block Block of AAD or cipher text.
  5466. */
  5467. #define GHASH_ONE_BLOCK(aes, block) \
  5468. do { \
  5469. word32* x = (word32*)AES_TAG(aes); \
  5470. word32* h = (word32*)aes->gcm.H; \
  5471. word32 bigEnd[4]; \
  5472. XMEMCPY(bigEnd, block, AES_BLOCK_SIZE); \
  5473. ByteReverseWords(bigEnd, bigEnd, AES_BLOCK_SIZE); \
  5474. x[0] ^= bigEnd[0]; \
  5475. x[1] ^= bigEnd[1]; \
  5476. x[2] ^= bigEnd[2]; \
  5477. x[3] ^= bigEnd[3]; \
  5478. GMULT(x, h); \
  5479. } \
  5480. while (0)
  5481. /* GHASH in AAD and cipher text lengths in bits.
  5482. *
  5483. * Convert tag back to little-endian.
  5484. *
  5485. * @param [in, out] aes AES GCM object.
  5486. */
  5487. #define GHASH_LEN_BLOCK(aes) \
  5488. do { \
  5489. word32 len[4]; \
  5490. word32* x = (word32*)AES_TAG(aes); \
  5491. word32* h = (word32*)aes->gcm.H; \
  5492. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  5493. len[1] = aes->aSz << 3; \
  5494. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  5495. len[3] = aes->cSz << 3; \
  5496. x[0] ^= len[0]; \
  5497. x[1] ^= len[1]; \
  5498. x[2] ^= len[2]; \
  5499. x[3] ^= len[3]; \
  5500. GMULT(x, h); \
  5501. ByteReverseWords(x, x, AES_BLOCK_SIZE); \
  5502. } \
  5503. while (0)
  5504. #else
  5505. /* No extra initialization for 32-bit word implementation.
  5506. *
  5507. * @param [in] aes AES GCM object.
  5508. */
  5509. #define GHASH_INIT_EXTRA(aes) WC_DO_NOTHING
  5510. /* GHASH one block of data..
  5511. *
  5512. * XOR block into tag and GMULT with H.
  5513. *
  5514. * @param [in, out] aes AES GCM object.
  5515. * @param [in] block Block of AAD or cipher text.
  5516. */
  5517. #define GHASH_ONE_BLOCK(aes, block) \
  5518. do { \
  5519. word32* x = (word32*)AES_TAG(aes); \
  5520. word32* h = (word32*)aes->gcm.H; \
  5521. word32 block32[4]; \
  5522. XMEMCPY(block32, block, AES_BLOCK_SIZE); \
  5523. x[0] ^= block32[0]; \
  5524. x[1] ^= block32[1]; \
  5525. x[2] ^= block32[2]; \
  5526. x[3] ^= block32[3]; \
  5527. GMULT(x, h); \
  5528. } \
  5529. while (0)
  5530. /* GHASH in AAD and cipher text lengths in bits.
  5531. *
  5532. * @param [in, out] aes AES GCM object.
  5533. */
  5534. #define GHASH_LEN_BLOCK(aes) \
  5535. do { \
  5536. word32 len[4]; \
  5537. word32* x = (word32*)AES_TAG(aes); \
  5538. word32* h = (word32*)aes->gcm.H; \
  5539. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  5540. len[1] = aes->aSz << 3; \
  5541. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  5542. len[3] = aes->cSz << 3; \
  5543. x[0] ^= len[0]; \
  5544. x[1] ^= len[1]; \
  5545. x[2] ^= len[2]; \
  5546. x[3] ^= len[3]; \
  5547. GMULT(x, h); \
  5548. } \
  5549. while (0)
  5550. #endif /* LITTLE_ENDIAN_ORDER */
  5551. #endif /* WOLFSSL_AESGCM_STREAM */
  5552. #endif /* end GCM_WORD32 */
  5553. #if !defined(WOLFSSL_XILINX_CRYPT) && !defined(WOLFSSL_AFALG_XILINX_AES)
  5554. #ifdef WOLFSSL_AESGCM_STREAM
  5555. #ifndef GHASH_LEN_BLOCK
  5556. /* Hash in the lengths of the AAD and cipher text in bits.
  5557. *
  5558. * Default implementation.
  5559. *
  5560. * @param [in, out] aes AES GCM object.
  5561. */
  5562. #define GHASH_LEN_BLOCK(aes) \
  5563. do { \
  5564. byte scratch[AES_BLOCK_SIZE]; \
  5565. FlattenSzInBits(&scratch[0], (aes)->aSz); \
  5566. FlattenSzInBits(&scratch[8], (aes)->cSz); \
  5567. GHASH_ONE_BLOCK(aes, scratch); \
  5568. } \
  5569. while (0)
  5570. #endif
  5571. /* Initialize a GHASH for streaming operations.
  5572. *
  5573. * @param [in, out] aes AES GCM object.
  5574. */
  5575. static void GHASH_INIT(Aes* aes) {
  5576. /* Set tag to all zeros as initial value. */
  5577. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  5578. /* Reset counts of AAD and cipher text. */
  5579. aes->aOver = 0;
  5580. aes->cOver = 0;
  5581. /* Extra initialization based on implementation. */
  5582. GHASH_INIT_EXTRA(aes);
  5583. }
  5584. /* Update the GHASH with AAD and/or cipher text.
  5585. *
  5586. * @param [in,out] aes AES GCM object.
  5587. * @param [in] a Additional authentication data buffer.
  5588. * @param [in] aSz Size of data in AAD buffer.
  5589. * @param [in] c Cipher text buffer.
  5590. * @param [in] cSz Size of data in cipher text buffer.
  5591. */
  5592. static void GHASH_UPDATE(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5593. word32 cSz)
  5594. {
  5595. word32 blocks;
  5596. word32 partial;
  5597. /* Hash in A, the Additional Authentication Data */
  5598. if (aSz != 0 && a != NULL) {
  5599. /* Update count of AAD we have hashed. */
  5600. aes->aSz += aSz;
  5601. /* Check if we have unprocessed data. */
  5602. if (aes->aOver > 0) {
  5603. /* Calculate amount we can use - fill up the block. */
  5604. byte sz = AES_BLOCK_SIZE - aes->aOver;
  5605. if (sz > aSz) {
  5606. sz = (byte)aSz;
  5607. }
  5608. /* Copy extra into last GHASH block array and update count. */
  5609. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  5610. aes->aOver += sz;
  5611. if (aes->aOver == AES_BLOCK_SIZE) {
  5612. /* We have filled up the block and can process. */
  5613. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5614. /* Reset count. */
  5615. aes->aOver = 0;
  5616. }
  5617. /* Used up some data. */
  5618. aSz -= sz;
  5619. a += sz;
  5620. }
  5621. /* Calculate number of blocks of AAD and the leftover. */
  5622. blocks = aSz / AES_BLOCK_SIZE;
  5623. partial = aSz % AES_BLOCK_SIZE;
  5624. /* GHASH full blocks now. */
  5625. while (blocks--) {
  5626. GHASH_ONE_BLOCK(aes, a);
  5627. a += AES_BLOCK_SIZE;
  5628. }
  5629. if (partial != 0) {
  5630. /* Cache the partial block. */
  5631. XMEMCPY(AES_LASTGBLOCK(aes), a, partial);
  5632. aes->aOver = (byte)partial;
  5633. }
  5634. }
  5635. if (aes->aOver > 0 && cSz > 0 && c != NULL) {
  5636. /* No more AAD coming and we have a partial block. */
  5637. /* Fill the rest of the block with zeros. */
  5638. byte sz = AES_BLOCK_SIZE - aes->aOver;
  5639. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, sz);
  5640. /* GHASH last AAD block. */
  5641. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5642. /* Clear partial count for next time through. */
  5643. aes->aOver = 0;
  5644. }
  5645. /* Hash in C, the Ciphertext */
  5646. if (cSz != 0 && c != NULL) {
  5647. /* Update count of cipher text we have hashed. */
  5648. aes->cSz += cSz;
  5649. if (aes->cOver > 0) {
  5650. /* Calculate amount we can use - fill up the block. */
  5651. byte sz = AES_BLOCK_SIZE - aes->cOver;
  5652. if (sz > cSz) {
  5653. sz = (byte)cSz;
  5654. }
  5655. XMEMCPY(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  5656. /* Update count of unused encrypted counter. */
  5657. aes->cOver += sz;
  5658. if (aes->cOver == AES_BLOCK_SIZE) {
  5659. /* We have filled up the block and can process. */
  5660. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5661. /* Reset count. */
  5662. aes->cOver = 0;
  5663. }
  5664. /* Used up some data. */
  5665. cSz -= sz;
  5666. c += sz;
  5667. }
  5668. /* Calculate number of blocks of cipher text and the leftover. */
  5669. blocks = cSz / AES_BLOCK_SIZE;
  5670. partial = cSz % AES_BLOCK_SIZE;
  5671. /* GHASH full blocks now. */
  5672. while (blocks--) {
  5673. GHASH_ONE_BLOCK(aes, c);
  5674. c += AES_BLOCK_SIZE;
  5675. }
  5676. if (partial != 0) {
  5677. /* Cache the partial block. */
  5678. XMEMCPY(AES_LASTGBLOCK(aes), c, partial);
  5679. aes->cOver = (byte)partial;
  5680. }
  5681. }
  5682. }
  5683. /* Finalize the GHASH calculation.
  5684. *
  5685. * Complete hashing cipher text and hash the AAD and cipher text lengths.
  5686. *
  5687. * @param [in, out] aes AES GCM object.
  5688. * @param [out] s Authentication tag.
  5689. * @param [in] sSz Size of authentication tag required.
  5690. */
  5691. static void GHASH_FINAL(Aes* aes, byte* s, word32 sSz)
  5692. {
  5693. /* AAD block incomplete when > 0 */
  5694. byte over = aes->aOver;
  5695. if (aes->cOver > 0) {
  5696. /* Cipher text block incomplete. */
  5697. over = aes->cOver;
  5698. }
  5699. if (over > 0) {
  5700. /* Zeroize the unused part of the block. */
  5701. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  5702. /* Hash the last block of cipher text. */
  5703. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5704. }
  5705. /* Hash in the lengths of AAD and cipher text in bits */
  5706. GHASH_LEN_BLOCK(aes);
  5707. /* Copy the result into s. */
  5708. XMEMCPY(s, AES_TAG(aes), sSz);
  5709. }
  5710. #endif /* WOLFSSL_AESGCM_STREAM */
  5711. #ifdef FREESCALE_LTC_AES_GCM
  5712. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  5713. const byte* iv, word32 ivSz,
  5714. byte* authTag, word32 authTagSz,
  5715. const byte* authIn, word32 authInSz)
  5716. {
  5717. status_t status;
  5718. word32 keySize;
  5719. /* argument checks */
  5720. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  5721. return BAD_FUNC_ARG;
  5722. }
  5723. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  5724. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  5725. return BAD_FUNC_ARG;
  5726. }
  5727. status = wc_AesGetKeySize(aes, &keySize);
  5728. if (status)
  5729. return status;
  5730. status = wolfSSL_CryptHwMutexLock();
  5731. if (status != 0)
  5732. return status;
  5733. status = LTC_AES_EncryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  5734. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  5735. wolfSSL_CryptHwMutexUnLock();
  5736. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  5737. }
  5738. #else
  5739. #ifdef STM32_CRYPTO_AES_GCM
  5740. /* this function supports inline encrypt */
  5741. /* define STM32_AESGCM_PARTIAL for STM HW that does not support authentication
  5742. * on byte multiples (see CRYP_HEADERWIDTHUNIT_BYTE) */
  5743. static WARN_UNUSED_RESULT int wc_AesGcmEncrypt_STM32(
  5744. Aes* aes, byte* out, const byte* in, word32 sz,
  5745. const byte* iv, word32 ivSz,
  5746. byte* authTag, word32 authTagSz,
  5747. const byte* authIn, word32 authInSz)
  5748. {
  5749. int ret;
  5750. #ifdef WOLFSSL_STM32_CUBEMX
  5751. CRYP_HandleTypeDef hcryp;
  5752. #else
  5753. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  5754. #endif
  5755. word32 keySize;
  5756. #ifdef WOLFSSL_STM32_CUBEMX
  5757. int status = HAL_OK;
  5758. word32 blocks = sz / AES_BLOCK_SIZE;
  5759. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  5760. #else
  5761. int status = SUCCESS;
  5762. #endif
  5763. word32 partial = sz % AES_BLOCK_SIZE;
  5764. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  5765. word32 ctrInit[AES_BLOCK_SIZE/sizeof(word32)];
  5766. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  5767. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  5768. byte* authInPadded = NULL;
  5769. int authPadSz, wasAlloc = 0, useSwGhash = 0;
  5770. ret = wc_AesGetKeySize(aes, &keySize);
  5771. if (ret != 0)
  5772. return ret;
  5773. #ifdef WOLFSSL_STM32_CUBEMX
  5774. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  5775. if (ret != 0)
  5776. return ret;
  5777. #endif
  5778. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  5779. if (ivSz == GCM_NONCE_MID_SZ) {
  5780. byte* pCtr = (byte*)ctr;
  5781. XMEMCPY(ctr, iv, ivSz);
  5782. pCtr[AES_BLOCK_SIZE - 1] = 1;
  5783. }
  5784. else {
  5785. GHASH(&aes->gcm, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  5786. }
  5787. XMEMCPY(ctrInit, ctr, sizeof(ctr)); /* save off initial counter for GMAC */
  5788. /* Authentication buffer - must be 4-byte multiple zero padded */
  5789. authPadSz = authInSz % sizeof(word32);
  5790. if (authPadSz != 0) {
  5791. authPadSz = authInSz + sizeof(word32) - authPadSz;
  5792. if (authPadSz <= sizeof(authhdr)) {
  5793. authInPadded = (byte*)authhdr;
  5794. }
  5795. else {
  5796. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  5797. DYNAMIC_TYPE_TMP_BUFFER);
  5798. if (authInPadded == NULL) {
  5799. wolfSSL_CryptHwMutexUnLock();
  5800. return MEMORY_E;
  5801. }
  5802. wasAlloc = 1;
  5803. }
  5804. XMEMSET(authInPadded, 0, authPadSz);
  5805. XMEMCPY(authInPadded, authIn, authInSz);
  5806. } else {
  5807. authPadSz = authInSz;
  5808. authInPadded = (byte*)authIn;
  5809. }
  5810. /* for cases where hardware cannot be used for authTag calculate it */
  5811. /* if IV is not 12 calculate GHASH using software */
  5812. if (ivSz != GCM_NONCE_MID_SZ
  5813. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  5814. /* or hardware that does not support partial block */
  5815. || sz == 0 || partial != 0
  5816. #endif
  5817. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  5818. /* or authIn is not a multiple of 4 */
  5819. || authPadSz != authInSz
  5820. #endif
  5821. ) {
  5822. useSwGhash = 1;
  5823. }
  5824. /* Hardware requires counter + 1 */
  5825. IncrementGcmCounter((byte*)ctr);
  5826. ret = wolfSSL_CryptHwMutexLock();
  5827. if (ret != 0) {
  5828. return ret;
  5829. }
  5830. #ifdef WOLFSSL_STM32_CUBEMX
  5831. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  5832. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  5833. #if defined(STM32_HAL_V2)
  5834. hcryp.Init.Algorithm = CRYP_AES_GCM;
  5835. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  5836. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  5837. hcryp.Init.HeaderSize = authInSz;
  5838. #else
  5839. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  5840. #endif
  5841. #ifdef CRYP_KEYIVCONFIG_ONCE
  5842. /* allows repeated calls to HAL_CRYP_Encrypt */
  5843. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  5844. #endif
  5845. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  5846. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  5847. HAL_CRYP_Init(&hcryp);
  5848. #ifndef CRYP_KEYIVCONFIG_ONCE
  5849. /* GCM payload phase - can handle partial blocks */
  5850. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  5851. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  5852. #else
  5853. /* GCM payload phase - blocks */
  5854. if (blocks) {
  5855. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  5856. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  5857. }
  5858. /* GCM payload phase - partial remainder */
  5859. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  5860. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5861. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5862. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)partialBlock, partial,
  5863. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  5864. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5865. }
  5866. #endif
  5867. if (status == HAL_OK && !useSwGhash) {
  5868. /* Compute the authTag */
  5869. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  5870. STM32_HAL_TIMEOUT);
  5871. }
  5872. #elif defined(STM32_CRYPTO_AES_ONLY)
  5873. /* Set the CRYP parameters */
  5874. hcryp.Init.HeaderSize = authPadSz;
  5875. if (authPadSz == 0)
  5876. hcryp.Init.Header = NULL; /* cannot pass pointer here when authIn == 0 */
  5877. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  5878. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  5879. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  5880. HAL_CRYP_Init(&hcryp);
  5881. /* GCM init phase */
  5882. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  5883. if (status == HAL_OK) {
  5884. /* GCM header phase */
  5885. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  5886. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  5887. }
  5888. if (status == HAL_OK) {
  5889. /* GCM payload phase - blocks */
  5890. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  5891. if (blocks) {
  5892. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  5893. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  5894. }
  5895. }
  5896. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  5897. /* GCM payload phase - partial remainder */
  5898. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5899. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5900. status = HAL_CRYPEx_AES_Auth(&hcryp, (uint8_t*)partialBlock, partial,
  5901. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  5902. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5903. }
  5904. if (status == HAL_OK && !useSwGhash) {
  5905. /* GCM final phase */
  5906. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  5907. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  5908. }
  5909. #else
  5910. hcryp.Init.HeaderSize = authPadSz;
  5911. HAL_CRYP_Init(&hcryp);
  5912. if (blocks) {
  5913. /* GCM payload phase - blocks */
  5914. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (byte*)in,
  5915. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  5916. }
  5917. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  5918. /* GCM payload phase - partial remainder */
  5919. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5920. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5921. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (uint8_t*)partialBlock, partial,
  5922. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  5923. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5924. }
  5925. if (status == HAL_OK && !useSwGhash) {
  5926. /* Compute the authTag */
  5927. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  5928. }
  5929. #endif
  5930. if (status != HAL_OK)
  5931. ret = AES_GCM_AUTH_E;
  5932. HAL_CRYP_DeInit(&hcryp);
  5933. #else /* Standard Peripheral Library */
  5934. ByteReverseWords(keyCopy, (word32*)aes->key, keySize);
  5935. status = CRYP_AES_GCM(MODE_ENCRYPT, (uint8_t*)ctr,
  5936. (uint8_t*)keyCopy, keySize * 8,
  5937. (uint8_t*)in, sz,
  5938. (uint8_t*)authInPadded, authInSz,
  5939. (uint8_t*)out, (uint8_t*)tag);
  5940. if (status != SUCCESS)
  5941. ret = AES_GCM_AUTH_E;
  5942. #endif /* WOLFSSL_STM32_CUBEMX */
  5943. wolfSSL_CryptHwMutexUnLock();
  5944. wc_Stm32_Aes_Cleanup();
  5945. if (ret == 0) {
  5946. /* return authTag */
  5947. if (authTag) {
  5948. if (useSwGhash) {
  5949. GHASH(&aes->gcm, authIn, authInSz, out, sz, authTag, authTagSz);
  5950. ret = wc_AesEncrypt(aes, (byte*)ctrInit, (byte*)tag);
  5951. if (ret == 0) {
  5952. xorbuf(authTag, tag, authTagSz);
  5953. }
  5954. }
  5955. else {
  5956. /* use hardware calculated tag */
  5957. XMEMCPY(authTag, tag, authTagSz);
  5958. }
  5959. }
  5960. }
  5961. /* Free memory */
  5962. if (wasAlloc) {
  5963. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  5964. }
  5965. return ret;
  5966. }
  5967. #endif /* STM32_CRYPTO_AES_GCM */
  5968. #ifdef WOLFSSL_AESNI
  5969. /* For performance reasons, this code needs to be not inlined. */
  5970. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  5971. Aes* aes, byte* out, const byte* in, word32 sz,
  5972. const byte* iv, word32 ivSz,
  5973. byte* authTag, word32 authTagSz,
  5974. const byte* authIn, word32 authInSz);
  5975. #else
  5976. static
  5977. #endif
  5978. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  5979. Aes* aes, byte* out, const byte* in, word32 sz,
  5980. const byte* iv, word32 ivSz,
  5981. byte* authTag, word32 authTagSz,
  5982. const byte* authIn, word32 authInSz)
  5983. {
  5984. int ret = 0;
  5985. word32 blocks = sz / AES_BLOCK_SIZE;
  5986. word32 partial = sz % AES_BLOCK_SIZE;
  5987. const byte* p = in;
  5988. byte* c = out;
  5989. ALIGN16 byte counter[AES_BLOCK_SIZE];
  5990. ALIGN16 byte initialCounter[AES_BLOCK_SIZE];
  5991. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  5992. if (ivSz == GCM_NONCE_MID_SZ) {
  5993. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  5994. XMEMCPY(counter, iv, ivSz);
  5995. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  5996. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  5997. counter[AES_BLOCK_SIZE - 1] = 1;
  5998. }
  5999. else {
  6000. /* Counter is GHASH of IV. */
  6001. #ifdef OPENSSL_EXTRA
  6002. word32 aadTemp = aes->gcm.aadLen;
  6003. aes->gcm.aadLen = 0;
  6004. #endif
  6005. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6006. #ifdef OPENSSL_EXTRA
  6007. aes->gcm.aadLen = aadTemp;
  6008. #endif
  6009. }
  6010. XMEMCPY(initialCounter, counter, AES_BLOCK_SIZE);
  6011. #ifdef WOLFSSL_PIC32MZ_CRYPT
  6012. if (blocks) {
  6013. /* use initial IV for HW, but don't use it below */
  6014. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  6015. ret = wc_Pic32AesCrypt(
  6016. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  6017. out, in, (blocks * AES_BLOCK_SIZE),
  6018. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6019. if (ret != 0)
  6020. return ret;
  6021. }
  6022. /* process remainder using partial handling */
  6023. #endif
  6024. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6025. /* some hardware acceleration can gain performance from doing AES encryption
  6026. * of the whole buffer at once */
  6027. if (c != p && blocks > 0) { /* can not handle inline encryption */
  6028. while (blocks--) {
  6029. IncrementGcmCounter(counter);
  6030. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  6031. c += AES_BLOCK_SIZE;
  6032. }
  6033. /* reset number of blocks and then do encryption */
  6034. blocks = sz / AES_BLOCK_SIZE;
  6035. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6036. xorbuf(out, p, AES_BLOCK_SIZE * blocks);
  6037. p += AES_BLOCK_SIZE * blocks;
  6038. }
  6039. else
  6040. #endif /* HAVE_AES_ECB && !WOLFSSL_PIC32MZ_CRYPT */
  6041. {
  6042. while (blocks--) {
  6043. IncrementGcmCounter(counter);
  6044. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6045. ret = wc_AesEncrypt(aes, counter, scratch);
  6046. if (ret != 0)
  6047. return ret;
  6048. xorbufout(c, scratch, p, AES_BLOCK_SIZE);
  6049. #endif
  6050. p += AES_BLOCK_SIZE;
  6051. c += AES_BLOCK_SIZE;
  6052. }
  6053. }
  6054. if (partial != 0) {
  6055. IncrementGcmCounter(counter);
  6056. ret = wc_AesEncrypt(aes, counter, scratch);
  6057. if (ret != 0)
  6058. return ret;
  6059. xorbufout(c, scratch, p, partial);
  6060. }
  6061. if (authTag) {
  6062. GHASH(&aes->gcm, authIn, authInSz, out, sz, authTag, authTagSz);
  6063. ret = wc_AesEncrypt(aes, initialCounter, scratch);
  6064. if (ret != 0)
  6065. return ret;
  6066. xorbuf(authTag, scratch, authTagSz);
  6067. #ifdef OPENSSL_EXTRA
  6068. if (!in && !sz)
  6069. /* store AAD size for next call */
  6070. aes->gcm.aadLen = authInSz;
  6071. #endif
  6072. }
  6073. return ret;
  6074. }
  6075. /* Software AES - GCM Encrypt */
  6076. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6077. const byte* iv, word32 ivSz,
  6078. byte* authTag, word32 authTagSz,
  6079. const byte* authIn, word32 authInSz)
  6080. {
  6081. /* argument checks */
  6082. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  6083. return BAD_FUNC_ARG;
  6084. }
  6085. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  6086. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  6087. return BAD_FUNC_ARG;
  6088. }
  6089. #ifdef WOLF_CRYPTO_CB
  6090. #ifndef WOLF_CRYPTO_CB_FIND
  6091. if (aes->devId != INVALID_DEVID)
  6092. #endif
  6093. {
  6094. int crypto_cb_ret =
  6095. wc_CryptoCb_AesGcmEncrypt(aes, out, in, sz, iv, ivSz, authTag,
  6096. authTagSz, authIn, authInSz);
  6097. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6098. return crypto_cb_ret;
  6099. /* fall-through when unavailable */
  6100. }
  6101. #endif
  6102. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6103. /* if async and byte count above threshold */
  6104. /* only 12-byte IV is supported in HW */
  6105. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6106. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6107. #if defined(HAVE_CAVIUM)
  6108. #ifdef HAVE_CAVIUM_V
  6109. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6110. return NitroxAesGcmEncrypt(aes, out, in, sz,
  6111. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6112. authTag, authTagSz, authIn, authInSz);
  6113. }
  6114. #endif
  6115. #elif defined(HAVE_INTEL_QA)
  6116. return IntelQaSymAesGcmEncrypt(&aes->asyncDev, out, in, sz,
  6117. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6118. authTag, authTagSz, authIn, authInSz);
  6119. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  6120. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_ENCRYPT)) {
  6121. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  6122. sw->aes.aes = aes;
  6123. sw->aes.out = out;
  6124. sw->aes.in = in;
  6125. sw->aes.sz = sz;
  6126. sw->aes.iv = iv;
  6127. sw->aes.ivSz = ivSz;
  6128. sw->aes.authTag = authTag;
  6129. sw->aes.authTagSz = authTagSz;
  6130. sw->aes.authIn = authIn;
  6131. sw->aes.authInSz = authInSz;
  6132. return WC_PENDING_E;
  6133. }
  6134. #endif
  6135. }
  6136. #endif /* WOLFSSL_ASYNC_CRYPT */
  6137. #ifdef WOLFSSL_SILABS_SE_ACCEL
  6138. return wc_AesGcmEncrypt_silabs(
  6139. aes, out, in, sz,
  6140. iv, ivSz,
  6141. authTag, authTagSz,
  6142. authIn, authInSz);
  6143. #endif
  6144. #ifdef STM32_CRYPTO_AES_GCM
  6145. return wc_AesGcmEncrypt_STM32(
  6146. aes, out, in, sz, iv, ivSz,
  6147. authTag, authTagSz, authIn, authInSz);
  6148. #endif /* STM32_CRYPTO_AES_GCM */
  6149. #ifdef WOLFSSL_AESNI
  6150. #ifdef HAVE_INTEL_AVX2
  6151. if (IS_INTEL_AVX2(intel_flags)) {
  6152. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6153. AES_GCM_encrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6154. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6155. RESTORE_VECTOR_REGISTERS();
  6156. return 0;
  6157. }
  6158. else
  6159. #endif
  6160. #if defined(HAVE_INTEL_AVX1)
  6161. if (IS_INTEL_AVX1(intel_flags)) {
  6162. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6163. AES_GCM_encrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6164. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6165. RESTORE_VECTOR_REGISTERS();
  6166. return 0;
  6167. }
  6168. else
  6169. #endif
  6170. if (haveAESNI) {
  6171. AES_GCM_encrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6172. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6173. return 0;
  6174. }
  6175. else
  6176. #endif
  6177. {
  6178. return AES_GCM_encrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  6179. authIn, authInSz);
  6180. }
  6181. }
  6182. #endif
  6183. /* AES GCM Decrypt */
  6184. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  6185. #ifdef FREESCALE_LTC_AES_GCM
  6186. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6187. const byte* iv, word32 ivSz,
  6188. const byte* authTag, word32 authTagSz,
  6189. const byte* authIn, word32 authInSz)
  6190. {
  6191. int ret;
  6192. word32 keySize;
  6193. status_t status;
  6194. /* argument checks */
  6195. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  6196. * in and out are don't cares, as this is is the GMAC case. */
  6197. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  6198. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  6199. ivSz == 0) {
  6200. return BAD_FUNC_ARG;
  6201. }
  6202. ret = wc_AesGetKeySize(aes, &keySize);
  6203. if (ret != 0) {
  6204. return ret;
  6205. }
  6206. status = wolfSSL_CryptHwMutexLock();
  6207. if (status != 0)
  6208. return status;
  6209. status = LTC_AES_DecryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  6210. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  6211. wolfSSL_CryptHwMutexUnLock();
  6212. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  6213. }
  6214. #else
  6215. #ifdef STM32_CRYPTO_AES_GCM
  6216. /* this function supports inline decrypt */
  6217. static WARN_UNUSED_RESULT int wc_AesGcmDecrypt_STM32(
  6218. Aes* aes, byte* out,
  6219. const byte* in, word32 sz,
  6220. const byte* iv, word32 ivSz,
  6221. const byte* authTag, word32 authTagSz,
  6222. const byte* authIn, word32 authInSz)
  6223. {
  6224. int ret;
  6225. #ifdef WOLFSSL_STM32_CUBEMX
  6226. int status = HAL_OK;
  6227. CRYP_HandleTypeDef hcryp;
  6228. word32 blocks = sz / AES_BLOCK_SIZE;
  6229. #else
  6230. int status = SUCCESS;
  6231. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  6232. #endif
  6233. word32 keySize;
  6234. word32 partial = sz % AES_BLOCK_SIZE;
  6235. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  6236. word32 tagExpected[AES_BLOCK_SIZE/sizeof(word32)];
  6237. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  6238. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  6239. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  6240. byte* authInPadded = NULL;
  6241. int authPadSz, wasAlloc = 0, tagComputed = 0;
  6242. ret = wc_AesGetKeySize(aes, &keySize);
  6243. if (ret != 0)
  6244. return ret;
  6245. #ifdef WOLFSSL_STM32_CUBEMX
  6246. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  6247. if (ret != 0)
  6248. return ret;
  6249. #endif
  6250. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  6251. if (ivSz == GCM_NONCE_MID_SZ) {
  6252. byte* pCtr = (byte*)ctr;
  6253. XMEMCPY(ctr, iv, ivSz);
  6254. pCtr[AES_BLOCK_SIZE - 1] = 1;
  6255. }
  6256. else {
  6257. GHASH(&aes->gcm, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  6258. }
  6259. /* Make copy of expected authTag, which could get corrupted in some
  6260. * Cube HAL versions without proper partial block support.
  6261. * For TLS blocks the authTag is after the output buffer, so save it */
  6262. XMEMCPY(tagExpected, authTag, authTagSz);
  6263. /* Authentication buffer - must be 4-byte multiple zero padded */
  6264. authPadSz = authInSz % sizeof(word32);
  6265. if (authPadSz != 0) {
  6266. authPadSz = authInSz + sizeof(word32) - authPadSz;
  6267. }
  6268. else {
  6269. authPadSz = authInSz;
  6270. }
  6271. /* for cases where hardware cannot be used for authTag calculate it */
  6272. /* if IV is not 12 calculate GHASH using software */
  6273. if (ivSz != GCM_NONCE_MID_SZ
  6274. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  6275. /* or hardware that does not support partial block */
  6276. || sz == 0 || partial != 0
  6277. #endif
  6278. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  6279. /* or authIn is not a multiple of 4 */
  6280. || authPadSz != authInSz
  6281. #endif
  6282. ) {
  6283. GHASH(&aes->gcm, authIn, authInSz, in, sz, (byte*)tag, sizeof(tag));
  6284. ret = wc_AesEncrypt(aes, (byte*)ctr, (byte*)partialBlock);
  6285. if (ret != 0)
  6286. return ret;
  6287. xorbuf(tag, partialBlock, sizeof(tag));
  6288. tagComputed = 1;
  6289. }
  6290. /* if using hardware for authentication tag make sure its aligned and zero padded */
  6291. if (authPadSz != authInSz && !tagComputed) {
  6292. if (authPadSz <= sizeof(authhdr)) {
  6293. authInPadded = (byte*)authhdr;
  6294. }
  6295. else {
  6296. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  6297. DYNAMIC_TYPE_TMP_BUFFER);
  6298. if (authInPadded == NULL) {
  6299. wolfSSL_CryptHwMutexUnLock();
  6300. return MEMORY_E;
  6301. }
  6302. wasAlloc = 1;
  6303. }
  6304. XMEMSET(authInPadded, 0, authPadSz);
  6305. XMEMCPY(authInPadded, authIn, authInSz);
  6306. } else {
  6307. authInPadded = (byte*)authIn;
  6308. }
  6309. /* Hardware requires counter + 1 */
  6310. IncrementGcmCounter((byte*)ctr);
  6311. ret = wolfSSL_CryptHwMutexLock();
  6312. if (ret != 0) {
  6313. return ret;
  6314. }
  6315. #ifdef WOLFSSL_STM32_CUBEMX
  6316. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6317. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  6318. #if defined(STM32_HAL_V2)
  6319. hcryp.Init.Algorithm = CRYP_AES_GCM;
  6320. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  6321. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  6322. hcryp.Init.HeaderSize = authInSz;
  6323. #else
  6324. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  6325. #endif
  6326. #ifdef CRYP_KEYIVCONFIG_ONCE
  6327. /* allows repeated calls to HAL_CRYP_Decrypt */
  6328. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  6329. #endif
  6330. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  6331. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6332. HAL_CRYP_Init(&hcryp);
  6333. #ifndef CRYP_KEYIVCONFIG_ONCE
  6334. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  6335. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  6336. #else
  6337. /* GCM payload phase - blocks */
  6338. if (blocks) {
  6339. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  6340. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  6341. }
  6342. /* GCM payload phase - partial remainder */
  6343. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6344. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6345. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6346. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)partialBlock, partial,
  6347. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  6348. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6349. }
  6350. #endif
  6351. if (status == HAL_OK && !tagComputed) {
  6352. /* Compute the authTag */
  6353. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  6354. STM32_HAL_TIMEOUT);
  6355. }
  6356. #elif defined(STM32_CRYPTO_AES_ONLY)
  6357. /* Set the CRYP parameters */
  6358. hcryp.Init.HeaderSize = authPadSz;
  6359. if (authPadSz == 0)
  6360. hcryp.Init.Header = NULL; /* cannot pass pointer when authIn == 0 */
  6361. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  6362. hcryp.Init.OperatingMode = CRYP_ALGOMODE_DECRYPT;
  6363. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  6364. HAL_CRYP_Init(&hcryp);
  6365. /* GCM init phase */
  6366. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6367. if (status == HAL_OK) {
  6368. /* GCM header phase */
  6369. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  6370. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6371. }
  6372. if (status == HAL_OK) {
  6373. /* GCM payload phase - blocks */
  6374. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  6375. if (blocks) {
  6376. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  6377. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6378. }
  6379. }
  6380. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  6381. /* GCM payload phase - partial remainder */
  6382. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6383. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6384. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)partialBlock, partial,
  6385. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  6386. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6387. }
  6388. if (status == HAL_OK && tagComputed == 0) {
  6389. /* GCM final phase */
  6390. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  6391. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  6392. }
  6393. #else
  6394. hcryp.Init.HeaderSize = authPadSz;
  6395. HAL_CRYP_Init(&hcryp);
  6396. if (blocks) {
  6397. /* GCM payload phase - blocks */
  6398. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)in,
  6399. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6400. }
  6401. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6402. /* GCM payload phase - partial remainder */
  6403. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6404. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6405. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)partialBlock, partial,
  6406. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  6407. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6408. }
  6409. if (status == HAL_OK && tagComputed == 0) {
  6410. /* Compute the authTag */
  6411. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  6412. }
  6413. #endif
  6414. if (status != HAL_OK)
  6415. ret = AES_GCM_AUTH_E;
  6416. HAL_CRYP_DeInit(&hcryp);
  6417. #else /* Standard Peripheral Library */
  6418. ByteReverseWords(keyCopy, (word32*)aes->key, aes->keylen);
  6419. /* Input size and auth size need to be the actual sizes, even though
  6420. * they are not block aligned, because this length (in bits) is used
  6421. * in the final GHASH. */
  6422. XMEMSET(partialBlock, 0, sizeof(partialBlock)); /* use this to get tag */
  6423. status = CRYP_AES_GCM(MODE_DECRYPT, (uint8_t*)ctr,
  6424. (uint8_t*)keyCopy, keySize * 8,
  6425. (uint8_t*)in, sz,
  6426. (uint8_t*)authInPadded, authInSz,
  6427. (uint8_t*)out, (uint8_t*)partialBlock);
  6428. if (status != SUCCESS)
  6429. ret = AES_GCM_AUTH_E;
  6430. if (tagComputed == 0)
  6431. XMEMCPY(tag, partialBlock, authTagSz);
  6432. #endif /* WOLFSSL_STM32_CUBEMX */
  6433. wolfSSL_CryptHwMutexUnLock();
  6434. wc_Stm32_Aes_Cleanup();
  6435. /* Check authentication tag */
  6436. if (ConstantCompare((const byte*)tagExpected, (byte*)tag, authTagSz) != 0) {
  6437. ret = AES_GCM_AUTH_E;
  6438. }
  6439. /* Free memory */
  6440. if (wasAlloc) {
  6441. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  6442. }
  6443. return ret;
  6444. }
  6445. #endif /* STM32_CRYPTO_AES_GCM */
  6446. #ifdef WOLFSSL_AESNI
  6447. /* For performance reasons, this code needs to be not inlined. */
  6448. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  6449. Aes* aes, byte* out, const byte* in, word32 sz,
  6450. const byte* iv, word32 ivSz,
  6451. const byte* authTag, word32 authTagSz,
  6452. const byte* authIn, word32 authInSz);
  6453. #else
  6454. static
  6455. #endif
  6456. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  6457. Aes* aes, byte* out, const byte* in, word32 sz,
  6458. const byte* iv, word32 ivSz,
  6459. const byte* authTag, word32 authTagSz,
  6460. const byte* authIn, word32 authInSz)
  6461. {
  6462. int ret;
  6463. word32 blocks = sz / AES_BLOCK_SIZE;
  6464. word32 partial = sz % AES_BLOCK_SIZE;
  6465. const byte* c = in;
  6466. byte* p = out;
  6467. ALIGN16 byte counter[AES_BLOCK_SIZE];
  6468. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  6469. ALIGN16 byte Tprime[AES_BLOCK_SIZE];
  6470. ALIGN16 byte EKY0[AES_BLOCK_SIZE];
  6471. sword32 res;
  6472. if (ivSz == GCM_NONCE_MID_SZ) {
  6473. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6474. XMEMCPY(counter, iv, ivSz);
  6475. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6476. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6477. counter[AES_BLOCK_SIZE - 1] = 1;
  6478. }
  6479. else {
  6480. /* Counter is GHASH of IV. */
  6481. #ifdef OPENSSL_EXTRA
  6482. word32 aadTemp = aes->gcm.aadLen;
  6483. aes->gcm.aadLen = 0;
  6484. #endif
  6485. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6486. #ifdef OPENSSL_EXTRA
  6487. aes->gcm.aadLen = aadTemp;
  6488. #endif
  6489. }
  6490. /* Calc the authTag again using received auth data and the cipher text */
  6491. GHASH(&aes->gcm, authIn, authInSz, in, sz, Tprime, sizeof(Tprime));
  6492. ret = wc_AesEncrypt(aes, counter, EKY0);
  6493. if (ret != 0)
  6494. return ret;
  6495. xorbuf(Tprime, EKY0, sizeof(Tprime));
  6496. #ifdef WC_AES_GCM_DEC_AUTH_EARLY
  6497. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  6498. * the pairwise bytes in the strings.
  6499. */
  6500. res = ConstantCompare(authTag, Tprime, authTagSz);
  6501. /* convert positive retval from ConstantCompare() to all-1s word, in
  6502. * constant time.
  6503. */
  6504. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  6505. ret = res & AES_GCM_AUTH_E;
  6506. if (ret != 0)
  6507. return ret;
  6508. #endif
  6509. #ifdef OPENSSL_EXTRA
  6510. if (!out) {
  6511. /* authenticated, non-confidential data */
  6512. /* store AAD size for next call */
  6513. aes->gcm.aadLen = authInSz;
  6514. }
  6515. #endif
  6516. #if defined(WOLFSSL_PIC32MZ_CRYPT)
  6517. if (blocks) {
  6518. /* use initial IV for HW, but don't use it below */
  6519. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  6520. ret = wc_Pic32AesCrypt(
  6521. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  6522. out, in, (blocks * AES_BLOCK_SIZE),
  6523. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6524. if (ret != 0)
  6525. return ret;
  6526. }
  6527. /* process remainder using partial handling */
  6528. #endif
  6529. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6530. /* some hardware acceleration can gain performance from doing AES encryption
  6531. * of the whole buffer at once */
  6532. if (c != p && blocks > 0) { /* can not handle inline decryption */
  6533. while (blocks--) {
  6534. IncrementGcmCounter(counter);
  6535. XMEMCPY(p, counter, AES_BLOCK_SIZE);
  6536. p += AES_BLOCK_SIZE;
  6537. }
  6538. /* reset number of blocks and then do encryption */
  6539. blocks = sz / AES_BLOCK_SIZE;
  6540. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6541. xorbuf(out, c, AES_BLOCK_SIZE * blocks);
  6542. c += AES_BLOCK_SIZE * blocks;
  6543. }
  6544. else
  6545. #endif /* HAVE_AES_ECB && !PIC32MZ */
  6546. {
  6547. while (blocks--) {
  6548. IncrementGcmCounter(counter);
  6549. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6550. ret = wc_AesEncrypt(aes, counter, scratch);
  6551. if (ret != 0)
  6552. return ret;
  6553. xorbufout(p, scratch, c, AES_BLOCK_SIZE);
  6554. #endif
  6555. p += AES_BLOCK_SIZE;
  6556. c += AES_BLOCK_SIZE;
  6557. }
  6558. }
  6559. if (partial != 0) {
  6560. IncrementGcmCounter(counter);
  6561. ret = wc_AesEncrypt(aes, counter, scratch);
  6562. if (ret != 0)
  6563. return ret;
  6564. xorbuf(scratch, c, partial);
  6565. XMEMCPY(p, scratch, partial);
  6566. }
  6567. #ifndef WC_AES_GCM_DEC_AUTH_EARLY
  6568. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  6569. * the pairwise bytes in the strings.
  6570. */
  6571. res = ConstantCompare(authTag, Tprime, (int)authTagSz);
  6572. /* convert positive retval from ConstantCompare() to all-1s word, in
  6573. * constant time.
  6574. */
  6575. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  6576. /* now use res as a mask for constant time return of ret, unless tag
  6577. * mismatch, whereupon AES_GCM_AUTH_E is returned.
  6578. */
  6579. ret = (ret & ~res) | (res & AES_GCM_AUTH_E);
  6580. #endif
  6581. return ret;
  6582. }
  6583. /* Software AES - GCM Decrypt */
  6584. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6585. const byte* iv, word32 ivSz,
  6586. const byte* authTag, word32 authTagSz,
  6587. const byte* authIn, word32 authInSz)
  6588. {
  6589. #ifdef WOLFSSL_AESNI
  6590. int res = AES_GCM_AUTH_E;
  6591. #endif
  6592. /* argument checks */
  6593. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  6594. * in and out are don't cares, as this is is the GMAC case. */
  6595. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  6596. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  6597. ivSz == 0) {
  6598. return BAD_FUNC_ARG;
  6599. }
  6600. #ifdef WOLF_CRYPTO_CB
  6601. #ifndef WOLF_CRYPTO_CB_FIND
  6602. if (aes->devId != INVALID_DEVID)
  6603. #endif
  6604. {
  6605. int crypto_cb_ret =
  6606. wc_CryptoCb_AesGcmDecrypt(aes, out, in, sz, iv, ivSz,
  6607. authTag, authTagSz, authIn, authInSz);
  6608. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6609. return crypto_cb_ret;
  6610. /* fall-through when unavailable */
  6611. }
  6612. #endif
  6613. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6614. /* if async and byte count above threshold */
  6615. /* only 12-byte IV is supported in HW */
  6616. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6617. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6618. #if defined(HAVE_CAVIUM)
  6619. #ifdef HAVE_CAVIUM_V
  6620. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6621. return NitroxAesGcmDecrypt(aes, out, in, sz,
  6622. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6623. authTag, authTagSz, authIn, authInSz);
  6624. }
  6625. #endif
  6626. #elif defined(HAVE_INTEL_QA)
  6627. return IntelQaSymAesGcmDecrypt(&aes->asyncDev, out, in, sz,
  6628. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6629. authTag, authTagSz, authIn, authInSz);
  6630. #elif defined(WOLFSSL_ASYNC_CRYPT_SW)
  6631. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_DECRYPT)) {
  6632. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  6633. sw->aes.aes = aes;
  6634. sw->aes.out = out;
  6635. sw->aes.in = in;
  6636. sw->aes.sz = sz;
  6637. sw->aes.iv = iv;
  6638. sw->aes.ivSz = ivSz;
  6639. sw->aes.authTag = (byte*)authTag;
  6640. sw->aes.authTagSz = authTagSz;
  6641. sw->aes.authIn = authIn;
  6642. sw->aes.authInSz = authInSz;
  6643. return WC_PENDING_E;
  6644. }
  6645. #endif
  6646. }
  6647. #endif /* WOLFSSL_ASYNC_CRYPT */
  6648. #ifdef WOLFSSL_SILABS_SE_ACCEL
  6649. return wc_AesGcmDecrypt_silabs(
  6650. aes, out, in, sz, iv, ivSz,
  6651. authTag, authTagSz, authIn, authInSz);
  6652. #endif
  6653. #ifdef STM32_CRYPTO_AES_GCM
  6654. /* The STM standard peripheral library API's doesn't support partial blocks */
  6655. return wc_AesGcmDecrypt_STM32(
  6656. aes, out, in, sz, iv, ivSz,
  6657. authTag, authTagSz, authIn, authInSz);
  6658. #endif /* STM32_CRYPTO_AES_GCM */
  6659. #ifdef WOLFSSL_AESNI
  6660. #ifdef HAVE_INTEL_AVX2
  6661. if (IS_INTEL_AVX2(intel_flags)) {
  6662. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6663. AES_GCM_decrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6664. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6665. RESTORE_VECTOR_REGISTERS();
  6666. if (res == 0)
  6667. return AES_GCM_AUTH_E;
  6668. return 0;
  6669. }
  6670. else
  6671. #endif
  6672. #if defined(HAVE_INTEL_AVX1)
  6673. if (IS_INTEL_AVX1(intel_flags)) {
  6674. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6675. AES_GCM_decrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6676. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6677. RESTORE_VECTOR_REGISTERS();
  6678. if (res == 0)
  6679. return AES_GCM_AUTH_E;
  6680. return 0;
  6681. }
  6682. else
  6683. #endif
  6684. if (haveAESNI) {
  6685. AES_GCM_decrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6686. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6687. if (res == 0)
  6688. return AES_GCM_AUTH_E;
  6689. return 0;
  6690. }
  6691. else
  6692. #endif
  6693. {
  6694. return AES_GCM_decrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  6695. authIn, authInSz);
  6696. }
  6697. }
  6698. #endif
  6699. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  6700. #ifdef WOLFSSL_AESGCM_STREAM
  6701. /* Initialize the AES GCM cipher with an IV. C implementation.
  6702. *
  6703. * @param [in, out] aes AES object.
  6704. * @param [in] iv IV/nonce buffer.
  6705. * @param [in] ivSz Length of IV/nonce data.
  6706. */
  6707. static WARN_UNUSED_RESULT int AesGcmInit_C(Aes* aes, const byte* iv, word32 ivSz)
  6708. {
  6709. ALIGN32 byte counter[AES_BLOCK_SIZE];
  6710. int ret;
  6711. if (ivSz == GCM_NONCE_MID_SZ) {
  6712. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6713. XMEMCPY(counter, iv, ivSz);
  6714. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6715. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6716. counter[AES_BLOCK_SIZE - 1] = 1;
  6717. }
  6718. else {
  6719. /* Counter is GHASH of IV. */
  6720. #ifdef OPENSSL_EXTRA
  6721. word32 aadTemp = aes->gcm.aadLen;
  6722. aes->gcm.aadLen = 0;
  6723. #endif
  6724. GHASH(&aes->gcm, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6725. #ifdef OPENSSL_EXTRA
  6726. aes->gcm.aadLen = aadTemp;
  6727. #endif
  6728. }
  6729. /* Copy in the counter for use with cipher. */
  6730. XMEMCPY(AES_COUNTER(aes), counter, AES_BLOCK_SIZE);
  6731. /* Encrypt initial counter into a buffer for GCM. */
  6732. ret = wc_AesEncrypt(aes, counter, AES_INITCTR(aes));
  6733. if (ret != 0)
  6734. return ret;
  6735. /* Reset state fields. */
  6736. aes->over = 0;
  6737. aes->aSz = 0;
  6738. aes->cSz = 0;
  6739. /* Initialization for GHASH. */
  6740. GHASH_INIT(aes);
  6741. return 0;
  6742. }
  6743. /* Update the AES GCM cipher with data. C implementation.
  6744. *
  6745. * Only enciphers data.
  6746. *
  6747. * @param [in, out] aes AES object.
  6748. * @param [in] out Cipher text or plaintext buffer.
  6749. * @param [in] in Plaintext or cipher text buffer.
  6750. * @param [in] sz Length of data.
  6751. */
  6752. static WARN_UNUSED_RESULT int AesGcmCryptUpdate_C(
  6753. Aes* aes, byte* out, const byte* in, word32 sz)
  6754. {
  6755. word32 blocks;
  6756. word32 partial;
  6757. int ret;
  6758. /* Check if previous encrypted block was not used up. */
  6759. if (aes->over > 0) {
  6760. byte pSz = AES_BLOCK_SIZE - aes->over;
  6761. if (pSz > sz) pSz = (byte)sz;
  6762. /* Use some/all of last encrypted block. */
  6763. xorbufout(out, AES_LASTBLOCK(aes) + aes->over, in, pSz);
  6764. aes->over = (aes->over + pSz) & (AES_BLOCK_SIZE - 1);
  6765. /* Some data used. */
  6766. sz -= pSz;
  6767. in += pSz;
  6768. out += pSz;
  6769. }
  6770. /* Calculate the number of blocks needing to be encrypted and any leftover.
  6771. */
  6772. blocks = sz / AES_BLOCK_SIZE;
  6773. partial = sz & (AES_BLOCK_SIZE - 1);
  6774. #if defined(HAVE_AES_ECB)
  6775. /* Some hardware acceleration can gain performance from doing AES encryption
  6776. * of the whole buffer at once.
  6777. * Overwrites the cipher text before using plaintext - no inline encryption.
  6778. */
  6779. if ((out != in) && blocks > 0) {
  6780. word32 b;
  6781. /* Place incrementing counter blocks into cipher text. */
  6782. for (b = 0; b < blocks; b++) {
  6783. IncrementGcmCounter(AES_COUNTER(aes));
  6784. XMEMCPY(out + b * AES_BLOCK_SIZE, AES_COUNTER(aes), AES_BLOCK_SIZE);
  6785. }
  6786. /* Encrypt counter blocks. */
  6787. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6788. /* XOR in plaintext. */
  6789. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  6790. /* Skip over processed data. */
  6791. in += AES_BLOCK_SIZE * blocks;
  6792. out += AES_BLOCK_SIZE * blocks;
  6793. }
  6794. else
  6795. #endif /* HAVE_AES_ECB */
  6796. {
  6797. /* Encrypt block by block. */
  6798. while (blocks--) {
  6799. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  6800. IncrementGcmCounter(AES_COUNTER(aes));
  6801. /* Encrypt counter into a buffer. */
  6802. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), scratch);
  6803. if (ret != 0)
  6804. return ret;
  6805. /* XOR plain text into encrypted counter into cipher text buffer. */
  6806. xorbufout(out, scratch, in, AES_BLOCK_SIZE);
  6807. /* Data complete. */
  6808. in += AES_BLOCK_SIZE;
  6809. out += AES_BLOCK_SIZE;
  6810. }
  6811. }
  6812. if (partial != 0) {
  6813. /* Generate an extra block and use up as much as needed. */
  6814. IncrementGcmCounter(AES_COUNTER(aes));
  6815. /* Encrypt counter into cache. */
  6816. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), AES_LASTBLOCK(aes));
  6817. if (ret != 0)
  6818. return ret;
  6819. /* XOR plain text into encrypted counter into cipher text buffer. */
  6820. xorbufout(out, AES_LASTBLOCK(aes), in, partial);
  6821. /* Keep amount of encrypted block used. */
  6822. aes->over = (byte)partial;
  6823. }
  6824. return 0;
  6825. }
  6826. /* Calculates authentication tag for AES GCM. C implementation.
  6827. *
  6828. * @param [in, out] aes AES object.
  6829. * @param [out] authTag Buffer to store authentication tag in.
  6830. * @param [in] authTagSz Length of tag to create.
  6831. */
  6832. static WARN_UNUSED_RESULT int AesGcmFinal_C(
  6833. Aes* aes, byte* authTag, word32 authTagSz)
  6834. {
  6835. /* Calculate authentication tag. */
  6836. GHASH_FINAL(aes, authTag, authTagSz);
  6837. /* XOR in as much of encrypted counter as is required. */
  6838. xorbuf(authTag, AES_INITCTR(aes), authTagSz);
  6839. #ifdef OPENSSL_EXTRA
  6840. /* store AAD size for next call */
  6841. aes->gcm.aadLen = aes->aSz;
  6842. #endif
  6843. /* Zeroize last block to protect sensitive data. */
  6844. ForceZero(AES_LASTBLOCK(aes), AES_BLOCK_SIZE);
  6845. return 0;
  6846. }
  6847. #ifdef WOLFSSL_AESNI
  6848. #ifdef __cplusplus
  6849. extern "C" {
  6850. #endif
  6851. /* Assembly code implementations in: aes_gcm_asm.S */
  6852. #ifdef HAVE_INTEL_AVX2
  6853. extern void AES_GCM_init_avx2(const unsigned char* key, int nr,
  6854. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6855. unsigned char* counter, unsigned char* initCtr);
  6856. extern void AES_GCM_aad_update_avx2(const unsigned char* addt,
  6857. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6858. extern void AES_GCM_encrypt_block_avx2(const unsigned char* key, int nr,
  6859. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6860. extern void AES_GCM_ghash_block_avx2(const unsigned char* data,
  6861. unsigned char* tag, unsigned char* h);
  6862. extern void AES_GCM_encrypt_update_avx2(const unsigned char* key, int nr,
  6863. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6864. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6865. extern void AES_GCM_encrypt_final_avx2(unsigned char* tag,
  6866. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6867. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6868. #endif
  6869. #ifdef HAVE_INTEL_AVX1
  6870. extern void AES_GCM_init_avx1(const unsigned char* key, int nr,
  6871. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6872. unsigned char* counter, unsigned char* initCtr);
  6873. extern void AES_GCM_aad_update_avx1(const unsigned char* addt,
  6874. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6875. extern void AES_GCM_encrypt_block_avx1(const unsigned char* key, int nr,
  6876. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6877. extern void AES_GCM_ghash_block_avx1(const unsigned char* data,
  6878. unsigned char* tag, unsigned char* h);
  6879. extern void AES_GCM_encrypt_update_avx1(const unsigned char* key, int nr,
  6880. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6881. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6882. extern void AES_GCM_encrypt_final_avx1(unsigned char* tag,
  6883. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6884. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6885. #endif
  6886. extern void AES_GCM_init_aesni(const unsigned char* key, int nr,
  6887. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6888. unsigned char* counter, unsigned char* initCtr);
  6889. extern void AES_GCM_aad_update_aesni(const unsigned char* addt,
  6890. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6891. extern void AES_GCM_encrypt_block_aesni(const unsigned char* key, int nr,
  6892. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6893. extern void AES_GCM_ghash_block_aesni(const unsigned char* data,
  6894. unsigned char* tag, unsigned char* h);
  6895. extern void AES_GCM_encrypt_update_aesni(const unsigned char* key, int nr,
  6896. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6897. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6898. extern void AES_GCM_encrypt_final_aesni(unsigned char* tag,
  6899. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6900. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6901. #ifdef __cplusplus
  6902. } /* extern "C" */
  6903. #endif
  6904. /* Initialize the AES GCM cipher with an IV. AES-NI implementations.
  6905. *
  6906. * @param [in, out] aes AES object.
  6907. * @param [in] iv IV/nonce buffer.
  6908. * @param [in] ivSz Length of IV/nonce data.
  6909. */
  6910. static WARN_UNUSED_RESULT int AesGcmInit_aesni(
  6911. Aes* aes, const byte* iv, word32 ivSz)
  6912. {
  6913. /* Reset state fields. */
  6914. aes->aSz = 0;
  6915. aes->cSz = 0;
  6916. /* Set tag to all zeros as initial value. */
  6917. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  6918. /* Reset counts of AAD and cipher text. */
  6919. aes->aOver = 0;
  6920. aes->cOver = 0;
  6921. #ifdef HAVE_INTEL_AVX2
  6922. if (IS_INTEL_AVX2(intel_flags)) {
  6923. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6924. AES_GCM_init_avx2((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  6925. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  6926. RESTORE_VECTOR_REGISTERS();
  6927. }
  6928. else
  6929. #endif
  6930. #ifdef HAVE_INTEL_AVX1
  6931. if (IS_INTEL_AVX1(intel_flags)) {
  6932. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6933. AES_GCM_init_avx1((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  6934. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  6935. RESTORE_VECTOR_REGISTERS();
  6936. }
  6937. else
  6938. #endif
  6939. {
  6940. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6941. AES_GCM_init_aesni((byte*)aes->key, (int)aes->rounds, iv, ivSz,
  6942. aes->gcm.H, AES_COUNTER(aes), AES_INITCTR(aes));
  6943. RESTORE_VECTOR_REGISTERS();
  6944. }
  6945. return 0;
  6946. }
  6947. /* Update the AES GCM for encryption with authentication data.
  6948. *
  6949. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  6950. *
  6951. * @param [in, out] aes AES object.
  6952. * @param [in] a Buffer holding authentication data.
  6953. * @param [in] aSz Length of authentication data in bytes.
  6954. * @param [in] endA Whether no more authentication data is expected.
  6955. */
  6956. static WARN_UNUSED_RESULT int AesGcmAadUpdate_aesni(
  6957. Aes* aes, const byte* a, word32 aSz, int endA)
  6958. {
  6959. word32 blocks;
  6960. int partial;
  6961. ASSERT_SAVED_VECTOR_REGISTERS();
  6962. if (aSz != 0 && a != NULL) {
  6963. /* Total count of AAD updated. */
  6964. aes->aSz += aSz;
  6965. /* Check if we have unprocessed data. */
  6966. if (aes->aOver > 0) {
  6967. /* Calculate amount we can use - fill up the block. */
  6968. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6969. if (sz > aSz) {
  6970. sz = (byte)aSz;
  6971. }
  6972. /* Copy extra into last GHASH block array and update count. */
  6973. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  6974. aes->aOver += sz;
  6975. if (aes->aOver == AES_BLOCK_SIZE) {
  6976. /* We have filled up the block and can process. */
  6977. #ifdef HAVE_INTEL_AVX2
  6978. if (IS_INTEL_AVX2(intel_flags)) {
  6979. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6980. aes->gcm.H);
  6981. }
  6982. else
  6983. #endif
  6984. #ifdef HAVE_INTEL_AVX1
  6985. if (IS_INTEL_AVX1(intel_flags)) {
  6986. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6987. aes->gcm.H);
  6988. }
  6989. else
  6990. #endif
  6991. {
  6992. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6993. aes->gcm.H);
  6994. }
  6995. /* Reset count. */
  6996. aes->aOver = 0;
  6997. }
  6998. /* Used up some data. */
  6999. aSz -= sz;
  7000. a += sz;
  7001. }
  7002. /* Calculate number of blocks of AAD and the leftover. */
  7003. blocks = aSz / AES_BLOCK_SIZE;
  7004. partial = aSz % AES_BLOCK_SIZE;
  7005. if (blocks > 0) {
  7006. /* GHASH full blocks now. */
  7007. #ifdef HAVE_INTEL_AVX2
  7008. if (IS_INTEL_AVX2(intel_flags)) {
  7009. AES_GCM_aad_update_avx2(a, blocks * AES_BLOCK_SIZE,
  7010. AES_TAG(aes), aes->gcm.H);
  7011. }
  7012. else
  7013. #endif
  7014. #ifdef HAVE_INTEL_AVX1
  7015. if (IS_INTEL_AVX1(intel_flags)) {
  7016. AES_GCM_aad_update_avx1(a, blocks * AES_BLOCK_SIZE,
  7017. AES_TAG(aes), aes->gcm.H);
  7018. }
  7019. else
  7020. #endif
  7021. {
  7022. AES_GCM_aad_update_aesni(a, blocks * AES_BLOCK_SIZE,
  7023. AES_TAG(aes), aes->gcm.H);
  7024. }
  7025. /* Skip over to end of AAD blocks. */
  7026. a += blocks * AES_BLOCK_SIZE;
  7027. }
  7028. if (partial != 0) {
  7029. /* Cache the partial block. */
  7030. XMEMCPY(AES_LASTGBLOCK(aes), a, (size_t)partial);
  7031. aes->aOver = (byte)partial;
  7032. }
  7033. }
  7034. if (endA && (aes->aOver > 0)) {
  7035. /* No more AAD coming and we have a partial block. */
  7036. /* Fill the rest of the block with zeros. */
  7037. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0,
  7038. AES_BLOCK_SIZE - aes->aOver);
  7039. /* GHASH last AAD block. */
  7040. #ifdef HAVE_INTEL_AVX2
  7041. if (IS_INTEL_AVX2(intel_flags)) {
  7042. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7043. aes->gcm.H);
  7044. }
  7045. else
  7046. #endif
  7047. #ifdef HAVE_INTEL_AVX1
  7048. if (IS_INTEL_AVX1(intel_flags)) {
  7049. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7050. aes->gcm.H);
  7051. }
  7052. else
  7053. #endif
  7054. {
  7055. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7056. aes->gcm.H);
  7057. }
  7058. /* Clear partial count for next time through. */
  7059. aes->aOver = 0;
  7060. }
  7061. return 0;
  7062. }
  7063. /* Update the AES GCM for encryption with data and/or authentication data.
  7064. *
  7065. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  7066. *
  7067. * @param [in, out] aes AES object.
  7068. * @param [out] c Buffer to hold cipher text.
  7069. * @param [in] p Buffer holding plaintext.
  7070. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7071. * @param [in] a Buffer holding authentication data.
  7072. * @param [in] aSz Length of authentication data in bytes.
  7073. */
  7074. static WARN_UNUSED_RESULT int AesGcmEncryptUpdate_aesni(
  7075. Aes* aes, byte* c, const byte* p, word32 cSz, const byte* a, word32 aSz)
  7076. {
  7077. word32 blocks;
  7078. int partial;
  7079. int ret;
  7080. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7081. /* Hash in A, the Authentication Data */
  7082. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7083. if (ret != 0)
  7084. return ret;
  7085. /* Encrypt plaintext and Hash in C, the Cipher text */
  7086. if (cSz != 0 && c != NULL) {
  7087. /* Update count of cipher text we have hashed. */
  7088. aes->cSz += cSz;
  7089. if (aes->cOver > 0) {
  7090. /* Calculate amount we can use - fill up the block. */
  7091. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7092. if (sz > cSz) {
  7093. sz = (byte)cSz;
  7094. }
  7095. /* Encrypt some of the plaintext. */
  7096. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, p, sz);
  7097. XMEMCPY(c, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7098. /* Update count of unused encrypted counter. */
  7099. aes->cOver += sz;
  7100. if (aes->cOver == AES_BLOCK_SIZE) {
  7101. /* We have filled up the block and can process. */
  7102. #ifdef HAVE_INTEL_AVX2
  7103. if (IS_INTEL_AVX2(intel_flags)) {
  7104. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7105. aes->gcm.H);
  7106. }
  7107. else
  7108. #endif
  7109. #ifdef HAVE_INTEL_AVX1
  7110. if (IS_INTEL_AVX1(intel_flags)) {
  7111. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7112. aes->gcm.H);
  7113. }
  7114. else
  7115. #endif
  7116. {
  7117. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7118. aes->gcm.H);
  7119. }
  7120. /* Reset count. */
  7121. aes->cOver = 0;
  7122. }
  7123. /* Used up some data. */
  7124. cSz -= sz;
  7125. p += sz;
  7126. c += sz;
  7127. }
  7128. /* Calculate number of blocks of plaintext and the leftover. */
  7129. blocks = cSz / AES_BLOCK_SIZE;
  7130. partial = cSz % AES_BLOCK_SIZE;
  7131. if (blocks > 0) {
  7132. /* Encrypt and GHASH full blocks now. */
  7133. #ifdef HAVE_INTEL_AVX2
  7134. if (IS_INTEL_AVX2(intel_flags)) {
  7135. AES_GCM_encrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  7136. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7137. AES_COUNTER(aes));
  7138. }
  7139. else
  7140. #endif
  7141. #ifdef HAVE_INTEL_AVX1
  7142. if (IS_INTEL_AVX1(intel_flags)) {
  7143. AES_GCM_encrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  7144. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7145. AES_COUNTER(aes));
  7146. }
  7147. else
  7148. #endif
  7149. {
  7150. AES_GCM_encrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  7151. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7152. AES_COUNTER(aes));
  7153. }
  7154. /* Skip over to end of blocks. */
  7155. p += blocks * AES_BLOCK_SIZE;
  7156. c += blocks * AES_BLOCK_SIZE;
  7157. }
  7158. if (partial != 0) {
  7159. /* Encrypt the counter - XOR in zeros as proxy for plaintext. */
  7160. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  7161. #ifdef HAVE_INTEL_AVX2
  7162. if (IS_INTEL_AVX2(intel_flags)) {
  7163. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  7164. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7165. }
  7166. else
  7167. #endif
  7168. #ifdef HAVE_INTEL_AVX1
  7169. if (IS_INTEL_AVX1(intel_flags)) {
  7170. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  7171. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7172. }
  7173. else
  7174. #endif
  7175. {
  7176. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  7177. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7178. }
  7179. /* XOR the remaining plaintext to calculate cipher text.
  7180. * Keep cipher text for GHASH of last partial block.
  7181. */
  7182. xorbuf(AES_LASTGBLOCK(aes), p, (word32)partial);
  7183. XMEMCPY(c, AES_LASTGBLOCK(aes), (size_t)partial);
  7184. /* Update count of the block used. */
  7185. aes->cOver = (byte)partial;
  7186. }
  7187. }
  7188. RESTORE_VECTOR_REGISTERS();
  7189. return 0;
  7190. }
  7191. /* Finalize the AES GCM for encryption and calculate the authentication tag.
  7192. *
  7193. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  7194. *
  7195. * @param [in, out] aes AES object.
  7196. * @param [in] authTag Buffer to hold authentication tag.
  7197. * @param [in] authTagSz Length of authentication tag in bytes.
  7198. * @return 0 on success.
  7199. */
  7200. static WARN_UNUSED_RESULT int AesGcmEncryptFinal_aesni(
  7201. Aes* aes, byte* authTag, word32 authTagSz)
  7202. {
  7203. /* AAD block incomplete when > 0 */
  7204. byte over = aes->aOver;
  7205. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7206. if (aes->cOver > 0) {
  7207. /* Cipher text block incomplete. */
  7208. over = aes->cOver;
  7209. }
  7210. if (over > 0) {
  7211. /* Fill the rest of the block with zeros. */
  7212. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  7213. /* GHASH last cipher block. */
  7214. #ifdef HAVE_INTEL_AVX2
  7215. if (IS_INTEL_AVX2(intel_flags)) {
  7216. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7217. aes->gcm.H);
  7218. }
  7219. else
  7220. #endif
  7221. #ifdef HAVE_INTEL_AVX1
  7222. if (IS_INTEL_AVX1(intel_flags)) {
  7223. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7224. aes->gcm.H);
  7225. }
  7226. else
  7227. #endif
  7228. {
  7229. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7230. aes->gcm.H);
  7231. }
  7232. }
  7233. /* Calculate the authentication tag. */
  7234. #ifdef HAVE_INTEL_AVX2
  7235. if (IS_INTEL_AVX2(intel_flags)) {
  7236. AES_GCM_encrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7237. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  7238. }
  7239. else
  7240. #endif
  7241. #ifdef HAVE_INTEL_AVX1
  7242. if (IS_INTEL_AVX1(intel_flags)) {
  7243. AES_GCM_encrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7244. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  7245. }
  7246. else
  7247. #endif
  7248. {
  7249. AES_GCM_encrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7250. aes->aSz, aes->gcm.H, AES_INITCTR(aes));
  7251. }
  7252. RESTORE_VECTOR_REGISTERS();
  7253. return 0;
  7254. }
  7255. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7256. #ifdef __cplusplus
  7257. extern "C" {
  7258. #endif
  7259. /* Assembly code implementations in: aes_gcm_asm.S and aes_gcm_x86_asm.S */
  7260. #ifdef HAVE_INTEL_AVX2
  7261. extern void AES_GCM_decrypt_update_avx2(const unsigned char* key, int nr,
  7262. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7263. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7264. extern void AES_GCM_decrypt_final_avx2(unsigned char* tag,
  7265. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7266. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7267. #endif
  7268. #ifdef HAVE_INTEL_AVX1
  7269. extern void AES_GCM_decrypt_update_avx1(const unsigned char* key, int nr,
  7270. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7271. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7272. extern void AES_GCM_decrypt_final_avx1(unsigned char* tag,
  7273. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7274. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7275. #endif
  7276. extern void AES_GCM_decrypt_update_aesni(const unsigned char* key, int nr,
  7277. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7278. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7279. extern void AES_GCM_decrypt_final_aesni(unsigned char* tag,
  7280. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7281. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7282. #ifdef __cplusplus
  7283. } /* extern "C" */
  7284. #endif
  7285. /* Update the AES GCM for decryption with data and/or authentication data.
  7286. *
  7287. * @param [in, out] aes AES object.
  7288. * @param [out] p Buffer to hold plaintext.
  7289. * @param [in] c Buffer holding cipher text.
  7290. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7291. * @param [in] a Buffer holding authentication data.
  7292. * @param [in] aSz Length of authentication data in bytes.
  7293. */
  7294. static WARN_UNUSED_RESULT int AesGcmDecryptUpdate_aesni(
  7295. Aes* aes, byte* p, const byte* c, word32 cSz, const byte* a, word32 aSz)
  7296. {
  7297. word32 blocks;
  7298. int partial;
  7299. int ret;
  7300. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7301. /* Hash in A, the Authentication Data */
  7302. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7303. if (ret != 0)
  7304. return ret;
  7305. /* Hash in C, the Cipher text, and decrypt. */
  7306. if (cSz != 0 && p != NULL) {
  7307. /* Update count of cipher text we have hashed. */
  7308. aes->cSz += cSz;
  7309. if (aes->cOver > 0) {
  7310. /* Calculate amount we can use - fill up the block. */
  7311. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7312. if (sz > cSz) {
  7313. sz = (byte)cSz;
  7314. }
  7315. /* Keep a copy of the cipher text for GHASH. */
  7316. XMEMCPY(AES_LASTBLOCK(aes) + aes->cOver, c, sz);
  7317. /* Decrypt some of the cipher text. */
  7318. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  7319. XMEMCPY(p, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7320. /* Update count of unused encrypted counter. */
  7321. aes->cOver += sz;
  7322. if (aes->cOver == AES_BLOCK_SIZE) {
  7323. /* We have filled up the block and can process. */
  7324. #ifdef HAVE_INTEL_AVX2
  7325. if (IS_INTEL_AVX2(intel_flags)) {
  7326. AES_GCM_ghash_block_avx2(AES_LASTBLOCK(aes), AES_TAG(aes),
  7327. aes->gcm.H);
  7328. }
  7329. else
  7330. #endif
  7331. #ifdef HAVE_INTEL_AVX1
  7332. if (IS_INTEL_AVX1(intel_flags)) {
  7333. AES_GCM_ghash_block_avx1(AES_LASTBLOCK(aes), AES_TAG(aes),
  7334. aes->gcm.H);
  7335. }
  7336. else
  7337. #endif
  7338. {
  7339. AES_GCM_ghash_block_aesni(AES_LASTBLOCK(aes), AES_TAG(aes),
  7340. aes->gcm.H);
  7341. }
  7342. /* Reset count. */
  7343. aes->cOver = 0;
  7344. }
  7345. /* Used up some data. */
  7346. cSz -= sz;
  7347. c += sz;
  7348. p += sz;
  7349. }
  7350. /* Calculate number of blocks of plaintext and the leftover. */
  7351. blocks = cSz / AES_BLOCK_SIZE;
  7352. partial = cSz % AES_BLOCK_SIZE;
  7353. if (blocks > 0) {
  7354. /* Decrypt and GHASH full blocks now. */
  7355. #ifdef HAVE_INTEL_AVX2
  7356. if (IS_INTEL_AVX2(intel_flags)) {
  7357. AES_GCM_decrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  7358. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7359. AES_COUNTER(aes));
  7360. }
  7361. else
  7362. #endif
  7363. #ifdef HAVE_INTEL_AVX1
  7364. if (IS_INTEL_AVX1(intel_flags)) {
  7365. AES_GCM_decrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  7366. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7367. AES_COUNTER(aes));
  7368. }
  7369. else
  7370. #endif
  7371. {
  7372. AES_GCM_decrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  7373. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->gcm.H,
  7374. AES_COUNTER(aes));
  7375. }
  7376. /* Skip over to end of blocks. */
  7377. c += blocks * AES_BLOCK_SIZE;
  7378. p += blocks * AES_BLOCK_SIZE;
  7379. }
  7380. if (partial != 0) {
  7381. /* Encrypt the counter - XOR in zeros as proxy for cipher text. */
  7382. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  7383. #ifdef HAVE_INTEL_AVX2
  7384. if (IS_INTEL_AVX2(intel_flags)) {
  7385. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  7386. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7387. }
  7388. else
  7389. #endif
  7390. #ifdef HAVE_INTEL_AVX1
  7391. if (IS_INTEL_AVX1(intel_flags)) {
  7392. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  7393. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7394. }
  7395. else
  7396. #endif
  7397. {
  7398. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  7399. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7400. }
  7401. /* Keep cipher text for GHASH of last partial block. */
  7402. XMEMCPY(AES_LASTBLOCK(aes), c, (size_t)partial);
  7403. /* XOR the remaining cipher text to calculate plaintext. */
  7404. xorbuf(AES_LASTGBLOCK(aes), c, (word32)partial);
  7405. XMEMCPY(p, AES_LASTGBLOCK(aes), (size_t)partial);
  7406. /* Update count of the block used. */
  7407. aes->cOver = (byte)partial;
  7408. }
  7409. }
  7410. RESTORE_VECTOR_REGISTERS();
  7411. return 0;
  7412. }
  7413. /* Finalize the AES GCM for decryption and check the authentication tag.
  7414. *
  7415. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  7416. *
  7417. * @param [in, out] aes AES object.
  7418. * @param [in] authTag Buffer holding authentication tag.
  7419. * @param [in] authTagSz Length of authentication tag in bytes.
  7420. * @return 0 on success.
  7421. * @return AES_GCM_AUTH_E when authentication tag doesn't match calculated
  7422. * value.
  7423. */
  7424. static WARN_UNUSED_RESULT int AesGcmDecryptFinal_aesni(
  7425. Aes* aes, const byte* authTag, word32 authTagSz)
  7426. {
  7427. int ret = 0;
  7428. int res;
  7429. /* AAD block incomplete when > 0 */
  7430. byte over = aes->aOver;
  7431. byte *lastBlock = AES_LASTGBLOCK(aes);
  7432. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7433. if (aes->cOver > 0) {
  7434. /* Cipher text block incomplete. */
  7435. over = aes->cOver;
  7436. lastBlock = AES_LASTBLOCK(aes);
  7437. }
  7438. if (over > 0) {
  7439. /* Zeroize the unused part of the block. */
  7440. XMEMSET(lastBlock + over, 0, AES_BLOCK_SIZE - over);
  7441. /* Hash the last block of cipher text. */
  7442. #ifdef HAVE_INTEL_AVX2
  7443. if (IS_INTEL_AVX2(intel_flags)) {
  7444. AES_GCM_ghash_block_avx2(lastBlock, AES_TAG(aes), aes->gcm.H);
  7445. }
  7446. else
  7447. #endif
  7448. #ifdef HAVE_INTEL_AVX1
  7449. if (IS_INTEL_AVX1(intel_flags)) {
  7450. AES_GCM_ghash_block_avx1(lastBlock, AES_TAG(aes), aes->gcm.H);
  7451. }
  7452. else
  7453. #endif
  7454. {
  7455. AES_GCM_ghash_block_aesni(lastBlock, AES_TAG(aes), aes->gcm.H);
  7456. }
  7457. }
  7458. /* Calculate and compare the authentication tag. */
  7459. #ifdef HAVE_INTEL_AVX2
  7460. if (IS_INTEL_AVX2(intel_flags)) {
  7461. AES_GCM_decrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7462. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  7463. }
  7464. else
  7465. #endif
  7466. #ifdef HAVE_INTEL_AVX1
  7467. if (IS_INTEL_AVX1(intel_flags)) {
  7468. AES_GCM_decrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7469. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  7470. }
  7471. else
  7472. #endif
  7473. {
  7474. AES_GCM_decrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7475. aes->aSz, aes->gcm.H, AES_INITCTR(aes), &res);
  7476. }
  7477. RESTORE_VECTOR_REGISTERS();
  7478. /* Return error code when calculated doesn't match input. */
  7479. if (res == 0) {
  7480. ret = AES_GCM_AUTH_E;
  7481. }
  7482. return ret;
  7483. }
  7484. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7485. #endif /* WOLFSSL_AESNI */
  7486. /* Initialize an AES GCM cipher for encryption or decryption.
  7487. *
  7488. * Must call wc_AesInit() before calling this function.
  7489. * Call wc_AesGcmSetIV() before calling this function to generate part of IV.
  7490. * Call wc_AesGcmSetExtIV() before calling this function to cache IV.
  7491. *
  7492. * @param [in, out] aes AES object.
  7493. * @param [in] key Buffer holding key.
  7494. * @param [in] len Length of key in bytes.
  7495. * @param [in] iv Buffer holding IV/nonce.
  7496. * @param [in] ivSz Length of IV/nonce in bytes.
  7497. * @return 0 on success.
  7498. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7499. * is NULL, or the IV is NULL and no previous IV has been set.
  7500. * @return MEMORY_E when dynamic memory allocation fails. (WOLFSSL_SMALL_STACK)
  7501. */
  7502. int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7503. word32 ivSz)
  7504. {
  7505. int ret = 0;
  7506. /* Check validity of parameters. */
  7507. if ((aes == NULL) || ((len > 0) && (key == NULL)) ||
  7508. ((ivSz == 0) && (iv != NULL)) ||
  7509. ((ivSz > 0) && (iv == NULL))) {
  7510. ret = BAD_FUNC_ARG;
  7511. }
  7512. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  7513. if ((ret == 0) && (aes->streamData == NULL)) {
  7514. /* Allocate buffers for streaming. */
  7515. aes->streamData = (byte*)XMALLOC(5 * AES_BLOCK_SIZE, aes->heap,
  7516. DYNAMIC_TYPE_AES);
  7517. if (aes->streamData == NULL) {
  7518. ret = MEMORY_E;
  7519. }
  7520. }
  7521. #endif
  7522. /* Set the key if passed in. */
  7523. if ((ret == 0) && (key != NULL)) {
  7524. ret = wc_AesGcmSetKey(aes, key, len);
  7525. }
  7526. if (ret == 0) {
  7527. /* Set the IV passed in if it is smaller than a block. */
  7528. if ((iv != NULL) && (ivSz <= AES_BLOCK_SIZE)) {
  7529. XMEMMOVE((byte*)aes->reg, iv, ivSz);
  7530. aes->nonceSz = ivSz;
  7531. }
  7532. /* No IV passed in, check for cached IV. */
  7533. if ((iv == NULL) && (aes->nonceSz != 0)) {
  7534. /* Use the cached copy. */
  7535. iv = (byte*)aes->reg;
  7536. ivSz = aes->nonceSz;
  7537. }
  7538. if (iv != NULL) {
  7539. /* Initialize with the IV. */
  7540. #ifdef WOLFSSL_AESNI
  7541. if (haveAESNI
  7542. #ifdef HAVE_INTEL_AVX2
  7543. || IS_INTEL_AVX2(intel_flags)
  7544. #endif
  7545. #ifdef HAVE_INTEL_AVX1
  7546. || IS_INTEL_AVX1(intel_flags)
  7547. #endif
  7548. ) {
  7549. ret = AesGcmInit_aesni(aes, iv, ivSz);
  7550. }
  7551. else
  7552. #endif
  7553. {
  7554. ret = AesGcmInit_C(aes, iv, ivSz);
  7555. }
  7556. aes->nonceSet = 1;
  7557. }
  7558. }
  7559. return ret;
  7560. }
  7561. /* Initialize an AES GCM cipher for encryption.
  7562. *
  7563. * Must call wc_AesInit() before calling this function.
  7564. *
  7565. * @param [in, out] aes AES object.
  7566. * @param [in] key Buffer holding key.
  7567. * @param [in] len Length of key in bytes.
  7568. * @param [in] iv Buffer holding IV/nonce.
  7569. * @param [in] ivSz Length of IV/nonce in bytes.
  7570. * @return 0 on success.
  7571. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7572. * is NULL, or the IV is NULL and no previous IV has been set.
  7573. */
  7574. int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7575. word32 ivSz)
  7576. {
  7577. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  7578. }
  7579. /* Initialize an AES GCM cipher for encryption. Get IV.
  7580. *
  7581. * Must call wc_AesGcmSetIV() to generate part of IV before calling this
  7582. * function.
  7583. * Must call wc_AesInit() before calling this function.
  7584. *
  7585. * See wc_AesGcmEncrypt_ex() for non-streaming version of getting IV out.
  7586. *
  7587. * @param [in, out] aes AES object.
  7588. * @param [in] key Buffer holding key.
  7589. * @param [in] len Length of key in bytes.
  7590. * @param [in] iv Buffer holding IV/nonce.
  7591. * @param [in] ivSz Length of IV/nonce in bytes.
  7592. * @return 0 on success.
  7593. * @return BAD_FUNC_ARG when aes is NULL, key length is non-zero but key
  7594. * is NULL, or the IV is NULL or ivOutSz is not the same as cached
  7595. * nonce size.
  7596. */
  7597. int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut,
  7598. word32 ivOutSz)
  7599. {
  7600. int ret;
  7601. /* Check validity of parameters. */
  7602. if ((aes == NULL) || (ivOut == NULL) || (ivOutSz != aes->nonceSz)) {
  7603. ret = BAD_FUNC_ARG;
  7604. }
  7605. else {
  7606. /* Copy out the IV including generated part for decryption. */
  7607. XMEMCPY(ivOut, aes->reg, ivOutSz);
  7608. /* Initialize AES GCM cipher with key and cached Iv. */
  7609. ret = wc_AesGcmInit(aes, key, len, NULL, 0);
  7610. }
  7611. return ret;
  7612. }
  7613. /* Update the AES GCM for encryption with data and/or authentication data.
  7614. *
  7615. * All the AAD must be passed to update before the plaintext.
  7616. * Last part of AAD can be passed with first part of plaintext.
  7617. *
  7618. * Must set key and IV before calling this function.
  7619. * Must call wc_AesGcmInit() before calling this function.
  7620. *
  7621. * @param [in, out] aes AES object.
  7622. * @param [out] out Buffer to hold cipher text.
  7623. * @param [in] in Buffer holding plaintext.
  7624. * @param [in] sz Length of plaintext in bytes.
  7625. * @param [in] authIn Buffer holding authentication data.
  7626. * @param [in] authInSz Length of authentication data in bytes.
  7627. * @return 0 on success.
  7628. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7629. * is NULL.
  7630. */
  7631. int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  7632. const byte* authIn, word32 authInSz)
  7633. {
  7634. int ret = 0;
  7635. /* Check validity of parameters. */
  7636. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  7637. ((out == NULL) || (in == NULL)))) {
  7638. ret = BAD_FUNC_ARG;
  7639. }
  7640. /* Check key has been set. */
  7641. if ((ret == 0) && (!aes->gcmKeySet)) {
  7642. ret = MISSING_KEY;
  7643. }
  7644. /* Check IV has been set. */
  7645. if ((ret == 0) && (!aes->nonceSet)) {
  7646. ret = MISSING_IV;
  7647. }
  7648. if ((ret == 0) && aes->ctrSet && (aes->aSz == 0) && (aes->cSz == 0)) {
  7649. aes->invokeCtr[0]++;
  7650. if (aes->invokeCtr[0] == 0) {
  7651. aes->invokeCtr[1]++;
  7652. if (aes->invokeCtr[1] == 0)
  7653. ret = AES_GCM_OVERFLOW_E;
  7654. }
  7655. }
  7656. if (ret == 0) {
  7657. /* Encrypt with AAD and/or plaintext. */
  7658. #if defined(WOLFSSL_AESNI)
  7659. if (haveAESNI
  7660. #ifdef HAVE_INTEL_AVX2
  7661. || IS_INTEL_AVX2(intel_flags)
  7662. #endif
  7663. #ifdef HAVE_INTEL_AVX1
  7664. || IS_INTEL_AVX1(intel_flags)
  7665. #endif
  7666. ) {
  7667. ret = AesGcmEncryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  7668. }
  7669. else
  7670. #endif
  7671. {
  7672. /* Encrypt the plaintext. */
  7673. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  7674. if (ret != 0)
  7675. return ret;
  7676. /* Update the authentication tag with any authentication data and the
  7677. * new cipher text. */
  7678. GHASH_UPDATE(aes, authIn, authInSz, out, sz);
  7679. }
  7680. }
  7681. return ret;
  7682. }
  7683. /* Finalize the AES GCM for encryption and return the authentication tag.
  7684. *
  7685. * Must set key and IV before calling this function.
  7686. * Must call wc_AesGcmInit() before calling this function.
  7687. *
  7688. * @param [in, out] aes AES object.
  7689. * @param [out] authTag Buffer to hold authentication tag.
  7690. * @param [in] authTagSz Length of authentication tag in bytes.
  7691. * @return 0 on success.
  7692. */
  7693. int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz)
  7694. {
  7695. int ret = 0;
  7696. /* Check validity of parameters. */
  7697. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  7698. (authTagSz == 0)) {
  7699. ret = BAD_FUNC_ARG;
  7700. }
  7701. /* Check key has been set. */
  7702. if ((ret == 0) && (!aes->gcmKeySet)) {
  7703. ret = MISSING_KEY;
  7704. }
  7705. /* Check IV has been set. */
  7706. if ((ret == 0) && (!aes->nonceSet)) {
  7707. ret = MISSING_IV;
  7708. }
  7709. if (ret == 0) {
  7710. /* Calculate authentication tag. */
  7711. #ifdef WOLFSSL_AESNI
  7712. if (haveAESNI
  7713. #ifdef HAVE_INTEL_AVX2
  7714. || IS_INTEL_AVX2(intel_flags)
  7715. #endif
  7716. #ifdef HAVE_INTEL_AVX1
  7717. || IS_INTEL_AVX1(intel_flags)
  7718. #endif
  7719. ) {
  7720. ret = AesGcmEncryptFinal_aesni(aes, authTag, authTagSz);
  7721. }
  7722. else
  7723. #endif
  7724. {
  7725. ret = AesGcmFinal_C(aes, authTag, authTagSz);
  7726. }
  7727. }
  7728. if ((ret == 0) && aes->ctrSet) {
  7729. IncCtr((byte*)aes->reg, aes->nonceSz);
  7730. }
  7731. return ret;
  7732. }
  7733. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7734. /* Initialize an AES GCM cipher for decryption.
  7735. *
  7736. * Must call wc_AesInit() before calling this function.
  7737. *
  7738. * Call wc_AesGcmSetExtIV() before calling this function to use FIPS external IV
  7739. * instead.
  7740. *
  7741. * @param [in, out] aes AES object.
  7742. * @param [in] key Buffer holding key.
  7743. * @param [in] len Length of key in bytes.
  7744. * @param [in] iv Buffer holding IV/nonce.
  7745. * @param [in] ivSz Length of IV/nonce in bytes.
  7746. * @return 0 on success.
  7747. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7748. * is NULL, or the IV is NULL and no previous IV has been set.
  7749. */
  7750. int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7751. word32 ivSz)
  7752. {
  7753. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  7754. }
  7755. /* Update the AES GCM for decryption with data and/or authentication data.
  7756. *
  7757. * All the AAD must be passed to update before the cipher text.
  7758. * Last part of AAD can be passed with first part of cipher text.
  7759. *
  7760. * Must set key and IV before calling this function.
  7761. * Must call wc_AesGcmInit() before calling this function.
  7762. *
  7763. * @param [in, out] aes AES object.
  7764. * @param [out] out Buffer to hold plaintext.
  7765. * @param [in] in Buffer holding cipher text.
  7766. * @param [in] sz Length of cipher text in bytes.
  7767. * @param [in] authIn Buffer holding authentication data.
  7768. * @param [in] authInSz Length of authentication data in bytes.
  7769. * @return 0 on success.
  7770. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7771. * is NULL.
  7772. */
  7773. int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  7774. const byte* authIn, word32 authInSz)
  7775. {
  7776. int ret = 0;
  7777. /* Check validity of parameters. */
  7778. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  7779. ((out == NULL) || (in == NULL)))) {
  7780. ret = BAD_FUNC_ARG;
  7781. }
  7782. /* Check key has been set. */
  7783. if ((ret == 0) && (!aes->gcmKeySet)) {
  7784. ret = MISSING_KEY;
  7785. }
  7786. /* Check IV has been set. */
  7787. if ((ret == 0) && (!aes->nonceSet)) {
  7788. ret = MISSING_IV;
  7789. }
  7790. if (ret == 0) {
  7791. /* Decrypt with AAD and/or cipher text. */
  7792. #if defined(WOLFSSL_AESNI)
  7793. if (haveAESNI
  7794. #ifdef HAVE_INTEL_AVX2
  7795. || IS_INTEL_AVX2(intel_flags)
  7796. #endif
  7797. #ifdef HAVE_INTEL_AVX1
  7798. || IS_INTEL_AVX1(intel_flags)
  7799. #endif
  7800. ) {
  7801. ret = AesGcmDecryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  7802. }
  7803. else
  7804. #endif
  7805. {
  7806. /* Update the authentication tag with any authentication data and
  7807. * cipher text. */
  7808. GHASH_UPDATE(aes, authIn, authInSz, in, sz);
  7809. /* Decrypt the cipher text. */
  7810. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  7811. }
  7812. }
  7813. return ret;
  7814. }
  7815. /* Finalize the AES GCM for decryption and check the authentication tag.
  7816. *
  7817. * Must set key and IV before calling this function.
  7818. * Must call wc_AesGcmInit() before calling this function.
  7819. *
  7820. * @param [in, out] aes AES object.
  7821. * @param [in] authTag Buffer holding authentication tag.
  7822. * @param [in] authTagSz Length of authentication tag in bytes.
  7823. * @return 0 on success.
  7824. */
  7825. int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz)
  7826. {
  7827. int ret = 0;
  7828. /* Check validity of parameters. */
  7829. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  7830. (authTagSz == 0)) {
  7831. ret = BAD_FUNC_ARG;
  7832. }
  7833. /* Check key has been set. */
  7834. if ((ret == 0) && (!aes->gcmKeySet)) {
  7835. ret = MISSING_KEY;
  7836. }
  7837. /* Check IV has been set. */
  7838. if ((ret == 0) && (!aes->nonceSet)) {
  7839. ret = MISSING_IV;
  7840. }
  7841. if (ret == 0) {
  7842. /* Calculate authentication tag and compare with one passed in.. */
  7843. #ifdef WOLFSSL_AESNI
  7844. if (haveAESNI
  7845. #ifdef HAVE_INTEL_AVX2
  7846. || IS_INTEL_AVX2(intel_flags)
  7847. #endif
  7848. #ifdef HAVE_INTEL_AVX1
  7849. || IS_INTEL_AVX1(intel_flags)
  7850. #endif
  7851. ) {
  7852. ret = AesGcmDecryptFinal_aesni(aes, authTag, authTagSz);
  7853. }
  7854. else
  7855. #endif
  7856. {
  7857. ALIGN32 byte calcTag[AES_BLOCK_SIZE];
  7858. /* Calculate authentication tag. */
  7859. ret = AesGcmFinal_C(aes, calcTag, authTagSz);
  7860. if (ret == 0) {
  7861. /* Check calculated tag matches the one passed in. */
  7862. if (ConstantCompare(authTag, calcTag, (int)authTagSz) != 0) {
  7863. ret = AES_GCM_AUTH_E;
  7864. }
  7865. }
  7866. }
  7867. }
  7868. /* reset the state */
  7869. if (ret == 0)
  7870. wc_AesFree(aes);
  7871. return ret;
  7872. }
  7873. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7874. #endif /* WOLFSSL_AESGCM_STREAM */
  7875. #endif /* WOLFSSL_XILINX_CRYPT */
  7876. #endif /* end of block for AESGCM implementation selection */
  7877. /* Common to all, abstract functions that build off of lower level AESGCM
  7878. * functions */
  7879. #ifndef WC_NO_RNG
  7880. static WARN_UNUSED_RESULT WC_INLINE int CheckAesGcmIvSize(int ivSz) {
  7881. return (ivSz == GCM_NONCE_MIN_SZ ||
  7882. ivSz == GCM_NONCE_MID_SZ ||
  7883. ivSz == GCM_NONCE_MAX_SZ);
  7884. }
  7885. int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz)
  7886. {
  7887. int ret = 0;
  7888. if (aes == NULL || iv == NULL || !CheckAesGcmIvSize((int)ivSz)) {
  7889. ret = BAD_FUNC_ARG;
  7890. }
  7891. if (ret == 0) {
  7892. XMEMCPY((byte*)aes->reg, iv, ivSz);
  7893. /* If the IV is 96, allow for a 2^64 invocation counter.
  7894. * For any other size for the nonce, limit the invocation
  7895. * counter to 32-bits. (SP 800-38D 8.3) */
  7896. aes->invokeCtr[0] = 0;
  7897. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  7898. #ifdef WOLFSSL_AESGCM_STREAM
  7899. aes->ctrSet = 1;
  7900. #endif
  7901. aes->nonceSz = ivSz;
  7902. }
  7903. return ret;
  7904. }
  7905. int wc_AesGcmSetIV(Aes* aes, word32 ivSz,
  7906. const byte* ivFixed, word32 ivFixedSz,
  7907. WC_RNG* rng)
  7908. {
  7909. int ret = 0;
  7910. if (aes == NULL || rng == NULL || !CheckAesGcmIvSize((int)ivSz) ||
  7911. (ivFixed == NULL && ivFixedSz != 0) ||
  7912. (ivFixed != NULL && ivFixedSz != AES_IV_FIXED_SZ)) {
  7913. ret = BAD_FUNC_ARG;
  7914. }
  7915. if (ret == 0) {
  7916. byte* iv = (byte*)aes->reg;
  7917. if (ivFixedSz)
  7918. XMEMCPY(iv, ivFixed, ivFixedSz);
  7919. ret = wc_RNG_GenerateBlock(rng, iv + ivFixedSz, ivSz - ivFixedSz);
  7920. }
  7921. if (ret == 0) {
  7922. /* If the IV is 96, allow for a 2^64 invocation counter.
  7923. * For any other size for the nonce, limit the invocation
  7924. * counter to 32-bits. (SP 800-38D 8.3) */
  7925. aes->invokeCtr[0] = 0;
  7926. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  7927. #ifdef WOLFSSL_AESGCM_STREAM
  7928. aes->ctrSet = 1;
  7929. #endif
  7930. aes->nonceSz = ivSz;
  7931. }
  7932. return ret;
  7933. }
  7934. int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  7935. byte* ivOut, word32 ivOutSz,
  7936. byte* authTag, word32 authTagSz,
  7937. const byte* authIn, word32 authInSz)
  7938. {
  7939. int ret = 0;
  7940. if (aes == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7941. ivOut == NULL || ivOutSz != aes->nonceSz ||
  7942. (authIn == NULL && authInSz != 0)) {
  7943. ret = BAD_FUNC_ARG;
  7944. }
  7945. if (ret == 0) {
  7946. aes->invokeCtr[0]++;
  7947. if (aes->invokeCtr[0] == 0) {
  7948. aes->invokeCtr[1]++;
  7949. if (aes->invokeCtr[1] == 0)
  7950. ret = AES_GCM_OVERFLOW_E;
  7951. }
  7952. }
  7953. if (ret == 0) {
  7954. XMEMCPY(ivOut, aes->reg, ivOutSz);
  7955. ret = wc_AesGcmEncrypt(aes, out, in, sz,
  7956. (byte*)aes->reg, ivOutSz,
  7957. authTag, authTagSz,
  7958. authIn, authInSz);
  7959. if (ret == 0)
  7960. IncCtr((byte*)aes->reg, ivOutSz);
  7961. }
  7962. return ret;
  7963. }
  7964. int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz,
  7965. const byte* authIn, word32 authInSz,
  7966. byte* authTag, word32 authTagSz, WC_RNG* rng)
  7967. {
  7968. #ifdef WOLFSSL_SMALL_STACK
  7969. Aes *aes = NULL;
  7970. #else
  7971. Aes aes[1];
  7972. #endif
  7973. int ret;
  7974. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  7975. authTag == NULL || authTagSz == 0 || rng == NULL) {
  7976. return BAD_FUNC_ARG;
  7977. }
  7978. #ifdef WOLFSSL_SMALL_STACK
  7979. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  7980. DYNAMIC_TYPE_AES)) == NULL)
  7981. return MEMORY_E;
  7982. #endif
  7983. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  7984. if (ret == 0) {
  7985. ret = wc_AesGcmSetKey(aes, key, keySz);
  7986. if (ret == 0)
  7987. ret = wc_AesGcmSetIV(aes, ivSz, NULL, 0, rng);
  7988. if (ret == 0)
  7989. ret = wc_AesGcmEncrypt_ex(aes, NULL, NULL, 0, iv, ivSz,
  7990. authTag, authTagSz, authIn, authInSz);
  7991. wc_AesFree(aes);
  7992. }
  7993. ForceZero(aes, sizeof *aes);
  7994. #ifdef WOLFSSL_SMALL_STACK
  7995. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  7996. #endif
  7997. return ret;
  7998. }
  7999. int wc_GmacVerify(const byte* key, word32 keySz,
  8000. const byte* iv, word32 ivSz,
  8001. const byte* authIn, word32 authInSz,
  8002. const byte* authTag, word32 authTagSz)
  8003. {
  8004. int ret;
  8005. #ifdef HAVE_AES_DECRYPT
  8006. #ifdef WOLFSSL_SMALL_STACK
  8007. Aes *aes = NULL;
  8008. #else
  8009. Aes aes[1];
  8010. #endif
  8011. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  8012. authTag == NULL || authTagSz == 0 || authTagSz > AES_BLOCK_SIZE) {
  8013. return BAD_FUNC_ARG;
  8014. }
  8015. #ifdef WOLFSSL_SMALL_STACK
  8016. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  8017. DYNAMIC_TYPE_AES)) == NULL)
  8018. return MEMORY_E;
  8019. #endif
  8020. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  8021. if (ret == 0) {
  8022. ret = wc_AesGcmSetKey(aes, key, keySz);
  8023. if (ret == 0)
  8024. ret = wc_AesGcmDecrypt(aes, NULL, NULL, 0, iv, ivSz,
  8025. authTag, authTagSz, authIn, authInSz);
  8026. wc_AesFree(aes);
  8027. }
  8028. ForceZero(aes, sizeof *aes);
  8029. #ifdef WOLFSSL_SMALL_STACK
  8030. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  8031. #endif
  8032. #else
  8033. (void)key;
  8034. (void)keySz;
  8035. (void)iv;
  8036. (void)ivSz;
  8037. (void)authIn;
  8038. (void)authInSz;
  8039. (void)authTag;
  8040. (void)authTagSz;
  8041. ret = NOT_COMPILED_IN;
  8042. #endif
  8043. return ret;
  8044. }
  8045. #endif /* WC_NO_RNG */
  8046. WOLFSSL_API int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  8047. {
  8048. if (gmac == NULL || key == NULL) {
  8049. return BAD_FUNC_ARG;
  8050. }
  8051. return wc_AesGcmSetKey(&gmac->aes, key, len);
  8052. }
  8053. WOLFSSL_API int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  8054. const byte* authIn, word32 authInSz,
  8055. byte* authTag, word32 authTagSz)
  8056. {
  8057. if (gmac == NULL) {
  8058. return BAD_FUNC_ARG;
  8059. }
  8060. return wc_AesGcmEncrypt(&gmac->aes, NULL, NULL, 0, iv, ivSz,
  8061. authTag, authTagSz, authIn, authInSz);
  8062. }
  8063. #endif /* HAVE_AESGCM */
  8064. #ifdef HAVE_AESCCM
  8065. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  8066. {
  8067. if (!((keySz == 16) || (keySz == 24) || (keySz == 32)))
  8068. return BAD_FUNC_ARG;
  8069. return wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  8070. }
  8071. /* Checks if the tag size is an accepted value based on RFC 3610 section 2
  8072. * returns 0 if tag size is ok
  8073. */
  8074. int wc_AesCcmCheckTagSize(int sz)
  8075. {
  8076. /* values here are from RFC 3610 section 2 */
  8077. if (sz != 4 && sz != 6 && sz != 8 && sz != 10 && sz != 12 && sz != 14
  8078. && sz != 16) {
  8079. WOLFSSL_MSG("Bad auth tag size AES-CCM");
  8080. return BAD_FUNC_ARG;
  8081. }
  8082. return 0;
  8083. }
  8084. #ifdef WOLFSSL_ARMASM
  8085. /* implementation located in wolfcrypt/src/port/arm/armv8-aes.c */
  8086. #elif defined(HAVE_COLDFIRE_SEC)
  8087. #error "Coldfire SEC doesn't currently support AES-CCM mode"
  8088. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8089. !defined(WOLFSSL_QNX_CAAM)
  8090. /* implemented in wolfcrypt/src/port/caam_aes.c */
  8091. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  8092. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  8093. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8094. const byte* nonce, word32 nonceSz,
  8095. byte* authTag, word32 authTagSz,
  8096. const byte* authIn, word32 authInSz)
  8097. {
  8098. return wc_AesCcmEncrypt_silabs(
  8099. aes, out, in, inSz,
  8100. nonce, nonceSz,
  8101. authTag, authTagSz,
  8102. authIn, authInSz);
  8103. }
  8104. #ifdef HAVE_AES_DECRYPT
  8105. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8106. const byte* nonce, word32 nonceSz,
  8107. const byte* authTag, word32 authTagSz,
  8108. const byte* authIn, word32 authInSz)
  8109. {
  8110. return wc_AesCcmDecrypt_silabs(
  8111. aes, out, in, inSz,
  8112. nonce, nonceSz,
  8113. authTag, authTagSz,
  8114. authIn, authInSz);
  8115. }
  8116. #endif
  8117. #elif defined(FREESCALE_LTC)
  8118. /* return 0 on success */
  8119. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8120. const byte* nonce, word32 nonceSz,
  8121. byte* authTag, word32 authTagSz,
  8122. const byte* authIn, word32 authInSz)
  8123. {
  8124. byte *key;
  8125. word32 keySize;
  8126. status_t status;
  8127. /* sanity check on arguments */
  8128. /* note, LTC_AES_EncryptTagCcm() doesn't allow null src or dst
  8129. * ptrs even if inSz is zero (ltc_aes_ccm_check_input_args()), so
  8130. * don't allow it here either.
  8131. */
  8132. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8133. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8134. return BAD_FUNC_ARG;
  8135. }
  8136. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  8137. return BAD_FUNC_ARG;
  8138. }
  8139. key = (byte*)aes->key;
  8140. status = wc_AesGetKeySize(aes, &keySize);
  8141. if (status != 0) {
  8142. return status;
  8143. }
  8144. status = wolfSSL_CryptHwMutexLock();
  8145. if (status != 0)
  8146. return status;
  8147. status = LTC_AES_EncryptTagCcm(LTC_BASE, in, out, inSz,
  8148. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8149. wolfSSL_CryptHwMutexUnLock();
  8150. return (kStatus_Success == status) ? 0 : BAD_FUNC_ARG;
  8151. }
  8152. #ifdef HAVE_AES_DECRYPT
  8153. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8154. const byte* nonce, word32 nonceSz,
  8155. const byte* authTag, word32 authTagSz,
  8156. const byte* authIn, word32 authInSz)
  8157. {
  8158. byte *key;
  8159. word32 keySize;
  8160. status_t status;
  8161. /* sanity check on arguments */
  8162. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8163. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8164. return BAD_FUNC_ARG;
  8165. }
  8166. key = (byte*)aes->key;
  8167. status = wc_AesGetKeySize(aes, &keySize);
  8168. if (status != 0) {
  8169. return status;
  8170. }
  8171. status = wolfSSL_CryptHwMutexLock();
  8172. if (status != 0)
  8173. return status;
  8174. status = LTC_AES_DecryptTagCcm(LTC_BASE, in, out, inSz,
  8175. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8176. wolfSSL_CryptHwMutexUnLock();
  8177. if (status != kStatus_Success) {
  8178. XMEMSET(out, 0, inSz);
  8179. return AES_CCM_AUTH_E;
  8180. }
  8181. return 0;
  8182. }
  8183. #endif /* HAVE_AES_DECRYPT */
  8184. #else
  8185. /* Software CCM */
  8186. static WARN_UNUSED_RESULT int roll_x(
  8187. Aes* aes, const byte* in, word32 inSz, byte* out)
  8188. {
  8189. int ret;
  8190. /* process the bulk of the data */
  8191. while (inSz >= AES_BLOCK_SIZE) {
  8192. xorbuf(out, in, AES_BLOCK_SIZE);
  8193. in += AES_BLOCK_SIZE;
  8194. inSz -= AES_BLOCK_SIZE;
  8195. ret = wc_AesEncrypt(aes, out, out);
  8196. if (ret != 0)
  8197. return ret;
  8198. }
  8199. /* process remainder of the data */
  8200. if (inSz > 0) {
  8201. xorbuf(out, in, inSz);
  8202. ret = wc_AesEncrypt(aes, out, out);
  8203. if (ret != 0)
  8204. return ret;
  8205. }
  8206. return 0;
  8207. }
  8208. static WARN_UNUSED_RESULT int roll_auth(
  8209. Aes* aes, const byte* in, word32 inSz, byte* out)
  8210. {
  8211. word32 authLenSz;
  8212. word32 remainder;
  8213. int ret;
  8214. /* encode the length in */
  8215. if (inSz <= 0xFEFF) {
  8216. authLenSz = 2;
  8217. out[0] ^= (byte)(inSz >> 8);
  8218. out[1] ^= (byte)inSz;
  8219. }
  8220. else {
  8221. authLenSz = 6;
  8222. out[0] ^= 0xFF;
  8223. out[1] ^= 0xFE;
  8224. out[2] ^= (byte)(inSz >> 24);
  8225. out[3] ^= (byte)(inSz >> 16);
  8226. out[4] ^= (byte)(inSz >> 8);
  8227. out[5] ^= (byte)inSz;
  8228. }
  8229. /* Note, the protocol handles auth data up to 2^64, but we are
  8230. * using 32-bit sizes right now, so the bigger data isn't handled
  8231. * else {}
  8232. */
  8233. /* start fill out the rest of the first block */
  8234. remainder = AES_BLOCK_SIZE - authLenSz;
  8235. if (inSz >= remainder) {
  8236. /* plenty of bulk data to fill the remainder of this block */
  8237. xorbuf(out + authLenSz, in, remainder);
  8238. inSz -= remainder;
  8239. in += remainder;
  8240. }
  8241. else {
  8242. /* not enough bulk data, copy what is available, and pad zero */
  8243. xorbuf(out + authLenSz, in, inSz);
  8244. inSz = 0;
  8245. }
  8246. ret = wc_AesEncrypt(aes, out, out);
  8247. if ((ret == 0) && (inSz > 0)) {
  8248. ret = roll_x(aes, in, inSz, out);
  8249. }
  8250. return ret;
  8251. }
  8252. static WC_INLINE void AesCcmCtrInc(byte* B, word32 lenSz)
  8253. {
  8254. word32 i;
  8255. for (i = 0; i < lenSz; i++) {
  8256. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) return;
  8257. }
  8258. }
  8259. #ifdef WOLFSSL_AESNI
  8260. static WC_INLINE void AesCcmCtrIncSet4(byte* B, word32 lenSz)
  8261. {
  8262. word32 i;
  8263. /* B+1 = B */
  8264. XMEMCPY(B + AES_BLOCK_SIZE * 1, B, AES_BLOCK_SIZE);
  8265. /* B+2,B+3 = B,B+1 */
  8266. XMEMCPY(B + AES_BLOCK_SIZE * 2, B, AES_BLOCK_SIZE * 2);
  8267. for (i = 0; i < lenSz; i++) {
  8268. if (++B[AES_BLOCK_SIZE * 2 - 1 - i] != 0) break;
  8269. }
  8270. B[AES_BLOCK_SIZE * 3 - 1] += 2;
  8271. if (B[AES_BLOCK_SIZE * 3 - 1] < 2) {
  8272. for (i = 1; i < lenSz; i++) {
  8273. if (++B[AES_BLOCK_SIZE * 3 - 1 - i] != 0) break;
  8274. }
  8275. }
  8276. B[AES_BLOCK_SIZE * 4 - 1] += 3;
  8277. if (B[AES_BLOCK_SIZE * 4 - 1] < 3) {
  8278. for (i = 1; i < lenSz; i++) {
  8279. if (++B[AES_BLOCK_SIZE * 4 - 1 - i] != 0) break;
  8280. }
  8281. }
  8282. }
  8283. static WC_INLINE void AesCcmCtrInc4(byte* B, word32 lenSz)
  8284. {
  8285. word32 i;
  8286. B[AES_BLOCK_SIZE - 1] += 4;
  8287. if (B[AES_BLOCK_SIZE - 1] < 4) {
  8288. for (i = 1; i < lenSz; i++) {
  8289. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) break;
  8290. }
  8291. }
  8292. }
  8293. #endif
  8294. /* Software AES - CCM Encrypt */
  8295. /* return 0 on success */
  8296. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8297. const byte* nonce, word32 nonceSz,
  8298. byte* authTag, word32 authTagSz,
  8299. const byte* authIn, word32 authInSz)
  8300. {
  8301. #ifndef WOLFSSL_AESNI
  8302. byte A[AES_BLOCK_SIZE];
  8303. byte B[AES_BLOCK_SIZE];
  8304. #else
  8305. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  8306. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  8307. #endif
  8308. byte lenSz;
  8309. word32 i;
  8310. byte mask = 0xFF;
  8311. const word32 wordSz = (word32)sizeof(word32);
  8312. int ret;
  8313. /* sanity check on arguments */
  8314. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  8315. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  8316. authTagSz > AES_BLOCK_SIZE)
  8317. return BAD_FUNC_ARG;
  8318. /* sanity check on tag size */
  8319. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  8320. return BAD_FUNC_ARG;
  8321. }
  8322. #ifdef WOLF_CRYPTO_CB
  8323. #ifndef WOLF_CRYPTO_CB_FIND
  8324. if (aes->devId != INVALID_DEVID)
  8325. #endif
  8326. {
  8327. int crypto_cb_ret =
  8328. wc_CryptoCb_AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz,
  8329. authTag, authTagSz, authIn, authInSz);
  8330. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  8331. return crypto_cb_ret;
  8332. /* fall-through when unavailable */
  8333. }
  8334. #endif
  8335. XMEMSET(A, 0, sizeof(A));
  8336. XMEMCPY(B+1, nonce, nonceSz);
  8337. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  8338. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  8339. + (8 * (((byte)authTagSz - 2) / 2))
  8340. + (lenSz - 1));
  8341. for (i = 0; i < lenSz; i++) {
  8342. if (mask && i >= wordSz)
  8343. mask = 0x00;
  8344. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  8345. }
  8346. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8347. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  8348. #endif
  8349. ret = wc_AesEncrypt(aes, B, A);
  8350. if (ret != 0) {
  8351. ForceZero(B, sizeof(B));
  8352. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8353. wc_MemZero_Check(B, sizeof(B));
  8354. #endif
  8355. return ret;
  8356. }
  8357. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8358. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  8359. #endif
  8360. if (authInSz > 0) {
  8361. ret = roll_auth(aes, authIn, authInSz, A);
  8362. if (ret != 0) {
  8363. ForceZero(A, sizeof(A));
  8364. ForceZero(B, sizeof(B));
  8365. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8366. wc_MemZero_Check(A, sizeof(A));
  8367. wc_MemZero_Check(B, sizeof(B));
  8368. #endif
  8369. return ret;
  8370. }
  8371. }
  8372. if (inSz > 0) {
  8373. ret = roll_x(aes, in, inSz, A);
  8374. if (ret != 0) {
  8375. ForceZero(A, sizeof(A));
  8376. ForceZero(B, sizeof(B));
  8377. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8378. wc_MemZero_Check(A, sizeof(A));
  8379. wc_MemZero_Check(B, sizeof(B));
  8380. #endif
  8381. return ret;
  8382. }
  8383. }
  8384. XMEMCPY(authTag, A, authTagSz);
  8385. B[0] = lenSz - 1;
  8386. for (i = 0; i < lenSz; i++)
  8387. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8388. ret = wc_AesEncrypt(aes, B, A);
  8389. if (ret != 0) {
  8390. ForceZero(A, sizeof(A));
  8391. ForceZero(B, sizeof(B));
  8392. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8393. wc_MemZero_Check(A, sizeof(A));
  8394. wc_MemZero_Check(B, sizeof(B));
  8395. #endif
  8396. return ret;
  8397. }
  8398. xorbuf(authTag, A, authTagSz);
  8399. B[15] = 1;
  8400. #ifdef WOLFSSL_AESNI
  8401. if (haveAESNI && aes->use_aesni) {
  8402. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8403. while (inSz >= AES_BLOCK_SIZE * 4) {
  8404. AesCcmCtrIncSet4(B, lenSz);
  8405. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  8406. (int)aes->rounds);
  8407. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  8408. XMEMCPY(out, A, AES_BLOCK_SIZE * 4);
  8409. inSz -= AES_BLOCK_SIZE * 4;
  8410. in += AES_BLOCK_SIZE * 4;
  8411. out += AES_BLOCK_SIZE * 4;
  8412. AesCcmCtrInc4(B, lenSz);
  8413. }
  8414. RESTORE_VECTOR_REGISTERS();
  8415. }
  8416. #endif
  8417. while (inSz >= AES_BLOCK_SIZE) {
  8418. ret = wc_AesEncrypt(aes, B, A);
  8419. if (ret != 0) {
  8420. ForceZero(A, sizeof(A));
  8421. ForceZero(B, sizeof(B));
  8422. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8423. wc_MemZero_Check(A, sizeof(A));
  8424. wc_MemZero_Check(B, sizeof(B));
  8425. #endif
  8426. return ret;
  8427. }
  8428. xorbuf(A, in, AES_BLOCK_SIZE);
  8429. XMEMCPY(out, A, AES_BLOCK_SIZE);
  8430. AesCcmCtrInc(B, lenSz);
  8431. inSz -= AES_BLOCK_SIZE;
  8432. in += AES_BLOCK_SIZE;
  8433. out += AES_BLOCK_SIZE;
  8434. }
  8435. if (inSz > 0) {
  8436. ret = wc_AesEncrypt(aes, B, A);
  8437. if (ret != 0) {
  8438. ForceZero(A, sizeof(A));
  8439. ForceZero(B, sizeof(B));
  8440. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8441. wc_MemZero_Check(A, sizeof(A));
  8442. wc_MemZero_Check(B, sizeof(B));
  8443. #endif
  8444. return ret;
  8445. }
  8446. xorbuf(A, in, inSz);
  8447. XMEMCPY(out, A, inSz);
  8448. }
  8449. ForceZero(A, sizeof(A));
  8450. ForceZero(B, sizeof(B));
  8451. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8452. wc_MemZero_Check(A, sizeof(A));
  8453. wc_MemZero_Check(B, sizeof(B));
  8454. #endif
  8455. return 0;
  8456. }
  8457. #ifdef HAVE_AES_DECRYPT
  8458. /* Software AES - CCM Decrypt */
  8459. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8460. const byte* nonce, word32 nonceSz,
  8461. const byte* authTag, word32 authTagSz,
  8462. const byte* authIn, word32 authInSz)
  8463. {
  8464. #ifndef WOLFSSL_AESNI
  8465. byte A[AES_BLOCK_SIZE];
  8466. byte B[AES_BLOCK_SIZE];
  8467. #else
  8468. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  8469. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  8470. #endif
  8471. byte* o;
  8472. byte lenSz;
  8473. word32 i, oSz;
  8474. byte mask = 0xFF;
  8475. const word32 wordSz = (word32)sizeof(word32);
  8476. int ret;
  8477. /* sanity check on arguments */
  8478. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  8479. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  8480. authTagSz > AES_BLOCK_SIZE)
  8481. return BAD_FUNC_ARG;
  8482. /* sanity check on tag size */
  8483. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  8484. return BAD_FUNC_ARG;
  8485. }
  8486. #ifdef WOLF_CRYPTO_CB
  8487. #ifndef WOLF_CRYPTO_CB_FIND
  8488. if (aes->devId != INVALID_DEVID)
  8489. #endif
  8490. {
  8491. int crypto_cb_ret =
  8492. wc_CryptoCb_AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  8493. authTag, authTagSz, authIn, authInSz);
  8494. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  8495. return crypto_cb_ret;
  8496. /* fall-through when unavailable */
  8497. }
  8498. #endif
  8499. o = out;
  8500. oSz = inSz;
  8501. XMEMSET(A, 0, sizeof A);
  8502. XMEMCPY(B+1, nonce, nonceSz);
  8503. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  8504. B[0] = lenSz - 1;
  8505. for (i = 0; i < lenSz; i++)
  8506. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8507. B[15] = 1;
  8508. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8509. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  8510. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  8511. #endif
  8512. #ifdef WOLFSSL_AESNI
  8513. if (haveAESNI && aes->use_aesni) {
  8514. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8515. while (oSz >= AES_BLOCK_SIZE * 4) {
  8516. AesCcmCtrIncSet4(B, lenSz);
  8517. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  8518. (int)aes->rounds);
  8519. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  8520. XMEMCPY(o, A, AES_BLOCK_SIZE * 4);
  8521. oSz -= AES_BLOCK_SIZE * 4;
  8522. in += AES_BLOCK_SIZE * 4;
  8523. o += AES_BLOCK_SIZE * 4;
  8524. AesCcmCtrInc4(B, lenSz);
  8525. }
  8526. RESTORE_VECTOR_REGISTERS();
  8527. }
  8528. #endif
  8529. while (oSz >= AES_BLOCK_SIZE) {
  8530. ret = wc_AesEncrypt(aes, B, A);
  8531. if (ret != 0) {
  8532. ForceZero(A, sizeof(A));
  8533. ForceZero(B, sizeof(B));
  8534. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8535. wc_MemZero_Check(A, sizeof(A));
  8536. wc_MemZero_Check(B, sizeof(B));
  8537. #endif
  8538. return ret;
  8539. }
  8540. xorbuf(A, in, AES_BLOCK_SIZE);
  8541. XMEMCPY(o, A, AES_BLOCK_SIZE);
  8542. AesCcmCtrInc(B, lenSz);
  8543. oSz -= AES_BLOCK_SIZE;
  8544. in += AES_BLOCK_SIZE;
  8545. o += AES_BLOCK_SIZE;
  8546. }
  8547. if (inSz > 0) {
  8548. ret = wc_AesEncrypt(aes, B, A);
  8549. if (ret != 0) {
  8550. ForceZero(A, sizeof(A));
  8551. ForceZero(B, sizeof(B));
  8552. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8553. wc_MemZero_Check(A, sizeof(A));
  8554. wc_MemZero_Check(B, sizeof(B));
  8555. #endif
  8556. return ret;
  8557. }
  8558. xorbuf(A, in, oSz);
  8559. XMEMCPY(o, A, oSz);
  8560. }
  8561. for (i = 0; i < lenSz; i++)
  8562. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8563. ret = wc_AesEncrypt(aes, B, A);
  8564. if (ret != 0) {
  8565. ForceZero(A, sizeof(A));
  8566. ForceZero(B, sizeof(B));
  8567. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8568. wc_MemZero_Check(A, sizeof(A));
  8569. wc_MemZero_Check(B, sizeof(B));
  8570. #endif
  8571. return ret;
  8572. }
  8573. o = out;
  8574. oSz = inSz;
  8575. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  8576. + (8 * (((byte)authTagSz - 2) / 2))
  8577. + (lenSz - 1));
  8578. for (i = 0; i < lenSz; i++) {
  8579. if (mask && i >= wordSz)
  8580. mask = 0x00;
  8581. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  8582. }
  8583. ret = wc_AesEncrypt(aes, B, A);
  8584. if (ret != 0) {
  8585. ForceZero(A, sizeof(A));
  8586. ForceZero(B, sizeof(B));
  8587. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8588. wc_MemZero_Check(A, sizeof(A));
  8589. wc_MemZero_Check(B, sizeof(B));
  8590. #endif
  8591. return ret;
  8592. }
  8593. if (authInSz > 0) {
  8594. ret = roll_auth(aes, authIn, authInSz, A);
  8595. if (ret != 0) {
  8596. ForceZero(A, sizeof(A));
  8597. ForceZero(B, sizeof(B));
  8598. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8599. wc_MemZero_Check(A, sizeof(A));
  8600. wc_MemZero_Check(B, sizeof(B));
  8601. #endif
  8602. return ret;
  8603. }
  8604. }
  8605. if (inSz > 0) {
  8606. ret = roll_x(aes, o, oSz, A);
  8607. if (ret != 0) {
  8608. ForceZero(A, sizeof(A));
  8609. ForceZero(B, sizeof(B));
  8610. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8611. wc_MemZero_Check(A, sizeof(A));
  8612. wc_MemZero_Check(B, sizeof(B));
  8613. #endif
  8614. return ret;
  8615. }
  8616. }
  8617. B[0] = lenSz - 1;
  8618. for (i = 0; i < lenSz; i++)
  8619. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8620. ret = wc_AesEncrypt(aes, B, B);
  8621. if (ret != 0) {
  8622. ForceZero(A, sizeof(A));
  8623. ForceZero(B, sizeof(B));
  8624. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8625. wc_MemZero_Check(A, sizeof(A));
  8626. wc_MemZero_Check(B, sizeof(B));
  8627. #endif
  8628. return ret;
  8629. }
  8630. xorbuf(A, B, authTagSz);
  8631. if (ConstantCompare(A, authTag, (int)authTagSz) != 0) {
  8632. /* If the authTag check fails, don't keep the decrypted data.
  8633. * Unfortunately, you need the decrypted data to calculate the
  8634. * check value. */
  8635. #if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) && \
  8636. defined(ACVP_VECTOR_TESTING)
  8637. WOLFSSL_MSG("Preserve output for vector responses");
  8638. #else
  8639. if (inSz > 0)
  8640. XMEMSET(out, 0, inSz);
  8641. #endif
  8642. ret = AES_CCM_AUTH_E;
  8643. }
  8644. ForceZero(A, sizeof(A));
  8645. ForceZero(B, sizeof(B));
  8646. o = NULL;
  8647. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8648. wc_MemZero_Check(A, sizeof(A));
  8649. wc_MemZero_Check(B, sizeof(B));
  8650. #endif
  8651. return ret;
  8652. }
  8653. #endif /* HAVE_AES_DECRYPT */
  8654. #endif /* software CCM */
  8655. /* abstract functions that call lower level AESCCM functions */
  8656. #ifndef WC_NO_RNG
  8657. int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz)
  8658. {
  8659. int ret = 0;
  8660. if (aes == NULL || nonce == NULL ||
  8661. nonceSz < CCM_NONCE_MIN_SZ || nonceSz > CCM_NONCE_MAX_SZ) {
  8662. ret = BAD_FUNC_ARG;
  8663. }
  8664. if (ret == 0) {
  8665. XMEMCPY(aes->reg, nonce, nonceSz);
  8666. aes->nonceSz = nonceSz;
  8667. /* Invocation counter should be 2^61 */
  8668. aes->invokeCtr[0] = 0;
  8669. aes->invokeCtr[1] = 0xE0000000;
  8670. }
  8671. return ret;
  8672. }
  8673. int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  8674. byte* ivOut, word32 ivOutSz,
  8675. byte* authTag, word32 authTagSz,
  8676. const byte* authIn, word32 authInSz)
  8677. {
  8678. int ret = 0;
  8679. if (aes == NULL || out == NULL ||
  8680. (in == NULL && sz != 0) ||
  8681. ivOut == NULL ||
  8682. (authIn == NULL && authInSz != 0) ||
  8683. (ivOutSz != aes->nonceSz)) {
  8684. ret = BAD_FUNC_ARG;
  8685. }
  8686. if (ret == 0) {
  8687. aes->invokeCtr[0]++;
  8688. if (aes->invokeCtr[0] == 0) {
  8689. aes->invokeCtr[1]++;
  8690. if (aes->invokeCtr[1] == 0)
  8691. ret = AES_CCM_OVERFLOW_E;
  8692. }
  8693. }
  8694. if (ret == 0) {
  8695. ret = wc_AesCcmEncrypt(aes, out, in, sz,
  8696. (byte*)aes->reg, aes->nonceSz,
  8697. authTag, authTagSz,
  8698. authIn, authInSz);
  8699. if (ret == 0) {
  8700. XMEMCPY(ivOut, aes->reg, aes->nonceSz);
  8701. IncCtr((byte*)aes->reg, aes->nonceSz);
  8702. }
  8703. }
  8704. return ret;
  8705. }
  8706. #endif /* WC_NO_RNG */
  8707. #endif /* HAVE_AESCCM */
  8708. /* Initialize Aes for use with async hardware */
  8709. int wc_AesInit(Aes* aes, void* heap, int devId)
  8710. {
  8711. int ret = 0;
  8712. if (aes == NULL)
  8713. return BAD_FUNC_ARG;
  8714. aes->heap = heap;
  8715. #ifdef WOLF_CRYPTO_CB
  8716. aes->devId = devId;
  8717. aes->devCtx = NULL;
  8718. #else
  8719. (void)devId;
  8720. #endif
  8721. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  8722. ret = wolfAsync_DevCtxInit(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES,
  8723. aes->heap, devId);
  8724. #endif /* WOLFSSL_ASYNC_CRYPT */
  8725. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  8726. aes->alFd = WC_SOCK_NOTSET;
  8727. aes->rdFd = WC_SOCK_NOTSET;
  8728. #endif
  8729. #ifdef WOLFSSL_KCAPI_AES
  8730. aes->handle = NULL;
  8731. aes->init = 0;
  8732. #endif
  8733. #if defined(WOLFSSL_DEVCRYPTO) && \
  8734. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  8735. aes->ctx.cfd = -1;
  8736. #endif
  8737. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  8738. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  8739. #endif
  8740. #if defined(WOLFSSL_IMXRT_DCP)
  8741. DCPAesInit(aes);
  8742. #endif
  8743. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  8744. XMEMSET(&aes->maxq_ctx, 0, sizeof(aes->maxq_ctx));
  8745. #endif
  8746. #ifdef HAVE_AESGCM
  8747. #ifdef OPENSSL_EXTRA
  8748. XMEMSET(aes->gcm.aadH, 0, sizeof(aes->gcm.aadH));
  8749. aes->gcm.aadLen = 0;
  8750. #endif
  8751. #endif
  8752. #ifdef WOLFSSL_AESGCM_STREAM
  8753. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  8754. aes->streamData = NULL;
  8755. #endif
  8756. aes->keylen = 0;
  8757. aes->nonceSz = 0;
  8758. aes->gcmKeySet = 0;
  8759. aes->nonceSet = 0;
  8760. aes->ctrSet = 0;
  8761. #endif
  8762. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8763. ret = wc_psa_aes_init(aes);
  8764. #endif
  8765. #if defined(WOLFSSL_RENESAS_FSPSM)
  8766. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  8767. #endif
  8768. return ret;
  8769. }
  8770. #ifdef WOLF_PRIVATE_KEY_ID
  8771. int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, int devId)
  8772. {
  8773. int ret = 0;
  8774. if (aes == NULL)
  8775. ret = BAD_FUNC_ARG;
  8776. if (ret == 0 && (len < 0 || len > AES_MAX_ID_LEN))
  8777. ret = BUFFER_E;
  8778. if (ret == 0)
  8779. ret = wc_AesInit(aes, heap, devId);
  8780. if (ret == 0) {
  8781. XMEMCPY(aes->id, id, (size_t)len);
  8782. aes->idLen = len;
  8783. aes->labelLen = 0;
  8784. }
  8785. return ret;
  8786. }
  8787. int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId)
  8788. {
  8789. int ret = 0;
  8790. size_t labelLen = 0;
  8791. if (aes == NULL || label == NULL)
  8792. ret = BAD_FUNC_ARG;
  8793. if (ret == 0) {
  8794. labelLen = XSTRLEN(label);
  8795. if (labelLen == 0 || labelLen > AES_MAX_LABEL_LEN)
  8796. ret = BUFFER_E;
  8797. }
  8798. if (ret == 0)
  8799. ret = wc_AesInit(aes, heap, devId);
  8800. if (ret == 0) {
  8801. XMEMCPY(aes->label, label, labelLen);
  8802. aes->labelLen = (int)labelLen;
  8803. aes->idLen = 0;
  8804. }
  8805. return ret;
  8806. }
  8807. #endif
  8808. /* Free Aes from use with async hardware */
  8809. void wc_AesFree(Aes* aes)
  8810. {
  8811. if (aes == NULL)
  8812. return;
  8813. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  8814. wolfAsync_DevCtxFree(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES);
  8815. #endif /* WOLFSSL_ASYNC_CRYPT */
  8816. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  8817. if (aes->rdFd > 0) { /* negative is error case */
  8818. close(aes->rdFd);
  8819. aes->rdFd = WC_SOCK_NOTSET;
  8820. }
  8821. if (aes->alFd > 0) {
  8822. close(aes->alFd);
  8823. aes->alFd = WC_SOCK_NOTSET;
  8824. }
  8825. #endif /* WOLFSSL_AFALG */
  8826. #ifdef WOLFSSL_KCAPI_AES
  8827. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  8828. if (aes->init == 1) {
  8829. kcapi_cipher_destroy(aes->handle);
  8830. }
  8831. aes->init = 0;
  8832. aes->handle = NULL;
  8833. #endif
  8834. #if defined(WOLFSSL_DEVCRYPTO) && \
  8835. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  8836. wc_DevCryptoFree(&aes->ctx);
  8837. #endif
  8838. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  8839. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  8840. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  8841. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  8842. #endif
  8843. #if defined(WOLFSSL_IMXRT_DCP)
  8844. DCPAesFree(aes);
  8845. #endif
  8846. #if defined(WOLFSSL_AESGCM_STREAM) && defined(WOLFSSL_SMALL_STACK) && \
  8847. !defined(WOLFSSL_AESNI)
  8848. if (aes->streamData != NULL) {
  8849. XFREE(aes->streamData, aes->heap, DYNAMIC_TYPE_AES);
  8850. aes->streamData = NULL;
  8851. }
  8852. #endif
  8853. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  8854. if (aes->useSWCrypt == 0) {
  8855. se050_aes_free(aes);
  8856. }
  8857. #endif
  8858. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8859. wc_psa_aes_free(aes);
  8860. #endif
  8861. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  8862. wc_MAXQ10XX_AesFree(aes);
  8863. #endif
  8864. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8865. wc_MemZero_Check(aes, sizeof(Aes));
  8866. #endif
  8867. #if ((defined(WOLFSSL_RENESAS_FSPSM_TLS) || \
  8868. defined(WOLFSSL_RENESAS_FSPSM_CRYPTONLY)) && \
  8869. !defined(NO_WOLFSSL_RENESAS_FSPSM_AES))
  8870. wc_fspsm_Aesfree(aes);
  8871. #endif
  8872. }
  8873. int wc_AesGetKeySize(Aes* aes, word32* keySize)
  8874. {
  8875. int ret = 0;
  8876. if (aes == NULL || keySize == NULL) {
  8877. return BAD_FUNC_ARG;
  8878. }
  8879. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8880. return wc_psa_aes_get_key_size(aes, keySize);
  8881. #endif
  8882. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  8883. *keySize = aes->ctx.key.keySize;
  8884. return ret;
  8885. #endif
  8886. switch (aes->rounds) {
  8887. #ifdef WOLFSSL_AES_128
  8888. case 10:
  8889. *keySize = 16;
  8890. break;
  8891. #endif
  8892. #ifdef WOLFSSL_AES_192
  8893. case 12:
  8894. *keySize = 24;
  8895. break;
  8896. #endif
  8897. #ifdef WOLFSSL_AES_256
  8898. case 14:
  8899. *keySize = 32;
  8900. break;
  8901. #endif
  8902. default:
  8903. *keySize = 0;
  8904. ret = BAD_FUNC_ARG;
  8905. }
  8906. return ret;
  8907. }
  8908. #endif /* !WOLFSSL_TI_CRYPT */
  8909. #ifdef HAVE_AES_ECB
  8910. #if defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8911. !defined(WOLFSSL_QNX_CAAM)
  8912. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  8913. #elif defined(WOLFSSL_AFALG)
  8914. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  8915. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  8916. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  8917. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  8918. /* Software AES - ECB */
  8919. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8920. {
  8921. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8922. return BAD_FUNC_ARG;
  8923. return AES_ECB_encrypt(aes, in, out, sz);
  8924. }
  8925. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8926. {
  8927. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8928. return BAD_FUNC_ARG;
  8929. return AES_ECB_decrypt(aes, in, out, sz);
  8930. }
  8931. #else
  8932. /* Software AES - ECB */
  8933. static WARN_UNUSED_RESULT int _AesEcbEncrypt(
  8934. Aes* aes, byte* out, const byte* in, word32 sz)
  8935. {
  8936. word32 blocks = sz / AES_BLOCK_SIZE;
  8937. #ifdef WOLF_CRYPTO_CB
  8938. #ifndef WOLF_CRYPTO_CB_FIND
  8939. if (aes->devId != INVALID_DEVID)
  8940. #endif
  8941. {
  8942. int ret = wc_CryptoCb_AesEcbEncrypt(aes, out, in, sz);
  8943. if (ret != CRYPTOCB_UNAVAILABLE)
  8944. return ret;
  8945. /* fall-through when unavailable */
  8946. }
  8947. #endif
  8948. #ifdef WOLFSSL_IMXRT_DCP
  8949. if (aes->keylen == 16)
  8950. return DCPAesEcbEncrypt(aes, out, in, sz);
  8951. #endif
  8952. #ifdef WOLFSSL_AESNI
  8953. if (haveAESNI && aes->use_aesni) {
  8954. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8955. AES_ECB_encrypt(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  8956. RESTORE_VECTOR_REGISTERS();
  8957. blocks = 0;
  8958. }
  8959. #endif
  8960. while (blocks > 0) {
  8961. int ret = wc_AesEncryptDirect(aes, out, in);
  8962. if (ret != 0)
  8963. return ret;
  8964. out += AES_BLOCK_SIZE;
  8965. in += AES_BLOCK_SIZE;
  8966. blocks--;
  8967. }
  8968. return 0;
  8969. }
  8970. static WARN_UNUSED_RESULT int _AesEcbDecrypt(
  8971. Aes* aes, byte* out, const byte* in, word32 sz)
  8972. {
  8973. word32 blocks = sz / AES_BLOCK_SIZE;
  8974. #ifdef WOLF_CRYPTO_CB
  8975. #ifndef WOLF_CRYPTO_CB_FIND
  8976. if (aes->devId != INVALID_DEVID)
  8977. #endif
  8978. {
  8979. int ret = wc_CryptoCb_AesEcbDecrypt(aes, out, in, sz);
  8980. if (ret != CRYPTOCB_UNAVAILABLE)
  8981. return ret;
  8982. /* fall-through when unavailable */
  8983. }
  8984. #endif
  8985. #ifdef WOLFSSL_IMXRT_DCP
  8986. if (aes->keylen == 16)
  8987. return DCPAesEcbDecrypt(aes, out, in, sz);
  8988. #endif
  8989. #ifdef WOLFSSL_AESNI
  8990. if (haveAESNI && aes->use_aesni) {
  8991. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8992. AES_ECB_decrypt(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  8993. RESTORE_VECTOR_REGISTERS();
  8994. blocks = 0;
  8995. }
  8996. #endif
  8997. while (blocks > 0) {
  8998. int ret = wc_AesDecryptDirect(aes, out, in);
  8999. if (ret != 0)
  9000. return ret;
  9001. out += AES_BLOCK_SIZE;
  9002. in += AES_BLOCK_SIZE;
  9003. blocks--;
  9004. }
  9005. return 0;
  9006. }
  9007. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9008. {
  9009. int ret;
  9010. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9011. return BAD_FUNC_ARG;
  9012. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9013. ret = _AesEcbEncrypt(aes, out, in, sz);
  9014. RESTORE_VECTOR_REGISTERS();
  9015. return ret;
  9016. }
  9017. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9018. {
  9019. int ret;
  9020. if ((in == NULL) || (out == NULL) || (aes == NULL))
  9021. return BAD_FUNC_ARG;
  9022. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9023. ret = _AesEcbDecrypt(aes, out, in, sz);
  9024. RESTORE_VECTOR_REGISTERS();
  9025. return ret;
  9026. }
  9027. #endif
  9028. #endif /* HAVE_AES_ECB */
  9029. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_OFB)
  9030. /* Feedback AES mode
  9031. *
  9032. * aes structure holding key to use for encryption
  9033. * out buffer to hold result of encryption (must be at least as large as input
  9034. * buffer)
  9035. * in buffer to encrypt
  9036. * sz size of input buffer
  9037. * mode flag to specify AES mode
  9038. *
  9039. * returns 0 on success and negative error values on failure
  9040. */
  9041. /* Software AES - CFB Encrypt */
  9042. static WARN_UNUSED_RESULT int wc_AesFeedbackEncrypt(
  9043. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9044. {
  9045. byte* tmp = NULL;
  9046. int ret = 0;
  9047. word32 processed;
  9048. if (aes == NULL || out == NULL || in == NULL) {
  9049. return BAD_FUNC_ARG;
  9050. }
  9051. /* consume any unused bytes left in aes->tmp */
  9052. processed = min(aes->left, sz);
  9053. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  9054. #ifdef WOLFSSL_AES_CFB
  9055. if (mode == AES_CFB_MODE) {
  9056. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, out, processed);
  9057. }
  9058. #endif
  9059. aes->left -= processed;
  9060. out += processed;
  9061. in += processed;
  9062. sz -= processed;
  9063. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9064. while (sz >= AES_BLOCK_SIZE) {
  9065. /* Using aes->tmp here for inline case i.e. in=out */
  9066. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9067. if (ret != 0)
  9068. break;
  9069. #ifdef WOLFSSL_AES_OFB
  9070. if (mode == AES_OFB_MODE) {
  9071. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9072. }
  9073. #endif
  9074. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9075. #ifdef WOLFSSL_AES_CFB
  9076. if (mode == AES_CFB_MODE) {
  9077. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9078. }
  9079. #endif
  9080. XMEMCPY(out, aes->tmp, AES_BLOCK_SIZE);
  9081. out += AES_BLOCK_SIZE;
  9082. in += AES_BLOCK_SIZE;
  9083. sz -= AES_BLOCK_SIZE;
  9084. aes->left = 0;
  9085. }
  9086. /* encrypt left over data */
  9087. if ((ret == 0) && sz) {
  9088. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9089. }
  9090. if ((ret == 0) && sz) {
  9091. aes->left = AES_BLOCK_SIZE;
  9092. tmp = (byte*)aes->tmp;
  9093. #ifdef WOLFSSL_AES_OFB
  9094. if (mode == AES_OFB_MODE) {
  9095. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9096. }
  9097. #endif
  9098. xorbufout(out, in, tmp, sz);
  9099. #ifdef WOLFSSL_AES_CFB
  9100. if (mode == AES_CFB_MODE) {
  9101. XMEMCPY(aes->reg, out, sz);
  9102. }
  9103. #endif
  9104. aes->left -= sz;
  9105. }
  9106. RESTORE_VECTOR_REGISTERS();
  9107. return ret;
  9108. }
  9109. #ifdef HAVE_AES_DECRYPT
  9110. /* CFB 128
  9111. *
  9112. * aes structure holding key to use for decryption
  9113. * out buffer to hold result of decryption (must be at least as large as input
  9114. * buffer)
  9115. * in buffer to decrypt
  9116. * sz size of input buffer
  9117. *
  9118. * returns 0 on success and negative error values on failure
  9119. */
  9120. /* Software AES - CFB Decrypt */
  9121. static WARN_UNUSED_RESULT int wc_AesFeedbackDecrypt(
  9122. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9123. {
  9124. int ret = 0;
  9125. word32 processed;
  9126. if (aes == NULL || out == NULL || in == NULL) {
  9127. return BAD_FUNC_ARG;
  9128. }
  9129. #ifdef WOLFSSL_AES_CFB
  9130. /* check if more input needs copied over to aes->reg */
  9131. if (aes->left && sz && mode == AES_CFB_MODE) {
  9132. word32 size = min(aes->left, sz);
  9133. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, in, size);
  9134. }
  9135. #endif
  9136. /* consume any unused bytes left in aes->tmp */
  9137. processed = min(aes->left, sz);
  9138. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  9139. aes->left -= processed;
  9140. out += processed;
  9141. in += processed;
  9142. sz -= processed;
  9143. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9144. while (sz > AES_BLOCK_SIZE) {
  9145. /* Using aes->tmp here for inline case i.e. in=out */
  9146. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9147. if (ret != 0)
  9148. break;
  9149. #ifdef WOLFSSL_AES_OFB
  9150. if (mode == AES_OFB_MODE) {
  9151. XMEMCPY((byte*)aes->reg, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9152. }
  9153. #endif
  9154. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9155. #ifdef WOLFSSL_AES_CFB
  9156. if (mode == AES_CFB_MODE) {
  9157. XMEMCPY(aes->reg, in, AES_BLOCK_SIZE);
  9158. }
  9159. #endif
  9160. XMEMCPY(out, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9161. out += AES_BLOCK_SIZE;
  9162. in += AES_BLOCK_SIZE;
  9163. sz -= AES_BLOCK_SIZE;
  9164. aes->left = 0;
  9165. }
  9166. /* decrypt left over data */
  9167. if ((ret == 0) && sz) {
  9168. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9169. }
  9170. if ((ret == 0) && sz) {
  9171. #ifdef WOLFSSL_AES_CFB
  9172. if (mode == AES_CFB_MODE) {
  9173. XMEMCPY(aes->reg, in, sz);
  9174. }
  9175. #endif
  9176. #ifdef WOLFSSL_AES_OFB
  9177. if (mode == AES_OFB_MODE) {
  9178. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9179. }
  9180. #endif
  9181. aes->left = AES_BLOCK_SIZE - sz;
  9182. xorbufout(out, in, aes->tmp, sz);
  9183. }
  9184. RESTORE_VECTOR_REGISTERS();
  9185. return ret;
  9186. }
  9187. #endif /* HAVE_AES_DECRYPT */
  9188. #endif /* WOLFSSL_AES_CFB */
  9189. #ifdef WOLFSSL_AES_CFB
  9190. /* CFB 128
  9191. *
  9192. * aes structure holding key to use for encryption
  9193. * out buffer to hold result of encryption (must be at least as large as input
  9194. * buffer)
  9195. * in buffer to encrypt
  9196. * sz size of input buffer
  9197. *
  9198. * returns 0 on success and negative error values on failure
  9199. */
  9200. /* Software AES - CFB Encrypt */
  9201. int wc_AesCfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9202. {
  9203. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_CFB_MODE);
  9204. }
  9205. #ifdef HAVE_AES_DECRYPT
  9206. /* CFB 128
  9207. *
  9208. * aes structure holding key to use for decryption
  9209. * out buffer to hold result of decryption (must be at least as large as input
  9210. * buffer)
  9211. * in buffer to decrypt
  9212. * sz size of input buffer
  9213. *
  9214. * returns 0 on success and negative error values on failure
  9215. */
  9216. /* Software AES - CFB Decrypt */
  9217. int wc_AesCfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9218. {
  9219. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_CFB_MODE);
  9220. }
  9221. #endif /* HAVE_AES_DECRYPT */
  9222. /* shift the whole AES_BLOCK_SIZE array left by 8 or 1 bits */
  9223. static void shiftLeftArray(byte* ary, byte shift)
  9224. {
  9225. int i;
  9226. if (shift == WOLFSSL_BIT_SIZE) {
  9227. /* shifting over by 8 bits */
  9228. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  9229. ary[i] = ary[i+1];
  9230. }
  9231. ary[i] = 0;
  9232. }
  9233. else {
  9234. /* shifting over by 7 or less bits */
  9235. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  9236. byte carry = ary[i+1] & (0XFF << (WOLFSSL_BIT_SIZE - shift));
  9237. carry >>= (WOLFSSL_BIT_SIZE - shift);
  9238. ary[i] = (byte)((ary[i] << shift) + carry);
  9239. }
  9240. ary[i] = ary[i] << shift;
  9241. }
  9242. }
  9243. /* returns 0 on success and negative values on failure */
  9244. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB8(
  9245. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  9246. {
  9247. byte *pt;
  9248. int ret = 0;
  9249. if (aes == NULL || out == NULL || in == NULL) {
  9250. return BAD_FUNC_ARG;
  9251. }
  9252. if (sz == 0) {
  9253. return 0;
  9254. }
  9255. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9256. while (sz > 0) {
  9257. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9258. if (ret != 0)
  9259. break;
  9260. if (dir == AES_DECRYPTION) {
  9261. pt = (byte*)aes->reg;
  9262. /* LSB + CAT */
  9263. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  9264. pt[AES_BLOCK_SIZE - 1] = in[0];
  9265. }
  9266. /* MSB + XOR */
  9267. #ifdef BIG_ENDIAN_ORDER
  9268. ByteReverseWords(aes->tmp, aes->tmp, AES_BLOCK_SIZE);
  9269. #endif
  9270. out[0] = (byte)(aes->tmp[0] ^ in[0]);
  9271. if (dir == AES_ENCRYPTION) {
  9272. pt = (byte*)aes->reg;
  9273. /* LSB + CAT */
  9274. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  9275. pt[AES_BLOCK_SIZE - 1] = out[0];
  9276. }
  9277. out += 1;
  9278. in += 1;
  9279. sz -= 1;
  9280. }
  9281. RESTORE_VECTOR_REGISTERS();
  9282. return ret;
  9283. }
  9284. /* returns 0 on success and negative values on failure */
  9285. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB1(
  9286. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  9287. {
  9288. byte tmp;
  9289. byte cur = 0; /* hold current work in order to handle inline in=out */
  9290. byte* pt;
  9291. int bit = 7;
  9292. int ret = 0;
  9293. if (aes == NULL || out == NULL || in == NULL) {
  9294. return BAD_FUNC_ARG;
  9295. }
  9296. if (sz == 0) {
  9297. return 0;
  9298. }
  9299. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9300. while (sz > 0) {
  9301. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9302. if (ret != 0)
  9303. break;
  9304. if (dir == AES_DECRYPTION) {
  9305. pt = (byte*)aes->reg;
  9306. /* LSB + CAT */
  9307. tmp = (0X01 << bit) & in[0];
  9308. tmp = tmp >> bit;
  9309. tmp &= 0x01;
  9310. shiftLeftArray((byte*)aes->reg, 1);
  9311. pt[AES_BLOCK_SIZE - 1] |= tmp;
  9312. }
  9313. /* MSB + XOR */
  9314. tmp = (0X01 << bit) & in[0];
  9315. pt = (byte*)aes->tmp;
  9316. tmp = (pt[0] >> 7) ^ (tmp >> bit);
  9317. tmp &= 0x01;
  9318. cur |= (tmp << bit);
  9319. if (dir == AES_ENCRYPTION) {
  9320. pt = (byte*)aes->reg;
  9321. /* LSB + CAT */
  9322. shiftLeftArray((byte*)aes->reg, 1);
  9323. pt[AES_BLOCK_SIZE - 1] |= tmp;
  9324. }
  9325. bit--;
  9326. if (bit < 0) {
  9327. out[0] = cur;
  9328. out += 1;
  9329. in += 1;
  9330. sz -= 1;
  9331. bit = 7;
  9332. cur = 0;
  9333. }
  9334. else {
  9335. sz -= 1;
  9336. }
  9337. }
  9338. if (ret == 0) {
  9339. if (bit > 0 && bit < 7) {
  9340. out[0] = cur;
  9341. }
  9342. }
  9343. RESTORE_VECTOR_REGISTERS();
  9344. return ret;
  9345. }
  9346. /* CFB 1
  9347. *
  9348. * aes structure holding key to use for encryption
  9349. * out buffer to hold result of encryption (must be at least as large as input
  9350. * buffer)
  9351. * in buffer to encrypt (packed to left, i.e. 101 is 0x90)
  9352. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  9353. *
  9354. * returns 0 on success and negative values on failure
  9355. */
  9356. int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9357. {
  9358. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_ENCRYPTION);
  9359. }
  9360. /* CFB 8
  9361. *
  9362. * aes structure holding key to use for encryption
  9363. * out buffer to hold result of encryption (must be at least as large as input
  9364. * buffer)
  9365. * in buffer to encrypt
  9366. * sz size of input buffer
  9367. *
  9368. * returns 0 on success and negative values on failure
  9369. */
  9370. int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9371. {
  9372. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_ENCRYPTION);
  9373. }
  9374. #ifdef HAVE_AES_DECRYPT
  9375. /* CFB 1
  9376. *
  9377. * aes structure holding key to use for encryption
  9378. * out buffer to hold result of encryption (must be at least as large as input
  9379. * buffer)
  9380. * in buffer to encrypt
  9381. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  9382. *
  9383. * returns 0 on success and negative values on failure
  9384. */
  9385. int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9386. {
  9387. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_DECRYPTION);
  9388. }
  9389. /* CFB 8
  9390. *
  9391. * aes structure holding key to use for encryption
  9392. * out buffer to hold result of encryption (must be at least as large as input
  9393. * buffer)
  9394. * in buffer to encrypt
  9395. * sz size of input buffer
  9396. *
  9397. * returns 0 on success and negative values on failure
  9398. */
  9399. int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9400. {
  9401. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_DECRYPTION);
  9402. }
  9403. #endif /* HAVE_AES_DECRYPT */
  9404. #endif /* WOLFSSL_AES_CFB */
  9405. #ifdef WOLFSSL_AES_OFB
  9406. /* OFB
  9407. *
  9408. * aes structure holding key to use for encryption
  9409. * out buffer to hold result of encryption (must be at least as large as input
  9410. * buffer)
  9411. * in buffer to encrypt
  9412. * sz size of input buffer
  9413. *
  9414. * returns 0 on success and negative error values on failure
  9415. */
  9416. /* Software AES - CFB Encrypt */
  9417. int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9418. {
  9419. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_OFB_MODE);
  9420. }
  9421. #ifdef HAVE_AES_DECRYPT
  9422. /* OFB
  9423. *
  9424. * aes structure holding key to use for decryption
  9425. * out buffer to hold result of decryption (must be at least as large as input
  9426. * buffer)
  9427. * in buffer to decrypt
  9428. * sz size of input buffer
  9429. *
  9430. * returns 0 on success and negative error values on failure
  9431. */
  9432. /* Software AES - OFB Decrypt */
  9433. int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9434. {
  9435. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_OFB_MODE);
  9436. }
  9437. #endif /* HAVE_AES_DECRYPT */
  9438. #endif /* WOLFSSL_AES_OFB */
  9439. #ifdef HAVE_AES_KEYWRAP
  9440. /* Initialize key wrap counter with value */
  9441. static WC_INLINE void InitKeyWrapCounter(byte* inOutCtr, word32 value)
  9442. {
  9443. word32 i;
  9444. word32 bytes;
  9445. bytes = sizeof(word32);
  9446. for (i = 0; i < sizeof(word32); i++) {
  9447. inOutCtr[i+sizeof(word32)] = (byte)(value >> ((bytes - 1) * 8));
  9448. bytes--;
  9449. }
  9450. }
  9451. /* Increment key wrap counter */
  9452. static WC_INLINE void IncrementKeyWrapCounter(byte* inOutCtr)
  9453. {
  9454. int i;
  9455. /* in network byte order so start at end and work back */
  9456. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  9457. if (++inOutCtr[i]) /* we're done unless we overflow */
  9458. return;
  9459. }
  9460. }
  9461. /* Decrement key wrap counter */
  9462. static WC_INLINE void DecrementKeyWrapCounter(byte* inOutCtr)
  9463. {
  9464. int i;
  9465. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  9466. if (--inOutCtr[i] != 0xFF) /* we're done unless we underflow */
  9467. return;
  9468. }
  9469. }
  9470. int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  9471. word32 outSz, const byte* iv)
  9472. {
  9473. word32 i;
  9474. byte* r;
  9475. int j;
  9476. int ret = 0;
  9477. byte t[KEYWRAP_BLOCK_SIZE];
  9478. byte tmp[AES_BLOCK_SIZE];
  9479. /* n must be at least 2 64-bit blocks, output size is (n + 1) 8 bytes (64-bit) */
  9480. if (aes == NULL || in == NULL || inSz < 2*KEYWRAP_BLOCK_SIZE ||
  9481. out == NULL || outSz < (inSz + KEYWRAP_BLOCK_SIZE))
  9482. return BAD_FUNC_ARG;
  9483. /* input must be multiple of 64-bits */
  9484. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  9485. return BAD_FUNC_ARG;
  9486. r = out + 8;
  9487. XMEMCPY(r, in, inSz);
  9488. XMEMSET(t, 0, sizeof(t));
  9489. /* user IV is optional */
  9490. if (iv == NULL) {
  9491. XMEMSET(tmp, 0xA6, KEYWRAP_BLOCK_SIZE);
  9492. } else {
  9493. XMEMCPY(tmp, iv, KEYWRAP_BLOCK_SIZE);
  9494. }
  9495. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9496. for (j = 0; j <= 5; j++) {
  9497. for (i = 1; i <= inSz / KEYWRAP_BLOCK_SIZE; i++) {
  9498. /* load R[i] */
  9499. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  9500. ret = wc_AesEncryptDirect(aes, tmp, tmp);
  9501. if (ret != 0)
  9502. break;
  9503. /* calculate new A */
  9504. IncrementKeyWrapCounter(t);
  9505. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  9506. /* save R[i] */
  9507. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  9508. r += KEYWRAP_BLOCK_SIZE;
  9509. }
  9510. if (ret != 0)
  9511. break;
  9512. r = out + KEYWRAP_BLOCK_SIZE;
  9513. }
  9514. RESTORE_VECTOR_REGISTERS();
  9515. if (ret != 0)
  9516. return ret;
  9517. /* C[0] = A */
  9518. XMEMCPY(out, tmp, KEYWRAP_BLOCK_SIZE);
  9519. return (int)(inSz + KEYWRAP_BLOCK_SIZE);
  9520. }
  9521. /* perform AES key wrap (RFC3394), return out sz on success, negative on err */
  9522. int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  9523. byte* out, word32 outSz, const byte* iv)
  9524. {
  9525. #ifdef WOLFSSL_SMALL_STACK
  9526. Aes *aes = NULL;
  9527. #else
  9528. Aes aes[1];
  9529. #endif
  9530. int ret;
  9531. if (key == NULL)
  9532. return BAD_FUNC_ARG;
  9533. #ifdef WOLFSSL_SMALL_STACK
  9534. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9535. DYNAMIC_TYPE_AES)) == NULL)
  9536. return MEMORY_E;
  9537. #endif
  9538. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9539. if (ret != 0)
  9540. goto out;
  9541. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  9542. if (ret != 0) {
  9543. wc_AesFree(aes);
  9544. goto out;
  9545. }
  9546. ret = wc_AesKeyWrap_ex(aes, in, inSz, out, outSz, iv);
  9547. wc_AesFree(aes);
  9548. out:
  9549. #ifdef WOLFSSL_SMALL_STACK
  9550. if (aes != NULL)
  9551. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9552. #endif
  9553. return ret;
  9554. }
  9555. int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  9556. word32 outSz, const byte* iv)
  9557. {
  9558. byte* r;
  9559. word32 i, n;
  9560. int j;
  9561. int ret = 0;
  9562. byte t[KEYWRAP_BLOCK_SIZE];
  9563. byte tmp[AES_BLOCK_SIZE];
  9564. const byte* expIv;
  9565. const byte defaultIV[] = {
  9566. 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6
  9567. };
  9568. if (aes == NULL || in == NULL || inSz < 3 * KEYWRAP_BLOCK_SIZE ||
  9569. out == NULL || outSz < (inSz - KEYWRAP_BLOCK_SIZE))
  9570. return BAD_FUNC_ARG;
  9571. /* input must be multiple of 64-bits */
  9572. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  9573. return BAD_FUNC_ARG;
  9574. /* user IV optional */
  9575. if (iv != NULL)
  9576. expIv = iv;
  9577. else
  9578. expIv = defaultIV;
  9579. /* A = C[0], R[i] = C[i] */
  9580. XMEMCPY(tmp, in, KEYWRAP_BLOCK_SIZE);
  9581. XMEMCPY(out, in + KEYWRAP_BLOCK_SIZE, inSz - KEYWRAP_BLOCK_SIZE);
  9582. XMEMSET(t, 0, sizeof(t));
  9583. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9584. /* initialize counter to 6n */
  9585. n = (inSz - 1) / KEYWRAP_BLOCK_SIZE;
  9586. InitKeyWrapCounter(t, 6 * n);
  9587. for (j = 5; j >= 0; j--) {
  9588. for (i = n; i >= 1; i--) {
  9589. /* calculate A */
  9590. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  9591. DecrementKeyWrapCounter(t);
  9592. /* load R[i], starting at end of R */
  9593. r = out + ((i - 1) * KEYWRAP_BLOCK_SIZE);
  9594. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  9595. ret = wc_AesDecryptDirect(aes, tmp, tmp);
  9596. if (ret != 0)
  9597. break;
  9598. /* save R[i] */
  9599. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  9600. }
  9601. if (ret != 0)
  9602. break;
  9603. }
  9604. RESTORE_VECTOR_REGISTERS();
  9605. if (ret != 0)
  9606. return ret;
  9607. /* verify IV */
  9608. if (XMEMCMP(tmp, expIv, KEYWRAP_BLOCK_SIZE) != 0)
  9609. return BAD_KEYWRAP_IV_E;
  9610. return (int)(inSz - KEYWRAP_BLOCK_SIZE);
  9611. }
  9612. int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  9613. byte* out, word32 outSz, const byte* iv)
  9614. {
  9615. #ifdef WOLFSSL_SMALL_STACK
  9616. Aes *aes = NULL;
  9617. #else
  9618. Aes aes[1];
  9619. #endif
  9620. int ret;
  9621. (void)iv;
  9622. if (key == NULL)
  9623. return BAD_FUNC_ARG;
  9624. #ifdef WOLFSSL_SMALL_STACK
  9625. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9626. DYNAMIC_TYPE_AES)) == NULL)
  9627. return MEMORY_E;
  9628. #endif
  9629. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9630. if (ret != 0)
  9631. goto out;
  9632. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_DECRYPTION);
  9633. if (ret != 0) {
  9634. wc_AesFree(aes);
  9635. goto out;
  9636. }
  9637. ret = wc_AesKeyUnWrap_ex(aes, in, inSz, out, outSz, iv);
  9638. wc_AesFree(aes);
  9639. out:
  9640. #ifdef WOLFSSL_SMALL_STACK
  9641. if (aes)
  9642. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9643. #endif
  9644. return ret;
  9645. }
  9646. #endif /* HAVE_AES_KEYWRAP */
  9647. #ifdef WOLFSSL_AES_XTS
  9648. /* Galios Field to use */
  9649. #define GF_XTS 0x87
  9650. /* This is to help with setting keys to correct encrypt or decrypt type.
  9651. *
  9652. * tweak AES key for tweak in XTS
  9653. * aes AES key for encrypt/decrypt process
  9654. * key buffer holding aes key | tweak key
  9655. * len length of key buffer in bytes. Should be twice that of key size. i.e.
  9656. * 32 for a 16 byte key.
  9657. * dir direction, either AES_ENCRYPTION or AES_DECRYPTION
  9658. * heap heap hint to use for memory. Can be NULL
  9659. * devId id to use with async crypto. Can be 0
  9660. *
  9661. * Note: is up to user to call wc_AesFree on tweak and aes key when done.
  9662. *
  9663. * return 0 on success
  9664. */
  9665. int wc_AesXtsSetKey(XtsAes* aes, const byte* key, word32 len, int dir,
  9666. void* heap, int devId)
  9667. {
  9668. word32 keySz;
  9669. int ret = 0;
  9670. if (aes == NULL || key == NULL) {
  9671. return BAD_FUNC_ARG;
  9672. }
  9673. if ((ret = wc_AesInit(&aes->tweak, heap, devId)) != 0) {
  9674. return ret;
  9675. }
  9676. if ((ret = wc_AesInit(&aes->aes, heap, devId)) != 0) {
  9677. return ret;
  9678. }
  9679. keySz = len/2;
  9680. if (keySz != 16 && keySz != 32) {
  9681. WOLFSSL_MSG("Unsupported key size");
  9682. return WC_KEY_SIZE_E;
  9683. }
  9684. if ((ret = wc_AesSetKey(&aes->aes, key, keySz, NULL, dir)) == 0) {
  9685. ret = wc_AesSetKey(&aes->tweak, key + keySz, keySz, NULL,
  9686. AES_ENCRYPTION);
  9687. if (ret != 0) {
  9688. wc_AesFree(&aes->aes);
  9689. }
  9690. }
  9691. return ret;
  9692. }
  9693. /* This is used to free up resources used by Aes structs
  9694. *
  9695. * aes AES keys to free
  9696. *
  9697. * return 0 on success
  9698. */
  9699. int wc_AesXtsFree(XtsAes* aes)
  9700. {
  9701. if (aes != NULL) {
  9702. wc_AesFree(&aes->aes);
  9703. wc_AesFree(&aes->tweak);
  9704. }
  9705. return 0;
  9706. }
  9707. /* Same process as wc_AesXtsEncrypt but uses a word64 type as the tweak value
  9708. * instead of a byte array. This just converts the word64 to a byte array and
  9709. * calls wc_AesXtsEncrypt.
  9710. *
  9711. * aes AES keys to use for block encrypt/decrypt
  9712. * out output buffer to hold cipher text
  9713. * in input plain text buffer to encrypt
  9714. * sz size of both out and in buffers
  9715. * sector value to use for tweak
  9716. *
  9717. * returns 0 on success
  9718. */
  9719. int wc_AesXtsEncryptSector(XtsAes* aes, byte* out, const byte* in,
  9720. word32 sz, word64 sector)
  9721. {
  9722. byte* pt;
  9723. byte i[AES_BLOCK_SIZE];
  9724. XMEMSET(i, 0, AES_BLOCK_SIZE);
  9725. #ifdef BIG_ENDIAN_ORDER
  9726. sector = ByteReverseWord64(sector);
  9727. #endif
  9728. pt = (byte*)&sector;
  9729. XMEMCPY(i, pt, sizeof(word64));
  9730. return wc_AesXtsEncrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  9731. }
  9732. /* Same process as wc_AesXtsDecrypt but uses a word64 type as the tweak value
  9733. * instead of a byte array. This just converts the word64 to a byte array.
  9734. *
  9735. * aes AES keys to use for block encrypt/decrypt
  9736. * out output buffer to hold plain text
  9737. * in input cipher text buffer to encrypt
  9738. * sz size of both out and in buffers
  9739. * sector value to use for tweak
  9740. *
  9741. * returns 0 on success
  9742. */
  9743. int wc_AesXtsDecryptSector(XtsAes* aes, byte* out, const byte* in, word32 sz,
  9744. word64 sector)
  9745. {
  9746. byte* pt;
  9747. byte i[AES_BLOCK_SIZE];
  9748. XMEMSET(i, 0, AES_BLOCK_SIZE);
  9749. #ifdef BIG_ENDIAN_ORDER
  9750. sector = ByteReverseWord64(sector);
  9751. #endif
  9752. pt = (byte*)&sector;
  9753. XMEMCPY(i, pt, sizeof(word64));
  9754. return wc_AesXtsDecrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  9755. }
  9756. #ifdef WOLFSSL_AESNI
  9757. #if defined(USE_INTEL_SPEEDUP)
  9758. #define HAVE_INTEL_AVX1
  9759. #define HAVE_INTEL_AVX2
  9760. #endif /* USE_INTEL_SPEEDUP */
  9761. void AES_XTS_encrypt(const unsigned char *in, unsigned char *out, word32 sz,
  9762. const unsigned char* i, const unsigned char* key,
  9763. const unsigned char* key2, int nr)
  9764. XASM_LINK("AES_XTS_encrypt");
  9765. #ifdef HAVE_INTEL_AVX1
  9766. void AES_XTS_encrypt_avx1(const unsigned char *in, unsigned char *out,
  9767. word32 sz, const unsigned char* i,
  9768. const unsigned char* key, const unsigned char* key2,
  9769. int nr)
  9770. XASM_LINK("AES_XTS_encrypt_avx1");
  9771. #endif /* HAVE_INTEL_AVX1 */
  9772. #ifdef HAVE_AES_DECRYPT
  9773. void AES_XTS_decrypt(const unsigned char *in, unsigned char *out, word32 sz,
  9774. const unsigned char* i, const unsigned char* key,
  9775. const unsigned char* key2, int nr)
  9776. XASM_LINK("AES_XTS_decrypt");
  9777. #ifdef HAVE_INTEL_AVX1
  9778. void AES_XTS_decrypt_avx1(const unsigned char *in, unsigned char *out,
  9779. word32 sz, const unsigned char* i,
  9780. const unsigned char* key, const unsigned char* key2,
  9781. int nr)
  9782. XASM_LINK("AES_XTS_decrypt_avx1");
  9783. #endif /* HAVE_INTEL_AVX1 */
  9784. #endif /* HAVE_AES_DECRYPT */
  9785. #endif /* WOLFSSL_AESNI */
  9786. #if !defined(WOLFSSL_ARMASM) || defined(WOLFSSL_ARMASM_NO_HW_CRYPTO)
  9787. #ifdef HAVE_AES_ECB
  9788. /* helper function for encrypting / decrypting full buffer at once */
  9789. static WARN_UNUSED_RESULT int _AesXtsHelper(
  9790. Aes* aes, byte* out, const byte* in, word32 sz, int dir)
  9791. {
  9792. word32 outSz = sz;
  9793. word32 totalSz = (sz / AES_BLOCK_SIZE) * AES_BLOCK_SIZE; /* total bytes */
  9794. byte* pt = out;
  9795. outSz -= AES_BLOCK_SIZE;
  9796. while (outSz > 0) {
  9797. word32 j;
  9798. byte carry = 0;
  9799. /* multiply by shift left and propagate carry */
  9800. for (j = 0; j < AES_BLOCK_SIZE && outSz > 0; j++, outSz--) {
  9801. byte tmpC;
  9802. tmpC = (pt[j] >> 7) & 0x01;
  9803. pt[j+AES_BLOCK_SIZE] = (byte)((pt[j] << 1) + carry);
  9804. carry = tmpC;
  9805. }
  9806. if (carry) {
  9807. pt[AES_BLOCK_SIZE] ^= GF_XTS;
  9808. }
  9809. pt += AES_BLOCK_SIZE;
  9810. }
  9811. xorbuf(out, in, totalSz);
  9812. if (dir == AES_ENCRYPTION) {
  9813. return _AesEcbEncrypt(aes, out, out, totalSz);
  9814. }
  9815. else {
  9816. return _AesEcbDecrypt(aes, out, out, totalSz);
  9817. }
  9818. }
  9819. #endif /* HAVE_AES_ECB */
  9820. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  9821. *
  9822. * xaes AES keys to use for block encrypt/decrypt
  9823. * out output buffer to hold cipher text
  9824. * in input plain text buffer to encrypt
  9825. * sz size of both out and in buffers
  9826. * i value to use for tweak
  9827. *
  9828. * returns 0 on success
  9829. */
  9830. /* Software AES - XTS Encrypt */
  9831. static int AesXtsEncrypt_sw(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9832. const byte* i)
  9833. {
  9834. int ret = 0;
  9835. word32 blocks = (sz / AES_BLOCK_SIZE);
  9836. Aes *aes = &xaes->aes;
  9837. Aes *tweak = &xaes->tweak;
  9838. byte tmp[AES_BLOCK_SIZE];
  9839. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  9840. * key setup passed to encrypt direct*/
  9841. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9842. ret = wc_AesEncryptDirect(tweak, tmp, i);
  9843. if (ret != 0) {
  9844. RESTORE_VECTOR_REGISTERS();
  9845. return ret;
  9846. }
  9847. #ifdef HAVE_AES_ECB
  9848. /* encrypt all of buffer at once when possible */
  9849. if (in != out) { /* can not handle inline */
  9850. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  9851. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_ENCRYPTION)) != 0) {
  9852. RESTORE_VECTOR_REGISTERS();
  9853. return ret;
  9854. }
  9855. }
  9856. #endif
  9857. while (blocks > 0) {
  9858. word32 j;
  9859. byte carry = 0;
  9860. #ifdef HAVE_AES_ECB
  9861. if (in == out)
  9862. #endif
  9863. { /* check for if inline */
  9864. byte buf[AES_BLOCK_SIZE];
  9865. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9866. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9867. ret = wc_AesEncryptDirect(aes, out, buf);
  9868. if (ret != 0) {
  9869. RESTORE_VECTOR_REGISTERS();
  9870. return ret;
  9871. }
  9872. }
  9873. xorbuf(out, tmp, AES_BLOCK_SIZE);
  9874. /* multiply by shift left and propagate carry */
  9875. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9876. byte tmpC;
  9877. tmpC = (tmp[j] >> 7) & 0x01;
  9878. tmp[j] = (byte)((tmp[j] << 1) + carry);
  9879. carry = tmpC;
  9880. }
  9881. if (carry) {
  9882. tmp[0] ^= GF_XTS;
  9883. }
  9884. in += AES_BLOCK_SIZE;
  9885. out += AES_BLOCK_SIZE;
  9886. sz -= AES_BLOCK_SIZE;
  9887. blocks--;
  9888. }
  9889. /* stealing operation of XTS to handle left overs */
  9890. if (sz > 0) {
  9891. byte buf[AES_BLOCK_SIZE];
  9892. XMEMCPY(buf, out - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  9893. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  9894. RESTORE_VECTOR_REGISTERS();
  9895. return BUFFER_E;
  9896. }
  9897. if (in != out) {
  9898. XMEMCPY(out, buf, sz);
  9899. XMEMCPY(buf, in, sz);
  9900. }
  9901. else {
  9902. byte buf2[AES_BLOCK_SIZE];
  9903. XMEMCPY(buf2, buf, sz);
  9904. XMEMCPY(buf, in, sz);
  9905. XMEMCPY(out, buf2, sz);
  9906. }
  9907. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9908. ret = wc_AesEncryptDirect(aes, out - AES_BLOCK_SIZE, buf);
  9909. if (ret == 0)
  9910. xorbuf(out - AES_BLOCK_SIZE, tmp, AES_BLOCK_SIZE);
  9911. }
  9912. RESTORE_VECTOR_REGISTERS();
  9913. return ret;
  9914. }
  9915. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  9916. *
  9917. * xaes AES keys to use for block encrypt/decrypt
  9918. * out output buffer to hold cipher text
  9919. * in input plain text buffer to encrypt
  9920. * sz size of both out and in buffers
  9921. * i value to use for tweak
  9922. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  9923. * adds a sanity check on how the user calls the function.
  9924. *
  9925. * returns 0 on success
  9926. */
  9927. int wc_AesXtsEncrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9928. const byte* i, word32 iSz)
  9929. {
  9930. if (xaes == NULL || out == NULL || in == NULL) {
  9931. return BAD_FUNC_ARG;
  9932. }
  9933. if (iSz < AES_BLOCK_SIZE) {
  9934. return BAD_FUNC_ARG;
  9935. }
  9936. if (sz < AES_BLOCK_SIZE) {
  9937. WOLFSSL_MSG("Plain text input too small for encryption");
  9938. return BAD_FUNC_ARG;
  9939. }
  9940. #ifdef WOLFSSL_AESNI
  9941. #if defined(HAVE_INTEL_AVX1)
  9942. if (IS_INTEL_AVX1(intel_flags)) {
  9943. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9944. AES_XTS_encrypt_avx1(in, out, sz, i, (const byte*)xaes->aes.key,
  9945. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  9946. RESTORE_VECTOR_REGISTERS();
  9947. return 0;
  9948. }
  9949. else
  9950. #endif
  9951. if (haveAESNI) {
  9952. AES_XTS_encrypt(in, out, sz, i, (const byte*)xaes->aes.key,
  9953. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  9954. return 0;
  9955. }
  9956. else
  9957. #endif
  9958. {
  9959. return AesXtsEncrypt_sw(xaes, out, in, sz, i);
  9960. }
  9961. }
  9962. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  9963. *
  9964. * xaes AES keys to use for block encrypt/decrypt
  9965. * out output buffer to hold plain text
  9966. * in input cipher text buffer to decrypt
  9967. * sz size of both out and in buffers
  9968. * i value to use for tweak
  9969. *
  9970. * returns 0 on success
  9971. */
  9972. /* Software AES - XTS Decrypt */
  9973. static int AesXtsDecrypt_sw(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9974. const byte* i)
  9975. {
  9976. int ret = 0;
  9977. word32 blocks = (sz / AES_BLOCK_SIZE);
  9978. Aes *aes = &xaes->aes;
  9979. Aes *tweak = &xaes->tweak;
  9980. word32 j;
  9981. byte carry = 0;
  9982. byte tmp[AES_BLOCK_SIZE];
  9983. byte stl = (sz % AES_BLOCK_SIZE);
  9984. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  9985. * key setup passed to decrypt direct*/
  9986. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9987. ret = wc_AesEncryptDirect(tweak, tmp, i);
  9988. if (ret != 0) {
  9989. RESTORE_VECTOR_REGISTERS();
  9990. return ret;
  9991. }
  9992. /* if Stealing then break out of loop one block early to handle special
  9993. * case */
  9994. if (stl > 0) {
  9995. blocks--;
  9996. }
  9997. #ifdef HAVE_AES_ECB
  9998. /* decrypt all of buffer at once when possible */
  9999. if (in != out) { /* can not handle inline */
  10000. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  10001. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_DECRYPTION)) != 0) {
  10002. RESTORE_VECTOR_REGISTERS();
  10003. return ret;
  10004. }
  10005. }
  10006. #endif
  10007. while (blocks > 0) {
  10008. #ifdef HAVE_AES_ECB
  10009. if (in == out)
  10010. #endif
  10011. { /* check for if inline */
  10012. byte buf[AES_BLOCK_SIZE];
  10013. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  10014. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10015. ret = wc_AesDecryptDirect(aes, out, buf);
  10016. if (ret != 0) {
  10017. RESTORE_VECTOR_REGISTERS();
  10018. return ret;
  10019. }
  10020. }
  10021. xorbuf(out, tmp, AES_BLOCK_SIZE);
  10022. /* multiply by shift left and propagate carry */
  10023. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  10024. byte tmpC;
  10025. tmpC = (tmp[j] >> 7) & 0x01;
  10026. tmp[j] = (byte)((tmp[j] << 1) + carry);
  10027. carry = tmpC;
  10028. }
  10029. if (carry) {
  10030. tmp[0] ^= GF_XTS;
  10031. }
  10032. carry = 0;
  10033. in += AES_BLOCK_SIZE;
  10034. out += AES_BLOCK_SIZE;
  10035. sz -= AES_BLOCK_SIZE;
  10036. blocks--;
  10037. }
  10038. /* stealing operation of XTS to handle left overs */
  10039. if (sz >= AES_BLOCK_SIZE) {
  10040. byte buf[AES_BLOCK_SIZE];
  10041. byte tmp2[AES_BLOCK_SIZE];
  10042. /* multiply by shift left and propagate carry */
  10043. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  10044. byte tmpC;
  10045. tmpC = (tmp[j] >> 7) & 0x01;
  10046. tmp2[j] = (byte)((tmp[j] << 1) + carry);
  10047. carry = tmpC;
  10048. }
  10049. if (carry) {
  10050. tmp2[0] ^= GF_XTS;
  10051. }
  10052. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  10053. xorbuf(buf, tmp2, AES_BLOCK_SIZE);
  10054. ret = wc_AesDecryptDirect(aes, out, buf);
  10055. if (ret != 0) {
  10056. RESTORE_VECTOR_REGISTERS();
  10057. return ret;
  10058. }
  10059. xorbuf(out, tmp2, AES_BLOCK_SIZE);
  10060. /* tmp2 holds partial | last */
  10061. XMEMCPY(tmp2, out, AES_BLOCK_SIZE);
  10062. in += AES_BLOCK_SIZE;
  10063. out += AES_BLOCK_SIZE;
  10064. sz -= AES_BLOCK_SIZE;
  10065. /* Make buffer with end of cipher text | last */
  10066. XMEMCPY(buf, tmp2, AES_BLOCK_SIZE);
  10067. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  10068. RESTORE_VECTOR_REGISTERS();
  10069. return BUFFER_E;
  10070. }
  10071. XMEMCPY(buf, in, sz);
  10072. XMEMCPY(out, tmp2, sz);
  10073. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  10074. ret = wc_AesDecryptDirect(aes, tmp2, buf);
  10075. if (ret != 0) {
  10076. RESTORE_VECTOR_REGISTERS();
  10077. return ret;
  10078. }
  10079. xorbuf(tmp2, tmp, AES_BLOCK_SIZE);
  10080. XMEMCPY(out - AES_BLOCK_SIZE, tmp2, AES_BLOCK_SIZE);
  10081. }
  10082. RESTORE_VECTOR_REGISTERS();
  10083. return ret;
  10084. }
  10085. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  10086. *
  10087. * xaes AES keys to use for block encrypt/decrypt
  10088. * out output buffer to hold plain text
  10089. * in input cipher text buffer to decrypt
  10090. * sz size of both out and in buffers
  10091. * i value to use for tweak
  10092. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  10093. * adds a sanity check on how the user calls the function.
  10094. *
  10095. * returns 0 on success
  10096. */
  10097. int wc_AesXtsDecrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  10098. const byte* i, word32 iSz)
  10099. {
  10100. if (xaes == NULL || out == NULL || in == NULL) {
  10101. return BAD_FUNC_ARG;
  10102. }
  10103. if (iSz < AES_BLOCK_SIZE) {
  10104. return BAD_FUNC_ARG;
  10105. }
  10106. if (sz < AES_BLOCK_SIZE) {
  10107. WOLFSSL_MSG("Cipher text input too small for decryption");
  10108. return BAD_FUNC_ARG;
  10109. }
  10110. #ifdef WOLFSSL_AESNI
  10111. #if defined(HAVE_INTEL_AVX1)
  10112. if (IS_INTEL_AVX1(intel_flags)) {
  10113. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  10114. AES_XTS_decrypt_avx1(in, out, sz, i, (const byte*)xaes->aes.key,
  10115. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  10116. RESTORE_VECTOR_REGISTERS();
  10117. return 0;
  10118. }
  10119. else
  10120. #endif
  10121. if (haveAESNI) {
  10122. AES_XTS_decrypt(in, out, sz, i, (const byte*)xaes->aes.key,
  10123. (const byte*)xaes->tweak.key, (int)xaes->aes.rounds);
  10124. return 0;
  10125. }
  10126. else
  10127. #endif
  10128. {
  10129. return AesXtsDecrypt_sw(xaes, out, in, sz, i);
  10130. }
  10131. }
  10132. #endif /* !WOLFSSL_ARMASM || WOLFSSL_ARMASM_NO_HW_CRYPTO */
  10133. /* Same as wc_AesXtsEncryptSector but the sector gets incremented by one every
  10134. * sectorSz bytes
  10135. *
  10136. * xaes AES keys to use for block encrypt
  10137. * out output buffer to hold cipher text
  10138. * in input plain text buffer to encrypt
  10139. * sz size of both out and in buffers
  10140. * sector value to use for tweak
  10141. * sectorSz size of the sector
  10142. *
  10143. * returns 0 on success
  10144. */
  10145. int wc_AesXtsEncryptConsecutiveSectors(XtsAes* aes, byte* out, const byte* in,
  10146. word32 sz, word64 sector, word32 sectorSz)
  10147. {
  10148. int ret = 0;
  10149. word32 iter = 0;
  10150. word32 sectorCount;
  10151. word32 remainder;
  10152. if (aes == NULL || out == NULL || in == NULL || sectorSz == 0) {
  10153. return BAD_FUNC_ARG;
  10154. }
  10155. if (sz < AES_BLOCK_SIZE) {
  10156. WOLFSSL_MSG("Cipher text input too small for encryption");
  10157. return BAD_FUNC_ARG;
  10158. }
  10159. sectorCount = sz / sectorSz;
  10160. remainder = sz % sectorSz;
  10161. while (sectorCount) {
  10162. ret = wc_AesXtsEncryptSector(aes, out + (iter * sectorSz),
  10163. in + (iter * sectorSz), sectorSz, sector);
  10164. if (ret != 0)
  10165. break;
  10166. sectorCount--;
  10167. iter++;
  10168. sector++;
  10169. }
  10170. if (remainder && ret == 0)
  10171. ret = wc_AesXtsEncryptSector(aes, out + (iter * sectorSz),
  10172. in + (iter * sectorSz), remainder, sector);
  10173. return ret;
  10174. }
  10175. /* Same as wc_AesXtsEncryptConsecutiveSectors but Aes key is AES_DECRYPTION type
  10176. *
  10177. * xaes AES keys to use for block decrypt
  10178. * out output buffer to hold cipher text
  10179. * in input plain text buffer to encrypt
  10180. * sz size of both out and in buffers
  10181. * sector value to use for tweak
  10182. * sectorSz size of the sector
  10183. *
  10184. * returns 0 on success
  10185. */
  10186. int wc_AesXtsDecryptConsecutiveSectors(XtsAes* aes, byte* out, const byte* in,
  10187. word32 sz, word64 sector, word32 sectorSz)
  10188. {
  10189. int ret = 0;
  10190. word32 iter = 0;
  10191. word32 sectorCount;
  10192. word32 remainder;
  10193. if (aes == NULL || out == NULL || in == NULL || sectorSz == 0) {
  10194. return BAD_FUNC_ARG;
  10195. }
  10196. if (sz < AES_BLOCK_SIZE) {
  10197. WOLFSSL_MSG("Cipher text input too small for decryption");
  10198. return BAD_FUNC_ARG;
  10199. }
  10200. sectorCount = sz / sectorSz;
  10201. remainder = sz % sectorSz;
  10202. while (sectorCount) {
  10203. ret = wc_AesXtsDecryptSector(aes, out + (iter * sectorSz),
  10204. in + (iter * sectorSz), sectorSz, sector);
  10205. if (ret != 0)
  10206. break;
  10207. sectorCount--;
  10208. iter++;
  10209. sector++;
  10210. }
  10211. if (remainder && ret == 0)
  10212. ret = wc_AesXtsDecryptSector(aes, out + (iter * sectorSz),
  10213. in + (iter * sectorSz), remainder, sector);
  10214. return ret;
  10215. }
  10216. #endif /* WOLFSSL_AES_XTS */
  10217. #ifdef WOLFSSL_AES_SIV
  10218. /*
  10219. * See RFC 5297 Section 2.4.
  10220. */
  10221. static WARN_UNUSED_RESULT int S2V(
  10222. const byte* key, word32 keySz, const byte* assoc, word32 assocSz,
  10223. const byte* nonce, word32 nonceSz, const byte* data,
  10224. word32 dataSz, byte* out)
  10225. {
  10226. #ifdef WOLFSSL_SMALL_STACK
  10227. byte* tmp[3] = {NULL, NULL, NULL};
  10228. int i;
  10229. Cmac* cmac;
  10230. #else
  10231. byte tmp[3][AES_BLOCK_SIZE];
  10232. Cmac cmac[1];
  10233. #endif
  10234. word32 macSz = AES_BLOCK_SIZE;
  10235. int ret = 0;
  10236. word32 zeroBytes;
  10237. #ifdef WOLFSSL_SMALL_STACK
  10238. for (i = 0; i < 3; ++i) {
  10239. tmp[i] = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10240. if (tmp[i] == NULL) {
  10241. ret = MEMORY_E;
  10242. break;
  10243. }
  10244. }
  10245. if (ret == 0)
  10246. #endif
  10247. {
  10248. XMEMSET(tmp[1], 0, AES_BLOCK_SIZE);
  10249. XMEMSET(tmp[2], 0, AES_BLOCK_SIZE);
  10250. ret = wc_AesCmacGenerate(tmp[0], &macSz, tmp[1], AES_BLOCK_SIZE,
  10251. key, keySz);
  10252. if (ret == 0) {
  10253. ShiftAndXorRb(tmp[1], tmp[0]);
  10254. ret = wc_AesCmacGenerate(tmp[0], &macSz, assoc, assocSz, key,
  10255. keySz);
  10256. if (ret == 0) {
  10257. xorbuf(tmp[1], tmp[0], AES_BLOCK_SIZE);
  10258. }
  10259. }
  10260. }
  10261. if (ret == 0) {
  10262. if (nonceSz > 0) {
  10263. ShiftAndXorRb(tmp[0], tmp[1]);
  10264. ret = wc_AesCmacGenerate(tmp[1], &macSz, nonce, nonceSz, key,
  10265. keySz);
  10266. if (ret == 0) {
  10267. xorbuf(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10268. }
  10269. }
  10270. else {
  10271. XMEMCPY(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10272. }
  10273. }
  10274. if (ret == 0) {
  10275. if (dataSz >= AES_BLOCK_SIZE) {
  10276. #ifdef WOLFSSL_SMALL_STACK
  10277. cmac = (Cmac*)XMALLOC(sizeof(Cmac), NULL, DYNAMIC_TYPE_CMAC);
  10278. if (cmac == NULL) {
  10279. ret = MEMORY_E;
  10280. }
  10281. if (ret == 0)
  10282. #endif
  10283. {
  10284. #ifdef WOLFSSL_CHECK_MEM_ZERO
  10285. /* Aes part is checked by wc_AesFree. */
  10286. wc_MemZero_Add("wc_AesCmacGenerate cmac",
  10287. ((unsigned char *)cmac) + sizeof(Aes),
  10288. sizeof(Cmac) - sizeof(Aes));
  10289. #endif
  10290. xorbuf(tmp[0], data + (dataSz - AES_BLOCK_SIZE),
  10291. AES_BLOCK_SIZE);
  10292. ret = wc_InitCmac(cmac, key, keySz, WC_CMAC_AES, NULL);
  10293. if (ret == 0) {
  10294. ret = wc_CmacUpdate(cmac, data, dataSz - AES_BLOCK_SIZE);
  10295. }
  10296. if (ret == 0) {
  10297. ret = wc_CmacUpdate(cmac, tmp[0], AES_BLOCK_SIZE);
  10298. }
  10299. if (ret == 0) {
  10300. ret = wc_CmacFinal(cmac, out, &macSz);
  10301. }
  10302. }
  10303. #ifdef WOLFSSL_SMALL_STACK
  10304. if (cmac != NULL) {
  10305. XFREE(cmac, NULL, DYNAMIC_TYPE_CMAC);
  10306. }
  10307. #elif defined(WOLFSSL_CHECK_MEM_ZERO)
  10308. wc_MemZero_Check(cmac, sizeof(Cmac));
  10309. #endif
  10310. }
  10311. else {
  10312. XMEMCPY(tmp[2], data, dataSz);
  10313. tmp[2][dataSz] |= 0x80;
  10314. zeroBytes = AES_BLOCK_SIZE - (dataSz + 1);
  10315. if (zeroBytes != 0) {
  10316. XMEMSET(tmp[2] + dataSz + 1, 0, zeroBytes);
  10317. }
  10318. ShiftAndXorRb(tmp[1], tmp[0]);
  10319. xorbuf(tmp[1], tmp[2], AES_BLOCK_SIZE);
  10320. ret = wc_AesCmacGenerate(out, &macSz, tmp[1], AES_BLOCK_SIZE, key,
  10321. keySz);
  10322. }
  10323. }
  10324. #ifdef WOLFSSL_SMALL_STACK
  10325. for (i = 0; i < 3; ++i) {
  10326. if (tmp[i] != NULL) {
  10327. XFREE(tmp[i], NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10328. }
  10329. }
  10330. #endif
  10331. return ret;
  10332. }
  10333. static WARN_UNUSED_RESULT int AesSivCipher(
  10334. const byte* key, word32 keySz, const byte* assoc,
  10335. word32 assocSz, const byte* nonce, word32 nonceSz,
  10336. const byte* data, word32 dataSz, byte* siv, byte* out,
  10337. int enc)
  10338. {
  10339. int ret = 0;
  10340. #ifdef WOLFSSL_SMALL_STACK
  10341. Aes* aes = NULL;
  10342. #else
  10343. Aes aes[1];
  10344. #endif
  10345. byte sivTmp[AES_BLOCK_SIZE];
  10346. if (key == NULL || siv == NULL || out == NULL) {
  10347. WOLFSSL_MSG("Bad parameter");
  10348. ret = BAD_FUNC_ARG;
  10349. }
  10350. if (ret == 0 && keySz != 32 && keySz != 48 && keySz != 64) {
  10351. WOLFSSL_MSG("Bad key size. Must be 256, 384, or 512 bits.");
  10352. ret = BAD_FUNC_ARG;
  10353. }
  10354. if (ret == 0) {
  10355. if (enc == 1) {
  10356. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, data,
  10357. dataSz, sivTmp);
  10358. if (ret != 0) {
  10359. WOLFSSL_MSG("S2V failed.");
  10360. }
  10361. else {
  10362. XMEMCPY(siv, sivTmp, AES_BLOCK_SIZE);
  10363. }
  10364. }
  10365. else {
  10366. XMEMCPY(sivTmp, siv, AES_BLOCK_SIZE);
  10367. }
  10368. }
  10369. #ifdef WOLFSSL_SMALL_STACK
  10370. if (ret == 0) {
  10371. aes = (Aes*)XMALLOC(sizeof(Aes), NULL, DYNAMIC_TYPE_AES);
  10372. if (aes == NULL) {
  10373. ret = MEMORY_E;
  10374. }
  10375. }
  10376. #endif
  10377. if (ret == 0) {
  10378. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10379. if (ret != 0) {
  10380. WOLFSSL_MSG("Failed to initialized AES object.");
  10381. }
  10382. }
  10383. if (ret == 0 && dataSz > 0) {
  10384. sivTmp[12] &= 0x7f;
  10385. sivTmp[8] &= 0x7f;
  10386. ret = wc_AesSetKey(aes, key + keySz / 2, keySz / 2, sivTmp,
  10387. AES_ENCRYPTION);
  10388. if (ret != 0) {
  10389. WOLFSSL_MSG("Failed to set key for AES-CTR.");
  10390. }
  10391. else {
  10392. ret = wc_AesCtrEncrypt(aes, out, data, dataSz);
  10393. if (ret != 0) {
  10394. WOLFSSL_MSG("AES-CTR encryption failed.");
  10395. }
  10396. }
  10397. }
  10398. if (ret == 0 && enc == 0) {
  10399. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, out, dataSz,
  10400. sivTmp);
  10401. if (ret != 0) {
  10402. WOLFSSL_MSG("S2V failed.");
  10403. }
  10404. if (XMEMCMP(siv, sivTmp, AES_BLOCK_SIZE) != 0) {
  10405. WOLFSSL_MSG("Computed SIV doesn't match received SIV.");
  10406. ret = AES_SIV_AUTH_E;
  10407. }
  10408. }
  10409. wc_AesFree(aes);
  10410. #ifdef WOLFSSL_SMALL_STACK
  10411. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10412. #endif
  10413. return ret;
  10414. }
  10415. /*
  10416. * See RFC 5297 Section 2.6.
  10417. */
  10418. int wc_AesSivEncrypt(const byte* key, word32 keySz, const byte* assoc,
  10419. word32 assocSz, const byte* nonce, word32 nonceSz,
  10420. const byte* in, word32 inSz, byte* siv, byte* out)
  10421. {
  10422. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  10423. siv, out, 1);
  10424. }
  10425. /*
  10426. * See RFC 5297 Section 2.7.
  10427. */
  10428. int wc_AesSivDecrypt(const byte* key, word32 keySz, const byte* assoc,
  10429. word32 assocSz, const byte* nonce, word32 nonceSz,
  10430. const byte* in, word32 inSz, byte* siv, byte* out)
  10431. {
  10432. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  10433. siv, out, 0);
  10434. }
  10435. #endif /* WOLFSSL_AES_SIV */
  10436. #if defined(WOLFSSL_AES_EAX)
  10437. /*
  10438. * AES EAX one-shot API
  10439. * Encrypts input data and computes an auth tag over the input
  10440. * auth data and ciphertext
  10441. *
  10442. * Returns 0 on success
  10443. * Returns error code on failure
  10444. */
  10445. int wc_AesEaxEncryptAuth(const byte* key, word32 keySz, byte* out,
  10446. const byte* in, word32 inSz,
  10447. const byte* nonce, word32 nonceSz,
  10448. /* output computed auth tag */
  10449. byte* authTag, word32 authTagSz,
  10450. /* input data to authenticate */
  10451. const byte* authIn, word32 authInSz)
  10452. {
  10453. #if defined(WOLFSSL_SMALL_STACK)
  10454. AesEax *eax;
  10455. #else
  10456. AesEax eax_mem;
  10457. AesEax *eax = &eax_mem;
  10458. #endif
  10459. int ret;
  10460. if (key == NULL || out == NULL || in == NULL || nonce == NULL
  10461. || authTag == NULL || authIn == NULL) {
  10462. return BAD_FUNC_ARG;
  10463. }
  10464. #if defined(WOLFSSL_SMALL_STACK)
  10465. if ((eax = (AesEax *)XMALLOC(sizeof(AesEax),
  10466. NULL,
  10467. DYNAMIC_TYPE_AES_EAX)) == NULL) {
  10468. return MEMORY_E;
  10469. }
  10470. #endif
  10471. if ((ret = wc_AesEaxInit(eax,
  10472. key, keySz,
  10473. nonce, nonceSz,
  10474. authIn, authInSz)) != 0) {
  10475. goto cleanup;
  10476. }
  10477. if ((ret = wc_AesEaxEncryptUpdate(eax, out, in, inSz, NULL, 0)) != 0) {
  10478. goto cleanup;
  10479. }
  10480. if ((ret = wc_AesEaxEncryptFinal(eax, authTag, authTagSz)) != 0) {
  10481. goto cleanup;
  10482. }
  10483. cleanup:
  10484. wc_AesEaxFree(eax);
  10485. #if defined(WOLFSSL_SMALL_STACK)
  10486. XFREE(eax, NULL, DYNAMIC_TYPE_AES_EAX);
  10487. #endif
  10488. return ret;
  10489. }
  10490. /*
  10491. * AES EAX one-shot API
  10492. * Decrypts and authenticates data against a supplied auth tag
  10493. *
  10494. * Returns 0 on success
  10495. * Returns error code on failure
  10496. */
  10497. int wc_AesEaxDecryptAuth(const byte* key, word32 keySz, byte* out,
  10498. const byte* in, word32 inSz,
  10499. const byte* nonce, word32 nonceSz,
  10500. /* auth tag to verify against */
  10501. const byte* authTag, word32 authTagSz,
  10502. /* input data to authenticate */
  10503. const byte* authIn, word32 authInSz)
  10504. {
  10505. #if defined(WOLFSSL_SMALL_STACK)
  10506. AesEax *eax;
  10507. #else
  10508. AesEax eax_mem;
  10509. AesEax *eax = &eax_mem;
  10510. #endif
  10511. int ret;
  10512. if (key == NULL || out == NULL || in == NULL || nonce == NULL
  10513. || authTag == NULL || authIn == NULL) {
  10514. return BAD_FUNC_ARG;
  10515. }
  10516. #if defined(WOLFSSL_SMALL_STACK)
  10517. if ((eax = (AesEax *)XMALLOC(sizeof(AesEax),
  10518. NULL,
  10519. DYNAMIC_TYPE_AES_EAX)) == NULL) {
  10520. return MEMORY_E;
  10521. }
  10522. #endif
  10523. if ((ret = wc_AesEaxInit(eax,
  10524. key, keySz,
  10525. nonce, nonceSz,
  10526. authIn, authInSz)) != 0) {
  10527. goto cleanup;
  10528. }
  10529. if ((ret = wc_AesEaxDecryptUpdate(eax, out, in, inSz, NULL, 0)) != 0) {
  10530. goto cleanup;
  10531. }
  10532. if ((ret = wc_AesEaxDecryptFinal(eax, authTag, authTagSz)) != 0) {
  10533. goto cleanup;
  10534. }
  10535. cleanup:
  10536. wc_AesEaxFree(eax);
  10537. #if defined(WOLFSSL_SMALL_STACK)
  10538. XFREE(eax, NULL, DYNAMIC_TYPE_AES_EAX);
  10539. #endif
  10540. return ret;
  10541. }
  10542. /*
  10543. * AES EAX Incremental API:
  10544. * Initializes an AES EAX encryption or decryption operation. This must be
  10545. * called before any other EAX APIs are used on the AesEax struct
  10546. *
  10547. * Returns 0 on success
  10548. * Returns error code on failure
  10549. */
  10550. int wc_AesEaxInit(AesEax* eax,
  10551. const byte* key, word32 keySz,
  10552. const byte* nonce, word32 nonceSz,
  10553. const byte* authIn, word32 authInSz)
  10554. {
  10555. int ret = 0;
  10556. word32 cmacSize;
  10557. if (eax == NULL || key == NULL || nonce == NULL) {
  10558. return BAD_FUNC_ARG;
  10559. }
  10560. XMEMSET(eax->prefixBuf, 0, sizeof(eax->prefixBuf));
  10561. if ((ret = wc_AesInit(&eax->aes, NULL, INVALID_DEVID)) != 0) {
  10562. return ret;
  10563. }
  10564. if ((ret = wc_AesSetKey(&eax->aes,
  10565. key,
  10566. keySz,
  10567. NULL,
  10568. AES_ENCRYPTION)) != 0) {
  10569. return ret;
  10570. }
  10571. /*
  10572. * OMAC the nonce to use as the IV for CTR encryption and auth tag chunk
  10573. * N' = OMAC^0_K(N)
  10574. */
  10575. if ((ret = wc_InitCmac(&eax->nonceCmac,
  10576. key,
  10577. keySz,
  10578. WC_CMAC_AES,
  10579. NULL)) != 0) {
  10580. return ret;
  10581. }
  10582. if ((ret = wc_CmacUpdate(&eax->nonceCmac,
  10583. eax->prefixBuf,
  10584. sizeof(eax->prefixBuf))) != 0) {
  10585. return ret;
  10586. }
  10587. if ((ret = wc_CmacUpdate(&eax->nonceCmac, nonce, nonceSz)) != 0) {
  10588. return ret;
  10589. }
  10590. cmacSize = AES_BLOCK_SIZE;
  10591. if ((ret = wc_CmacFinal(&eax->nonceCmac,
  10592. eax->nonceCmacFinal,
  10593. &cmacSize)) != 0) {
  10594. return ret;
  10595. }
  10596. if ((ret = wc_AesSetIV(&eax->aes, eax->nonceCmacFinal)) != 0) {
  10597. return ret;
  10598. }
  10599. /*
  10600. * start the OMAC used to build the auth tag chunk for the AD .
  10601. * This CMAC is continued in subsequent update calls when more auth data is
  10602. * provided
  10603. * H' = OMAC^1_K(H)
  10604. */
  10605. eax->prefixBuf[AES_BLOCK_SIZE-1] = 1;
  10606. if ((ret = wc_InitCmac(&eax->aadCmac,
  10607. key,
  10608. keySz,
  10609. WC_CMAC_AES,
  10610. NULL)) != 0) {
  10611. return ret;
  10612. }
  10613. if ((ret = wc_CmacUpdate(&eax->aadCmac,
  10614. eax->prefixBuf,
  10615. sizeof(eax->prefixBuf))) != 0) {
  10616. return ret;
  10617. }
  10618. if (authIn != NULL) {
  10619. if ((ret = wc_CmacUpdate(&eax->aadCmac, authIn, authInSz)) != 0) {
  10620. return ret;
  10621. }
  10622. }
  10623. /*
  10624. * start the OMAC to create auth tag chunk for ciphertext. This MAC will be
  10625. * updated in subsequent calls to encrypt/decrypt
  10626. * C' = OMAC^2_K(C)
  10627. */
  10628. eax->prefixBuf[AES_BLOCK_SIZE-1] = 2;
  10629. if ((ret = wc_InitCmac(&eax->ciphertextCmac,
  10630. key,
  10631. keySz,
  10632. WC_CMAC_AES,
  10633. NULL)) != 0) {
  10634. return ret;
  10635. }
  10636. if ((ret = wc_CmacUpdate(&eax->ciphertextCmac,
  10637. eax->prefixBuf,
  10638. sizeof(eax->prefixBuf))) != 0) {
  10639. return ret;
  10640. }
  10641. return ret;
  10642. }
  10643. /*
  10644. * AES EAX Incremental API:
  10645. * Encrypts input plaintext using AES EAX mode, adding optional auth data to
  10646. * the authentication stream
  10647. *
  10648. * Returns 0 on success
  10649. * Returns error code on failure
  10650. */
  10651. int wc_AesEaxEncryptUpdate(AesEax* eax, byte* out,
  10652. const byte* in, word32 inSz,
  10653. const byte* authIn, word32 authInSz)
  10654. {
  10655. int ret;
  10656. if (eax == NULL || out == NULL || in == NULL) {
  10657. return BAD_FUNC_ARG;
  10658. }
  10659. /*
  10660. * Encrypt the plaintext using AES CTR
  10661. * C = CTR(M)
  10662. */
  10663. if ((ret = wc_AesCtrEncrypt(&eax->aes, out, in, inSz)) != 0) {
  10664. return ret;
  10665. }
  10666. /*
  10667. * update OMAC with new ciphertext
  10668. * C' = OMAC^2_K(C)
  10669. */
  10670. if ((ret = wc_CmacUpdate(&eax->ciphertextCmac, out, inSz)) != 0) {
  10671. return ret;
  10672. }
  10673. /* If there exists new auth data, update the OMAC for that as well */
  10674. if (authIn != NULL) {
  10675. if ((ret = wc_CmacUpdate(&eax->aadCmac, authIn, authInSz)) != 0) {
  10676. return ret;
  10677. }
  10678. }
  10679. return 0;
  10680. }
  10681. /*
  10682. * AES EAX Incremental API:
  10683. * Decrypts input ciphertext using AES EAX mode, adding optional auth data to
  10684. * the authentication stream
  10685. *
  10686. * Returns 0 on sucess
  10687. * Returns error code on failure
  10688. */
  10689. int wc_AesEaxDecryptUpdate(AesEax* eax, byte* out,
  10690. const byte* in, word32 inSz,
  10691. const byte* authIn, word32 authInSz)
  10692. {
  10693. int ret;
  10694. if (eax == NULL || out == NULL || in == NULL) {
  10695. return BAD_FUNC_ARG;
  10696. }
  10697. /*
  10698. * Decrypt the plaintext using AES CTR
  10699. * C = CTR(M)
  10700. */
  10701. if ((ret = wc_AesCtrEncrypt(&eax->aes, out, in, inSz)) != 0) {
  10702. return ret;
  10703. }
  10704. /*
  10705. * update OMAC with new ciphertext
  10706. * C' = OMAC^2_K(C)
  10707. */
  10708. if ((ret = wc_CmacUpdate(&eax->ciphertextCmac, in, inSz)) != 0) {
  10709. return ret;
  10710. }
  10711. /* If there exists new auth data, update the OMAC for that as well */
  10712. if (authIn != NULL) {
  10713. if ((ret = wc_CmacUpdate(&eax->aadCmac, authIn, authInSz)) != 0) {
  10714. return ret;
  10715. }
  10716. }
  10717. return 0;
  10718. }
  10719. /*
  10720. * AES EAX Incremental API:
  10721. * Provides additional auth data information to the authentication
  10722. * stream for an authenticated encryption or decryption operation
  10723. *
  10724. * Returns 0 on success
  10725. * Returns error code on failure
  10726. */
  10727. int wc_AesEaxAuthDataUpdate(AesEax* eax, const byte* authIn, word32 authInSz)
  10728. {
  10729. return wc_CmacUpdate(&eax->aadCmac, authIn, authInSz);
  10730. }
  10731. /*
  10732. * AES EAX Incremental API:
  10733. * Finalizes the authenticated encryption operation, computing the auth tag
  10734. * over previously supplied auth data and computed ciphertext
  10735. *
  10736. * Returns 0 on success
  10737. * Returns error code on failure
  10738. */
  10739. int wc_AesEaxEncryptFinal(AesEax* eax, byte* authTag, word32 authTagSz)
  10740. {
  10741. word32 cmacSize;
  10742. int ret;
  10743. word32 i;
  10744. if (eax == NULL || authTag == NULL || authTagSz > AES_BLOCK_SIZE) {
  10745. return BAD_FUNC_ARG;
  10746. }
  10747. /* Complete the OMAC for the ciphertext */
  10748. cmacSize = AES_BLOCK_SIZE;
  10749. if ((ret = wc_CmacFinal(&eax->ciphertextCmac,
  10750. eax->ciphertextCmacFinal,
  10751. &cmacSize)) != 0) {
  10752. return ret;
  10753. }
  10754. /* Complete the OMAC for auth data */
  10755. cmacSize = AES_BLOCK_SIZE;
  10756. if ((ret = wc_CmacFinal(&eax->aadCmac,
  10757. eax->aadCmacFinal,
  10758. &cmacSize)) != 0) {
  10759. return ret;
  10760. }
  10761. /*
  10762. * Concatenate all three auth tag chunks into the final tag, truncating
  10763. * at the specified tag length
  10764. * T = Tag [first authTagSz bytes]
  10765. */
  10766. for (i = 0; i < authTagSz; i++) {
  10767. authTag[i] = eax->nonceCmacFinal[i]
  10768. ^ eax->aadCmacFinal[i]
  10769. ^ eax->ciphertextCmacFinal[i];
  10770. }
  10771. return 0;
  10772. }
  10773. /*
  10774. * AES EAX Incremental API:
  10775. * Finalizes the authenticated decryption operation, computing the auth tag
  10776. * for the previously supplied auth data and cipher text and validating it
  10777. * against a provided auth tag
  10778. *
  10779. * Returns 0 on success
  10780. * Return error code for failure
  10781. */
  10782. int wc_AesEaxDecryptFinal(AesEax* eax,
  10783. const byte* authIn, word32 authInSz)
  10784. {
  10785. int ret;
  10786. word32 i;
  10787. word32 cmacSize;
  10788. #if defined(WOLFSSL_SMALL_STACK)
  10789. byte *authTag;
  10790. #else
  10791. byte authTag[AES_BLOCK_SIZE];
  10792. #endif
  10793. if (eax == NULL || authIn == NULL || authInSz > AES_BLOCK_SIZE) {
  10794. return BAD_FUNC_ARG;
  10795. }
  10796. /* Complete the OMAC for the ciphertext */
  10797. cmacSize = AES_BLOCK_SIZE;
  10798. if ((ret = wc_CmacFinal(&eax->ciphertextCmac,
  10799. eax->ciphertextCmacFinal,
  10800. &cmacSize)) != 0) {
  10801. return ret;
  10802. }
  10803. /* Complete the OMAC for auth data */
  10804. cmacSize = AES_BLOCK_SIZE;
  10805. if ((ret = wc_CmacFinal(&eax->aadCmac,
  10806. eax->aadCmacFinal,
  10807. &cmacSize)) != 0) {
  10808. return ret;
  10809. }
  10810. #if defined(WOLFSSL_SMALL_STACK)
  10811. authTag = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10812. if (authTag == NULL) {
  10813. return MEMORY_E;
  10814. }
  10815. #endif
  10816. /*
  10817. * Concatenate all three auth tag chunks into the final tag, truncating
  10818. * at the specified tag length
  10819. * T = Tag [first authInSz bytes]
  10820. */
  10821. for (i = 0; i < authInSz; i++) {
  10822. authTag[i] = eax->nonceCmacFinal[i]
  10823. ^ eax->aadCmacFinal[i]
  10824. ^ eax->ciphertextCmacFinal[i];
  10825. }
  10826. if (ConstantCompare((const byte*)authTag, authIn, (int)authInSz) != 0) {
  10827. ret = AES_EAX_AUTH_E;
  10828. }
  10829. else {
  10830. ret = 0;
  10831. }
  10832. #if defined(WOLFSSL_SMALL_STACK)
  10833. XFREE(authTag, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10834. #endif
  10835. return ret;
  10836. }
  10837. /*
  10838. * Frees the underlying AES context. Must be called when done using the AES EAX
  10839. * context structure
  10840. *
  10841. * Returns 0 on success
  10842. * Returns error code on failure
  10843. */
  10844. int wc_AesEaxFree(AesEax* eax)
  10845. {
  10846. if (eax == NULL) {
  10847. return BAD_FUNC_ARG;
  10848. }
  10849. wc_AesFree(&eax->aes);
  10850. return 0;
  10851. }
  10852. #endif /* WOLFSSL_AES_EAX */
  10853. #endif /* !NO_AES */