aes.c 374 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576
  1. /* aes.c
  2. *
  3. * Copyright (C) 2006-2023 wolfSSL Inc.
  4. *
  5. * This file is part of wolfSSL.
  6. *
  7. * wolfSSL is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * wolfSSL is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
  20. */
  21. /*
  22. DESCRIPTION
  23. This library provides the interfaces to the Advanced Encryption Standard (AES)
  24. for encrypting and decrypting data. AES is the standard known for a symmetric
  25. block cipher mechanism that uses n-bit binary string parameter key with 128-bits,
  26. 192-bits, and 256-bits of key sizes.
  27. */
  28. #ifdef HAVE_CONFIG_H
  29. #include <config.h>
  30. #endif
  31. #include <wolfssl/wolfcrypt/settings.h>
  32. #include <wolfssl/wolfcrypt/error-crypt.h>
  33. #if !defined(NO_AES)
  34. /* Tip: Locate the software cipher modes by searching for "Software AES" */
  35. #if defined(HAVE_FIPS) && \
  36. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  37. /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
  38. #define FIPS_NO_WRAPPERS
  39. #ifdef USE_WINDOWS_API
  40. #pragma code_seg(".fipsA$g")
  41. #pragma const_seg(".fipsB$g")
  42. #endif
  43. #endif
  44. #include <wolfssl/wolfcrypt/aes.h>
  45. #ifdef WOLFSSL_AESNI
  46. #include <wmmintrin.h>
  47. #include <emmintrin.h>
  48. #include <smmintrin.h>
  49. #endif /* WOLFSSL_AESNI */
  50. #include <wolfssl/wolfcrypt/cpuid.h>
  51. #ifdef WOLF_CRYPTO_CB
  52. #include <wolfssl/wolfcrypt/cryptocb.h>
  53. #endif
  54. #ifdef WOLFSSL_SECO_CAAM
  55. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  56. #endif
  57. #ifdef WOLFSSL_IMXRT_DCP
  58. #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
  59. #endif
  60. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  61. #include <wolfssl/wolfcrypt/port/nxp/se050_port.h>
  62. #endif
  63. #ifdef WOLFSSL_AES_SIV
  64. #include <wolfssl/wolfcrypt/cmac.h>
  65. #endif
  66. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  67. #include <wolfssl/wolfcrypt/port/psa/psa.h>
  68. #endif
  69. /* fips wrapper calls, user can call direct */
  70. #if defined(HAVE_FIPS) && \
  71. (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 2))
  72. int wc_AesSetKey(Aes* aes, const byte* key, word32 len, const byte* iv,
  73. int dir)
  74. {
  75. if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) {
  76. return BAD_FUNC_ARG;
  77. }
  78. return AesSetKey_fips(aes, key, len, iv, dir);
  79. }
  80. int wc_AesSetIV(Aes* aes, const byte* iv)
  81. {
  82. if (aes == NULL) {
  83. return BAD_FUNC_ARG;
  84. }
  85. return AesSetIV_fips(aes, iv);
  86. }
  87. #ifdef HAVE_AES_CBC
  88. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  89. {
  90. if (aes == NULL || out == NULL || in == NULL) {
  91. return BAD_FUNC_ARG;
  92. }
  93. return AesCbcEncrypt_fips(aes, out, in, sz);
  94. }
  95. #ifdef HAVE_AES_DECRYPT
  96. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  97. {
  98. if (aes == NULL || out == NULL || in == NULL
  99. || sz % AES_BLOCK_SIZE != 0) {
  100. return BAD_FUNC_ARG;
  101. }
  102. return AesCbcDecrypt_fips(aes, out, in, sz);
  103. }
  104. #endif /* HAVE_AES_DECRYPT */
  105. #endif /* HAVE_AES_CBC */
  106. /* AES-CTR */
  107. #ifdef WOLFSSL_AES_COUNTER
  108. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  109. {
  110. if (aes == NULL || out == NULL || in == NULL) {
  111. return BAD_FUNC_ARG;
  112. }
  113. return AesCtrEncrypt(aes, out, in, sz);
  114. }
  115. #endif
  116. /* AES-DIRECT */
  117. #if defined(WOLFSSL_AES_DIRECT)
  118. void wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  119. {
  120. AesEncryptDirect(aes, out, in);
  121. }
  122. #ifdef HAVE_AES_DECRYPT
  123. void wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  124. {
  125. AesDecryptDirect(aes, out, in);
  126. }
  127. #endif /* HAVE_AES_DECRYPT */
  128. int wc_AesSetKeyDirect(Aes* aes, const byte* key, word32 len,
  129. const byte* iv, int dir)
  130. {
  131. return AesSetKeyDirect(aes, key, len, iv, dir);
  132. }
  133. #endif /* WOLFSSL_AES_DIRECT */
  134. /* AES-GCM */
  135. #ifdef HAVE_AESGCM
  136. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  137. {
  138. if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) {
  139. return BAD_FUNC_ARG;
  140. }
  141. return AesGcmSetKey_fips(aes, key, len);
  142. }
  143. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  144. const byte* iv, word32 ivSz,
  145. byte* authTag, word32 authTagSz,
  146. const byte* authIn, word32 authInSz)
  147. {
  148. if (aes == NULL || authTagSz > AES_BLOCK_SIZE ||
  149. authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ ||
  150. ivSz == 0 || ivSz > AES_BLOCK_SIZE) {
  151. return BAD_FUNC_ARG;
  152. }
  153. return AesGcmEncrypt_fips(aes, out, in, sz, iv, ivSz, authTag,
  154. authTagSz, authIn, authInSz);
  155. }
  156. #ifdef HAVE_AES_DECRYPT
  157. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  158. const byte* iv, word32 ivSz,
  159. const byte* authTag, word32 authTagSz,
  160. const byte* authIn, word32 authInSz)
  161. {
  162. if (aes == NULL || out == NULL || in == NULL || iv == NULL
  163. || authTag == NULL || authTagSz > AES_BLOCK_SIZE ||
  164. ivSz == 0 || ivSz > AES_BLOCK_SIZE) {
  165. return BAD_FUNC_ARG;
  166. }
  167. return AesGcmDecrypt_fips(aes, out, in, sz, iv, ivSz, authTag,
  168. authTagSz, authIn, authInSz);
  169. }
  170. #endif /* HAVE_AES_DECRYPT */
  171. int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  172. {
  173. if (gmac == NULL || key == NULL || !((len == 16) ||
  174. (len == 24) || (len == 32)) ) {
  175. return BAD_FUNC_ARG;
  176. }
  177. return GmacSetKey(gmac, key, len);
  178. }
  179. int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  180. const byte* authIn, word32 authInSz,
  181. byte* authTag, word32 authTagSz)
  182. {
  183. if (gmac == NULL || authTagSz > AES_BLOCK_SIZE ||
  184. authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  185. return BAD_FUNC_ARG;
  186. }
  187. return GmacUpdate(gmac, iv, ivSz, authIn, authInSz,
  188. authTag, authTagSz);
  189. }
  190. #endif /* HAVE_AESGCM */
  191. /* AES-CCM */
  192. #if defined(HAVE_AESCCM) && \
  193. defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
  194. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  195. {
  196. return AesCcmSetKey(aes, key, keySz);
  197. }
  198. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  199. const byte* nonce, word32 nonceSz,
  200. byte* authTag, word32 authTagSz,
  201. const byte* authIn, word32 authInSz)
  202. {
  203. /* sanity check on arguments */
  204. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  205. || authTag == NULL || nonceSz < 7 || nonceSz > 13)
  206. return BAD_FUNC_ARG;
  207. AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz, authTag,
  208. authTagSz, authIn, authInSz);
  209. return 0;
  210. }
  211. #ifdef HAVE_AES_DECRYPT
  212. int wc_AesCcmDecrypt(Aes* aes, byte* out,
  213. const byte* in, word32 inSz,
  214. const byte* nonce, word32 nonceSz,
  215. const byte* authTag, word32 authTagSz,
  216. const byte* authIn, word32 authInSz)
  217. {
  218. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  219. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  220. return BAD_FUNC_ARG;
  221. }
  222. return AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  223. authTag, authTagSz, authIn, authInSz);
  224. }
  225. #endif /* HAVE_AES_DECRYPT */
  226. #endif /* HAVE_AESCCM && HAVE_FIPS_VERSION 2 */
  227. int wc_AesInit(Aes* aes, void* h, int i)
  228. {
  229. if (aes == NULL)
  230. return BAD_FUNC_ARG;
  231. (void)h;
  232. (void)i;
  233. /* FIPS doesn't support */
  234. #ifdef WOLFSSL_KCAPI_AES
  235. return AesInit(aes, h, i);
  236. #else
  237. return 0;
  238. #endif
  239. }
  240. void wc_AesFree(Aes* aes)
  241. {
  242. (void)aes;
  243. /* FIPS doesn't support */
  244. #ifdef WOLFSSL_KCAPI_AES
  245. AesFree(aes);
  246. #endif
  247. }
  248. #else /* else build without fips, or for FIPS v2+ */
  249. #if defined(WOLFSSL_TI_CRYPT)
  250. #include <wolfcrypt/src/port/ti/ti-aes.c>
  251. #else
  252. #include <wolfssl/wolfcrypt/logging.h>
  253. #ifdef NO_INLINE
  254. #include <wolfssl/wolfcrypt/misc.h>
  255. #else
  256. #define WOLFSSL_MISC_INCLUDED
  257. #include <wolfcrypt/src/misc.c>
  258. #endif
  259. #ifndef WOLFSSL_ARMASM
  260. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  261. /* case of possibly not using hardware acceleration for AES but using key
  262. blobs */
  263. #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h>
  264. #endif
  265. #ifdef DEBUG_AESNI
  266. #include <stdio.h>
  267. #endif
  268. #ifdef _MSC_VER
  269. /* 4127 warning constant while(1) */
  270. #pragma warning(disable: 4127)
  271. #endif
  272. /* Define AES implementation includes and functions */
  273. #if defined(STM32_CRYPTO)
  274. /* STM32F2/F4/F7/L4/L5/H7/WB55 hardware AES support for ECB, CBC, CTR and GCM modes */
  275. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESGCM) || defined(HAVE_AESCCM)
  276. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  277. Aes* aes, const byte* inBlock, byte* outBlock)
  278. {
  279. int ret = 0;
  280. #ifdef WOLFSSL_STM32_CUBEMX
  281. CRYP_HandleTypeDef hcryp;
  282. #else
  283. CRYP_InitTypeDef cryptInit;
  284. CRYP_KeyInitTypeDef keyInit;
  285. #endif
  286. #ifdef WOLFSSL_STM32_CUBEMX
  287. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  288. if (ret != 0)
  289. return ret;
  290. ret = wolfSSL_CryptHwMutexLock();
  291. if (ret != 0)
  292. return ret;
  293. #if defined(STM32_HAL_V2)
  294. hcryp.Init.Algorithm = CRYP_AES_ECB;
  295. #elif defined(STM32_CRYPTO_AES_ONLY)
  296. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  297. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  298. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  299. #endif
  300. HAL_CRYP_Init(&hcryp);
  301. #if defined(STM32_HAL_V2)
  302. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  303. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  304. #elif defined(STM32_CRYPTO_AES_ONLY)
  305. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  306. outBlock, STM32_HAL_TIMEOUT);
  307. #else
  308. ret = HAL_CRYP_AESECB_Encrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  309. outBlock, STM32_HAL_TIMEOUT);
  310. #endif
  311. if (ret != HAL_OK) {
  312. ret = WC_TIMEOUT_E;
  313. }
  314. HAL_CRYP_DeInit(&hcryp);
  315. #else /* Standard Peripheral Library */
  316. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  317. if (ret != 0)
  318. return ret;
  319. ret = wolfSSL_CryptHwMutexLock();
  320. if (ret != 0)
  321. return ret;
  322. /* reset registers to their default values */
  323. CRYP_DeInit();
  324. /* setup key */
  325. CRYP_KeyInit(&keyInit);
  326. /* set direction and mode */
  327. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  328. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  329. CRYP_Init(&cryptInit);
  330. /* enable crypto processor */
  331. CRYP_Cmd(ENABLE);
  332. /* flush IN/OUT FIFOs */
  333. CRYP_FIFOFlush();
  334. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  335. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  336. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  337. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  338. /* wait until the complete message has been processed */
  339. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  340. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  341. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  342. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  343. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  344. /* disable crypto processor */
  345. CRYP_Cmd(DISABLE);
  346. #endif /* WOLFSSL_STM32_CUBEMX */
  347. wolfSSL_CryptHwMutexUnLock();
  348. return ret;
  349. }
  350. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESGCM || HAVE_AESCCM */
  351. #ifdef HAVE_AES_DECRYPT
  352. #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESCCM)
  353. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  354. Aes* aes, const byte* inBlock, byte* outBlock)
  355. {
  356. int ret = 0;
  357. #ifdef WOLFSSL_STM32_CUBEMX
  358. CRYP_HandleTypeDef hcryp;
  359. #else
  360. CRYP_InitTypeDef cryptInit;
  361. CRYP_KeyInitTypeDef keyInit;
  362. #endif
  363. #ifdef WOLFSSL_STM32_CUBEMX
  364. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  365. if (ret != 0)
  366. return ret;
  367. ret = wolfSSL_CryptHwMutexLock();
  368. if (ret != 0)
  369. return ret;
  370. #if defined(STM32_HAL_V2)
  371. hcryp.Init.Algorithm = CRYP_AES_ECB;
  372. #elif defined(STM32_CRYPTO_AES_ONLY)
  373. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  374. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB;
  375. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  376. #endif
  377. HAL_CRYP_Init(&hcryp);
  378. #if defined(STM32_HAL_V2)
  379. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE,
  380. (uint32_t*)outBlock, STM32_HAL_TIMEOUT);
  381. #elif defined(STM32_CRYPTO_AES_ONLY)
  382. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  383. outBlock, STM32_HAL_TIMEOUT);
  384. #else
  385. ret = HAL_CRYP_AESECB_Decrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE,
  386. outBlock, STM32_HAL_TIMEOUT);
  387. #endif
  388. if (ret != HAL_OK) {
  389. ret = WC_TIMEOUT_E;
  390. }
  391. HAL_CRYP_DeInit(&hcryp);
  392. #else /* Standard Peripheral Library */
  393. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  394. if (ret != 0)
  395. return ret;
  396. ret = wolfSSL_CryptHwMutexLock();
  397. if (ret != 0)
  398. return ret;
  399. /* reset registers to their default values */
  400. CRYP_DeInit();
  401. /* set direction and key */
  402. CRYP_KeyInit(&keyInit);
  403. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  404. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  405. CRYP_Init(&cryptInit);
  406. /* enable crypto processor */
  407. CRYP_Cmd(ENABLE);
  408. /* wait until decrypt key has been initialized */
  409. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  410. /* set direction and mode */
  411. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  412. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB;
  413. CRYP_Init(&cryptInit);
  414. /* enable crypto processor */
  415. CRYP_Cmd(ENABLE);
  416. /* flush IN/OUT FIFOs */
  417. CRYP_FIFOFlush();
  418. CRYP_DataIn(*(uint32_t*)&inBlock[0]);
  419. CRYP_DataIn(*(uint32_t*)&inBlock[4]);
  420. CRYP_DataIn(*(uint32_t*)&inBlock[8]);
  421. CRYP_DataIn(*(uint32_t*)&inBlock[12]);
  422. /* wait until the complete message has been processed */
  423. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  424. *(uint32_t*)&outBlock[0] = CRYP_DataOut();
  425. *(uint32_t*)&outBlock[4] = CRYP_DataOut();
  426. *(uint32_t*)&outBlock[8] = CRYP_DataOut();
  427. *(uint32_t*)&outBlock[12] = CRYP_DataOut();
  428. /* disable crypto processor */
  429. CRYP_Cmd(DISABLE);
  430. #endif /* WOLFSSL_STM32_CUBEMX */
  431. wolfSSL_CryptHwMutexUnLock();
  432. return ret;
  433. }
  434. #endif /* WOLFSSL_AES_DIRECT || HAVE_AESCCM */
  435. #endif /* HAVE_AES_DECRYPT */
  436. #elif defined(HAVE_COLDFIRE_SEC)
  437. /* Freescale Coldfire SEC support for CBC mode.
  438. * NOTE: no support for AES-CTR/GCM/CCM/Direct */
  439. #include <wolfssl/wolfcrypt/types.h>
  440. #include "sec.h"
  441. #include "mcf5475_sec.h"
  442. #include "mcf5475_siu.h"
  443. #elif defined(FREESCALE_LTC)
  444. #include "fsl_ltc.h"
  445. #if defined(FREESCALE_LTC_AES_GCM)
  446. #undef NEED_AES_TABLES
  447. #undef GCM_TABLE
  448. #endif
  449. /* if LTC doesn't have GCM, use software with LTC AES ECB mode */
  450. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  451. Aes* aes, const byte* inBlock, byte* outBlock)
  452. {
  453. word32 keySize = 0;
  454. byte* key = (byte*)aes->key;
  455. int ret = wc_AesGetKeySize(aes, &keySize);
  456. if (ret != 0)
  457. return ret;
  458. if (wolfSSL_CryptHwMutexLock() == 0) {
  459. LTC_AES_EncryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  460. key, keySize);
  461. wolfSSL_CryptHwMutexUnLock();
  462. }
  463. return 0;
  464. }
  465. #ifdef HAVE_AES_DECRYPT
  466. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  467. Aes* aes, const byte* inBlock, byte* outBlock)
  468. {
  469. word32 keySize = 0;
  470. byte* key = (byte*)aes->key;
  471. int ret = wc_AesGetKeySize(aes, &keySize);
  472. if (ret != 0)
  473. return ret;
  474. if (wolfSSL_CryptHwMutexLock() == 0) {
  475. LTC_AES_DecryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE,
  476. key, keySize, kLTC_EncryptKey);
  477. wolfSSL_CryptHwMutexUnLock();
  478. }
  479. return 0;
  480. }
  481. #endif
  482. #elif defined(FREESCALE_MMCAU)
  483. /* Freescale mmCAU hardware AES support for Direct, CBC, CCM, GCM modes
  484. * through the CAU/mmCAU library. Documentation located in
  485. * ColdFire/ColdFire+ CAU and Kinetis mmCAU Software Library User
  486. * Guide (See note in README). */
  487. #ifdef FREESCALE_MMCAU_CLASSIC
  488. /* MMCAU 1.4 library used with non-KSDK / classic MQX builds */
  489. #include "cau_api.h"
  490. #else
  491. #include "fsl_mmcau.h"
  492. #endif
  493. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  494. Aes* aes, const byte* inBlock, byte* outBlock)
  495. {
  496. if (wolfSSL_CryptHwMutexLock() == 0) {
  497. #ifdef FREESCALE_MMCAU_CLASSIC
  498. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  499. WOLFSSL_MSG("Bad cau_aes_encrypt alignment");
  500. return BAD_ALIGN_E;
  501. }
  502. cau_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  503. #else
  504. MMCAU_AES_EncryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  505. outBlock);
  506. #endif
  507. wolfSSL_CryptHwMutexUnLock();
  508. }
  509. return 0;
  510. }
  511. #ifdef HAVE_AES_DECRYPT
  512. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  513. Aes* aes, const byte* inBlock, byte* outBlock)
  514. {
  515. if (wolfSSL_CryptHwMutexLock() == 0) {
  516. #ifdef FREESCALE_MMCAU_CLASSIC
  517. if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) {
  518. WOLFSSL_MSG("Bad cau_aes_decrypt alignment");
  519. return BAD_ALIGN_E;
  520. }
  521. cau_aes_decrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock);
  522. #else
  523. MMCAU_AES_DecryptEcb(inBlock, (byte*)aes->key, aes->rounds,
  524. outBlock);
  525. #endif
  526. wolfSSL_CryptHwMutexUnLock();
  527. }
  528. return 0;
  529. }
  530. #endif /* HAVE_AES_DECRYPT */
  531. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  532. #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
  533. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  534. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  535. Aes* aes, const byte* inBlock, byte* outBlock)
  536. {
  537. /* Thread mutex protection handled in Pic32Crypto */
  538. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  539. outBlock, inBlock, AES_BLOCK_SIZE,
  540. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  541. }
  542. #endif
  543. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  544. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  545. Aes* aes, const byte* inBlock, byte* outBlock)
  546. {
  547. /* Thread mutex protection handled in Pic32Crypto */
  548. return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0,
  549. outBlock, inBlock, AES_BLOCK_SIZE,
  550. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB);
  551. }
  552. #endif
  553. #elif defined(WOLFSSL_NRF51_AES)
  554. /* Use built-in AES hardware - AES 128 ECB Encrypt Only */
  555. #include "wolfssl/wolfcrypt/port/nrf51.h"
  556. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  557. Aes* aes, const byte* inBlock, byte* outBlock)
  558. {
  559. int ret;
  560. ret = wolfSSL_CryptHwMutexLock();
  561. if (ret == 0) {
  562. ret = nrf51_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds,
  563. outBlock);
  564. wolfSSL_CryptHwMutexUnLock();
  565. }
  566. return ret;
  567. }
  568. #ifdef HAVE_AES_DECRYPT
  569. #error nRF51 AES Hardware does not support decrypt
  570. #endif /* HAVE_AES_DECRYPT */
  571. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  572. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  573. #include "wolfssl/wolfcrypt/port/Espressif/esp32-crypt.h"
  574. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  575. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  576. Aes* aes, const byte* inBlock, byte* outBlock)
  577. {
  578. /* Thread mutex protection handled in esp_aes_hw_InUse */
  579. return wc_esp32AesEncrypt(aes, inBlock, outBlock);
  580. }
  581. #endif
  582. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  583. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  584. Aes* aes, const byte* inBlock, byte* outBlock)
  585. {
  586. /* Thread mutex protection handled in esp_aes_hw_InUse */
  587. return wc_esp32AesDecrypt(aes, inBlock, outBlock);
  588. }
  589. #endif
  590. #elif defined(WOLFSSL_AESNI)
  591. #define NEED_AES_TABLES
  592. /* Each platform needs to query info type 1 from cpuid to see if aesni is
  593. * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
  594. */
  595. #ifndef AESNI_ALIGN
  596. #define AESNI_ALIGN 16
  597. #endif
  598. static int checkAESNI = 0;
  599. static int haveAESNI = 0;
  600. static word32 intel_flags = 0;
  601. static WARN_UNUSED_RESULT int Check_CPU_support_AES(void)
  602. {
  603. intel_flags = cpuid_get_flags();
  604. return IS_INTEL_AESNI(intel_flags) != 0;
  605. }
  606. /* tell C compiler these are asm functions in case any mix up of ABI underscore
  607. prefix between clang/gcc/llvm etc */
  608. #ifdef HAVE_AES_CBC
  609. void AES_CBC_encrypt(const unsigned char* in, unsigned char* out,
  610. unsigned char* ivec, unsigned long length,
  611. const unsigned char* KS, int nr)
  612. XASM_LINK("AES_CBC_encrypt");
  613. #ifdef HAVE_AES_DECRYPT
  614. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  615. void AES_CBC_decrypt_by4(const unsigned char* in, unsigned char* out,
  616. unsigned char* ivec, unsigned long length,
  617. const unsigned char* KS, int nr)
  618. XASM_LINK("AES_CBC_decrypt_by4");
  619. #elif defined(WOLFSSL_AESNI_BY6)
  620. void AES_CBC_decrypt_by6(const unsigned char* in, unsigned char* out,
  621. unsigned char* ivec, unsigned long length,
  622. const unsigned char* KS, int nr)
  623. XASM_LINK("AES_CBC_decrypt_by6");
  624. #else /* WOLFSSL_AESNI_BYx */
  625. void AES_CBC_decrypt_by8(const unsigned char* in, unsigned char* out,
  626. unsigned char* ivec, unsigned long length,
  627. const unsigned char* KS, int nr)
  628. XASM_LINK("AES_CBC_decrypt_by8");
  629. #endif /* WOLFSSL_AESNI_BYx */
  630. #endif /* HAVE_AES_DECRYPT */
  631. #endif /* HAVE_AES_CBC */
  632. void AES_ECB_encrypt(const unsigned char* in, unsigned char* out,
  633. unsigned long length, const unsigned char* KS, int nr)
  634. XASM_LINK("AES_ECB_encrypt");
  635. #ifdef HAVE_AES_DECRYPT
  636. void AES_ECB_decrypt(const unsigned char* in, unsigned char* out,
  637. unsigned long length, const unsigned char* KS, int nr)
  638. XASM_LINK("AES_ECB_decrypt");
  639. #endif
  640. void AES_128_Key_Expansion(const unsigned char* userkey,
  641. unsigned char* key_schedule)
  642. XASM_LINK("AES_128_Key_Expansion");
  643. void AES_192_Key_Expansion(const unsigned char* userkey,
  644. unsigned char* key_schedule)
  645. XASM_LINK("AES_192_Key_Expansion");
  646. void AES_256_Key_Expansion(const unsigned char* userkey,
  647. unsigned char* key_schedule)
  648. XASM_LINK("AES_256_Key_Expansion");
  649. static WARN_UNUSED_RESULT int AES_set_encrypt_key(
  650. const unsigned char *userKey, const int bits, Aes* aes)
  651. {
  652. int ret;
  653. if (!userKey || !aes)
  654. return BAD_FUNC_ARG;
  655. switch (bits) {
  656. case 128:
  657. AES_128_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 10;
  658. return 0;
  659. case 192:
  660. AES_192_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 12;
  661. return 0;
  662. case 256:
  663. AES_256_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 14;
  664. return 0;
  665. default:
  666. ret = BAD_FUNC_ARG;
  667. }
  668. return ret;
  669. }
  670. #ifdef HAVE_AES_DECRYPT
  671. static WARN_UNUSED_RESULT int AES_set_decrypt_key(
  672. const unsigned char* userKey, const int bits, Aes* aes)
  673. {
  674. word32 nr;
  675. #ifdef WOLFSSL_SMALL_STACK
  676. Aes *temp_key;
  677. #else
  678. Aes temp_key[1];
  679. #endif
  680. __m128i *Key_Schedule;
  681. __m128i *Temp_Key_Schedule;
  682. if (!userKey || !aes)
  683. return BAD_FUNC_ARG;
  684. #ifdef WOLFSSL_SMALL_STACK
  685. if ((temp_key = (Aes *)XMALLOC(sizeof *aes, aes->heap,
  686. DYNAMIC_TYPE_AES)) == NULL)
  687. return MEMORY_E;
  688. #endif
  689. if (AES_set_encrypt_key(userKey,bits,temp_key) == BAD_FUNC_ARG) {
  690. #ifdef WOLFSSL_SMALL_STACK
  691. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  692. #endif
  693. return BAD_FUNC_ARG;
  694. }
  695. Key_Schedule = (__m128i*)aes->key;
  696. Temp_Key_Schedule = (__m128i*)temp_key->key;
  697. nr = temp_key->rounds;
  698. aes->rounds = nr;
  699. #ifdef WOLFSSL_SMALL_STACK
  700. SAVE_VECTOR_REGISTERS(XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); return _svr_ret;);
  701. #else
  702. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  703. #endif
  704. Key_Schedule[nr] = Temp_Key_Schedule[0];
  705. Key_Schedule[nr-1] = _mm_aesimc_si128(Temp_Key_Schedule[1]);
  706. Key_Schedule[nr-2] = _mm_aesimc_si128(Temp_Key_Schedule[2]);
  707. Key_Schedule[nr-3] = _mm_aesimc_si128(Temp_Key_Schedule[3]);
  708. Key_Schedule[nr-4] = _mm_aesimc_si128(Temp_Key_Schedule[4]);
  709. Key_Schedule[nr-5] = _mm_aesimc_si128(Temp_Key_Schedule[5]);
  710. Key_Schedule[nr-6] = _mm_aesimc_si128(Temp_Key_Schedule[6]);
  711. Key_Schedule[nr-7] = _mm_aesimc_si128(Temp_Key_Schedule[7]);
  712. Key_Schedule[nr-8] = _mm_aesimc_si128(Temp_Key_Schedule[8]);
  713. Key_Schedule[nr-9] = _mm_aesimc_si128(Temp_Key_Schedule[9]);
  714. if (nr>10) {
  715. Key_Schedule[nr-10] = _mm_aesimc_si128(Temp_Key_Schedule[10]);
  716. Key_Schedule[nr-11] = _mm_aesimc_si128(Temp_Key_Schedule[11]);
  717. }
  718. if (nr>12) {
  719. Key_Schedule[nr-12] = _mm_aesimc_si128(Temp_Key_Schedule[12]);
  720. Key_Schedule[nr-13] = _mm_aesimc_si128(Temp_Key_Schedule[13]);
  721. }
  722. Key_Schedule[0] = Temp_Key_Schedule[nr];
  723. RESTORE_VECTOR_REGISTERS();
  724. #ifdef WOLFSSL_SMALL_STACK
  725. XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES);
  726. #endif
  727. return 0;
  728. }
  729. #endif /* HAVE_AES_DECRYPT */
  730. #elif (defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  731. && !defined(WOLFSSL_QNX_CAAM)) || \
  732. ((defined(WOLFSSL_AFALG) || defined(WOLFSSL_DEVCRYPTO_AES)) && \
  733. defined(HAVE_AESCCM))
  734. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  735. Aes* aes, const byte* inBlock, byte* outBlock)
  736. {
  737. return wc_AesEncryptDirect(aes, outBlock, inBlock);
  738. }
  739. #elif defined(WOLFSSL_AFALG)
  740. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  741. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  742. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  743. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  744. #include "hal_data.h"
  745. #ifndef WOLFSSL_SCE_AES256_HANDLE
  746. #define WOLFSSL_SCE_AES256_HANDLE g_sce_aes_256
  747. #endif
  748. #ifndef WOLFSSL_SCE_AES192_HANDLE
  749. #define WOLFSSL_SCE_AES192_HANDLE g_sce_aes_192
  750. #endif
  751. #ifndef WOLFSSL_SCE_AES128_HANDLE
  752. #define WOLFSSL_SCE_AES128_HANDLE g_sce_aes_128
  753. #endif
  754. static WARN_UNUSED_RESULT int AES_ECB_encrypt(
  755. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  756. {
  757. word32 ret;
  758. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  759. CRYPTO_WORD_ENDIAN_BIG) {
  760. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  761. }
  762. switch (aes->keylen) {
  763. #ifdef WOLFSSL_AES_128
  764. case AES_128_KEY_SIZE:
  765. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->encrypt(
  766. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key,
  767. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  768. (word32*)outBlock);
  769. break;
  770. #endif
  771. #ifdef WOLFSSL_AES_192
  772. case AES_192_KEY_SIZE:
  773. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->encrypt(
  774. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key,
  775. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  776. (word32*)outBlock);
  777. break;
  778. #endif
  779. #ifdef WOLFSSL_AES_256
  780. case AES_256_KEY_SIZE:
  781. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->encrypt(
  782. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key,
  783. NULL, (sz / sizeof(word32)), (word32*)inBlock,
  784. (word32*)outBlock);
  785. break;
  786. #endif
  787. default:
  788. WOLFSSL_MSG("Unknown key size");
  789. return BAD_FUNC_ARG;
  790. }
  791. if (ret != SSP_SUCCESS) {
  792. /* revert input */
  793. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  794. return WC_HW_E;
  795. }
  796. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  797. CRYPTO_WORD_ENDIAN_BIG) {
  798. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  799. if (inBlock != outBlock) {
  800. /* revert input */
  801. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  802. }
  803. }
  804. return 0;
  805. }
  806. #if defined(HAVE_AES_DECRYPT)
  807. static WARN_UNUSED_RESULT int AES_ECB_decrypt(
  808. Aes* aes, const byte* inBlock, byte* outBlock, int sz)
  809. {
  810. word32 ret;
  811. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  812. CRYPTO_WORD_ENDIAN_BIG) {
  813. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  814. }
  815. switch (aes->keylen) {
  816. #ifdef WOLFSSL_AES_128
  817. case AES_128_KEY_SIZE:
  818. ret = WOLFSSL_SCE_AES128_HANDLE.p_api->decrypt(
  819. WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, aes->reg,
  820. (sz / sizeof(word32)), (word32*)inBlock,
  821. (word32*)outBlock);
  822. break;
  823. #endif
  824. #ifdef WOLFSSL_AES_192
  825. case AES_192_KEY_SIZE:
  826. ret = WOLFSSL_SCE_AES192_HANDLE.p_api->decrypt(
  827. WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, aes->reg,
  828. (sz / sizeof(word32)), (word32*)inBlock,
  829. (word32*)outBlock);
  830. break;
  831. #endif
  832. #ifdef WOLFSSL_AES_256
  833. case AES_256_KEY_SIZE:
  834. ret = WOLFSSL_SCE_AES256_HANDLE.p_api->decrypt(
  835. WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, aes->reg,
  836. (sz / sizeof(word32)), (word32*)inBlock,
  837. (word32*)outBlock);
  838. break;
  839. #endif
  840. default:
  841. WOLFSSL_MSG("Unknown key size");
  842. return BAD_FUNC_ARG;
  843. }
  844. if (ret != SSP_SUCCESS) {
  845. return WC_HW_E;
  846. }
  847. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
  848. CRYPTO_WORD_ENDIAN_BIG) {
  849. ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz);
  850. if (inBlock != outBlock) {
  851. /* revert input */
  852. ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz);
  853. }
  854. }
  855. return 0;
  856. }
  857. #endif /* HAVE_AES_DECRYPT */
  858. #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT)
  859. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  860. Aes* aes, const byte* inBlock, byte* outBlock)
  861. {
  862. return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  863. }
  864. #endif
  865. #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT)
  866. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  867. Aes* aes, const byte* inBlock, byte* outBlock)
  868. {
  869. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  870. }
  871. #endif
  872. #elif defined(WOLFSSL_KCAPI_AES)
  873. /* Only CBC and GCM that are in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  874. #if defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AESCCM) || \
  875. defined(WOLFSSL_CMAC) || defined(WOLFSSL_AES_OFB) || \
  876. defined(WOLFSSL_AES_CFB) || defined(HAVE_AES_ECB) || \
  877. defined(WOLFSSL_AES_DIRECT) || \
  878. (defined(HAVE_AES_CBC) && defined(WOLFSSL_NO_KCAPI_AES_CBC))
  879. #define NEED_AES_TABLES
  880. #endif
  881. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  882. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  883. #else
  884. /* using wolfCrypt software implementation */
  885. #define NEED_AES_TABLES
  886. #endif
  887. #ifdef NEED_AES_TABLES
  888. static const FLASH_QUALIFIER word32 rcon[] = {
  889. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  890. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  891. 0x1B000000, 0x36000000,
  892. /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  893. };
  894. #ifndef WOLFSSL_AES_SMALL_TABLES
  895. static const FLASH_QUALIFIER word32 Te[4][256] = {
  896. {
  897. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  898. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  899. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  900. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  901. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  902. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  903. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  904. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  905. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  906. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  907. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  908. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  909. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  910. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  911. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  912. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  913. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  914. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  915. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  916. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  917. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  918. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  919. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  920. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  921. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  922. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  923. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  924. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  925. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  926. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  927. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  928. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  929. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  930. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  931. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  932. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  933. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  934. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  935. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  936. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  937. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  938. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  939. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  940. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  941. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  942. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  943. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  944. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  945. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  946. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  947. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  948. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  949. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  950. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  951. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  952. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  953. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  954. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  955. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  956. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  957. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  958. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  959. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  960. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  961. },
  962. {
  963. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  964. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  965. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  966. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  967. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  968. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  969. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  970. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  971. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  972. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  973. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  974. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  975. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  976. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  977. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  978. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  979. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  980. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  981. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  982. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  983. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  984. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  985. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  986. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  987. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  988. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  989. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  990. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  991. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  992. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  993. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  994. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  995. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  996. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  997. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  998. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  999. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  1000. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  1001. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  1002. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  1003. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  1004. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  1005. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  1006. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  1007. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  1008. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  1009. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  1010. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  1011. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  1012. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  1013. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  1014. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  1015. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  1016. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  1017. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  1018. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  1019. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  1020. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  1021. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  1022. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  1023. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  1024. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  1025. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  1026. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  1027. },
  1028. {
  1029. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  1030. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  1031. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  1032. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  1033. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  1034. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  1035. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  1036. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  1037. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  1038. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  1039. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  1040. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  1041. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  1042. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  1043. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  1044. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  1045. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  1046. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  1047. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  1048. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  1049. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  1050. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  1051. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  1052. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  1053. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  1054. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  1055. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  1056. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  1057. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  1058. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  1059. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  1060. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  1061. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  1062. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  1063. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  1064. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  1065. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  1066. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  1067. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  1068. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  1069. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  1070. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  1071. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  1072. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  1073. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  1074. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  1075. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  1076. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  1077. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  1078. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  1079. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  1080. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  1081. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  1082. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  1083. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  1084. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  1085. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  1086. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  1087. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  1088. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  1089. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  1090. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  1091. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  1092. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  1093. },
  1094. {
  1095. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  1096. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  1097. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  1098. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  1099. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  1100. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  1101. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  1102. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  1103. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  1104. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  1105. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  1106. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  1107. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  1108. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  1109. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  1110. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  1111. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  1112. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  1113. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  1114. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  1115. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  1116. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  1117. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  1118. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  1119. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  1120. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  1121. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  1122. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  1123. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  1124. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  1125. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  1126. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  1127. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  1128. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  1129. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  1130. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  1131. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  1132. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  1133. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  1134. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  1135. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  1136. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  1137. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  1138. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  1139. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  1140. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  1141. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  1142. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  1143. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  1144. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  1145. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  1146. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  1147. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  1148. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  1149. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  1150. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  1151. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  1152. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  1153. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  1154. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  1155. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  1156. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  1157. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  1158. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  1159. }
  1160. };
  1161. #ifdef HAVE_AES_DECRYPT
  1162. static const FLASH_QUALIFIER word32 Td[4][256] = {
  1163. {
  1164. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  1165. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  1166. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  1167. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  1168. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  1169. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  1170. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  1171. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  1172. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  1173. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  1174. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  1175. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  1176. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  1177. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  1178. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  1179. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  1180. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  1181. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  1182. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  1183. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  1184. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  1185. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  1186. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  1187. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  1188. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  1189. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  1190. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  1191. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  1192. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  1193. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  1194. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  1195. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  1196. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  1197. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  1198. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  1199. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  1200. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  1201. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  1202. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  1203. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  1204. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  1205. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  1206. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  1207. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  1208. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  1209. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  1210. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  1211. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  1212. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  1213. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  1214. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  1215. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  1216. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  1217. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  1218. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  1219. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  1220. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  1221. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  1222. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  1223. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  1224. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  1225. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  1226. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  1227. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  1228. },
  1229. {
  1230. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  1231. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  1232. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  1233. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  1234. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  1235. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  1236. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  1237. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  1238. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  1239. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  1240. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  1241. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  1242. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  1243. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  1244. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  1245. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  1246. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  1247. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  1248. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  1249. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  1250. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  1251. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  1252. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  1253. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  1254. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  1255. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  1256. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  1257. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  1258. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  1259. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  1260. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  1261. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  1262. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  1263. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1264. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1265. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1266. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1267. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1268. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1269. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1270. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1271. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1272. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1273. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1274. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1275. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1276. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1277. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1278. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1279. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1280. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1281. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1282. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1283. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1284. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1285. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1286. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1287. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1288. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1289. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1290. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1291. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1292. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1293. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1294. },
  1295. {
  1296. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1297. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1298. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1299. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1300. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1301. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1302. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1303. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1304. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1305. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1306. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1307. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1308. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1309. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1310. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1311. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1312. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1313. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1314. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1315. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1316. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1317. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1318. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1319. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1320. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1321. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1322. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1323. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1324. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1325. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1326. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1327. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1328. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1329. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1330. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1331. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1332. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1333. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1334. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1335. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1336. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1337. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1338. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1339. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1340. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1341. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1342. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1343. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1344. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1345. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1346. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1347. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1348. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1349. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1350. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1351. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1352. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1353. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1354. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1355. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1356. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1357. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1358. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1359. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1360. },
  1361. {
  1362. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1363. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1364. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1365. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1366. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1367. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1368. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1369. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1370. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1371. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1372. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1373. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1374. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1375. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1376. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1377. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1378. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1379. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1380. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1381. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1382. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1383. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1384. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1385. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1386. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1387. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1388. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1389. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1390. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1391. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1392. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1393. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1394. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1395. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1396. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1397. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1398. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1399. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1400. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1401. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1402. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1403. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1404. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1405. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1406. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1407. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1408. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1409. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1410. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1411. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1412. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1413. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1414. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1415. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1416. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1417. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1418. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1419. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1420. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1421. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1422. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1423. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1424. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1425. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1426. }
  1427. };
  1428. #endif /* HAVE_AES_DECRYPT */
  1429. #endif /* WOLFSSL_AES_SMALL_TABLES */
  1430. #ifdef HAVE_AES_DECRYPT
  1431. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) \
  1432. || defined(WOLFSSL_AES_DIRECT)
  1433. static const FLASH_QUALIFIER byte Td4[256] =
  1434. {
  1435. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1436. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1437. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1438. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1439. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1440. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1441. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1442. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1443. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1444. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1445. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1446. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1447. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1448. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1449. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1450. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1451. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1452. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1453. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1454. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1455. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1456. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1457. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1458. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1459. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1460. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1461. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1462. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1463. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1464. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1465. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1466. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1467. };
  1468. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  1469. #endif /* HAVE_AES_DECRYPT */
  1470. #define GETBYTE(x, y) (word32)((byte)((x) >> (8 * (y))))
  1471. #ifdef WOLFSSL_AES_SMALL_TABLES
  1472. static const byte Tsbox[256] = {
  1473. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1474. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1475. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1476. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1477. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1478. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1479. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1480. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1481. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1482. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1483. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1484. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1485. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1486. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1487. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1488. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1489. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1490. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1491. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1492. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1493. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1494. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1495. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1496. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1497. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1498. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1499. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1500. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1501. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1502. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1503. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1504. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1505. };
  1506. #define AES_XTIME(x) ((byte)((byte)((x) << 1) ^ ((0 - ((x) >> 7)) & 0x1b)))
  1507. static WARN_UNUSED_RESULT word32 col_mul(
  1508. word32 t, int i2, int i3, int ia, int ib)
  1509. {
  1510. byte t3 = GETBYTE(t, i3);
  1511. byte tm = AES_XTIME(GETBYTE(t, i2) ^ t3);
  1512. return GETBYTE(t, ia) ^ GETBYTE(t, ib) ^ t3 ^ tm;
  1513. }
  1514. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT)
  1515. static WARN_UNUSED_RESULT word32 inv_col_mul(
  1516. word32 t, int i9, int ib, int id, int ie)
  1517. {
  1518. byte t9 = GETBYTE(t, i9);
  1519. byte tb = GETBYTE(t, ib);
  1520. byte td = GETBYTE(t, id);
  1521. byte te = GETBYTE(t, ie);
  1522. byte t0 = t9 ^ tb ^ td;
  1523. return t0 ^ AES_XTIME(AES_XTIME(AES_XTIME(t0 ^ te) ^ td ^ te) ^ tb ^ te);
  1524. }
  1525. #endif
  1526. #endif
  1527. #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) || \
  1528. defined(HAVE_AESCCM) || defined(HAVE_AESGCM)
  1529. #ifndef WC_CACHE_LINE_SZ
  1530. #if defined(__x86_64__) || defined(_M_X64) || \
  1531. (defined(__ILP32__) && (__ILP32__ >= 1))
  1532. #define WC_CACHE_LINE_SZ 64
  1533. #else
  1534. /* default cache line size */
  1535. #define WC_CACHE_LINE_SZ 32
  1536. #endif
  1537. #endif
  1538. #ifndef WC_NO_CACHE_RESISTANT
  1539. #if defined(__riscv) && !defined(WOLFSSL_AES_TOUCH_LINES)
  1540. #define WOLFSSL_AES_TOUCH_LINES
  1541. #endif
  1542. #ifndef WOLFSSL_AES_SMALL_TABLES
  1543. /* load 4 Te Tables into cache by cache line stride */
  1544. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTe(void)
  1545. {
  1546. #ifndef WOLFSSL_AES_TOUCH_LINES
  1547. word32 x = 0;
  1548. int i,j;
  1549. for (i = 0; i < 4; i++) {
  1550. /* 256 elements, each one is 4 bytes */
  1551. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  1552. x &= Te[i][j];
  1553. }
  1554. }
  1555. return x;
  1556. #else
  1557. return 0;
  1558. #endif
  1559. }
  1560. #else
  1561. /* load sbox into cache by cache line stride */
  1562. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchSBox(void)
  1563. {
  1564. #ifndef WOLFSSL_AES_TOUCH_LINES
  1565. word32 x = 0;
  1566. int i;
  1567. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ/4) {
  1568. x &= Tsbox[i];
  1569. }
  1570. return x;
  1571. #else
  1572. return 0;
  1573. #endif
  1574. }
  1575. #endif
  1576. #endif
  1577. #ifdef WOLFSSL_AES_TOUCH_LINES
  1578. #if WC_CACHE_LINE_SZ == 128
  1579. #define WC_CACHE_LINE_BITS 5
  1580. #define WC_CACHE_LINE_MASK_HI 0xe0
  1581. #define WC_CACHE_LINE_MASK_LO 0x1f
  1582. #define WC_CACHE_LINE_ADD 0x20
  1583. #elif WC_CACHE_LINE_SZ == 64
  1584. #define WC_CACHE_LINE_BITS 4
  1585. #define WC_CACHE_LINE_MASK_HI 0xf0
  1586. #define WC_CACHE_LINE_MASK_LO 0x0f
  1587. #define WC_CACHE_LINE_ADD 0x10
  1588. #elif WC_CACHE_LINE_SZ == 32
  1589. #define WC_CACHE_LINE_BITS 3
  1590. #define WC_CACHE_LINE_MASK_HI 0xf8
  1591. #define WC_CACHE_LINE_MASK_LO 0x07
  1592. #define WC_CACHE_LINE_ADD 0x08
  1593. #elif WC_CACHE_LINE_SZ = 16
  1594. #define WC_CACHE_LINE_BITS 2
  1595. #define WC_CACHE_LINE_MASK_HI 0xfc
  1596. #define WC_CACHE_LINE_MASK_LO 0x03
  1597. #define WC_CACHE_LINE_ADD 0x04
  1598. #else
  1599. #error Cache line size not supported
  1600. #endif
  1601. #ifndef WOLFSSL_AES_SMALL_TABLES
  1602. static word32 GetTable(const word32* t, byte o)
  1603. {
  1604. #if WC_CACHE_LINE_SZ == 64
  1605. word32 e;
  1606. byte hi = o & 0xf0;
  1607. byte lo = o & 0x0f;
  1608. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1609. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1610. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1611. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1612. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1613. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1614. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1615. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1616. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1617. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1618. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1619. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1620. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1621. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1622. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1623. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1624. return e;
  1625. #else
  1626. word32 e = 0;
  1627. int i;
  1628. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1629. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1630. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1631. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1632. hi -= WC_CACHE_LINE_ADD;
  1633. }
  1634. return e;
  1635. #endif
  1636. }
  1637. #endif
  1638. #ifdef WOLFSSL_AES_SMALL_TABLES
  1639. static byte GetTable8(const byte* t, byte o)
  1640. {
  1641. #if WC_CACHE_LINE_SZ == 64
  1642. byte e;
  1643. byte hi = o & 0xf0;
  1644. byte lo = o & 0x0f;
  1645. e = t[lo + 0x00] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1646. e |= t[lo + 0x10] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1647. e |= t[lo + 0x20] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1648. e |= t[lo + 0x30] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1649. e |= t[lo + 0x40] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1650. e |= t[lo + 0x50] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1651. e |= t[lo + 0x60] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1652. e |= t[lo + 0x70] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1653. e |= t[lo + 0x80] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1654. e |= t[lo + 0x90] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1655. e |= t[lo + 0xa0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1656. e |= t[lo + 0xb0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1657. e |= t[lo + 0xc0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1658. e |= t[lo + 0xd0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1659. e |= t[lo + 0xe0] & ((word32)0 - (((word32)hi - 0x01) >> 31)); hi -= 0x10;
  1660. e |= t[lo + 0xf0] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1661. return e;
  1662. #else
  1663. byte e = 0;
  1664. int i;
  1665. byte hi = o & WC_CACHE_LINE_MASK_HI;
  1666. byte lo = o & WC_CACHE_LINE_MASK_LO;
  1667. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1668. e |= t[lo + i] & ((word32)0 - (((word32)hi - 0x01) >> 31));
  1669. hi -= WC_CACHE_LINE_ADD;
  1670. }
  1671. return e;
  1672. #endif
  1673. }
  1674. #endif
  1675. #ifndef WOLFSSL_AES_SMALL_TABLES
  1676. static void GetTable_Multi(const word32* t, word32* t0, byte o0,
  1677. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1678. {
  1679. word32 e0 = 0;
  1680. word32 e1 = 0;
  1681. word32 e2 = 0;
  1682. word32 e3 = 0;
  1683. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1684. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1685. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1686. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1687. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1688. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1689. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1690. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1691. int i;
  1692. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1693. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1694. hi0 -= WC_CACHE_LINE_ADD;
  1695. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1696. hi1 -= WC_CACHE_LINE_ADD;
  1697. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1698. hi2 -= WC_CACHE_LINE_ADD;
  1699. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1700. hi3 -= WC_CACHE_LINE_ADD;
  1701. }
  1702. *t0 = e0;
  1703. *t1 = e1;
  1704. *t2 = e2;
  1705. *t3 = e3;
  1706. }
  1707. static void XorTable_Multi(const word32* t, word32* t0, byte o0,
  1708. word32* t1, byte o1, word32* t2, byte o2, word32* t3, byte o3)
  1709. {
  1710. word32 e0 = 0;
  1711. word32 e1 = 0;
  1712. word32 e2 = 0;
  1713. word32 e3 = 0;
  1714. byte hi0 = o0 & 0xf0;
  1715. byte lo0 = o0 & 0x0f;
  1716. byte hi1 = o1 & 0xf0;
  1717. byte lo1 = o1 & 0x0f;
  1718. byte hi2 = o2 & 0xf0;
  1719. byte lo2 = o2 & 0x0f;
  1720. byte hi3 = o3 & 0xf0;
  1721. byte lo3 = o3 & 0x0f;
  1722. int i;
  1723. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1724. e0 |= t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31));
  1725. hi0 -= WC_CACHE_LINE_ADD;
  1726. e1 |= t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31));
  1727. hi1 -= WC_CACHE_LINE_ADD;
  1728. e2 |= t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31));
  1729. hi2 -= WC_CACHE_LINE_ADD;
  1730. e3 |= t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31));
  1731. hi3 -= WC_CACHE_LINE_ADD;
  1732. }
  1733. *t0 ^= e0;
  1734. *t1 ^= e1;
  1735. *t2 ^= e2;
  1736. *t3 ^= e3;
  1737. }
  1738. static word32 GetTable8_4(const byte* t, byte o0, byte o1, byte o2, byte o3)
  1739. {
  1740. word32 e = 0;
  1741. int i;
  1742. byte hi0 = o0 & WC_CACHE_LINE_MASK_HI;
  1743. byte lo0 = o0 & WC_CACHE_LINE_MASK_LO;
  1744. byte hi1 = o1 & WC_CACHE_LINE_MASK_HI;
  1745. byte lo1 = o1 & WC_CACHE_LINE_MASK_LO;
  1746. byte hi2 = o2 & WC_CACHE_LINE_MASK_HI;
  1747. byte lo2 = o2 & WC_CACHE_LINE_MASK_LO;
  1748. byte hi3 = o3 & WC_CACHE_LINE_MASK_HI;
  1749. byte lo3 = o3 & WC_CACHE_LINE_MASK_LO;
  1750. for (i = 0; i < 256; i += (1 << WC_CACHE_LINE_BITS)) {
  1751. e |= (word32)(t[lo0 + i] & ((word32)0 - (((word32)hi0 - 0x01) >> 31)))
  1752. << 24;
  1753. hi0 -= WC_CACHE_LINE_ADD;
  1754. e |= (word32)(t[lo1 + i] & ((word32)0 - (((word32)hi1 - 0x01) >> 31)))
  1755. << 16;
  1756. hi1 -= WC_CACHE_LINE_ADD;
  1757. e |= (word32)(t[lo2 + i] & ((word32)0 - (((word32)hi2 - 0x01) >> 31)))
  1758. << 8;
  1759. hi2 -= WC_CACHE_LINE_ADD;
  1760. e |= (word32)(t[lo3 + i] & ((word32)0 - (((word32)hi3 - 0x01) >> 31)))
  1761. << 0;
  1762. hi3 -= WC_CACHE_LINE_ADD;
  1763. }
  1764. return e;
  1765. }
  1766. #endif
  1767. #else
  1768. #define GetTable(t, o) t[o]
  1769. #define GetTable8(t, o) t[o]
  1770. #define GetTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1771. *(t0) = (t)[o0]; *(t1) = (t)[o1]; *(t2) = (t)[o2]; *(t3) = (t)[o3]
  1772. #define XorTable_Multi(t, t0, o0, t1, o1, t2, o2, t3, o3) \
  1773. *(t0) ^= (t)[o0]; *(t1) ^= (t)[o1]; *(t2) ^= (t)[o2]; *(t3) ^= (t)[o3]
  1774. #define GetTable8_4(t, o0, o1, o2, o3) \
  1775. (((word32)(t)[o0] << 24) | ((word32)(t)[o1] << 16) | \
  1776. ((word32)(t)[o2] << 8) | ((word32)(t)[o3] << 0))
  1777. #endif
  1778. /* Software AES - ECB Encrypt */
  1779. static WARN_UNUSED_RESULT int wc_AesEncrypt(
  1780. Aes* aes, const byte* inBlock, byte* outBlock)
  1781. {
  1782. word32 s0, s1, s2, s3;
  1783. word32 t0, t1, t2, t3;
  1784. word32 r = aes->rounds >> 1;
  1785. const word32* rk = aes->key;
  1786. if (r > 7 || r == 0) {
  1787. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  1788. return KEYUSAGE_E;
  1789. }
  1790. #ifdef WOLFSSL_AESNI
  1791. if (haveAESNI && aes->use_aesni) {
  1792. #ifdef DEBUG_AESNI
  1793. printf("about to aes encrypt\n");
  1794. printf("in = %p\n", inBlock);
  1795. printf("out = %p\n", outBlock);
  1796. printf("aes->key = %p\n", aes->key);
  1797. printf("aes->rounds = %d\n", aes->rounds);
  1798. printf("sz = %d\n", AES_BLOCK_SIZE);
  1799. #endif
  1800. /* check alignment, decrypt doesn't need alignment */
  1801. if ((wc_ptr_t)inBlock % AESNI_ALIGN) {
  1802. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  1803. byte* tmp = (byte*)XMALLOC(AES_BLOCK_SIZE + AESNI_ALIGN, aes->heap,
  1804. DYNAMIC_TYPE_TMP_BUFFER);
  1805. byte* tmp_align;
  1806. if (tmp == NULL)
  1807. return MEMORY_E;
  1808. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  1809. XMEMCPY(tmp_align, inBlock, AES_BLOCK_SIZE);
  1810. AES_ECB_encrypt(tmp_align, tmp_align, AES_BLOCK_SIZE,
  1811. (byte*)aes->key, (int)aes->rounds);
  1812. XMEMCPY(outBlock, tmp_align, AES_BLOCK_SIZE);
  1813. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  1814. return 0;
  1815. #else
  1816. WOLFSSL_MSG("AES-ECB encrypt with bad alignment");
  1817. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  1818. return BAD_ALIGN_E;
  1819. #endif
  1820. }
  1821. AES_ECB_encrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  1822. (int)aes->rounds);
  1823. return 0;
  1824. }
  1825. else {
  1826. #ifdef DEBUG_AESNI
  1827. printf("Skipping AES-NI\n");
  1828. #endif
  1829. }
  1830. #endif
  1831. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  1832. AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  1833. return 0;
  1834. #endif
  1835. #if defined(WOLFSSL_IMXRT_DCP)
  1836. if (aes->keylen == 16) {
  1837. DCPAesEcbEncrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  1838. return 0;
  1839. }
  1840. #endif
  1841. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  1842. if (aes->useSWCrypt == 0) {
  1843. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  1844. AES_ENCRYPTION, kAlgorithm_SSS_AES_ECB);
  1845. }
  1846. #endif
  1847. /*
  1848. * map byte array block to cipher state
  1849. * and add initial round key:
  1850. */
  1851. XMEMCPY(&s0, inBlock, sizeof(s0));
  1852. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  1853. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  1854. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  1855. #ifdef LITTLE_ENDIAN_ORDER
  1856. s0 = ByteReverseWord32(s0);
  1857. s1 = ByteReverseWord32(s1);
  1858. s2 = ByteReverseWord32(s2);
  1859. s3 = ByteReverseWord32(s3);
  1860. #endif
  1861. /* AddRoundKey */
  1862. s0 ^= rk[0];
  1863. s1 ^= rk[1];
  1864. s2 ^= rk[2];
  1865. s3 ^= rk[3];
  1866. #ifndef WOLFSSL_AES_SMALL_TABLES
  1867. #ifndef WC_NO_CACHE_RESISTANT
  1868. s0 |= PreFetchTe();
  1869. #endif
  1870. #ifndef WOLFSSL_AES_TOUCH_LINES
  1871. #define ENC_ROUND_T_S(o) \
  1872. t0 = GetTable(Te[0], GETBYTE(s0, 3)) ^ GetTable(Te[1], GETBYTE(s1, 2)) ^ \
  1873. GetTable(Te[2], GETBYTE(s2, 1)) ^ GetTable(Te[3], GETBYTE(s3, 0)) ^ \
  1874. rk[(o)+4]; \
  1875. t1 = GetTable(Te[0], GETBYTE(s1, 3)) ^ GetTable(Te[1], GETBYTE(s2, 2)) ^ \
  1876. GetTable(Te[2], GETBYTE(s3, 1)) ^ GetTable(Te[3], GETBYTE(s0, 0)) ^ \
  1877. rk[(o)+5]; \
  1878. t2 = GetTable(Te[0], GETBYTE(s2, 3)) ^ GetTable(Te[1], GETBYTE(s3, 2)) ^ \
  1879. GetTable(Te[2], GETBYTE(s0, 1)) ^ GetTable(Te[3], GETBYTE(s1, 0)) ^ \
  1880. rk[(o)+6]; \
  1881. t3 = GetTable(Te[0], GETBYTE(s3, 3)) ^ GetTable(Te[1], GETBYTE(s0, 2)) ^ \
  1882. GetTable(Te[2], GETBYTE(s1, 1)) ^ GetTable(Te[3], GETBYTE(s2, 0)) ^ \
  1883. rk[(o)+7]
  1884. #define ENC_ROUND_S_T(o) \
  1885. s0 = GetTable(Te[0], GETBYTE(t0, 3)) ^ GetTable(Te[1], GETBYTE(t1, 2)) ^ \
  1886. GetTable(Te[2], GETBYTE(t2, 1)) ^ GetTable(Te[3], GETBYTE(t3, 0)) ^ \
  1887. rk[(o)+0]; \
  1888. s1 = GetTable(Te[0], GETBYTE(t1, 3)) ^ GetTable(Te[1], GETBYTE(t2, 2)) ^ \
  1889. GetTable(Te[2], GETBYTE(t3, 1)) ^ GetTable(Te[3], GETBYTE(t0, 0)) ^ \
  1890. rk[(o)+1]; \
  1891. s2 = GetTable(Te[0], GETBYTE(t2, 3)) ^ GetTable(Te[1], GETBYTE(t3, 2)) ^ \
  1892. GetTable(Te[2], GETBYTE(t0, 1)) ^ GetTable(Te[3], GETBYTE(t1, 0)) ^ \
  1893. rk[(o)+2]; \
  1894. s3 = GetTable(Te[0], GETBYTE(t3, 3)) ^ GetTable(Te[1], GETBYTE(t0, 2)) ^ \
  1895. GetTable(Te[2], GETBYTE(t1, 1)) ^ GetTable(Te[3], GETBYTE(t2, 0)) ^ \
  1896. rk[(o)+3]
  1897. #else
  1898. #define ENC_ROUND_T_S(o) \
  1899. GetTable_Multi(Te[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  1900. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  1901. XorTable_Multi(Te[1], &t0, GETBYTE(s1, 2), &t1, GETBYTE(s2, 2), \
  1902. &t2, GETBYTE(s3, 2), &t3, GETBYTE(s0, 2)); \
  1903. XorTable_Multi(Te[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  1904. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  1905. XorTable_Multi(Te[3], &t0, GETBYTE(s3, 0), &t1, GETBYTE(s0, 0), \
  1906. &t2, GETBYTE(s1, 0), &t3, GETBYTE(s2, 0)); \
  1907. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  1908. #define ENC_ROUND_S_T(o) \
  1909. GetTable_Multi(Te[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  1910. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  1911. XorTable_Multi(Te[1], &s0, GETBYTE(t1, 2), &s1, GETBYTE(t2, 2), \
  1912. &s2, GETBYTE(t3, 2), &s3, GETBYTE(t0, 2)); \
  1913. XorTable_Multi(Te[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  1914. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  1915. XorTable_Multi(Te[3], &s0, GETBYTE(t3, 0), &s1, GETBYTE(t0, 0), \
  1916. &s2, GETBYTE(t1, 0), &s3, GETBYTE(t2, 0)); \
  1917. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  1918. #endif
  1919. #ifndef WOLFSSL_AES_NO_UNROLL
  1920. /* Unroll the loop. */
  1921. ENC_ROUND_T_S( 0);
  1922. ENC_ROUND_S_T( 8); ENC_ROUND_T_S( 8);
  1923. ENC_ROUND_S_T(16); ENC_ROUND_T_S(16);
  1924. ENC_ROUND_S_T(24); ENC_ROUND_T_S(24);
  1925. ENC_ROUND_S_T(32); ENC_ROUND_T_S(32);
  1926. if (r > 5) {
  1927. ENC_ROUND_S_T(40); ENC_ROUND_T_S(40);
  1928. if (r > 6) {
  1929. ENC_ROUND_S_T(48); ENC_ROUND_T_S(48);
  1930. }
  1931. }
  1932. rk += r * 8;
  1933. #else
  1934. /*
  1935. * Nr - 1 full rounds:
  1936. */
  1937. for (;;) {
  1938. ENC_ROUND_T_S(0);
  1939. rk += 8;
  1940. if (--r == 0) {
  1941. break;
  1942. }
  1943. ENC_ROUND_S_T(0);
  1944. }
  1945. #endif
  1946. /*
  1947. * apply last round and
  1948. * map cipher state to byte array block:
  1949. */
  1950. #ifndef WOLFSSL_AES_TOUCH_LINES
  1951. s0 =
  1952. (GetTable(Te[2], GETBYTE(t0, 3)) & 0xff000000) ^
  1953. (GetTable(Te[3], GETBYTE(t1, 2)) & 0x00ff0000) ^
  1954. (GetTable(Te[0], GETBYTE(t2, 1)) & 0x0000ff00) ^
  1955. (GetTable(Te[1], GETBYTE(t3, 0)) & 0x000000ff) ^
  1956. rk[0];
  1957. s1 =
  1958. (GetTable(Te[2], GETBYTE(t1, 3)) & 0xff000000) ^
  1959. (GetTable(Te[3], GETBYTE(t2, 2)) & 0x00ff0000) ^
  1960. (GetTable(Te[0], GETBYTE(t3, 1)) & 0x0000ff00) ^
  1961. (GetTable(Te[1], GETBYTE(t0, 0)) & 0x000000ff) ^
  1962. rk[1];
  1963. s2 =
  1964. (GetTable(Te[2], GETBYTE(t2, 3)) & 0xff000000) ^
  1965. (GetTable(Te[3], GETBYTE(t3, 2)) & 0x00ff0000) ^
  1966. (GetTable(Te[0], GETBYTE(t0, 1)) & 0x0000ff00) ^
  1967. (GetTable(Te[1], GETBYTE(t1, 0)) & 0x000000ff) ^
  1968. rk[2];
  1969. s3 =
  1970. (GetTable(Te[2], GETBYTE(t3, 3)) & 0xff000000) ^
  1971. (GetTable(Te[3], GETBYTE(t0, 2)) & 0x00ff0000) ^
  1972. (GetTable(Te[0], GETBYTE(t1, 1)) & 0x0000ff00) ^
  1973. (GetTable(Te[1], GETBYTE(t2, 0)) & 0x000000ff) ^
  1974. rk[3];
  1975. #else
  1976. {
  1977. word32 u0;
  1978. word32 u1;
  1979. word32 u2;
  1980. word32 u3;
  1981. s0 = rk[0]; s1 = rk[1]; s2 = rk[2]; s3 = rk[3];
  1982. GetTable_Multi(Te[2], &u0, GETBYTE(t0, 3), &u1, GETBYTE(t1, 3),
  1983. &u2, GETBYTE(t2, 3), &u3, GETBYTE(t3, 3));
  1984. s0 ^= u0 & 0xff000000; s1 ^= u1 & 0xff000000;
  1985. s2 ^= u2 & 0xff000000; s3 ^= u3 & 0xff000000;
  1986. GetTable_Multi(Te[3], &u0, GETBYTE(t1, 2), &u1, GETBYTE(t2, 2),
  1987. &u2, GETBYTE(t3, 2), &u3, GETBYTE(t0, 2));
  1988. s0 ^= u0 & 0x00ff0000; s1 ^= u1 & 0x00ff0000;
  1989. s2 ^= u2 & 0x00ff0000; s3 ^= u3 & 0x00ff0000;
  1990. GetTable_Multi(Te[0], &u0, GETBYTE(t2, 1), &u1, GETBYTE(t3, 1),
  1991. &u2, GETBYTE(t0, 1), &u3, GETBYTE(t1, 1));
  1992. s0 ^= u0 & 0x0000ff00; s1 ^= u1 & 0x0000ff00;
  1993. s2 ^= u2 & 0x0000ff00; s3 ^= u3 & 0x0000ff00;
  1994. GetTable_Multi(Te[1], &u0, GETBYTE(t3, 0), &u1, GETBYTE(t0, 0),
  1995. &u2, GETBYTE(t1, 0), &u3, GETBYTE(t2, 0));
  1996. s0 ^= u0 & 0x000000ff; s1 ^= u1 & 0x000000ff;
  1997. s2 ^= u2 & 0x000000ff; s3 ^= u3 & 0x000000ff;
  1998. }
  1999. #endif
  2000. #else
  2001. #ifndef WC_NO_CACHE_RESISTANT
  2002. s0 |= PreFetchSBox();
  2003. #endif
  2004. r *= 2;
  2005. /* Two rounds at a time */
  2006. for (rk += 4; r > 1; r--, rk += 4) {
  2007. t0 =
  2008. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  2009. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  2010. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  2011. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  2012. t1 =
  2013. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  2014. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  2015. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  2016. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  2017. t2 =
  2018. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  2019. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  2020. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  2021. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  2022. t3 =
  2023. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  2024. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  2025. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  2026. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  2027. s0 =
  2028. (col_mul(t0, 3, 2, 0, 1) << 24) ^
  2029. (col_mul(t0, 2, 1, 0, 3) << 16) ^
  2030. (col_mul(t0, 1, 0, 2, 3) << 8) ^
  2031. (col_mul(t0, 0, 3, 2, 1) ) ^
  2032. rk[0];
  2033. s1 =
  2034. (col_mul(t1, 3, 2, 0, 1) << 24) ^
  2035. (col_mul(t1, 2, 1, 0, 3) << 16) ^
  2036. (col_mul(t1, 1, 0, 2, 3) << 8) ^
  2037. (col_mul(t1, 0, 3, 2, 1) ) ^
  2038. rk[1];
  2039. s2 =
  2040. (col_mul(t2, 3, 2, 0, 1) << 24) ^
  2041. (col_mul(t2, 2, 1, 0, 3) << 16) ^
  2042. (col_mul(t2, 1, 0, 2, 3) << 8) ^
  2043. (col_mul(t2, 0, 3, 2, 1) ) ^
  2044. rk[2];
  2045. s3 =
  2046. (col_mul(t3, 3, 2, 0, 1) << 24) ^
  2047. (col_mul(t3, 2, 1, 0, 3) << 16) ^
  2048. (col_mul(t3, 1, 0, 2, 3) << 8) ^
  2049. (col_mul(t3, 0, 3, 2, 1) ) ^
  2050. rk[3];
  2051. }
  2052. t0 =
  2053. ((word32)GetTable8(Tsbox, GETBYTE(s0, 3)) << 24) ^
  2054. ((word32)GetTable8(Tsbox, GETBYTE(s1, 2)) << 16) ^
  2055. ((word32)GetTable8(Tsbox, GETBYTE(s2, 1)) << 8) ^
  2056. ((word32)GetTable8(Tsbox, GETBYTE(s3, 0)));
  2057. t1 =
  2058. ((word32)GetTable8(Tsbox, GETBYTE(s1, 3)) << 24) ^
  2059. ((word32)GetTable8(Tsbox, GETBYTE(s2, 2)) << 16) ^
  2060. ((word32)GetTable8(Tsbox, GETBYTE(s3, 1)) << 8) ^
  2061. ((word32)GetTable8(Tsbox, GETBYTE(s0, 0)));
  2062. t2 =
  2063. ((word32)GetTable8(Tsbox, GETBYTE(s2, 3)) << 24) ^
  2064. ((word32)GetTable8(Tsbox, GETBYTE(s3, 2)) << 16) ^
  2065. ((word32)GetTable8(Tsbox, GETBYTE(s0, 1)) << 8) ^
  2066. ((word32)GetTable8(Tsbox, GETBYTE(s1, 0)));
  2067. t3 =
  2068. ((word32)GetTable8(Tsbox, GETBYTE(s3, 3)) << 24) ^
  2069. ((word32)GetTable8(Tsbox, GETBYTE(s0, 2)) << 16) ^
  2070. ((word32)GetTable8(Tsbox, GETBYTE(s1, 1)) << 8) ^
  2071. ((word32)GetTable8(Tsbox, GETBYTE(s2, 0)));
  2072. s0 = t0 ^ rk[0];
  2073. s1 = t1 ^ rk[1];
  2074. s2 = t2 ^ rk[2];
  2075. s3 = t3 ^ rk[3];
  2076. #endif
  2077. /* write out */
  2078. #ifdef LITTLE_ENDIAN_ORDER
  2079. s0 = ByteReverseWord32(s0);
  2080. s1 = ByteReverseWord32(s1);
  2081. s2 = ByteReverseWord32(s2);
  2082. s3 = ByteReverseWord32(s3);
  2083. #endif
  2084. XMEMCPY(outBlock, &s0, sizeof(s0));
  2085. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2086. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2087. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2088. return 0;
  2089. }
  2090. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT || HAVE_AESGCM */
  2091. #if defined(HAVE_AES_DECRYPT)
  2092. #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) || \
  2093. defined(WOLFSSL_AES_DIRECT)
  2094. #ifndef WC_NO_CACHE_RESISTANT
  2095. #ifndef WOLFSSL_AES_SMALL_TABLES
  2096. /* load 4 Td Tables into cache by cache line stride */
  2097. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd(void)
  2098. {
  2099. word32 x = 0;
  2100. int i,j;
  2101. for (i = 0; i < 4; i++) {
  2102. /* 256 elements, each one is 4 bytes */
  2103. for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) {
  2104. x &= Td[i][j];
  2105. }
  2106. }
  2107. return x;
  2108. }
  2109. #endif
  2110. /* load Td Table4 into cache by cache line stride */
  2111. static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd4(void)
  2112. {
  2113. #ifndef WOLFSSL_AES_TOUCH_LINES
  2114. word32 x = 0;
  2115. int i;
  2116. for (i = 0; i < 256; i += WC_CACHE_LINE_SZ) {
  2117. x &= (word32)Td4[i];
  2118. }
  2119. return x;
  2120. #else
  2121. return 0;
  2122. #endif
  2123. }
  2124. #endif
  2125. /* Software AES - ECB Decrypt */
  2126. static WARN_UNUSED_RESULT int wc_AesDecrypt(
  2127. Aes* aes, const byte* inBlock, byte* outBlock)
  2128. {
  2129. word32 s0, s1, s2, s3;
  2130. word32 t0, t1, t2, t3;
  2131. word32 r = aes->rounds >> 1;
  2132. const word32* rk = aes->key;
  2133. if (r > 7 || r == 0) {
  2134. WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E);
  2135. return KEYUSAGE_E;
  2136. }
  2137. #ifdef WOLFSSL_AESNI
  2138. if (haveAESNI && aes->use_aesni) {
  2139. #ifdef DEBUG_AESNI
  2140. printf("about to aes decrypt\n");
  2141. printf("in = %p\n", inBlock);
  2142. printf("out = %p\n", outBlock);
  2143. printf("aes->key = %p\n", aes->key);
  2144. printf("aes->rounds = %d\n", aes->rounds);
  2145. printf("sz = %d\n", AES_BLOCK_SIZE);
  2146. #endif
  2147. /* if input and output same will overwrite input iv */
  2148. if ((const byte*)aes->tmp != inBlock)
  2149. XMEMCPY(aes->tmp, inBlock, AES_BLOCK_SIZE);
  2150. AES_ECB_decrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key,
  2151. (int)aes->rounds);
  2152. return 0;
  2153. }
  2154. else {
  2155. #ifdef DEBUG_AESNI
  2156. printf("Skipping AES-NI\n");
  2157. #endif
  2158. }
  2159. #endif /* WOLFSSL_AESNI */
  2160. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  2161. return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE);
  2162. #endif
  2163. #if defined(WOLFSSL_IMXRT_DCP)
  2164. if (aes->keylen == 16) {
  2165. DCPAesEcbDecrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE);
  2166. return 0;
  2167. }
  2168. #endif
  2169. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2170. if (aes->useSWCrypt == 0) {
  2171. return se050_aes_crypt(aes, inBlock, outBlock, AES_BLOCK_SIZE,
  2172. AES_DECRYPTION, kAlgorithm_SSS_AES_ECB);
  2173. }
  2174. #endif
  2175. /*
  2176. * map byte array block to cipher state
  2177. * and add initial round key:
  2178. */
  2179. XMEMCPY(&s0, inBlock, sizeof(s0));
  2180. XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1));
  2181. XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2));
  2182. XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3));
  2183. #ifdef LITTLE_ENDIAN_ORDER
  2184. s0 = ByteReverseWord32(s0);
  2185. s1 = ByteReverseWord32(s1);
  2186. s2 = ByteReverseWord32(s2);
  2187. s3 = ByteReverseWord32(s3);
  2188. #endif
  2189. s0 ^= rk[0];
  2190. s1 ^= rk[1];
  2191. s2 ^= rk[2];
  2192. s3 ^= rk[3];
  2193. #ifndef WOLFSSL_AES_SMALL_TABLES
  2194. #ifndef WC_NO_CACHE_RESISTANT
  2195. s0 |= PreFetchTd();
  2196. #endif
  2197. #ifndef WOLFSSL_AES_TOUCH_LINES
  2198. /* Unroll the loop. */
  2199. #define DEC_ROUND_T_S(o) \
  2200. t0 = GetTable(Td[0], GETBYTE(s0, 3)) ^ GetTable(Td[1], GETBYTE(s3, 2)) ^ \
  2201. GetTable(Td[2], GETBYTE(s2, 1)) ^ GetTable(Td[3], GETBYTE(s1, 0)) ^ rk[(o)+4]; \
  2202. t1 = GetTable(Td[0], GETBYTE(s1, 3)) ^ GetTable(Td[1], GETBYTE(s0, 2)) ^ \
  2203. GetTable(Td[2], GETBYTE(s3, 1)) ^ GetTable(Td[3], GETBYTE(s2, 0)) ^ rk[(o)+5]; \
  2204. t2 = GetTable(Td[0], GETBYTE(s2, 3)) ^ GetTable(Td[1], GETBYTE(s1, 2)) ^ \
  2205. GetTable(Td[2], GETBYTE(s0, 1)) ^ GetTable(Td[3], GETBYTE(s3, 0)) ^ rk[(o)+6]; \
  2206. t3 = GetTable(Td[0], GETBYTE(s3, 3)) ^ GetTable(Td[1], GETBYTE(s2, 2)) ^ \
  2207. GetTable(Td[2], GETBYTE(s1, 1)) ^ GetTable(Td[3], GETBYTE(s0, 0)) ^ rk[(o)+7]
  2208. #define DEC_ROUND_S_T(o) \
  2209. s0 = GetTable(Td[0], GETBYTE(t0, 3)) ^ GetTable(Td[1], GETBYTE(t3, 2)) ^ \
  2210. GetTable(Td[2], GETBYTE(t2, 1)) ^ GetTable(Td[3], GETBYTE(t1, 0)) ^ rk[(o)+0]; \
  2211. s1 = GetTable(Td[0], GETBYTE(t1, 3)) ^ GetTable(Td[1], GETBYTE(t0, 2)) ^ \
  2212. GetTable(Td[2], GETBYTE(t3, 1)) ^ GetTable(Td[3], GETBYTE(t2, 0)) ^ rk[(o)+1]; \
  2213. s2 = GetTable(Td[0], GETBYTE(t2, 3)) ^ GetTable(Td[1], GETBYTE(t1, 2)) ^ \
  2214. GetTable(Td[2], GETBYTE(t0, 1)) ^ GetTable(Td[3], GETBYTE(t3, 0)) ^ rk[(o)+2]; \
  2215. s3 = GetTable(Td[0], GETBYTE(t3, 3)) ^ GetTable(Td[1], GETBYTE(t2, 2)) ^ \
  2216. GetTable(Td[2], GETBYTE(t1, 1)) ^ GetTable(Td[3], GETBYTE(t0, 0)) ^ rk[(o)+3]
  2217. #else
  2218. #define DEC_ROUND_T_S(o) \
  2219. GetTable_Multi(Td[0], &t0, GETBYTE(s0, 3), &t1, GETBYTE(s1, 3), \
  2220. &t2, GETBYTE(s2, 3), &t3, GETBYTE(s3, 3)); \
  2221. XorTable_Multi(Td[1], &t0, GETBYTE(s3, 2), &t1, GETBYTE(s0, 2), \
  2222. &t2, GETBYTE(s1, 2), &t3, GETBYTE(s2, 2)); \
  2223. XorTable_Multi(Td[2], &t0, GETBYTE(s2, 1), &t1, GETBYTE(s3, 1), \
  2224. &t2, GETBYTE(s0, 1), &t3, GETBYTE(s1, 1)); \
  2225. XorTable_Multi(Td[3], &t0, GETBYTE(s1, 0), &t1, GETBYTE(s2, 0), \
  2226. &t2, GETBYTE(s3, 0), &t3, GETBYTE(s0, 0)); \
  2227. t0 ^= rk[(o)+4]; t1 ^= rk[(o)+5]; t2 ^= rk[(o)+6]; t3 ^= rk[(o)+7];
  2228. #define DEC_ROUND_S_T(o) \
  2229. GetTable_Multi(Td[0], &s0, GETBYTE(t0, 3), &s1, GETBYTE(t1, 3), \
  2230. &s2, GETBYTE(t2, 3), &s3, GETBYTE(t3, 3)); \
  2231. XorTable_Multi(Td[1], &s0, GETBYTE(t3, 2), &s1, GETBYTE(t0, 2), \
  2232. &s2, GETBYTE(t1, 2), &s3, GETBYTE(t2, 2)); \
  2233. XorTable_Multi(Td[2], &s0, GETBYTE(t2, 1), &s1, GETBYTE(t3, 1), \
  2234. &s2, GETBYTE(t0, 1), &s3, GETBYTE(t1, 1)); \
  2235. XorTable_Multi(Td[3], &s0, GETBYTE(t1, 0), &s1, GETBYTE(t2, 0), \
  2236. &s2, GETBYTE(t3, 0), &s3, GETBYTE(t0, 0)); \
  2237. s0 ^= rk[(o)+0]; s1 ^= rk[(o)+1]; s2 ^= rk[(o)+2]; s3 ^= rk[(o)+3];
  2238. #endif
  2239. #ifndef WOLFSSL_AES_NO_UNROLL
  2240. DEC_ROUND_T_S( 0);
  2241. DEC_ROUND_S_T( 8); DEC_ROUND_T_S( 8);
  2242. DEC_ROUND_S_T(16); DEC_ROUND_T_S(16);
  2243. DEC_ROUND_S_T(24); DEC_ROUND_T_S(24);
  2244. DEC_ROUND_S_T(32); DEC_ROUND_T_S(32);
  2245. if (r > 5) {
  2246. DEC_ROUND_S_T(40); DEC_ROUND_T_S(40);
  2247. if (r > 6) {
  2248. DEC_ROUND_S_T(48); DEC_ROUND_T_S(48);
  2249. }
  2250. }
  2251. rk += r * 8;
  2252. #else
  2253. /*
  2254. * Nr - 1 full rounds:
  2255. */
  2256. for (;;) {
  2257. DEC_ROUND_T_S(0);
  2258. rk += 8;
  2259. if (--r == 0) {
  2260. break;
  2261. }
  2262. DEC_ROUND_S_T(0);
  2263. }
  2264. #endif
  2265. /*
  2266. * apply last round and
  2267. * map cipher state to byte array block:
  2268. */
  2269. #ifndef WC_NO_CACHE_RESISTANT
  2270. t0 |= PreFetchTd4();
  2271. #endif
  2272. s0 = GetTable8_4(Td4, GETBYTE(t0, 3), GETBYTE(t3, 2),
  2273. GETBYTE(t2, 1), GETBYTE(t1, 0)) ^ rk[0];
  2274. s1 = GetTable8_4(Td4, GETBYTE(t1, 3), GETBYTE(t0, 2),
  2275. GETBYTE(t3, 1), GETBYTE(t2, 0)) ^ rk[1];
  2276. s2 = GetTable8_4(Td4, GETBYTE(t2, 3), GETBYTE(t1, 2),
  2277. GETBYTE(t0, 1), GETBYTE(t3, 0)) ^ rk[2];
  2278. s3 = GetTable8_4(Td4, GETBYTE(t3, 3), GETBYTE(t2, 2),
  2279. GETBYTE(t1, 1), GETBYTE(t0, 0)) ^ rk[3];
  2280. #else
  2281. #ifndef WC_NO_CACHE_RESISTANT
  2282. s0 |= PreFetchTd4();
  2283. #endif
  2284. r *= 2;
  2285. for (rk += 4; r > 1; r--, rk += 4) {
  2286. t0 =
  2287. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2288. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2289. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2290. ((word32)GetTable8(Td4, GETBYTE(s1, 0))) ^
  2291. rk[0];
  2292. t1 =
  2293. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2294. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2295. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2296. ((word32)GetTable8(Td4, GETBYTE(s2, 0))) ^
  2297. rk[1];
  2298. t2 =
  2299. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2300. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2301. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2302. ((word32)GetTable8(Td4, GETBYTE(s3, 0))) ^
  2303. rk[2];
  2304. t3 =
  2305. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2306. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2307. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2308. ((word32)GetTable8(Td4, GETBYTE(s0, 0))) ^
  2309. rk[3];
  2310. s0 =
  2311. (inv_col_mul(t0, 0, 2, 1, 3) << 24) ^
  2312. (inv_col_mul(t0, 3, 1, 0, 2) << 16) ^
  2313. (inv_col_mul(t0, 2, 0, 3, 1) << 8) ^
  2314. (inv_col_mul(t0, 1, 3, 2, 0) );
  2315. s1 =
  2316. (inv_col_mul(t1, 0, 2, 1, 3) << 24) ^
  2317. (inv_col_mul(t1, 3, 1, 0, 2) << 16) ^
  2318. (inv_col_mul(t1, 2, 0, 3, 1) << 8) ^
  2319. (inv_col_mul(t1, 1, 3, 2, 0) );
  2320. s2 =
  2321. (inv_col_mul(t2, 0, 2, 1, 3) << 24) ^
  2322. (inv_col_mul(t2, 3, 1, 0, 2) << 16) ^
  2323. (inv_col_mul(t2, 2, 0, 3, 1) << 8) ^
  2324. (inv_col_mul(t2, 1, 3, 2, 0) );
  2325. s3 =
  2326. (inv_col_mul(t3, 0, 2, 1, 3) << 24) ^
  2327. (inv_col_mul(t3, 3, 1, 0, 2) << 16) ^
  2328. (inv_col_mul(t3, 2, 0, 3, 1) << 8) ^
  2329. (inv_col_mul(t3, 1, 3, 2, 0) );
  2330. }
  2331. t0 =
  2332. ((word32)GetTable8(Td4, GETBYTE(s0, 3)) << 24) ^
  2333. ((word32)GetTable8(Td4, GETBYTE(s3, 2)) << 16) ^
  2334. ((word32)GetTable8(Td4, GETBYTE(s2, 1)) << 8) ^
  2335. ((word32)GetTable8(Td4, GETBYTE(s1, 0)));
  2336. t1 =
  2337. ((word32)GetTable8(Td4, GETBYTE(s1, 3)) << 24) ^
  2338. ((word32)GetTable8(Td4, GETBYTE(s0, 2)) << 16) ^
  2339. ((word32)GetTable8(Td4, GETBYTE(s3, 1)) << 8) ^
  2340. ((word32)GetTable8(Td4, GETBYTE(s2, 0)));
  2341. t2 =
  2342. ((word32)GetTable8(Td4, GETBYTE(s2, 3)) << 24) ^
  2343. ((word32)GetTable8(Td4, GETBYTE(s1, 2)) << 16) ^
  2344. ((word32)GetTable8(Td4, GETBYTE(s0, 1)) << 8) ^
  2345. ((word32)GetTable8(Td4, GETBYTE(s3, 0)));
  2346. t3 =
  2347. ((word32)GetTable8(Td4, GETBYTE(s3, 3)) << 24) ^
  2348. ((word32)GetTable8(Td4, GETBYTE(s2, 2)) << 16) ^
  2349. ((word32)GetTable8(Td4, GETBYTE(s1, 1)) << 8) ^
  2350. ((word32)GetTable8(Td4, GETBYTE(s0, 0)));
  2351. s0 = t0 ^ rk[0];
  2352. s1 = t1 ^ rk[1];
  2353. s2 = t2 ^ rk[2];
  2354. s3 = t3 ^ rk[3];
  2355. #endif
  2356. /* write out */
  2357. #ifdef LITTLE_ENDIAN_ORDER
  2358. s0 = ByteReverseWord32(s0);
  2359. s1 = ByteReverseWord32(s1);
  2360. s2 = ByteReverseWord32(s2);
  2361. s3 = ByteReverseWord32(s3);
  2362. #endif
  2363. XMEMCPY(outBlock, &s0, sizeof(s0));
  2364. XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1));
  2365. XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2));
  2366. XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3));
  2367. return 0;
  2368. }
  2369. #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */
  2370. #endif /* HAVE_AES_DECRYPT */
  2371. #endif /* NEED_AES_TABLES */
  2372. /* wc_AesSetKey */
  2373. #if defined(STM32_CRYPTO)
  2374. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2375. const byte* iv, int dir)
  2376. {
  2377. word32 *rk;
  2378. (void)dir;
  2379. if (aes == NULL || (keylen != 16 &&
  2380. #ifdef WOLFSSL_AES_192
  2381. keylen != 24 &&
  2382. #endif
  2383. keylen != 32)) {
  2384. return BAD_FUNC_ARG;
  2385. }
  2386. rk = aes->key;
  2387. aes->keylen = keylen;
  2388. aes->rounds = keylen/4 + 6;
  2389. XMEMCPY(rk, userKey, keylen);
  2390. #if !defined(WOLFSSL_STM32_CUBEMX) || defined(STM32_HAL_V2)
  2391. ByteReverseWords(rk, rk, keylen);
  2392. #endif
  2393. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2394. defined(WOLFSSL_AES_OFB)
  2395. aes->left = 0;
  2396. #endif
  2397. return wc_AesSetIV(aes, iv);
  2398. }
  2399. #if defined(WOLFSSL_AES_DIRECT)
  2400. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2401. const byte* iv, int dir)
  2402. {
  2403. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2404. }
  2405. #endif
  2406. #elif defined(HAVE_COLDFIRE_SEC)
  2407. #if defined (HAVE_THREADX)
  2408. #include "memory_pools.h"
  2409. extern TX_BYTE_POOL mp_ncached; /* Non Cached memory pool */
  2410. #endif
  2411. #define AES_BUFFER_SIZE (AES_BLOCK_SIZE * 64)
  2412. static unsigned char *AESBuffIn = NULL;
  2413. static unsigned char *AESBuffOut = NULL;
  2414. static byte *secReg;
  2415. static byte *secKey;
  2416. static volatile SECdescriptorType *secDesc;
  2417. static wolfSSL_Mutex Mutex_AesSEC;
  2418. #define SEC_DESC_AES_CBC_ENCRYPT 0x60300010
  2419. #define SEC_DESC_AES_CBC_DECRYPT 0x60200010
  2420. extern volatile unsigned char __MBAR[];
  2421. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2422. const byte* iv, int dir)
  2423. {
  2424. if (AESBuffIn == NULL) {
  2425. #if defined (HAVE_THREADX)
  2426. int s1, s2, s3, s4, s5;
  2427. s5 = tx_byte_allocate(&mp_ncached,(void *)&secDesc,
  2428. sizeof(SECdescriptorType), TX_NO_WAIT);
  2429. s1 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffIn,
  2430. AES_BUFFER_SIZE, TX_NO_WAIT);
  2431. s2 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffOut,
  2432. AES_BUFFER_SIZE, TX_NO_WAIT);
  2433. s3 = tx_byte_allocate(&mp_ncached, (void *)&secKey,
  2434. AES_BLOCK_SIZE*2, TX_NO_WAIT);
  2435. s4 = tx_byte_allocate(&mp_ncached, (void *)&secReg,
  2436. AES_BLOCK_SIZE, TX_NO_WAIT);
  2437. if (s1 || s2 || s3 || s4 || s5)
  2438. return BAD_FUNC_ARG;
  2439. #else
  2440. #warning "Allocate non-Cache buffers"
  2441. #endif
  2442. wc_InitMutex(&Mutex_AesSEC);
  2443. }
  2444. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2445. return BAD_FUNC_ARG;
  2446. if (aes == NULL)
  2447. return BAD_FUNC_ARG;
  2448. aes->keylen = keylen;
  2449. aes->rounds = keylen/4 + 6;
  2450. XMEMCPY(aes->key, userKey, keylen);
  2451. if (iv)
  2452. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2453. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2454. defined(WOLFSSL_AES_OFB)
  2455. aes->left = 0;
  2456. #endif
  2457. return 0;
  2458. }
  2459. #elif defined(FREESCALE_LTC)
  2460. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2461. int dir)
  2462. {
  2463. if (aes == NULL || !((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2464. return BAD_FUNC_ARG;
  2465. aes->rounds = keylen/4 + 6;
  2466. XMEMCPY(aes->key, userKey, keylen);
  2467. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2468. defined(WOLFSSL_AES_OFB)
  2469. aes->left = 0;
  2470. #endif
  2471. return wc_AesSetIV(aes, iv);
  2472. }
  2473. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2474. const byte* iv, int dir)
  2475. {
  2476. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2477. }
  2478. #elif defined(FREESCALE_MMCAU)
  2479. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2480. const byte* iv, int dir)
  2481. {
  2482. int ret;
  2483. byte* rk;
  2484. byte* tmpKey = (byte*)userKey;
  2485. int tmpKeyDynamic = 0;
  2486. word32 alignOffset = 0;
  2487. (void)dir;
  2488. if (!((keylen == 16) || (keylen == 24) || (keylen == 32)))
  2489. return BAD_FUNC_ARG;
  2490. if (aes == NULL)
  2491. return BAD_FUNC_ARG;
  2492. rk = (byte*)aes->key;
  2493. if (rk == NULL)
  2494. return BAD_FUNC_ARG;
  2495. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2496. defined(WOLFSSL_AES_OFB)
  2497. aes->left = 0;
  2498. #endif
  2499. aes->rounds = keylen/4 + 6;
  2500. #ifdef FREESCALE_MMCAU_CLASSIC
  2501. if ((wc_ptr_t)userKey % WOLFSSL_MMCAU_ALIGNMENT) {
  2502. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  2503. byte* tmp = (byte*)XMALLOC(keylen + WOLFSSL_MMCAU_ALIGNMENT,
  2504. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2505. if (tmp == NULL) {
  2506. return MEMORY_E;
  2507. }
  2508. alignOffset = WOLFSSL_MMCAU_ALIGNMENT -
  2509. ((wc_ptr_t)tmp % WOLFSSL_MMCAU_ALIGNMENT);
  2510. tmpKey = tmp + alignOffset;
  2511. XMEMCPY(tmpKey, userKey, keylen);
  2512. tmpKeyDynamic = 1;
  2513. #else
  2514. WOLFSSL_MSG("Bad cau_aes_set_key alignment");
  2515. return BAD_ALIGN_E;
  2516. #endif
  2517. }
  2518. #endif
  2519. ret = wolfSSL_CryptHwMutexLock();
  2520. if(ret == 0) {
  2521. #ifdef FREESCALE_MMCAU_CLASSIC
  2522. cau_aes_set_key(tmpKey, keylen*8, rk);
  2523. #else
  2524. MMCAU_AES_SetKey(tmpKey, keylen, rk);
  2525. #endif
  2526. wolfSSL_CryptHwMutexUnLock();
  2527. ret = wc_AesSetIV(aes, iv);
  2528. }
  2529. if (tmpKeyDynamic == 1) {
  2530. XFREE(tmpKey - alignOffset, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  2531. }
  2532. return ret;
  2533. }
  2534. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2535. const byte* iv, int dir)
  2536. {
  2537. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2538. }
  2539. #elif defined(WOLFSSL_NRF51_AES)
  2540. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2541. const byte* iv, int dir)
  2542. {
  2543. int ret;
  2544. (void)dir;
  2545. (void)iv;
  2546. if (aes == NULL || keylen != 16)
  2547. return BAD_FUNC_ARG;
  2548. aes->keylen = keylen;
  2549. aes->rounds = keylen/4 + 6;
  2550. XMEMCPY(aes->key, userKey, keylen);
  2551. ret = nrf51_aes_set_key(userKey);
  2552. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2553. defined(WOLFSSL_AES_OFB)
  2554. aes->left = 0;
  2555. #endif
  2556. return ret;
  2557. }
  2558. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2559. const byte* iv, int dir)
  2560. {
  2561. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2562. }
  2563. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  2564. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  2565. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  2566. const byte* iv, int dir)
  2567. {
  2568. (void)dir;
  2569. (void)iv;
  2570. if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) {
  2571. return BAD_FUNC_ARG;
  2572. }
  2573. #if !defined(WOLFSSL_AES_128)
  2574. if (keylen == 16) {
  2575. return BAD_FUNC_ARG;
  2576. }
  2577. #endif
  2578. #if !defined(WOLFSSL_AES_192)
  2579. if (keylen == 24) {
  2580. return BAD_FUNC_ARG;
  2581. }
  2582. #endif
  2583. #if !defined(WOLFSSL_AES_256)
  2584. if (keylen == 32) {
  2585. return BAD_FUNC_ARG;
  2586. }
  2587. #endif
  2588. aes->keylen = keylen;
  2589. aes->rounds = keylen/4 + 6;
  2590. XMEMCPY(aes->key, userKey, keylen);
  2591. #if defined(WOLFSSL_AES_COUNTER)
  2592. aes->left = 0;
  2593. #endif
  2594. return wc_AesSetIV(aes, iv);
  2595. }
  2596. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2597. const byte* iv, int dir)
  2598. {
  2599. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2600. }
  2601. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  2602. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv,
  2603. int dir)
  2604. {
  2605. SaSiError_t ret = SASI_OK;
  2606. SaSiAesIv_t iv_aes;
  2607. if (aes == NULL ||
  2608. (keylen != AES_128_KEY_SIZE &&
  2609. keylen != AES_192_KEY_SIZE &&
  2610. keylen != AES_256_KEY_SIZE)) {
  2611. return BAD_FUNC_ARG;
  2612. }
  2613. #if defined(AES_MAX_KEY_SIZE)
  2614. if (keylen > (AES_MAX_KEY_SIZE/8)) {
  2615. return BAD_FUNC_ARG;
  2616. }
  2617. #endif
  2618. if (dir != AES_ENCRYPTION &&
  2619. dir != AES_DECRYPTION) {
  2620. return BAD_FUNC_ARG;
  2621. }
  2622. if (dir == AES_ENCRYPTION) {
  2623. aes->ctx.mode = SASI_AES_ENCRYPT;
  2624. SaSi_AesInit(&aes->ctx.user_ctx,
  2625. SASI_AES_ENCRYPT,
  2626. SASI_AES_MODE_CBC,
  2627. SASI_AES_PADDING_NONE);
  2628. }
  2629. else {
  2630. aes->ctx.mode = SASI_AES_DECRYPT;
  2631. SaSi_AesInit(&aes->ctx.user_ctx,
  2632. SASI_AES_DECRYPT,
  2633. SASI_AES_MODE_CBC,
  2634. SASI_AES_PADDING_NONE);
  2635. }
  2636. aes->keylen = keylen;
  2637. aes->rounds = keylen/4 + 6;
  2638. XMEMCPY(aes->key, userKey, keylen);
  2639. aes->ctx.key.pKey = (byte*)aes->key;
  2640. aes->ctx.key.keySize= keylen;
  2641. ret = SaSi_AesSetKey(&aes->ctx.user_ctx,
  2642. SASI_AES_USER_KEY,
  2643. &aes->ctx.key,
  2644. sizeof(aes->ctx.key));
  2645. if (ret != SASI_OK) {
  2646. return BAD_FUNC_ARG;
  2647. }
  2648. ret = wc_AesSetIV(aes, iv);
  2649. if (iv)
  2650. XMEMCPY(iv_aes, iv, AES_BLOCK_SIZE);
  2651. else
  2652. XMEMSET(iv_aes, 0, AES_BLOCK_SIZE);
  2653. ret = SaSi_AesSetIv(&aes->ctx.user_ctx, iv_aes);
  2654. if (ret != SASI_OK) {
  2655. return ret;
  2656. }
  2657. return ret;
  2658. }
  2659. #if defined(WOLFSSL_AES_DIRECT)
  2660. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  2661. const byte* iv, int dir)
  2662. {
  2663. return wc_AesSetKey(aes, userKey, keylen, iv, dir);
  2664. }
  2665. #endif
  2666. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \
  2667. && !defined(WOLFSSL_QNX_CAAM)
  2668. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  2669. #elif defined(WOLFSSL_AFALG)
  2670. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  2671. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  2672. /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */
  2673. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  2674. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  2675. #else
  2676. /* Software AES - SetKey */
  2677. static WARN_UNUSED_RESULT int wc_AesSetKeyLocal(
  2678. Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir,
  2679. int checkKeyLen)
  2680. {
  2681. int ret;
  2682. word32 *rk;
  2683. #ifdef NEED_AES_TABLES
  2684. word32 temp;
  2685. unsigned int i = 0;
  2686. #endif
  2687. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2688. byte local[32];
  2689. word32 localSz = 32;
  2690. #endif
  2691. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  2692. if (wc_MAXQ10XX_AesSetKey(aes, userKey, keylen) != 0) {
  2693. return WC_HW_E;
  2694. }
  2695. #endif
  2696. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  2697. if (keylen == (16 + WC_CAAM_BLOB_SZ) ||
  2698. keylen == (24 + WC_CAAM_BLOB_SZ) ||
  2699. keylen == (32 + WC_CAAM_BLOB_SZ)) {
  2700. if (wc_caamOpenBlob((byte*)userKey, keylen, local, &localSz) != 0) {
  2701. return BAD_FUNC_ARG;
  2702. }
  2703. /* set local values */
  2704. userKey = local;
  2705. keylen = localSz;
  2706. }
  2707. #endif
  2708. #ifdef WOLFSSL_SECO_CAAM
  2709. /* if set to use hardware than import the key */
  2710. if (aes->devId == WOLFSSL_SECO_DEVID) {
  2711. int keyGroup = 1; /* group one was chosen arbitrarily */
  2712. unsigned int keyIdOut;
  2713. byte importiv[GCM_NONCE_MID_SZ];
  2714. int importivSz = GCM_NONCE_MID_SZ;
  2715. int keyType = 0;
  2716. WC_RNG rng;
  2717. if (wc_InitRng(&rng) != 0) {
  2718. WOLFSSL_MSG("RNG init for IV failed");
  2719. return WC_HW_E;
  2720. }
  2721. if (wc_RNG_GenerateBlock(&rng, importiv, importivSz) != 0) {
  2722. WOLFSSL_MSG("Generate IV failed");
  2723. wc_FreeRng(&rng);
  2724. return WC_HW_E;
  2725. }
  2726. wc_FreeRng(&rng);
  2727. if (iv)
  2728. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2729. else
  2730. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2731. switch (keylen) {
  2732. case AES_128_KEY_SIZE: keyType = CAAM_KEYTYPE_AES128; break;
  2733. case AES_192_KEY_SIZE: keyType = CAAM_KEYTYPE_AES192; break;
  2734. case AES_256_KEY_SIZE: keyType = CAAM_KEYTYPE_AES256; break;
  2735. }
  2736. keyIdOut = wc_SECO_WrapKey(0, (byte*)userKey, keylen, importiv,
  2737. importivSz, keyType, CAAM_KEY_TRANSIENT, keyGroup);
  2738. if (keyIdOut == 0) {
  2739. return WC_HW_E;
  2740. }
  2741. aes->blackKey = keyIdOut;
  2742. return 0;
  2743. }
  2744. #endif
  2745. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  2746. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  2747. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  2748. #ifdef WOLF_CRYPTO_CB
  2749. if (aes->devId != INVALID_DEVID)
  2750. #endif
  2751. {
  2752. if (keylen > sizeof(aes->devKey)) {
  2753. return BAD_FUNC_ARG;
  2754. }
  2755. XMEMCPY(aes->devKey, userKey, keylen);
  2756. }
  2757. #endif
  2758. if (checkKeyLen) {
  2759. if (keylen != 16 && keylen != 24 && keylen != 32) {
  2760. return BAD_FUNC_ARG;
  2761. }
  2762. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE < 256
  2763. /* Check key length only when AES_MAX_KEY_SIZE doesn't allow
  2764. * all key sizes. Otherwise this condition is never true. */
  2765. if (keylen > (AES_MAX_KEY_SIZE / 8)) {
  2766. return BAD_FUNC_ARG;
  2767. }
  2768. #endif
  2769. }
  2770. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \
  2771. defined(WOLFSSL_AES_OFB)
  2772. aes->left = 0;
  2773. #endif
  2774. aes->keylen = (int)keylen;
  2775. aes->rounds = (keylen/4) + 6;
  2776. #ifdef WOLFSSL_AESNI
  2777. aes->use_aesni = 0;
  2778. if (checkAESNI == 0) {
  2779. haveAESNI = Check_CPU_support_AES();
  2780. checkAESNI = 1;
  2781. }
  2782. if (haveAESNI) {
  2783. #ifdef WOLFSSL_LINUXKM
  2784. /* runtime alignment check */
  2785. if ((wc_ptr_t)&aes->key & (wc_ptr_t)0xf) {
  2786. return BAD_ALIGN_E;
  2787. }
  2788. #endif
  2789. aes->use_aesni = 1;
  2790. if (iv)
  2791. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  2792. else
  2793. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  2794. if (dir == AES_ENCRYPTION)
  2795. return AES_set_encrypt_key(userKey, (int)keylen * 8, aes);
  2796. #ifdef HAVE_AES_DECRYPT
  2797. else
  2798. return AES_set_decrypt_key(userKey, (int)keylen * 8, aes);
  2799. #endif
  2800. }
  2801. #endif /* WOLFSSL_AESNI */
  2802. #ifdef WOLFSSL_KCAPI_AES
  2803. XMEMCPY(aes->devKey, userKey, keylen);
  2804. if (aes->init != 0) {
  2805. kcapi_cipher_destroy(aes->handle);
  2806. aes->handle = NULL;
  2807. aes->init = 0;
  2808. }
  2809. (void)dir;
  2810. #endif
  2811. if (keylen > sizeof(aes->key)) {
  2812. return BAD_FUNC_ARG;
  2813. }
  2814. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  2815. return wc_psa_aes_set_key(aes, userKey, keylen, (uint8_t*)iv,
  2816. ((psa_algorithm_t)0), dir);
  2817. #endif
  2818. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  2819. /* wolfSSL HostCrypto in SE05x SDK can request to use SW crypto
  2820. * instead of SE05x crypto by setting useSWCrypt */
  2821. if (aes->useSWCrypt == 0) {
  2822. ret = se050_aes_set_key(aes, userKey, keylen, iv, dir);
  2823. if (ret == 0) {
  2824. ret = wc_AesSetIV(aes, iv);
  2825. }
  2826. return ret;
  2827. }
  2828. #endif
  2829. rk = aes->key;
  2830. XMEMCPY(rk, userKey, keylen);
  2831. #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  2832. (!defined(WOLFSSL_ESP32WROOM32_CRYPT) || \
  2833. defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES))
  2834. ByteReverseWords(rk, rk, keylen);
  2835. #endif
  2836. #ifdef WOLFSSL_IMXRT_DCP
  2837. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  2838. temp = 0;
  2839. if (keylen == 16)
  2840. temp = DCPAesSetKey(aes, userKey, keylen, iv, dir);
  2841. if (temp != 0)
  2842. return WC_HW_E;
  2843. #endif
  2844. #ifdef NEED_AES_TABLES
  2845. switch (keylen) {
  2846. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \
  2847. defined(WOLFSSL_AES_128)
  2848. case 16:
  2849. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2850. temp = (word32)-1;
  2851. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2852. #endif
  2853. while (1)
  2854. {
  2855. temp = rk[3];
  2856. rk[4] = rk[0] ^
  2857. #ifndef WOLFSSL_AES_SMALL_TABLES
  2858. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2859. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2860. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2861. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2862. #else
  2863. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2864. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2865. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2866. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2867. #endif
  2868. rcon[i];
  2869. rk[5] = rk[1] ^ rk[4];
  2870. rk[6] = rk[2] ^ rk[5];
  2871. rk[7] = rk[3] ^ rk[6];
  2872. if (++i == 10)
  2873. break;
  2874. rk += 4;
  2875. }
  2876. break;
  2877. #endif /* 128 */
  2878. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \
  2879. defined(WOLFSSL_AES_192)
  2880. case 24:
  2881. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2882. temp = (word32)-1;
  2883. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2884. #endif
  2885. /* for (;;) here triggers a bug in VC60 SP4 w/ Pro Pack */
  2886. while (1)
  2887. {
  2888. temp = rk[ 5];
  2889. rk[ 6] = rk[ 0] ^
  2890. #ifndef WOLFSSL_AES_SMALL_TABLES
  2891. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2892. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2893. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2894. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2895. #else
  2896. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2897. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2898. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2899. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2900. #endif
  2901. rcon[i];
  2902. rk[ 7] = rk[ 1] ^ rk[ 6];
  2903. rk[ 8] = rk[ 2] ^ rk[ 7];
  2904. rk[ 9] = rk[ 3] ^ rk[ 8];
  2905. if (++i == 8)
  2906. break;
  2907. rk[10] = rk[ 4] ^ rk[ 9];
  2908. rk[11] = rk[ 5] ^ rk[10];
  2909. rk += 6;
  2910. }
  2911. break;
  2912. #endif /* 192 */
  2913. #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \
  2914. defined(WOLFSSL_AES_256)
  2915. case 32:
  2916. #ifdef WOLFSSL_CHECK_MEM_ZERO
  2917. temp = (word32)-1;
  2918. wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp));
  2919. #endif
  2920. while (1)
  2921. {
  2922. temp = rk[ 7];
  2923. rk[ 8] = rk[ 0] ^
  2924. #ifndef WOLFSSL_AES_SMALL_TABLES
  2925. (GetTable(Te[2], GETBYTE(temp, 2)) & 0xff000000) ^
  2926. (GetTable(Te[3], GETBYTE(temp, 1)) & 0x00ff0000) ^
  2927. (GetTable(Te[0], GETBYTE(temp, 0)) & 0x0000ff00) ^
  2928. (GetTable(Te[1], GETBYTE(temp, 3)) & 0x000000ff) ^
  2929. #else
  2930. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 24) ^
  2931. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 16) ^
  2932. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)) << 8) ^
  2933. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3))) ^
  2934. #endif
  2935. rcon[i];
  2936. rk[ 9] = rk[ 1] ^ rk[ 8];
  2937. rk[10] = rk[ 2] ^ rk[ 9];
  2938. rk[11] = rk[ 3] ^ rk[10];
  2939. if (++i == 7)
  2940. break;
  2941. temp = rk[11];
  2942. rk[12] = rk[ 4] ^
  2943. #ifndef WOLFSSL_AES_SMALL_TABLES
  2944. (GetTable(Te[2], GETBYTE(temp, 3)) & 0xff000000) ^
  2945. (GetTable(Te[3], GETBYTE(temp, 2)) & 0x00ff0000) ^
  2946. (GetTable(Te[0], GETBYTE(temp, 1)) & 0x0000ff00) ^
  2947. (GetTable(Te[1], GETBYTE(temp, 0)) & 0x000000ff);
  2948. #else
  2949. ((word32)GetTable8(Tsbox, GETBYTE(temp, 3)) << 24) ^
  2950. ((word32)GetTable8(Tsbox, GETBYTE(temp, 2)) << 16) ^
  2951. ((word32)GetTable8(Tsbox, GETBYTE(temp, 1)) << 8) ^
  2952. ((word32)GetTable8(Tsbox, GETBYTE(temp, 0)));
  2953. #endif
  2954. rk[13] = rk[ 5] ^ rk[12];
  2955. rk[14] = rk[ 6] ^ rk[13];
  2956. rk[15] = rk[ 7] ^ rk[14];
  2957. rk += 8;
  2958. }
  2959. break;
  2960. #endif /* 256 */
  2961. default:
  2962. return BAD_FUNC_ARG;
  2963. } /* switch */
  2964. ForceZero(&temp, sizeof(temp));
  2965. #if defined(HAVE_AES_DECRYPT)
  2966. if (dir == AES_DECRYPTION) {
  2967. unsigned int j;
  2968. rk = aes->key;
  2969. /* invert the order of the round keys: */
  2970. for (i = 0, j = 4* aes->rounds; i < j; i += 4, j -= 4) {
  2971. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  2972. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  2973. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  2974. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  2975. }
  2976. ForceZero(&temp, sizeof(temp));
  2977. #if !defined(WOLFSSL_AES_SMALL_TABLES)
  2978. /* apply the inverse MixColumn transform to all round keys but the
  2979. first and the last: */
  2980. for (i = 1; i < aes->rounds; i++) {
  2981. rk += 4;
  2982. rk[0] =
  2983. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[0], 3)) & 0xff) ^
  2984. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[0], 2)) & 0xff) ^
  2985. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[0], 1)) & 0xff) ^
  2986. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[0], 0)) & 0xff);
  2987. rk[1] =
  2988. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[1], 3)) & 0xff) ^
  2989. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[1], 2)) & 0xff) ^
  2990. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[1], 1)) & 0xff) ^
  2991. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[1], 0)) & 0xff);
  2992. rk[2] =
  2993. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[2], 3)) & 0xff) ^
  2994. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[2], 2)) & 0xff) ^
  2995. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[2], 1)) & 0xff) ^
  2996. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[2], 0)) & 0xff);
  2997. rk[3] =
  2998. GetTable(Td[0], GetTable(Te[1], GETBYTE(rk[3], 3)) & 0xff) ^
  2999. GetTable(Td[1], GetTable(Te[1], GETBYTE(rk[3], 2)) & 0xff) ^
  3000. GetTable(Td[2], GetTable(Te[1], GETBYTE(rk[3], 1)) & 0xff) ^
  3001. GetTable(Td[3], GetTable(Te[1], GETBYTE(rk[3], 0)) & 0xff);
  3002. }
  3003. #endif
  3004. }
  3005. #else
  3006. (void)dir;
  3007. #endif /* HAVE_AES_DECRYPT */
  3008. (void)temp;
  3009. #endif /* NEED_AES_TABLES */
  3010. #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  3011. XMEMCPY((byte*)aes->key, userKey, keylen);
  3012. if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == CRYPTO_WORD_ENDIAN_BIG) {
  3013. ByteReverseWords(aes->key, aes->key, 32);
  3014. }
  3015. #endif
  3016. ret = wc_AesSetIV(aes, iv);
  3017. #if defined(WOLFSSL_DEVCRYPTO) && \
  3018. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  3019. aes->ctx.cfd = -1;
  3020. #endif
  3021. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  3022. ForceZero(local, sizeof(local));
  3023. #endif
  3024. #ifdef WOLFSSL_CHECK_MEM_ZERO
  3025. wc_MemZero_Check(&temp, sizeof(temp));
  3026. #endif
  3027. return ret;
  3028. }
  3029. int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen,
  3030. const byte* iv, int dir)
  3031. {
  3032. if (aes == NULL) {
  3033. return BAD_FUNC_ARG;
  3034. }
  3035. if (keylen > sizeof(aes->key)) {
  3036. return BAD_FUNC_ARG;
  3037. }
  3038. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1);
  3039. }
  3040. #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER)
  3041. /* AES-CTR and AES-DIRECT need to use this for key setup */
  3042. /* This function allows key sizes that are not 128/192/256 bits */
  3043. int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen,
  3044. const byte* iv, int dir)
  3045. {
  3046. if (aes == NULL) {
  3047. return BAD_FUNC_ARG;
  3048. }
  3049. if (keylen > sizeof(aes->key)) {
  3050. return BAD_FUNC_ARG;
  3051. }
  3052. return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 0);
  3053. }
  3054. #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER */
  3055. #endif /* wc_AesSetKey block */
  3056. /* wc_AesSetIV is shared between software and hardware */
  3057. int wc_AesSetIV(Aes* aes, const byte* iv)
  3058. {
  3059. if (aes == NULL)
  3060. return BAD_FUNC_ARG;
  3061. if (iv)
  3062. XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE);
  3063. else
  3064. XMEMSET(aes->reg, 0, AES_BLOCK_SIZE);
  3065. #if defined(WOLFSSL_AES_COUNTER) || defined(WOLFSSL_AES_CFB) || \
  3066. defined(WOLFSSL_AES_OFB) || defined(WOLFSSL_AES_XTS)
  3067. /* Clear any unused bytes from last cipher op. */
  3068. aes->left = 0;
  3069. #endif
  3070. return 0;
  3071. }
  3072. /* AES-DIRECT */
  3073. #if defined(WOLFSSL_AES_DIRECT)
  3074. #if defined(HAVE_COLDFIRE_SEC)
  3075. #error "Coldfire SEC doesn't yet support AES direct"
  3076. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3077. !defined(WOLFSSL_QNX_CAAM)
  3078. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3079. #elif defined(WOLFSSL_AFALG)
  3080. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3081. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  3082. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3083. #elif defined(WOLFSSL_LINUXKM) && defined(WOLFSSL_AESNI)
  3084. WARN_UNUSED_RESULT int wc_AesEncryptDirect(
  3085. Aes* aes, byte* out, const byte* in)
  3086. {
  3087. int ret;
  3088. if (haveAESNI && aes->use_aesni)
  3089. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3090. ret = wc_AesEncrypt(aes, in, out);
  3091. if (haveAESNI && aes->use_aesni)
  3092. RESTORE_VECTOR_REGISTERS();
  3093. return ret;
  3094. }
  3095. /* vector reg save/restore is explicit in all below calls to
  3096. * wc_Aes{En,De}cryptDirect(), so bypass the public version with a
  3097. * macro.
  3098. */
  3099. #define wc_AesEncryptDirect(aes, out, in) wc_AesEncrypt(aes, in, out)
  3100. #ifdef HAVE_AES_DECRYPT
  3101. /* Allow direct access to one block decrypt */
  3102. WARN_UNUSED_RESULT int wc_AesDecryptDirect(
  3103. Aes* aes, byte* out, const byte* in)
  3104. {
  3105. int ret;
  3106. if (haveAESNI && aes->use_aesni)
  3107. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3108. ret = wc_AesDecrypt(aes, in, out);
  3109. if (haveAESNI && aes->use_aesni)
  3110. RESTORE_VECTOR_REGISTERS();
  3111. return ret;
  3112. }
  3113. #define wc_AesDecryptDirect(aes, out, in) wc_AesDecrypt(aes, in, out)
  3114. #endif /* HAVE_AES_DECRYPT */
  3115. #else
  3116. /* Allow direct access to one block encrypt */
  3117. int wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in)
  3118. {
  3119. return wc_AesEncrypt(aes, in, out);
  3120. }
  3121. #ifdef HAVE_AES_DECRYPT
  3122. /* Allow direct access to one block decrypt */
  3123. int wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in)
  3124. {
  3125. return wc_AesDecrypt(aes, in, out);
  3126. }
  3127. #endif /* HAVE_AES_DECRYPT */
  3128. #endif /* AES direct block */
  3129. #endif /* WOLFSSL_AES_DIRECT */
  3130. /* AES-CBC */
  3131. #ifdef HAVE_AES_CBC
  3132. #if defined(STM32_CRYPTO)
  3133. #ifdef WOLFSSL_STM32_CUBEMX
  3134. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3135. {
  3136. int ret = 0;
  3137. CRYP_HandleTypeDef hcryp;
  3138. word32 blocks = (sz / AES_BLOCK_SIZE);
  3139. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3140. if (sz % AES_BLOCK_SIZE) {
  3141. return BAD_LENGTH_E;
  3142. }
  3143. #endif
  3144. if (blocks == 0)
  3145. return 0;
  3146. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3147. if (ret != 0)
  3148. return ret;
  3149. ret = wolfSSL_CryptHwMutexLock();
  3150. if (ret != 0) {
  3151. return ret;
  3152. }
  3153. #if defined(STM32_HAL_V2)
  3154. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3155. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3156. #elif defined(STM32_CRYPTO_AES_ONLY)
  3157. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3158. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3159. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3160. #endif
  3161. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3162. HAL_CRYP_Init(&hcryp);
  3163. #if defined(STM32_HAL_V2)
  3164. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3165. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3166. #elif defined(STM32_CRYPTO_AES_ONLY)
  3167. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3168. out, STM32_HAL_TIMEOUT);
  3169. #else
  3170. ret = HAL_CRYP_AESCBC_Encrypt(&hcryp, (uint8_t*)in,
  3171. blocks * AES_BLOCK_SIZE,
  3172. out, STM32_HAL_TIMEOUT);
  3173. #endif
  3174. if (ret != HAL_OK) {
  3175. ret = WC_TIMEOUT_E;
  3176. }
  3177. /* store iv for next call */
  3178. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3179. HAL_CRYP_DeInit(&hcryp);
  3180. wolfSSL_CryptHwMutexUnLock();
  3181. return ret;
  3182. }
  3183. #ifdef HAVE_AES_DECRYPT
  3184. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3185. {
  3186. int ret = 0;
  3187. CRYP_HandleTypeDef hcryp;
  3188. word32 blocks = (sz / AES_BLOCK_SIZE);
  3189. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3190. if (sz % AES_BLOCK_SIZE) {
  3191. return BAD_LENGTH_E;
  3192. }
  3193. #endif
  3194. if (blocks == 0)
  3195. return 0;
  3196. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3197. if (ret != 0)
  3198. return ret;
  3199. ret = wolfSSL_CryptHwMutexLock();
  3200. if (ret != 0) {
  3201. return ret;
  3202. }
  3203. /* if input and output same will overwrite input iv */
  3204. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3205. #if defined(STM32_HAL_V2)
  3206. hcryp.Init.Algorithm = CRYP_AES_CBC;
  3207. ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE);
  3208. #elif defined(STM32_CRYPTO_AES_ONLY)
  3209. hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT;
  3210. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC;
  3211. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3212. #endif
  3213. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3214. HAL_CRYP_Init(&hcryp);
  3215. #if defined(STM32_HAL_V2)
  3216. ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE,
  3217. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3218. #elif defined(STM32_CRYPTO_AES_ONLY)
  3219. ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE,
  3220. out, STM32_HAL_TIMEOUT);
  3221. #else
  3222. ret = HAL_CRYP_AESCBC_Decrypt(&hcryp, (uint8_t*)in,
  3223. blocks * AES_BLOCK_SIZE,
  3224. out, STM32_HAL_TIMEOUT);
  3225. #endif
  3226. if (ret != HAL_OK) {
  3227. ret = WC_TIMEOUT_E;
  3228. }
  3229. /* store iv for next call */
  3230. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3231. HAL_CRYP_DeInit(&hcryp);
  3232. wolfSSL_CryptHwMutexUnLock();
  3233. return ret;
  3234. }
  3235. #endif /* HAVE_AES_DECRYPT */
  3236. #else /* Standard Peripheral Library */
  3237. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3238. {
  3239. int ret;
  3240. word32 *iv;
  3241. CRYP_InitTypeDef cryptInit;
  3242. CRYP_KeyInitTypeDef keyInit;
  3243. CRYP_IVInitTypeDef ivInit;
  3244. word32 blocks = (sz / AES_BLOCK_SIZE);
  3245. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3246. if (sz % AES_BLOCK_SIZE) {
  3247. return BAD_LENGTH_E;
  3248. }
  3249. #endif
  3250. if (blocks == 0)
  3251. return 0;
  3252. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3253. if (ret != 0)
  3254. return ret;
  3255. ret = wolfSSL_CryptHwMutexLock();
  3256. if (ret != 0) {
  3257. return ret;
  3258. }
  3259. /* reset registers to their default values */
  3260. CRYP_DeInit();
  3261. /* set key */
  3262. CRYP_KeyInit(&keyInit);
  3263. /* set iv */
  3264. iv = aes->reg;
  3265. CRYP_IVStructInit(&ivInit);
  3266. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3267. ivInit.CRYP_IV0Left = iv[0];
  3268. ivInit.CRYP_IV0Right = iv[1];
  3269. ivInit.CRYP_IV1Left = iv[2];
  3270. ivInit.CRYP_IV1Right = iv[3];
  3271. CRYP_IVInit(&ivInit);
  3272. /* set direction and mode */
  3273. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3274. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3275. CRYP_Init(&cryptInit);
  3276. /* enable crypto processor */
  3277. CRYP_Cmd(ENABLE);
  3278. while (blocks--) {
  3279. /* flush IN/OUT FIFOs */
  3280. CRYP_FIFOFlush();
  3281. CRYP_DataIn(*(uint32_t*)&in[0]);
  3282. CRYP_DataIn(*(uint32_t*)&in[4]);
  3283. CRYP_DataIn(*(uint32_t*)&in[8]);
  3284. CRYP_DataIn(*(uint32_t*)&in[12]);
  3285. /* wait until the complete message has been processed */
  3286. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3287. *(uint32_t*)&out[0] = CRYP_DataOut();
  3288. *(uint32_t*)&out[4] = CRYP_DataOut();
  3289. *(uint32_t*)&out[8] = CRYP_DataOut();
  3290. *(uint32_t*)&out[12] = CRYP_DataOut();
  3291. /* store iv for next call */
  3292. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3293. sz -= AES_BLOCK_SIZE;
  3294. in += AES_BLOCK_SIZE;
  3295. out += AES_BLOCK_SIZE;
  3296. }
  3297. /* disable crypto processor */
  3298. CRYP_Cmd(DISABLE);
  3299. wolfSSL_CryptHwMutexUnLock();
  3300. return ret;
  3301. }
  3302. #ifdef HAVE_AES_DECRYPT
  3303. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3304. {
  3305. int ret;
  3306. word32 *iv;
  3307. CRYP_InitTypeDef cryptInit;
  3308. CRYP_KeyInitTypeDef keyInit;
  3309. CRYP_IVInitTypeDef ivInit;
  3310. word32 blocks = (sz / AES_BLOCK_SIZE);
  3311. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3312. if (sz % AES_BLOCK_SIZE) {
  3313. return BAD_LENGTH_E;
  3314. }
  3315. #endif
  3316. if (blocks == 0)
  3317. return 0;
  3318. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3319. if (ret != 0)
  3320. return ret;
  3321. ret = wolfSSL_CryptHwMutexLock();
  3322. if (ret != 0) {
  3323. return ret;
  3324. }
  3325. /* if input and output same will overwrite input iv */
  3326. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3327. /* reset registers to their default values */
  3328. CRYP_DeInit();
  3329. /* set direction and key */
  3330. CRYP_KeyInit(&keyInit);
  3331. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3332. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key;
  3333. CRYP_Init(&cryptInit);
  3334. /* enable crypto processor */
  3335. CRYP_Cmd(ENABLE);
  3336. /* wait until key has been prepared */
  3337. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3338. /* set direction and mode */
  3339. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt;
  3340. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC;
  3341. CRYP_Init(&cryptInit);
  3342. /* set iv */
  3343. iv = aes->reg;
  3344. CRYP_IVStructInit(&ivInit);
  3345. ByteReverseWords(iv, iv, AES_BLOCK_SIZE);
  3346. ivInit.CRYP_IV0Left = iv[0];
  3347. ivInit.CRYP_IV0Right = iv[1];
  3348. ivInit.CRYP_IV1Left = iv[2];
  3349. ivInit.CRYP_IV1Right = iv[3];
  3350. CRYP_IVInit(&ivInit);
  3351. /* enable crypto processor */
  3352. CRYP_Cmd(ENABLE);
  3353. while (blocks--) {
  3354. /* flush IN/OUT FIFOs */
  3355. CRYP_FIFOFlush();
  3356. CRYP_DataIn(*(uint32_t*)&in[0]);
  3357. CRYP_DataIn(*(uint32_t*)&in[4]);
  3358. CRYP_DataIn(*(uint32_t*)&in[8]);
  3359. CRYP_DataIn(*(uint32_t*)&in[12]);
  3360. /* wait until the complete message has been processed */
  3361. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  3362. *(uint32_t*)&out[0] = CRYP_DataOut();
  3363. *(uint32_t*)&out[4] = CRYP_DataOut();
  3364. *(uint32_t*)&out[8] = CRYP_DataOut();
  3365. *(uint32_t*)&out[12] = CRYP_DataOut();
  3366. /* store iv for next call */
  3367. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3368. in += AES_BLOCK_SIZE;
  3369. out += AES_BLOCK_SIZE;
  3370. }
  3371. /* disable crypto processor */
  3372. CRYP_Cmd(DISABLE);
  3373. wolfSSL_CryptHwMutexUnLock();
  3374. return ret;
  3375. }
  3376. #endif /* HAVE_AES_DECRYPT */
  3377. #endif /* WOLFSSL_STM32_CUBEMX */
  3378. #elif defined(HAVE_COLDFIRE_SEC)
  3379. static WARN_UNUSED_RESULT int wc_AesCbcCrypt(
  3380. Aes* aes, byte* po, const byte* pi, word32 sz, word32 descHeader)
  3381. {
  3382. #ifdef DEBUG_WOLFSSL
  3383. int i; int stat1, stat2; int ret;
  3384. #endif
  3385. int size;
  3386. volatile int v;
  3387. if ((pi == NULL) || (po == NULL))
  3388. return BAD_FUNC_ARG; /*wrong pointer*/
  3389. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3390. if (sz % AES_BLOCK_SIZE) {
  3391. return BAD_LENGTH_E;
  3392. }
  3393. #endif
  3394. wc_LockMutex(&Mutex_AesSEC);
  3395. /* Set descriptor for SEC */
  3396. secDesc->length1 = 0x0;
  3397. secDesc->pointer1 = NULL;
  3398. secDesc->length2 = AES_BLOCK_SIZE;
  3399. secDesc->pointer2 = (byte *)secReg; /* Initial Vector */
  3400. switch(aes->rounds) {
  3401. case 10: secDesc->length3 = 16; break;
  3402. case 12: secDesc->length3 = 24; break;
  3403. case 14: secDesc->length3 = 32; break;
  3404. }
  3405. XMEMCPY(secKey, aes->key, secDesc->length3);
  3406. secDesc->pointer3 = (byte *)secKey;
  3407. secDesc->pointer4 = AESBuffIn;
  3408. secDesc->pointer5 = AESBuffOut;
  3409. secDesc->length6 = 0x0;
  3410. secDesc->pointer6 = NULL;
  3411. secDesc->length7 = 0x0;
  3412. secDesc->pointer7 = NULL;
  3413. secDesc->nextDescriptorPtr = NULL;
  3414. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3415. size = AES_BUFFER_SIZE;
  3416. #endif
  3417. while (sz) {
  3418. secDesc->header = descHeader;
  3419. XMEMCPY(secReg, aes->reg, AES_BLOCK_SIZE);
  3420. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3421. sz -= AES_BUFFER_SIZE;
  3422. #else
  3423. if (sz < AES_BUFFER_SIZE) {
  3424. size = sz;
  3425. sz = 0;
  3426. } else {
  3427. size = AES_BUFFER_SIZE;
  3428. sz -= AES_BUFFER_SIZE;
  3429. }
  3430. #endif
  3431. secDesc->length4 = size;
  3432. secDesc->length5 = size;
  3433. XMEMCPY(AESBuffIn, pi, size);
  3434. if(descHeader == SEC_DESC_AES_CBC_DECRYPT) {
  3435. XMEMCPY((void*)aes->tmp, (void*)&(pi[size-AES_BLOCK_SIZE]),
  3436. AES_BLOCK_SIZE);
  3437. }
  3438. /* Point SEC to the location of the descriptor */
  3439. MCF_SEC_FR0 = (uint32)secDesc;
  3440. /* Initialize SEC and wait for encryption to complete */
  3441. MCF_SEC_CCCR0 = 0x0000001a;
  3442. /* poll SISR to determine when channel is complete */
  3443. v=0;
  3444. while ((secDesc->header>> 24) != 0xff) v++;
  3445. #ifdef DEBUG_WOLFSSL
  3446. ret = MCF_SEC_SISRH;
  3447. stat1 = MCF_SEC_AESSR;
  3448. stat2 = MCF_SEC_AESISR;
  3449. if (ret & 0xe0000000) {
  3450. db_printf("Aes_Cbc(i=%d):ISRH=%08x, AESSR=%08x, "
  3451. "AESISR=%08x\n", i, ret, stat1, stat2);
  3452. }
  3453. #endif
  3454. XMEMCPY(po, AESBuffOut, size);
  3455. if (descHeader == SEC_DESC_AES_CBC_ENCRYPT) {
  3456. XMEMCPY((void*)aes->reg, (void*)&(po[size-AES_BLOCK_SIZE]),
  3457. AES_BLOCK_SIZE);
  3458. } else {
  3459. XMEMCPY((void*)aes->reg, (void*)aes->tmp, AES_BLOCK_SIZE);
  3460. }
  3461. pi += size;
  3462. po += size;
  3463. }
  3464. wc_UnLockMutex(&Mutex_AesSEC);
  3465. return 0;
  3466. }
  3467. int wc_AesCbcEncrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3468. {
  3469. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_ENCRYPT));
  3470. }
  3471. #ifdef HAVE_AES_DECRYPT
  3472. int wc_AesCbcDecrypt(Aes* aes, byte* po, const byte* pi, word32 sz)
  3473. {
  3474. return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_DECRYPT));
  3475. }
  3476. #endif /* HAVE_AES_DECRYPT */
  3477. #elif defined(FREESCALE_LTC)
  3478. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3479. {
  3480. word32 keySize;
  3481. status_t status;
  3482. byte *iv, *enc_key;
  3483. word32 blocks = (sz / AES_BLOCK_SIZE);
  3484. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3485. if (sz % AES_BLOCK_SIZE) {
  3486. return BAD_LENGTH_E;
  3487. }
  3488. #endif
  3489. if (blocks == 0)
  3490. return 0;
  3491. iv = (byte*)aes->reg;
  3492. enc_key = (byte*)aes->key;
  3493. status = wc_AesGetKeySize(aes, &keySize);
  3494. if (status != 0) {
  3495. return status;
  3496. }
  3497. status = wolfSSL_CryptHwMutexLock();
  3498. if (status != 0)
  3499. return status;
  3500. status = LTC_AES_EncryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3501. iv, enc_key, keySize);
  3502. wolfSSL_CryptHwMutexUnLock();
  3503. /* store iv for next call */
  3504. if (status == kStatus_Success) {
  3505. XMEMCPY(iv, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3506. }
  3507. return (status == kStatus_Success) ? 0 : -1;
  3508. }
  3509. #ifdef HAVE_AES_DECRYPT
  3510. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3511. {
  3512. word32 keySize;
  3513. status_t status;
  3514. byte* iv, *dec_key;
  3515. byte temp_block[AES_BLOCK_SIZE];
  3516. word32 blocks = (sz / AES_BLOCK_SIZE);
  3517. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3518. if (sz % AES_BLOCK_SIZE) {
  3519. return BAD_LENGTH_E;
  3520. }
  3521. #endif
  3522. if (blocks == 0)
  3523. return 0;
  3524. iv = (byte*)aes->reg;
  3525. dec_key = (byte*)aes->key;
  3526. status = wc_AesGetKeySize(aes, &keySize);
  3527. if (status != 0) {
  3528. return status;
  3529. }
  3530. /* get IV for next call */
  3531. XMEMCPY(temp_block, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3532. status = wolfSSL_CryptHwMutexLock();
  3533. if (status != 0)
  3534. return status;
  3535. status = LTC_AES_DecryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE,
  3536. iv, dec_key, keySize, kLTC_EncryptKey);
  3537. wolfSSL_CryptHwMutexUnLock();
  3538. /* store IV for next call */
  3539. if (status == kStatus_Success) {
  3540. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3541. }
  3542. return (status == kStatus_Success) ? 0 : -1;
  3543. }
  3544. #endif /* HAVE_AES_DECRYPT */
  3545. #elif defined(FREESCALE_MMCAU)
  3546. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3547. {
  3548. int offset = 0;
  3549. byte *iv;
  3550. byte temp_block[AES_BLOCK_SIZE];
  3551. word32 blocks = (sz / AES_BLOCK_SIZE);
  3552. int ret;
  3553. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3554. if (sz % AES_BLOCK_SIZE) {
  3555. return BAD_LENGTH_E;
  3556. }
  3557. #endif
  3558. if (blocks == 0)
  3559. return 0;
  3560. iv = (byte*)aes->reg;
  3561. while (blocks--) {
  3562. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3563. /* XOR block with IV for CBC */
  3564. xorbuf(temp_block, iv, AES_BLOCK_SIZE);
  3565. ret = wc_AesEncrypt(aes, temp_block, out + offset);
  3566. if (ret != 0)
  3567. return ret;
  3568. offset += AES_BLOCK_SIZE;
  3569. /* store IV for next block */
  3570. XMEMCPY(iv, out + offset - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3571. }
  3572. return 0;
  3573. }
  3574. #ifdef HAVE_AES_DECRYPT
  3575. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3576. {
  3577. int offset = 0;
  3578. byte* iv;
  3579. byte temp_block[AES_BLOCK_SIZE];
  3580. word32 blocks = (sz / AES_BLOCK_SIZE);
  3581. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3582. if (sz % AES_BLOCK_SIZE) {
  3583. return BAD_LENGTH_E;
  3584. }
  3585. #endif
  3586. if (blocks == 0)
  3587. return 0;
  3588. iv = (byte*)aes->reg;
  3589. while (blocks--) {
  3590. XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE);
  3591. wc_AesDecrypt(aes, in + offset, out + offset);
  3592. /* XOR block with IV for CBC */
  3593. xorbuf(out + offset, iv, AES_BLOCK_SIZE);
  3594. /* store IV for next block */
  3595. XMEMCPY(iv, temp_block, AES_BLOCK_SIZE);
  3596. offset += AES_BLOCK_SIZE;
  3597. }
  3598. return 0;
  3599. }
  3600. #endif /* HAVE_AES_DECRYPT */
  3601. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  3602. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3603. {
  3604. int ret;
  3605. if (sz == 0)
  3606. return 0;
  3607. /* hardware fails on input that is not a multiple of AES block size */
  3608. if (sz % AES_BLOCK_SIZE != 0) {
  3609. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3610. return BAD_LENGTH_E;
  3611. #else
  3612. return BAD_FUNC_ARG;
  3613. #endif
  3614. }
  3615. ret = wc_Pic32AesCrypt(
  3616. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3617. out, in, sz, PIC32_ENCRYPTION,
  3618. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3619. /* store iv for next call */
  3620. if (ret == 0) {
  3621. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3622. }
  3623. return ret;
  3624. }
  3625. #ifdef HAVE_AES_DECRYPT
  3626. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3627. {
  3628. int ret;
  3629. byte scratch[AES_BLOCK_SIZE];
  3630. if (sz == 0)
  3631. return 0;
  3632. /* hardware fails on input that is not a multiple of AES block size */
  3633. if (sz % AES_BLOCK_SIZE != 0) {
  3634. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3635. return BAD_LENGTH_E;
  3636. #else
  3637. return BAD_FUNC_ARG;
  3638. #endif
  3639. }
  3640. XMEMCPY(scratch, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3641. ret = wc_Pic32AesCrypt(
  3642. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  3643. out, in, sz, PIC32_DECRYPTION,
  3644. PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC);
  3645. /* store iv for next call */
  3646. if (ret == 0) {
  3647. XMEMCPY((byte*)aes->reg, scratch, AES_BLOCK_SIZE);
  3648. }
  3649. return ret;
  3650. }
  3651. #endif /* HAVE_AES_DECRYPT */
  3652. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  3653. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  3654. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3655. {
  3656. return wc_esp32AesCbcEncrypt(aes, out, in, sz);
  3657. }
  3658. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3659. {
  3660. return wc_esp32AesCbcDecrypt(aes, out, in, sz);
  3661. }
  3662. #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  3663. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3664. {
  3665. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3666. }
  3667. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3668. {
  3669. return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out);
  3670. }
  3671. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  3672. !defined(WOLFSSL_QNX_CAAM)
  3673. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  3674. #elif defined(WOLFSSL_AFALG)
  3675. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  3676. #elif defined(WOLFSSL_KCAPI_AES) && !defined(WOLFSSL_NO_KCAPI_AES_CBC)
  3677. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  3678. #elif defined(WOLFSSL_DEVCRYPTO_CBC)
  3679. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  3680. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  3681. /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
  3682. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  3683. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  3684. #else
  3685. /* Software AES - CBC Encrypt */
  3686. int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3687. {
  3688. word32 blocks;
  3689. if (aes == NULL || out == NULL || in == NULL) {
  3690. return BAD_FUNC_ARG;
  3691. }
  3692. if (sz == 0) {
  3693. return 0;
  3694. }
  3695. blocks = sz / AES_BLOCK_SIZE;
  3696. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3697. if (sz % AES_BLOCK_SIZE) {
  3698. WOLFSSL_ERROR_VERBOSE(BAD_LENGTH_E);
  3699. return BAD_LENGTH_E;
  3700. }
  3701. #endif
  3702. #ifdef WOLFSSL_IMXRT_DCP
  3703. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3704. if (aes->keylen == 16)
  3705. return DCPAesCbcEncrypt(aes, out, in, sz);
  3706. #endif
  3707. #ifdef WOLF_CRYPTO_CB
  3708. #ifndef WOLF_CRYPTO_CB_FIND
  3709. if (aes->devId != INVALID_DEVID)
  3710. #endif
  3711. {
  3712. int crypto_cb_ret = wc_CryptoCb_AesCbcEncrypt(aes, out, in, sz);
  3713. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3714. return crypto_cb_ret;
  3715. /* fall-through when unavailable */
  3716. }
  3717. #endif
  3718. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3719. /* if async and byte count above threshold */
  3720. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3721. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3722. #if defined(HAVE_CAVIUM)
  3723. return NitroxAesCbcEncrypt(aes, out, in, sz);
  3724. #elif defined(HAVE_INTEL_QA)
  3725. return IntelQaSymAesCbcEncrypt(&aes->asyncDev, out, in, sz,
  3726. (const byte*)aes->devKey, aes->keylen,
  3727. (byte*)aes->reg, AES_BLOCK_SIZE);
  3728. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  3729. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_ENCRYPT)) {
  3730. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  3731. sw->aes.aes = aes;
  3732. sw->aes.out = out;
  3733. sw->aes.in = in;
  3734. sw->aes.sz = sz;
  3735. return WC_PENDING_E;
  3736. }
  3737. #endif
  3738. }
  3739. #endif /* WOLFSSL_ASYNC_CRYPT */
  3740. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3741. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  3742. if (aes->useSWCrypt == 0) {
  3743. return se050_aes_crypt(aes, in, out, sz, AES_ENCRYPTION,
  3744. kAlgorithm_SSS_AES_CBC);
  3745. }
  3746. #endif
  3747. #ifdef WOLFSSL_AESNI
  3748. if (haveAESNI) {
  3749. #ifdef DEBUG_AESNI
  3750. printf("about to aes cbc encrypt\n");
  3751. printf("in = %p\n", in);
  3752. printf("out = %p\n", out);
  3753. printf("aes->key = %p\n", aes->key);
  3754. printf("aes->reg = %p\n", aes->reg);
  3755. printf("aes->rounds = %d\n", aes->rounds);
  3756. printf("sz = %d\n", sz);
  3757. #endif
  3758. /* check alignment, decrypt doesn't need alignment */
  3759. if ((wc_ptr_t)in % AESNI_ALIGN) {
  3760. #ifndef NO_WOLFSSL_ALLOC_ALIGN
  3761. byte* tmp = (byte*)XMALLOC(sz + AES_BLOCK_SIZE + AESNI_ALIGN,
  3762. aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3763. byte* tmp_align;
  3764. if (tmp == NULL) return MEMORY_E;
  3765. tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN));
  3766. XMEMCPY(tmp_align, in, sz);
  3767. SAVE_VECTOR_REGISTERS(XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); return _svr_ret;);
  3768. AES_CBC_encrypt(tmp_align, tmp_align, (byte*)aes->reg, sz,
  3769. (byte*)aes->key, (int)aes->rounds);
  3770. RESTORE_VECTOR_REGISTERS();
  3771. /* store iv for next call */
  3772. XMEMCPY(aes->reg, tmp_align + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3773. XMEMCPY(out, tmp_align, sz);
  3774. XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  3775. return 0;
  3776. #else
  3777. WOLFSSL_MSG("AES-CBC encrypt with bad alignment");
  3778. WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E);
  3779. return BAD_ALIGN_E;
  3780. #endif
  3781. }
  3782. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3783. AES_CBC_encrypt(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3784. (int)aes->rounds);
  3785. RESTORE_VECTOR_REGISTERS();
  3786. /* store iv for next call */
  3787. XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3788. return 0;
  3789. }
  3790. #endif
  3791. while (blocks--) {
  3792. int ret;
  3793. xorbuf((byte*)aes->reg, in, AES_BLOCK_SIZE);
  3794. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->reg);
  3795. if (ret != 0)
  3796. return ret;
  3797. XMEMCPY(out, aes->reg, AES_BLOCK_SIZE);
  3798. out += AES_BLOCK_SIZE;
  3799. in += AES_BLOCK_SIZE;
  3800. }
  3801. return 0;
  3802. }
  3803. #ifdef HAVE_AES_DECRYPT
  3804. /* Software AES - CBC Decrypt */
  3805. int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  3806. {
  3807. word32 blocks;
  3808. if (aes == NULL || out == NULL || in == NULL) {
  3809. return BAD_FUNC_ARG;
  3810. }
  3811. if (sz == 0) {
  3812. return 0;
  3813. }
  3814. blocks = sz / AES_BLOCK_SIZE;
  3815. if (sz % AES_BLOCK_SIZE) {
  3816. #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS
  3817. return BAD_LENGTH_E;
  3818. #else
  3819. return BAD_FUNC_ARG;
  3820. #endif
  3821. }
  3822. #ifdef WOLFSSL_IMXRT_DCP
  3823. /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */
  3824. if (aes->keylen == 16)
  3825. return DCPAesCbcDecrypt(aes, out, in, sz);
  3826. #endif
  3827. #ifdef WOLF_CRYPTO_CB
  3828. #ifndef WOLF_CRYPTO_CB_FIND
  3829. if (aes->devId != INVALID_DEVID)
  3830. #endif
  3831. {
  3832. int crypto_cb_ret = wc_CryptoCb_AesCbcDecrypt(aes, out, in, sz);
  3833. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  3834. return crypto_cb_ret;
  3835. /* fall-through when unavailable */
  3836. }
  3837. #endif
  3838. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  3839. /* if async and byte count above threshold */
  3840. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  3841. sz >= WC_ASYNC_THRESH_AES_CBC) {
  3842. #if defined(HAVE_CAVIUM)
  3843. return NitroxAesCbcDecrypt(aes, out, in, sz);
  3844. #elif defined(HAVE_INTEL_QA)
  3845. return IntelQaSymAesCbcDecrypt(&aes->asyncDev, out, in, sz,
  3846. (const byte*)aes->devKey, aes->keylen,
  3847. (byte*)aes->reg, AES_BLOCK_SIZE);
  3848. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  3849. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_CBC_DECRYPT)) {
  3850. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  3851. sw->aes.aes = aes;
  3852. sw->aes.out = out;
  3853. sw->aes.in = in;
  3854. sw->aes.sz = sz;
  3855. return WC_PENDING_E;
  3856. }
  3857. #endif
  3858. }
  3859. #endif
  3860. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  3861. /* Implemented in wolfcrypt/src/port/nxp/se050_port.c */
  3862. if (aes->useSWCrypt == 0) {
  3863. return se050_aes_crypt(aes, in, out, sz, AES_DECRYPTION,
  3864. kAlgorithm_SSS_AES_CBC);
  3865. }
  3866. #endif
  3867. #ifdef WOLFSSL_AESNI
  3868. if (haveAESNI) {
  3869. #ifdef DEBUG_AESNI
  3870. printf("about to aes cbc decrypt\n");
  3871. printf("in = %p\n", in);
  3872. printf("out = %p\n", out);
  3873. printf("aes->key = %p\n", aes->key);
  3874. printf("aes->reg = %p\n", aes->reg);
  3875. printf("aes->rounds = %d\n", aes->rounds);
  3876. printf("sz = %d\n", sz);
  3877. #endif
  3878. /* if input and output same will overwrite input iv */
  3879. XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  3880. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  3881. #if defined(WOLFSSL_AESNI_BY4) || defined(WOLFSSL_X86_BUILD)
  3882. AES_CBC_decrypt_by4(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3883. aes->rounds);
  3884. #elif defined(WOLFSSL_AESNI_BY6)
  3885. AES_CBC_decrypt_by6(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3886. aes->rounds);
  3887. #else /* WOLFSSL_AESNI_BYx */
  3888. AES_CBC_decrypt_by8(in, out, (byte*)aes->reg, sz, (byte*)aes->key,
  3889. (int)aes->rounds);
  3890. #endif /* WOLFSSL_AESNI_BYx */
  3891. /* store iv for next call */
  3892. RESTORE_VECTOR_REGISTERS();
  3893. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3894. return 0;
  3895. }
  3896. #endif
  3897. while (blocks--) {
  3898. int ret;
  3899. XMEMCPY(aes->tmp, in, AES_BLOCK_SIZE);
  3900. ret = wc_AesDecrypt(aes, (byte*)aes->tmp, out);
  3901. if (ret != 0)
  3902. return ret;
  3903. xorbuf(out, (byte*)aes->reg, AES_BLOCK_SIZE);
  3904. /* store iv for next call */
  3905. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  3906. out += AES_BLOCK_SIZE;
  3907. in += AES_BLOCK_SIZE;
  3908. }
  3909. return 0;
  3910. }
  3911. #endif /* HAVE_AES_DECRYPT */
  3912. #endif /* AES-CBC block */
  3913. #endif /* HAVE_AES_CBC */
  3914. /* AES-CTR */
  3915. #if defined(WOLFSSL_AES_COUNTER)
  3916. #ifdef STM32_CRYPTO
  3917. #define NEED_AES_CTR_SOFT
  3918. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  3919. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  3920. {
  3921. int ret = 0;
  3922. #ifdef WOLFSSL_STM32_CUBEMX
  3923. CRYP_HandleTypeDef hcryp;
  3924. #ifdef STM32_HAL_V2
  3925. word32 iv[AES_BLOCK_SIZE/sizeof(word32)];
  3926. #endif
  3927. #else
  3928. word32 *iv;
  3929. CRYP_InitTypeDef cryptInit;
  3930. CRYP_KeyInitTypeDef keyInit;
  3931. CRYP_IVInitTypeDef ivInit;
  3932. #endif
  3933. #ifdef WOLFSSL_STM32_CUBEMX
  3934. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  3935. if (ret != 0) {
  3936. return ret;
  3937. }
  3938. ret = wolfSSL_CryptHwMutexLock();
  3939. if (ret != 0) {
  3940. return ret;
  3941. }
  3942. #if defined(STM32_HAL_V2)
  3943. hcryp.Init.Algorithm = CRYP_AES_CTR;
  3944. ByteReverseWords(iv, aes->reg, AES_BLOCK_SIZE);
  3945. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)iv;
  3946. #elif defined(STM32_CRYPTO_AES_ONLY)
  3947. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  3948. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CTR;
  3949. hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE;
  3950. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3951. #else
  3952. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg;
  3953. #endif
  3954. HAL_CRYP_Init(&hcryp);
  3955. #if defined(STM32_HAL_V2)
  3956. ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, AES_BLOCK_SIZE,
  3957. (uint32_t*)out, STM32_HAL_TIMEOUT);
  3958. #elif defined(STM32_CRYPTO_AES_ONLY)
  3959. ret = HAL_CRYPEx_AES(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3960. out, STM32_HAL_TIMEOUT);
  3961. #else
  3962. ret = HAL_CRYP_AESCTR_Encrypt(&hcryp, (byte*)in, AES_BLOCK_SIZE,
  3963. out, STM32_HAL_TIMEOUT);
  3964. #endif
  3965. if (ret != HAL_OK) {
  3966. ret = WC_TIMEOUT_E;
  3967. }
  3968. HAL_CRYP_DeInit(&hcryp);
  3969. #else /* Standard Peripheral Library */
  3970. ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit);
  3971. if (ret != 0) {
  3972. return ret;
  3973. }
  3974. ret = wolfSSL_CryptHwMutexLock();
  3975. if (ret != 0) {
  3976. return ret;
  3977. }
  3978. /* reset registers to their default values */
  3979. CRYP_DeInit();
  3980. /* set key */
  3981. CRYP_KeyInit(&keyInit);
  3982. /* set iv */
  3983. iv = aes->reg;
  3984. CRYP_IVStructInit(&ivInit);
  3985. ivInit.CRYP_IV0Left = ByteReverseWord32(iv[0]);
  3986. ivInit.CRYP_IV0Right = ByteReverseWord32(iv[1]);
  3987. ivInit.CRYP_IV1Left = ByteReverseWord32(iv[2]);
  3988. ivInit.CRYP_IV1Right = ByteReverseWord32(iv[3]);
  3989. CRYP_IVInit(&ivInit);
  3990. /* set direction and mode */
  3991. cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt;
  3992. cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CTR;
  3993. CRYP_Init(&cryptInit);
  3994. /* enable crypto processor */
  3995. CRYP_Cmd(ENABLE);
  3996. /* flush IN/OUT FIFOs */
  3997. CRYP_FIFOFlush();
  3998. CRYP_DataIn(*(uint32_t*)&in[0]);
  3999. CRYP_DataIn(*(uint32_t*)&in[4]);
  4000. CRYP_DataIn(*(uint32_t*)&in[8]);
  4001. CRYP_DataIn(*(uint32_t*)&in[12]);
  4002. /* wait until the complete message has been processed */
  4003. while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {}
  4004. *(uint32_t*)&out[0] = CRYP_DataOut();
  4005. *(uint32_t*)&out[4] = CRYP_DataOut();
  4006. *(uint32_t*)&out[8] = CRYP_DataOut();
  4007. *(uint32_t*)&out[12] = CRYP_DataOut();
  4008. /* disable crypto processor */
  4009. CRYP_Cmd(DISABLE);
  4010. #endif /* WOLFSSL_STM32_CUBEMX */
  4011. wolfSSL_CryptHwMutexUnLock();
  4012. return ret;
  4013. }
  4014. #elif defined(WOLFSSL_PIC32MZ_CRYPT)
  4015. #define NEED_AES_CTR_SOFT
  4016. #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock
  4017. int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in)
  4018. {
  4019. word32 tmpIv[AES_BLOCK_SIZE / sizeof(word32)];
  4020. XMEMCPY(tmpIv, aes->reg, AES_BLOCK_SIZE);
  4021. return wc_Pic32AesCrypt(
  4022. aes->key, aes->keylen, tmpIv, AES_BLOCK_SIZE,
  4023. out, in, AES_BLOCK_SIZE,
  4024. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCTR);
  4025. }
  4026. #elif defined(HAVE_COLDFIRE_SEC)
  4027. #error "Coldfire SEC doesn't currently support AES-CTR mode"
  4028. #elif defined(FREESCALE_LTC)
  4029. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4030. {
  4031. int ret = 0;
  4032. word32 keySize;
  4033. byte *iv, *enc_key;
  4034. byte* tmp;
  4035. if (aes == NULL || out == NULL || in == NULL) {
  4036. return BAD_FUNC_ARG;
  4037. }
  4038. /* consume any unused bytes left in aes->tmp */
  4039. tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left;
  4040. while (aes->left && sz) {
  4041. *(out++) = *(in++) ^ *(tmp++);
  4042. aes->left--;
  4043. sz--;
  4044. }
  4045. if (sz) {
  4046. iv = (byte*)aes->reg;
  4047. enc_key = (byte*)aes->key;
  4048. ret = wc_AesGetKeySize(aes, &keySize);
  4049. if (ret != 0)
  4050. return ret;
  4051. ret = wolfSSL_CryptHwMutexLock();
  4052. if (ret != 0)
  4053. return ret;
  4054. LTC_AES_CryptCtr(LTC_BASE, in, out, sz,
  4055. iv, enc_key, keySize, (byte*)aes->tmp,
  4056. (uint32_t*)&aes->left);
  4057. wolfSSL_CryptHwMutexUnLock();
  4058. }
  4059. return ret;
  4060. }
  4061. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  4062. !defined(WOLFSSL_QNX_CAAM)
  4063. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  4064. #elif defined(WOLFSSL_AFALG)
  4065. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  4066. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4067. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4068. #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
  4069. !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)
  4070. /* esp32 doesn't support CRT mode by hw. */
  4071. /* use aes ecnryption plus sw implementation */
  4072. #define NEED_AES_CTR_SOFT
  4073. #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  4074. /* implemented in wolfcrypt/src/port/psa/psa_aes.c */
  4075. #else
  4076. /* Use software based AES counter */
  4077. #define NEED_AES_CTR_SOFT
  4078. #endif
  4079. #ifdef NEED_AES_CTR_SOFT
  4080. /* Increment AES counter */
  4081. static WC_INLINE void IncrementAesCounter(byte* inOutCtr)
  4082. {
  4083. /* in network byte order so start at end and work back */
  4084. int i;
  4085. for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) {
  4086. if (++inOutCtr[i]) /* we're done unless we overflow */
  4087. return;
  4088. }
  4089. }
  4090. /* Software AES - CTR Encrypt */
  4091. int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  4092. {
  4093. byte scratch[AES_BLOCK_SIZE];
  4094. int ret;
  4095. word32 processed;
  4096. if (aes == NULL || out == NULL || in == NULL) {
  4097. return BAD_FUNC_ARG;
  4098. }
  4099. #ifdef WOLF_CRYPTO_CB
  4100. #ifndef WOLF_CRYPTO_CB_FIND
  4101. if (aes->devId != INVALID_DEVID)
  4102. #endif
  4103. {
  4104. int crypto_cb_ret = wc_CryptoCb_AesCtrEncrypt(aes, out, in, sz);
  4105. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  4106. return crypto_cb_ret;
  4107. /* fall-through when unavailable */
  4108. }
  4109. #endif
  4110. /* consume any unused bytes left in aes->tmp */
  4111. processed = min(aes->left, sz);
  4112. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left,
  4113. processed);
  4114. out += processed;
  4115. in += processed;
  4116. aes->left -= processed;
  4117. sz -= processed;
  4118. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \
  4119. !defined(XTRANSFORM_AESCTRBLOCK)
  4120. if (in != out && sz >= AES_BLOCK_SIZE) {
  4121. word32 blocks = sz / AES_BLOCK_SIZE;
  4122. byte* counter = (byte*)aes->reg;
  4123. byte* c = out;
  4124. while (blocks--) {
  4125. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  4126. c += AES_BLOCK_SIZE;
  4127. IncrementAesCounter(counter);
  4128. }
  4129. /* reset number of blocks and then do encryption */
  4130. blocks = sz / AES_BLOCK_SIZE;
  4131. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  4132. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  4133. in += AES_BLOCK_SIZE * blocks;
  4134. out += AES_BLOCK_SIZE * blocks;
  4135. sz -= blocks * AES_BLOCK_SIZE;
  4136. }
  4137. else
  4138. #endif
  4139. {
  4140. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4141. wc_MemZero_Add("wc_AesCtrEncrypt scratch", scratch,
  4142. AES_BLOCK_SIZE);
  4143. #endif
  4144. /* do as many block size ops as possible */
  4145. while (sz >= AES_BLOCK_SIZE) {
  4146. #ifdef XTRANSFORM_AESCTRBLOCK
  4147. XTRANSFORM_AESCTRBLOCK(aes, out, in);
  4148. #else
  4149. ret = wc_AesEncrypt(aes, (byte*)aes->reg, scratch);
  4150. if (ret != 0) {
  4151. ForceZero(scratch, AES_BLOCK_SIZE);
  4152. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4153. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4154. #endif
  4155. return ret;
  4156. }
  4157. xorbuf(scratch, in, AES_BLOCK_SIZE);
  4158. XMEMCPY(out, scratch, AES_BLOCK_SIZE);
  4159. #endif
  4160. IncrementAesCounter((byte*)aes->reg);
  4161. out += AES_BLOCK_SIZE;
  4162. in += AES_BLOCK_SIZE;
  4163. sz -= AES_BLOCK_SIZE;
  4164. aes->left = 0;
  4165. }
  4166. ForceZero(scratch, AES_BLOCK_SIZE);
  4167. }
  4168. /* handle non block size remaining and store unused byte count in left */
  4169. if (sz) {
  4170. ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->tmp);
  4171. if (ret != 0) {
  4172. ForceZero(scratch, AES_BLOCK_SIZE);
  4173. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4174. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4175. #endif
  4176. return ret;
  4177. }
  4178. IncrementAesCounter((byte*)aes->reg);
  4179. aes->left = AES_BLOCK_SIZE - sz;
  4180. xorbufout(out, in, aes->tmp, sz);
  4181. }
  4182. #ifdef WOLFSSL_CHECK_MEM_ZERO
  4183. wc_MemZero_Check(scratch, AES_BLOCK_SIZE);
  4184. #endif
  4185. return 0;
  4186. }
  4187. #endif /* NEED_AES_CTR_SOFT */
  4188. #endif /* WOLFSSL_AES_COUNTER */
  4189. #endif /* !WOLFSSL_ARMASM */
  4190. /*
  4191. * The IV for AES GCM and CCM, stored in struct Aes's member reg, is comprised
  4192. * of two parts in order:
  4193. * 1. The fixed field which may be 0 or 4 bytes long. In TLS, this is set
  4194. * to the implicit IV.
  4195. * 2. The explicit IV is generated by wolfCrypt. It needs to be managed
  4196. * by wolfCrypt to ensure the IV is unique for each call to encrypt.
  4197. * The IV may be a 96-bit random value, or the 32-bit fixed value and a
  4198. * 64-bit set of 0 or random data. The final 32-bits of reg is used as a
  4199. * block counter during the encryption.
  4200. */
  4201. #if (defined(HAVE_AESGCM) && !defined(WC_NO_RNG)) || defined(HAVE_AESCCM)
  4202. static WC_INLINE void IncCtr(byte* ctr, word32 ctrSz)
  4203. {
  4204. int i;
  4205. for (i = (int)ctrSz - 1; i >= 0; i--) {
  4206. if (++ctr[i])
  4207. break;
  4208. }
  4209. }
  4210. #endif /* HAVE_AESGCM || HAVE_AESCCM */
  4211. #ifdef HAVE_AESGCM
  4212. #ifdef WOLFSSL_AESGCM_STREAM
  4213. /* Access initialization counter data. */
  4214. #define AES_INITCTR(aes) ((aes)->streamData + 0 * AES_BLOCK_SIZE)
  4215. /* Access counter data. */
  4216. #define AES_COUNTER(aes) ((aes)->streamData + 1 * AES_BLOCK_SIZE)
  4217. /* Access tag data. */
  4218. #define AES_TAG(aes) ((aes)->streamData + 2 * AES_BLOCK_SIZE)
  4219. /* Access last GHASH block. */
  4220. #define AES_LASTGBLOCK(aes) ((aes)->streamData + 3 * AES_BLOCK_SIZE)
  4221. /* Access last encrypted block. */
  4222. #define AES_LASTBLOCK(aes) ((aes)->streamData + 4 * AES_BLOCK_SIZE)
  4223. #endif
  4224. #if defined(HAVE_COLDFIRE_SEC)
  4225. #error "Coldfire SEC doesn't currently support AES-GCM mode"
  4226. #endif
  4227. #ifdef WOLFSSL_ARMASM
  4228. /* implementation is located in wolfcrypt/src/port/arm/armv8-aes.c */
  4229. #elif defined(WOLFSSL_AFALG)
  4230. /* implemented in wolfcrypt/src/port/afalg/afalg_aes.c */
  4231. #elif defined(WOLFSSL_KCAPI_AES)
  4232. /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */
  4233. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  4234. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  4235. #else /* software + AESNI implementation */
  4236. #if !defined(FREESCALE_LTC_AES_GCM)
  4237. static WC_INLINE void IncrementGcmCounter(byte* inOutCtr)
  4238. {
  4239. int i;
  4240. /* in network byte order so start at end and work back */
  4241. for (i = AES_BLOCK_SIZE - 1; i >= AES_BLOCK_SIZE - CTR_SZ; i--) {
  4242. if (++inOutCtr[i]) /* we're done unless we overflow */
  4243. return;
  4244. }
  4245. }
  4246. #endif /* !FREESCALE_LTC_AES_GCM */
  4247. #if defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4248. static WC_INLINE void FlattenSzInBits(byte* buf, word32 sz)
  4249. {
  4250. /* Multiply the sz by 8 */
  4251. word32 szHi = (sz >> (8*sizeof(sz) - 3));
  4252. sz <<= 3;
  4253. /* copy over the words of the sz into the destination buffer */
  4254. buf[0] = (byte)(szHi >> 24);
  4255. buf[1] = (byte)(szHi >> 16);
  4256. buf[2] = (byte)(szHi >> 8);
  4257. buf[3] = (byte)szHi;
  4258. buf[4] = (byte)(sz >> 24);
  4259. buf[5] = (byte)(sz >> 16);
  4260. buf[6] = (byte)(sz >> 8);
  4261. buf[7] = (byte)sz;
  4262. }
  4263. static WC_INLINE void RIGHTSHIFTX(byte* x)
  4264. {
  4265. int i;
  4266. int carryIn = 0;
  4267. byte borrow = (0x00 - (x[15] & 0x01)) & 0xE1;
  4268. for (i = 0; i < AES_BLOCK_SIZE; i++) {
  4269. int carryOut = (x[i] & 0x01) << 7;
  4270. x[i] = (byte) ((x[i] >> 1) | carryIn);
  4271. carryIn = carryOut;
  4272. }
  4273. x[0] ^= borrow;
  4274. }
  4275. #endif /* defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) */
  4276. #ifdef GCM_TABLE
  4277. static void GenerateM0(Aes* aes)
  4278. {
  4279. int i, j;
  4280. byte (*m)[AES_BLOCK_SIZE] = aes->M0;
  4281. XMEMCPY(m[128], aes->H, AES_BLOCK_SIZE);
  4282. for (i = 64; i > 0; i /= 2) {
  4283. XMEMCPY(m[i], m[i*2], AES_BLOCK_SIZE);
  4284. RIGHTSHIFTX(m[i]);
  4285. }
  4286. for (i = 2; i < 256; i *= 2) {
  4287. for (j = 1; j < i; j++) {
  4288. XMEMCPY(m[i+j], m[i], AES_BLOCK_SIZE);
  4289. xorbuf(m[i+j], m[j], AES_BLOCK_SIZE);
  4290. }
  4291. }
  4292. XMEMSET(m[0], 0, AES_BLOCK_SIZE);
  4293. }
  4294. #elif defined(GCM_TABLE_4BIT)
  4295. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4296. static WC_INLINE void Shift4_M0(byte *r8, byte *z8)
  4297. {
  4298. int i;
  4299. for (i = 15; i > 0; i--)
  4300. r8[i] = (byte)(z8[i-1] << 4) | (byte)(z8[i] >> 4);
  4301. r8[0] = (byte)(z8[0] >> 4);
  4302. }
  4303. #endif
  4304. static void GenerateM0(Aes* aes)
  4305. {
  4306. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4307. int i;
  4308. #endif
  4309. byte (*m)[AES_BLOCK_SIZE] = aes->M0;
  4310. /* 0 times -> 0x0 */
  4311. XMEMSET(m[0x0], 0, AES_BLOCK_SIZE);
  4312. /* 1 times -> 0x8 */
  4313. XMEMCPY(m[0x8], aes->H, AES_BLOCK_SIZE);
  4314. /* 2 times -> 0x4 */
  4315. XMEMCPY(m[0x4], m[0x8], AES_BLOCK_SIZE);
  4316. RIGHTSHIFTX(m[0x4]);
  4317. /* 4 times -> 0x2 */
  4318. XMEMCPY(m[0x2], m[0x4], AES_BLOCK_SIZE);
  4319. RIGHTSHIFTX(m[0x2]);
  4320. /* 8 times -> 0x1 */
  4321. XMEMCPY(m[0x1], m[0x2], AES_BLOCK_SIZE);
  4322. RIGHTSHIFTX(m[0x1]);
  4323. /* 0x3 */
  4324. XMEMCPY(m[0x3], m[0x2], AES_BLOCK_SIZE);
  4325. xorbuf (m[0x3], m[0x1], AES_BLOCK_SIZE);
  4326. /* 0x5 -> 0x7 */
  4327. XMEMCPY(m[0x5], m[0x4], AES_BLOCK_SIZE);
  4328. xorbuf (m[0x5], m[0x1], AES_BLOCK_SIZE);
  4329. XMEMCPY(m[0x6], m[0x4], AES_BLOCK_SIZE);
  4330. xorbuf (m[0x6], m[0x2], AES_BLOCK_SIZE);
  4331. XMEMCPY(m[0x7], m[0x4], AES_BLOCK_SIZE);
  4332. xorbuf (m[0x7], m[0x3], AES_BLOCK_SIZE);
  4333. /* 0x9 -> 0xf */
  4334. XMEMCPY(m[0x9], m[0x8], AES_BLOCK_SIZE);
  4335. xorbuf (m[0x9], m[0x1], AES_BLOCK_SIZE);
  4336. XMEMCPY(m[0xa], m[0x8], AES_BLOCK_SIZE);
  4337. xorbuf (m[0xa], m[0x2], AES_BLOCK_SIZE);
  4338. XMEMCPY(m[0xb], m[0x8], AES_BLOCK_SIZE);
  4339. xorbuf (m[0xb], m[0x3], AES_BLOCK_SIZE);
  4340. XMEMCPY(m[0xc], m[0x8], AES_BLOCK_SIZE);
  4341. xorbuf (m[0xc], m[0x4], AES_BLOCK_SIZE);
  4342. XMEMCPY(m[0xd], m[0x8], AES_BLOCK_SIZE);
  4343. xorbuf (m[0xd], m[0x5], AES_BLOCK_SIZE);
  4344. XMEMCPY(m[0xe], m[0x8], AES_BLOCK_SIZE);
  4345. xorbuf (m[0xe], m[0x6], AES_BLOCK_SIZE);
  4346. XMEMCPY(m[0xf], m[0x8], AES_BLOCK_SIZE);
  4347. xorbuf (m[0xf], m[0x7], AES_BLOCK_SIZE);
  4348. #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU)
  4349. for (i = 0; i < 16; i++) {
  4350. Shift4_M0(m[16+i], m[i]);
  4351. }
  4352. #endif
  4353. }
  4354. #endif /* GCM_TABLE */
  4355. /* Software AES - GCM SetKey */
  4356. int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len)
  4357. {
  4358. int ret;
  4359. byte iv[AES_BLOCK_SIZE];
  4360. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4361. byte local[32];
  4362. word32 localSz = 32;
  4363. if (len == (16 + WC_CAAM_BLOB_SZ) ||
  4364. len == (24 + WC_CAAM_BLOB_SZ) ||
  4365. len == (32 + WC_CAAM_BLOB_SZ)) {
  4366. if (wc_caamOpenBlob((byte*)key, len, local, &localSz) != 0) {
  4367. return BAD_FUNC_ARG;
  4368. }
  4369. /* set local values */
  4370. key = local;
  4371. len = localSz;
  4372. }
  4373. #endif
  4374. if (!((len == 16) || (len == 24) || (len == 32)))
  4375. return BAD_FUNC_ARG;
  4376. if (aes == NULL) {
  4377. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4378. ForceZero(local, sizeof(local));
  4379. #endif
  4380. return BAD_FUNC_ARG;
  4381. }
  4382. #ifdef OPENSSL_EXTRA
  4383. XMEMSET(aes->aadH, 0, sizeof(aes->aadH));
  4384. aes->aadLen = 0;
  4385. #endif
  4386. XMEMSET(iv, 0, AES_BLOCK_SIZE);
  4387. ret = wc_AesSetKey(aes, key, len, iv, AES_ENCRYPTION);
  4388. #ifdef WOLFSSL_AESGCM_STREAM
  4389. aes->gcmKeySet = 1;
  4390. #endif
  4391. #ifdef WOLFSSL_AESNI
  4392. /* AES-NI code generates its own H value. */
  4393. if (haveAESNI)
  4394. return ret;
  4395. #endif /* WOLFSSL_AESNI */
  4396. #if defined(WOLFSSL_SECO_CAAM)
  4397. if (aes->devId == WOLFSSL_SECO_DEVID) {
  4398. return ret;
  4399. }
  4400. #endif /* WOLFSSL_SECO_CAAM */
  4401. #if !defined(FREESCALE_LTC_AES_GCM)
  4402. if (ret == 0)
  4403. ret = wc_AesEncrypt(aes, iv, aes->H);
  4404. if (ret == 0) {
  4405. #if defined(GCM_TABLE) || defined(GCM_TABLE_4BIT)
  4406. GenerateM0(aes);
  4407. #endif /* GCM_TABLE */
  4408. }
  4409. #endif /* FREESCALE_LTC_AES_GCM */
  4410. #if defined(WOLFSSL_XILINX_CRYPT) || defined(WOLFSSL_AFALG_XILINX_AES)
  4411. wc_AesGcmSetKey_ex(aes, key, len, WOLFSSL_XILINX_AES_KEY_SRC);
  4412. #endif
  4413. #ifdef WOLF_CRYPTO_CB
  4414. if (aes->devId != INVALID_DEVID) {
  4415. XMEMCPY(aes->devKey, key, len);
  4416. }
  4417. #endif
  4418. #ifdef WOLFSSL_IMX6_CAAM_BLOB
  4419. ForceZero(local, sizeof(local));
  4420. #endif
  4421. return ret;
  4422. }
  4423. #ifdef WOLFSSL_AESNI
  4424. #if defined(USE_INTEL_SPEEDUP)
  4425. #define HAVE_INTEL_AVX1
  4426. #define HAVE_INTEL_AVX2
  4427. #endif /* USE_INTEL_SPEEDUP */
  4428. void AES_GCM_encrypt(const unsigned char *in, unsigned char *out,
  4429. const unsigned char* addt, const unsigned char* ivec,
  4430. unsigned char *tag, word32 nbytes,
  4431. word32 abytes, word32 ibytes,
  4432. word32 tbytes, const unsigned char* key, int nr)
  4433. XASM_LINK("AES_GCM_encrypt");
  4434. #ifdef HAVE_INTEL_AVX1
  4435. void AES_GCM_encrypt_avx1(const unsigned char *in, unsigned char *out,
  4436. const unsigned char* addt, const unsigned char* ivec,
  4437. unsigned char *tag, word32 nbytes,
  4438. word32 abytes, word32 ibytes,
  4439. word32 tbytes, const unsigned char* key,
  4440. int nr)
  4441. XASM_LINK("AES_GCM_encrypt_avx1");
  4442. #ifdef HAVE_INTEL_AVX2
  4443. void AES_GCM_encrypt_avx2(const unsigned char *in, unsigned char *out,
  4444. const unsigned char* addt, const unsigned char* ivec,
  4445. unsigned char *tag, word32 nbytes,
  4446. word32 abytes, word32 ibytes,
  4447. word32 tbytes, const unsigned char* key,
  4448. int nr)
  4449. XASM_LINK("AES_GCM_encrypt_avx2");
  4450. #endif /* HAVE_INTEL_AVX2 */
  4451. #endif /* HAVE_INTEL_AVX1 */
  4452. #ifdef HAVE_AES_DECRYPT
  4453. void AES_GCM_decrypt(const unsigned char *in, unsigned char *out,
  4454. const unsigned char* addt, const unsigned char* ivec,
  4455. const unsigned char *tag, word32 nbytes, word32 abytes,
  4456. word32 ibytes, word32 tbytes, const unsigned char* key,
  4457. int nr, int* res)
  4458. XASM_LINK("AES_GCM_decrypt");
  4459. #ifdef HAVE_INTEL_AVX1
  4460. void AES_GCM_decrypt_avx1(const unsigned char *in, unsigned char *out,
  4461. const unsigned char* addt, const unsigned char* ivec,
  4462. const unsigned char *tag, word32 nbytes,
  4463. word32 abytes, word32 ibytes, word32 tbytes,
  4464. const unsigned char* key, int nr, int* res)
  4465. XASM_LINK("AES_GCM_decrypt_avx1");
  4466. #ifdef HAVE_INTEL_AVX2
  4467. void AES_GCM_decrypt_avx2(const unsigned char *in, unsigned char *out,
  4468. const unsigned char* addt, const unsigned char* ivec,
  4469. const unsigned char *tag, word32 nbytes,
  4470. word32 abytes, word32 ibytes, word32 tbytes,
  4471. const unsigned char* key, int nr, int* res)
  4472. XASM_LINK("AES_GCM_decrypt_avx2");
  4473. #endif /* HAVE_INTEL_AVX2 */
  4474. #endif /* HAVE_INTEL_AVX1 */
  4475. #endif /* HAVE_AES_DECRYPT */
  4476. #endif /* WOLFSSL_AESNI */
  4477. #if defined(GCM_SMALL)
  4478. static void GMULT(byte* X, byte* Y)
  4479. {
  4480. byte Z[AES_BLOCK_SIZE];
  4481. byte V[AES_BLOCK_SIZE];
  4482. int i, j;
  4483. XMEMSET(Z, 0, AES_BLOCK_SIZE);
  4484. XMEMCPY(V, X, AES_BLOCK_SIZE);
  4485. for (i = 0; i < AES_BLOCK_SIZE; i++)
  4486. {
  4487. byte y = Y[i];
  4488. for (j = 0; j < 8; j++)
  4489. {
  4490. if (y & 0x80) {
  4491. xorbuf(Z, V, AES_BLOCK_SIZE);
  4492. }
  4493. RIGHTSHIFTX(V);
  4494. y = y << 1;
  4495. }
  4496. }
  4497. XMEMCPY(X, Z, AES_BLOCK_SIZE);
  4498. }
  4499. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  4500. word32 cSz, byte* s, word32 sSz)
  4501. {
  4502. byte x[AES_BLOCK_SIZE];
  4503. byte scratch[AES_BLOCK_SIZE];
  4504. word32 blocks, partial;
  4505. byte* h;
  4506. if (aes == NULL) {
  4507. return;
  4508. }
  4509. h = aes->H;
  4510. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4511. /* Hash in A, the Additional Authentication Data */
  4512. if (aSz != 0 && a != NULL) {
  4513. blocks = aSz / AES_BLOCK_SIZE;
  4514. partial = aSz % AES_BLOCK_SIZE;
  4515. while (blocks--) {
  4516. xorbuf(x, a, AES_BLOCK_SIZE);
  4517. GMULT(x, h);
  4518. a += AES_BLOCK_SIZE;
  4519. }
  4520. if (partial != 0) {
  4521. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4522. XMEMCPY(scratch, a, partial);
  4523. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4524. GMULT(x, h);
  4525. }
  4526. }
  4527. /* Hash in C, the Ciphertext */
  4528. if (cSz != 0 && c != NULL) {
  4529. blocks = cSz / AES_BLOCK_SIZE;
  4530. partial = cSz % AES_BLOCK_SIZE;
  4531. while (blocks--) {
  4532. xorbuf(x, c, AES_BLOCK_SIZE);
  4533. GMULT(x, h);
  4534. c += AES_BLOCK_SIZE;
  4535. }
  4536. if (partial != 0) {
  4537. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4538. XMEMCPY(scratch, c, partial);
  4539. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4540. GMULT(x, h);
  4541. }
  4542. }
  4543. /* Hash in the lengths of A and C in bits */
  4544. FlattenSzInBits(&scratch[0], aSz);
  4545. FlattenSzInBits(&scratch[8], cSz);
  4546. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4547. GMULT(x, h);
  4548. /* Copy the result into s. */
  4549. XMEMCPY(s, x, sSz);
  4550. }
  4551. #ifdef WOLFSSL_AESGCM_STREAM
  4552. /* No extra initialization for small implementation.
  4553. *
  4554. * @param [in] aes AES GCM object.
  4555. */
  4556. #define GHASH_INIT_EXTRA(aes)
  4557. /* GHASH one block of data..
  4558. *
  4559. * XOR block into tag and GMULT with H.
  4560. *
  4561. * @param [in, out] aes AES GCM object.
  4562. * @param [in] block Block of AAD or cipher text.
  4563. */
  4564. #define GHASH_ONE_BLOCK(aes, block) \
  4565. do { \
  4566. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  4567. GMULT(AES_TAG(aes), aes->H); \
  4568. } \
  4569. while (0)
  4570. #endif /* WOLFSSL_AESGCM_STREAM */
  4571. /* end GCM_SMALL */
  4572. #elif defined(GCM_TABLE)
  4573. static const byte R[256][2] = {
  4574. {0x00, 0x00}, {0x01, 0xc2}, {0x03, 0x84}, {0x02, 0x46},
  4575. {0x07, 0x08}, {0x06, 0xca}, {0x04, 0x8c}, {0x05, 0x4e},
  4576. {0x0e, 0x10}, {0x0f, 0xd2}, {0x0d, 0x94}, {0x0c, 0x56},
  4577. {0x09, 0x18}, {0x08, 0xda}, {0x0a, 0x9c}, {0x0b, 0x5e},
  4578. {0x1c, 0x20}, {0x1d, 0xe2}, {0x1f, 0xa4}, {0x1e, 0x66},
  4579. {0x1b, 0x28}, {0x1a, 0xea}, {0x18, 0xac}, {0x19, 0x6e},
  4580. {0x12, 0x30}, {0x13, 0xf2}, {0x11, 0xb4}, {0x10, 0x76},
  4581. {0x15, 0x38}, {0x14, 0xfa}, {0x16, 0xbc}, {0x17, 0x7e},
  4582. {0x38, 0x40}, {0x39, 0x82}, {0x3b, 0xc4}, {0x3a, 0x06},
  4583. {0x3f, 0x48}, {0x3e, 0x8a}, {0x3c, 0xcc}, {0x3d, 0x0e},
  4584. {0x36, 0x50}, {0x37, 0x92}, {0x35, 0xd4}, {0x34, 0x16},
  4585. {0x31, 0x58}, {0x30, 0x9a}, {0x32, 0xdc}, {0x33, 0x1e},
  4586. {0x24, 0x60}, {0x25, 0xa2}, {0x27, 0xe4}, {0x26, 0x26},
  4587. {0x23, 0x68}, {0x22, 0xaa}, {0x20, 0xec}, {0x21, 0x2e},
  4588. {0x2a, 0x70}, {0x2b, 0xb2}, {0x29, 0xf4}, {0x28, 0x36},
  4589. {0x2d, 0x78}, {0x2c, 0xba}, {0x2e, 0xfc}, {0x2f, 0x3e},
  4590. {0x70, 0x80}, {0x71, 0x42}, {0x73, 0x04}, {0x72, 0xc6},
  4591. {0x77, 0x88}, {0x76, 0x4a}, {0x74, 0x0c}, {0x75, 0xce},
  4592. {0x7e, 0x90}, {0x7f, 0x52}, {0x7d, 0x14}, {0x7c, 0xd6},
  4593. {0x79, 0x98}, {0x78, 0x5a}, {0x7a, 0x1c}, {0x7b, 0xde},
  4594. {0x6c, 0xa0}, {0x6d, 0x62}, {0x6f, 0x24}, {0x6e, 0xe6},
  4595. {0x6b, 0xa8}, {0x6a, 0x6a}, {0x68, 0x2c}, {0x69, 0xee},
  4596. {0x62, 0xb0}, {0x63, 0x72}, {0x61, 0x34}, {0x60, 0xf6},
  4597. {0x65, 0xb8}, {0x64, 0x7a}, {0x66, 0x3c}, {0x67, 0xfe},
  4598. {0x48, 0xc0}, {0x49, 0x02}, {0x4b, 0x44}, {0x4a, 0x86},
  4599. {0x4f, 0xc8}, {0x4e, 0x0a}, {0x4c, 0x4c}, {0x4d, 0x8e},
  4600. {0x46, 0xd0}, {0x47, 0x12}, {0x45, 0x54}, {0x44, 0x96},
  4601. {0x41, 0xd8}, {0x40, 0x1a}, {0x42, 0x5c}, {0x43, 0x9e},
  4602. {0x54, 0xe0}, {0x55, 0x22}, {0x57, 0x64}, {0x56, 0xa6},
  4603. {0x53, 0xe8}, {0x52, 0x2a}, {0x50, 0x6c}, {0x51, 0xae},
  4604. {0x5a, 0xf0}, {0x5b, 0x32}, {0x59, 0x74}, {0x58, 0xb6},
  4605. {0x5d, 0xf8}, {0x5c, 0x3a}, {0x5e, 0x7c}, {0x5f, 0xbe},
  4606. {0xe1, 0x00}, {0xe0, 0xc2}, {0xe2, 0x84}, {0xe3, 0x46},
  4607. {0xe6, 0x08}, {0xe7, 0xca}, {0xe5, 0x8c}, {0xe4, 0x4e},
  4608. {0xef, 0x10}, {0xee, 0xd2}, {0xec, 0x94}, {0xed, 0x56},
  4609. {0xe8, 0x18}, {0xe9, 0xda}, {0xeb, 0x9c}, {0xea, 0x5e},
  4610. {0xfd, 0x20}, {0xfc, 0xe2}, {0xfe, 0xa4}, {0xff, 0x66},
  4611. {0xfa, 0x28}, {0xfb, 0xea}, {0xf9, 0xac}, {0xf8, 0x6e},
  4612. {0xf3, 0x30}, {0xf2, 0xf2}, {0xf0, 0xb4}, {0xf1, 0x76},
  4613. {0xf4, 0x38}, {0xf5, 0xfa}, {0xf7, 0xbc}, {0xf6, 0x7e},
  4614. {0xd9, 0x40}, {0xd8, 0x82}, {0xda, 0xc4}, {0xdb, 0x06},
  4615. {0xde, 0x48}, {0xdf, 0x8a}, {0xdd, 0xcc}, {0xdc, 0x0e},
  4616. {0xd7, 0x50}, {0xd6, 0x92}, {0xd4, 0xd4}, {0xd5, 0x16},
  4617. {0xd0, 0x58}, {0xd1, 0x9a}, {0xd3, 0xdc}, {0xd2, 0x1e},
  4618. {0xc5, 0x60}, {0xc4, 0xa2}, {0xc6, 0xe4}, {0xc7, 0x26},
  4619. {0xc2, 0x68}, {0xc3, 0xaa}, {0xc1, 0xec}, {0xc0, 0x2e},
  4620. {0xcb, 0x70}, {0xca, 0xb2}, {0xc8, 0xf4}, {0xc9, 0x36},
  4621. {0xcc, 0x78}, {0xcd, 0xba}, {0xcf, 0xfc}, {0xce, 0x3e},
  4622. {0x91, 0x80}, {0x90, 0x42}, {0x92, 0x04}, {0x93, 0xc6},
  4623. {0x96, 0x88}, {0x97, 0x4a}, {0x95, 0x0c}, {0x94, 0xce},
  4624. {0x9f, 0x90}, {0x9e, 0x52}, {0x9c, 0x14}, {0x9d, 0xd6},
  4625. {0x98, 0x98}, {0x99, 0x5a}, {0x9b, 0x1c}, {0x9a, 0xde},
  4626. {0x8d, 0xa0}, {0x8c, 0x62}, {0x8e, 0x24}, {0x8f, 0xe6},
  4627. {0x8a, 0xa8}, {0x8b, 0x6a}, {0x89, 0x2c}, {0x88, 0xee},
  4628. {0x83, 0xb0}, {0x82, 0x72}, {0x80, 0x34}, {0x81, 0xf6},
  4629. {0x84, 0xb8}, {0x85, 0x7a}, {0x87, 0x3c}, {0x86, 0xfe},
  4630. {0xa9, 0xc0}, {0xa8, 0x02}, {0xaa, 0x44}, {0xab, 0x86},
  4631. {0xae, 0xc8}, {0xaf, 0x0a}, {0xad, 0x4c}, {0xac, 0x8e},
  4632. {0xa7, 0xd0}, {0xa6, 0x12}, {0xa4, 0x54}, {0xa5, 0x96},
  4633. {0xa0, 0xd8}, {0xa1, 0x1a}, {0xa3, 0x5c}, {0xa2, 0x9e},
  4634. {0xb5, 0xe0}, {0xb4, 0x22}, {0xb6, 0x64}, {0xb7, 0xa6},
  4635. {0xb2, 0xe8}, {0xb3, 0x2a}, {0xb1, 0x6c}, {0xb0, 0xae},
  4636. {0xbb, 0xf0}, {0xba, 0x32}, {0xb8, 0x74}, {0xb9, 0xb6},
  4637. {0xbc, 0xf8}, {0xbd, 0x3a}, {0xbf, 0x7c}, {0xbe, 0xbe} };
  4638. static void GMULT(byte *x, byte m[256][AES_BLOCK_SIZE])
  4639. {
  4640. #if !defined(WORD64_AVAILABLE) || defined(BIG_ENDIAN_ORDER)
  4641. int i, j;
  4642. byte Z[AES_BLOCK_SIZE];
  4643. byte a;
  4644. XMEMSET(Z, 0, sizeof(Z));
  4645. for (i = 15; i > 0; i--) {
  4646. xorbuf(Z, m[x[i]], AES_BLOCK_SIZE);
  4647. a = Z[15];
  4648. for (j = 15; j > 0; j--) {
  4649. Z[j] = Z[j-1];
  4650. }
  4651. Z[0] = R[a][0];
  4652. Z[1] ^= R[a][1];
  4653. }
  4654. xorbuf(Z, m[x[0]], AES_BLOCK_SIZE);
  4655. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  4656. #else
  4657. byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE];
  4658. byte a;
  4659. word64* pZ;
  4660. word64* pm;
  4661. word64* px = (word64*)(x);
  4662. int i;
  4663. pZ = (word64*)(Z + 15 + 1);
  4664. pm = (word64*)(m[x[15]]);
  4665. pZ[0] = pm[0];
  4666. pZ[1] = pm[1];
  4667. a = Z[16 + 15];
  4668. Z[15] = R[a][0];
  4669. Z[16] ^= R[a][1];
  4670. for (i = 14; i > 0; i--) {
  4671. pZ = (word64*)(Z + i + 1);
  4672. pm = (word64*)(m[x[i]]);
  4673. pZ[0] ^= pm[0];
  4674. pZ[1] ^= pm[1];
  4675. a = Z[16 + i];
  4676. Z[i] = R[a][0];
  4677. Z[i+1] ^= R[a][1];
  4678. }
  4679. pZ = (word64*)(Z + 1);
  4680. pm = (word64*)(m[x[0]]);
  4681. px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1];
  4682. #endif
  4683. }
  4684. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  4685. word32 cSz, byte* s, word32 sSz)
  4686. {
  4687. byte x[AES_BLOCK_SIZE];
  4688. byte scratch[AES_BLOCK_SIZE];
  4689. word32 blocks, partial;
  4690. if (aes == NULL) {
  4691. return;
  4692. }
  4693. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4694. /* Hash in A, the Additional Authentication Data */
  4695. if (aSz != 0 && a != NULL) {
  4696. blocks = aSz / AES_BLOCK_SIZE;
  4697. partial = aSz % AES_BLOCK_SIZE;
  4698. while (blocks--) {
  4699. xorbuf(x, a, AES_BLOCK_SIZE);
  4700. GMULT(x, aes->M0);
  4701. a += AES_BLOCK_SIZE;
  4702. }
  4703. if (partial != 0) {
  4704. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4705. XMEMCPY(scratch, a, partial);
  4706. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4707. GMULT(x, aes->M0);
  4708. }
  4709. }
  4710. /* Hash in C, the Ciphertext */
  4711. if (cSz != 0 && c != NULL) {
  4712. blocks = cSz / AES_BLOCK_SIZE;
  4713. partial = cSz % AES_BLOCK_SIZE;
  4714. while (blocks--) {
  4715. xorbuf(x, c, AES_BLOCK_SIZE);
  4716. GMULT(x, aes->M0);
  4717. c += AES_BLOCK_SIZE;
  4718. }
  4719. if (partial != 0) {
  4720. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4721. XMEMCPY(scratch, c, partial);
  4722. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4723. GMULT(x, aes->M0);
  4724. }
  4725. }
  4726. /* Hash in the lengths of A and C in bits */
  4727. FlattenSzInBits(&scratch[0], aSz);
  4728. FlattenSzInBits(&scratch[8], cSz);
  4729. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4730. GMULT(x, aes->M0);
  4731. /* Copy the result into s. */
  4732. XMEMCPY(s, x, sSz);
  4733. }
  4734. #ifdef WOLFSSL_AESGCM_STREAM
  4735. /* No extra initialization for table implementation.
  4736. *
  4737. * @param [in] aes AES GCM object.
  4738. */
  4739. #define GHASH_INIT_EXTRA(aes)
  4740. /* GHASH one block of data..
  4741. *
  4742. * XOR block into tag and GMULT with H using pre-computed table.
  4743. *
  4744. * @param [in, out] aes AES GCM object.
  4745. * @param [in] block Block of AAD or cipher text.
  4746. */
  4747. #define GHASH_ONE_BLOCK(aes, block) \
  4748. do { \
  4749. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  4750. GMULT(AES_TAG(aes), aes->M0); \
  4751. } \
  4752. while (0)
  4753. #endif /* WOLFSSL_AESGCM_STREAM */
  4754. /* end GCM_TABLE */
  4755. #elif defined(GCM_TABLE_4BIT)
  4756. /* remainder = x^7 + x^2 + x^1 + 1 => 0xe1
  4757. * R shifts right a reverse bit pair of bytes such that:
  4758. * R(b0, b1) => b1 = (b1 >> 1) | (b0 << 7); b0 >>= 1
  4759. * 0 => 0, 0, 0, 0 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ 00,00 = 00,00
  4760. * 8 => 0, 0, 0, 1 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ e1,00 = e1,00
  4761. * 4 => 0, 0, 1, 0 => R(R(R(00,00) ^ 00,00) ^ e1,00) ^ 00,00 = 70,80
  4762. * 2 => 0, 1, 0, 0 => R(R(R(00,00) ^ e1,00) ^ 00,00) ^ 00,00 = 38,40
  4763. * 1 => 1, 0, 0, 0 => R(R(R(e1,00) ^ 00,00) ^ 00,00) ^ 00,00 = 1c,20
  4764. * To calculate te rest, XOR result for each bit.
  4765. * e.g. 6 = 4 ^ 2 => 48,c0
  4766. *
  4767. * Second half is same values rotated by 4-bits.
  4768. */
  4769. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  4770. static const byte R[16][2] = {
  4771. {0x00, 0x00}, {0x1c, 0x20}, {0x38, 0x40}, {0x24, 0x60},
  4772. {0x70, 0x80}, {0x6c, 0xa0}, {0x48, 0xc0}, {0x54, 0xe0},
  4773. {0xe1, 0x00}, {0xfd, 0x20}, {0xd9, 0x40}, {0xc5, 0x60},
  4774. {0x91, 0x80}, {0x8d, 0xa0}, {0xa9, 0xc0}, {0xb5, 0xe0},
  4775. };
  4776. #else
  4777. static const word16 R[32] = {
  4778. 0x0000, 0x201c, 0x4038, 0x6024,
  4779. 0x8070, 0xa06c, 0xc048, 0xe054,
  4780. 0x00e1, 0x20fd, 0x40d9, 0x60c5,
  4781. 0x8091, 0xa08d, 0xc0a9, 0xe0b5,
  4782. 0x0000, 0xc201, 0x8403, 0x4602,
  4783. 0x0807, 0xca06, 0x8c04, 0x4e05,
  4784. 0x100e, 0xd20f, 0x940d, 0x560c,
  4785. 0x1809, 0xda08, 0x9c0a, 0x5e0b,
  4786. };
  4787. #endif
  4788. /* Multiply in GF(2^128) defined by polynomial:
  4789. * x^128 + x^7 + x^2 + x^1 + 1.
  4790. *
  4791. * H: hash key = encrypt(key, 0)
  4792. * x = x * H in field
  4793. *
  4794. * x: cumlative result
  4795. * m: 4-bit table
  4796. * [0..15] * H
  4797. */
  4798. #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU)
  4799. static void GMULT(byte *x, byte m[16][AES_BLOCK_SIZE])
  4800. {
  4801. int i, j, n;
  4802. byte Z[AES_BLOCK_SIZE];
  4803. byte a;
  4804. XMEMSET(Z, 0, sizeof(Z));
  4805. for (i = 15; i >= 0; i--) {
  4806. for (n = 0; n < 2; n++) {
  4807. if (n == 0)
  4808. xorbuf(Z, m[x[i] & 0xf], AES_BLOCK_SIZE);
  4809. else {
  4810. xorbuf(Z, m[x[i] >> 4], AES_BLOCK_SIZE);
  4811. if (i == 0)
  4812. break;
  4813. }
  4814. a = Z[15] & 0xf;
  4815. for (j = 15; j > 0; j--)
  4816. Z[j] = (Z[j-1] << 4) | (Z[j] >> 4);
  4817. Z[0] >>= 4;
  4818. Z[0] ^= R[a][0];
  4819. Z[1] ^= R[a][1];
  4820. }
  4821. }
  4822. XMEMCPY(x, Z, AES_BLOCK_SIZE);
  4823. }
  4824. #elif defined(WC_32BIT_CPU)
  4825. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  4826. {
  4827. int i;
  4828. word32 z8[4] = {0, 0, 0, 0};
  4829. byte a;
  4830. word32* x8 = (word32*)x;
  4831. word32* m8;
  4832. byte xi;
  4833. word32 n7, n6, n5, n4, n3, n2, n1, n0;
  4834. for (i = 15; i > 0; i--) {
  4835. xi = x[i];
  4836. /* XOR in (msn * H) */
  4837. m8 = (word32*)m[xi & 0xf];
  4838. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4839. /* Cache top byte for remainder calculations - lost in rotate. */
  4840. a = (byte)(z8[3] >> 24);
  4841. /* Rotate Z by 8-bits */
  4842. z8[3] = (z8[2] >> 24) | (z8[3] << 8);
  4843. z8[2] = (z8[1] >> 24) | (z8[2] << 8);
  4844. z8[1] = (z8[0] >> 24) | (z8[1] << 8);
  4845. z8[0] <<= 8;
  4846. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  4847. z8[0] ^= (word32)R[16 + (a & 0xf)];
  4848. xi >>= 4;
  4849. /* XOR in next significant nibble (XORed with H) * remainder */
  4850. m8 = (word32*)m[xi];
  4851. a ^= (byte)(m8[3] >> 20);
  4852. z8[0] ^= (word32)R[a >> 4];
  4853. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  4854. m8 = (word32*)m[16 + xi];
  4855. z8[0] ^= m8[0]; z8[1] ^= m8[1];
  4856. z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4857. }
  4858. xi = x[0];
  4859. /* XOR in most significant nibble * H */
  4860. m8 = (word32*)m[xi & 0xf];
  4861. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4862. /* Cache top byte for remainder calculations - lost in rotate. */
  4863. a = (z8[3] >> 24) & 0xf;
  4864. /* Rotate z by 4-bits */
  4865. n7 = z8[3] & 0xf0f0f0f0ULL;
  4866. n6 = z8[3] & 0x0f0f0f0fULL;
  4867. n5 = z8[2] & 0xf0f0f0f0ULL;
  4868. n4 = z8[2] & 0x0f0f0f0fULL;
  4869. n3 = z8[1] & 0xf0f0f0f0ULL;
  4870. n2 = z8[1] & 0x0f0f0f0fULL;
  4871. n1 = z8[0] & 0xf0f0f0f0ULL;
  4872. n0 = z8[0] & 0x0f0f0f0fULL;
  4873. z8[3] = (n7 >> 4) | (n6 << 12) | (n4 >> 20);
  4874. z8[2] = (n5 >> 4) | (n4 << 12) | (n2 >> 20);
  4875. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 20);
  4876. z8[0] = (n1 >> 4) | (n0 << 12);
  4877. /* XOR in most significant nibble * remainder */
  4878. z8[0] ^= (word32)R[a];
  4879. /* XOR in next significant nibble * H */
  4880. m8 = (word32*)m[xi >> 4];
  4881. z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3];
  4882. /* Write back result. */
  4883. x8[0] = z8[0]; x8[1] = z8[1]; x8[2] = z8[2]; x8[3] = z8[3];
  4884. }
  4885. #else
  4886. static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE])
  4887. {
  4888. int i;
  4889. word64 z8[2] = {0, 0};
  4890. byte a;
  4891. word64* x8 = (word64*)x;
  4892. word64* m8;
  4893. word64 n0, n1, n2, n3;
  4894. byte xi;
  4895. for (i = 15; i > 0; i--) {
  4896. xi = x[i];
  4897. /* XOR in (msn * H) */
  4898. m8 = (word64*)m[xi & 0xf];
  4899. z8[0] ^= m8[0];
  4900. z8[1] ^= m8[1];
  4901. /* Cache top byte for remainder calculations - lost in rotate. */
  4902. a = (byte)(z8[1] >> 56);
  4903. /* Rotate Z by 8-bits */
  4904. z8[1] = (z8[0] >> 56) | (z8[1] << 8);
  4905. z8[0] <<= 8;
  4906. /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */
  4907. m8 = (word64*)m[16 + (xi >> 4)];
  4908. z8[0] ^= m8[0];
  4909. z8[1] ^= m8[1];
  4910. /* XOR in (msn * remainder) [pre-rotated by 4 bits] */
  4911. z8[0] ^= (word64)R[16 + (a & 0xf)];
  4912. /* XOR in next significant nibble (XORed with H) * remainder */
  4913. m8 = (word64*)m[xi >> 4];
  4914. a ^= (byte)(m8[1] >> 52);
  4915. z8[0] ^= (word64)R[a >> 4];
  4916. }
  4917. xi = x[0];
  4918. /* XOR in most significant nibble * H */
  4919. m8 = (word64*)m[xi & 0xf];
  4920. z8[0] ^= m8[0];
  4921. z8[1] ^= m8[1];
  4922. /* Cache top byte for remainder calculations - lost in rotate. */
  4923. a = (z8[1] >> 56) & 0xf;
  4924. /* Rotate z by 4-bits */
  4925. n3 = z8[1] & W64LIT(0xf0f0f0f0f0f0f0f0);
  4926. n2 = z8[1] & W64LIT(0x0f0f0f0f0f0f0f0f);
  4927. n1 = z8[0] & W64LIT(0xf0f0f0f0f0f0f0f0);
  4928. n0 = z8[0] & W64LIT(0x0f0f0f0f0f0f0f0f);
  4929. z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 52);
  4930. z8[0] = (n1 >> 4) | (n0 << 12);
  4931. /* XOR in next significant nibble * H */
  4932. m8 = (word64*)m[xi >> 4];
  4933. z8[0] ^= m8[0];
  4934. z8[1] ^= m8[1];
  4935. /* XOR in most significant nibble * remainder */
  4936. z8[0] ^= (word64)R[a];
  4937. /* Write back result. */
  4938. x8[0] = z8[0];
  4939. x8[1] = z8[1];
  4940. }
  4941. #endif
  4942. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  4943. word32 cSz, byte* s, word32 sSz)
  4944. {
  4945. byte x[AES_BLOCK_SIZE];
  4946. byte scratch[AES_BLOCK_SIZE];
  4947. word32 blocks, partial;
  4948. if (aes == NULL) {
  4949. return;
  4950. }
  4951. XMEMSET(x, 0, AES_BLOCK_SIZE);
  4952. /* Hash in A, the Additional Authentication Data */
  4953. if (aSz != 0 && a != NULL) {
  4954. blocks = aSz / AES_BLOCK_SIZE;
  4955. partial = aSz % AES_BLOCK_SIZE;
  4956. while (blocks--) {
  4957. xorbuf(x, a, AES_BLOCK_SIZE);
  4958. GMULT(x, aes->M0);
  4959. a += AES_BLOCK_SIZE;
  4960. }
  4961. if (partial != 0) {
  4962. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4963. XMEMCPY(scratch, a, partial);
  4964. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4965. GMULT(x, aes->M0);
  4966. }
  4967. }
  4968. /* Hash in C, the Ciphertext */
  4969. if (cSz != 0 && c != NULL) {
  4970. blocks = cSz / AES_BLOCK_SIZE;
  4971. partial = cSz % AES_BLOCK_SIZE;
  4972. while (blocks--) {
  4973. xorbuf(x, c, AES_BLOCK_SIZE);
  4974. GMULT(x, aes->M0);
  4975. c += AES_BLOCK_SIZE;
  4976. }
  4977. if (partial != 0) {
  4978. XMEMSET(scratch, 0, AES_BLOCK_SIZE);
  4979. XMEMCPY(scratch, c, partial);
  4980. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4981. GMULT(x, aes->M0);
  4982. }
  4983. }
  4984. /* Hash in the lengths of A and C in bits */
  4985. FlattenSzInBits(&scratch[0], aSz);
  4986. FlattenSzInBits(&scratch[8], cSz);
  4987. xorbuf(x, scratch, AES_BLOCK_SIZE);
  4988. GMULT(x, aes->M0);
  4989. /* Copy the result into s. */
  4990. XMEMCPY(s, x, sSz);
  4991. }
  4992. #ifdef WOLFSSL_AESGCM_STREAM
  4993. /* No extra initialization for 4-bit table implementation.
  4994. *
  4995. * @param [in] aes AES GCM object.
  4996. */
  4997. #define GHASH_INIT_EXTRA(aes)
  4998. /* GHASH one block of data..
  4999. *
  5000. * XOR block into tag and GMULT with H using pre-computed table.
  5001. *
  5002. * @param [in, out] aes AES GCM object.
  5003. * @param [in] block Block of AAD or cipher text.
  5004. */
  5005. #define GHASH_ONE_BLOCK(aes, block) \
  5006. do { \
  5007. xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \
  5008. GMULT(AES_TAG(aes), (aes)->M0); \
  5009. } \
  5010. while (0)
  5011. #endif /* WOLFSSL_AESGCM_STREAM */
  5012. #elif defined(WORD64_AVAILABLE) && !defined(GCM_WORD32)
  5013. #if !defined(FREESCALE_LTC_AES_GCM)
  5014. static void GMULT(word64* X, word64* Y)
  5015. {
  5016. word64 Z[2] = {0,0};
  5017. word64 V[2];
  5018. int i, j;
  5019. word64 v1;
  5020. V[0] = X[0]; V[1] = X[1];
  5021. for (i = 0; i < 2; i++)
  5022. {
  5023. word64 y = Y[i];
  5024. for (j = 0; j < 64; j++)
  5025. {
  5026. #ifndef AES_GCM_GMULT_NCT
  5027. word64 mask = 0 - (y >> 63);
  5028. Z[0] ^= V[0] & mask;
  5029. Z[1] ^= V[1] & mask;
  5030. #else
  5031. if (y & 0x8000000000000000ULL) {
  5032. Z[0] ^= V[0];
  5033. Z[1] ^= V[1];
  5034. }
  5035. #endif
  5036. v1 = (0 - (V[1] & 1)) & 0xE100000000000000ULL;
  5037. V[1] >>= 1;
  5038. V[1] |= V[0] << 63;
  5039. V[0] >>= 1;
  5040. V[0] ^= v1;
  5041. y <<= 1;
  5042. }
  5043. }
  5044. X[0] = Z[0];
  5045. X[1] = Z[1];
  5046. }
  5047. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5048. word32 cSz, byte* s, word32 sSz)
  5049. {
  5050. word64 x[2] = {0,0};
  5051. word32 blocks, partial;
  5052. word64 bigH[2];
  5053. if (aes == NULL) {
  5054. return;
  5055. }
  5056. XMEMCPY(bigH, aes->H, AES_BLOCK_SIZE);
  5057. #ifdef LITTLE_ENDIAN_ORDER
  5058. ByteReverseWords64(bigH, bigH, AES_BLOCK_SIZE);
  5059. #endif
  5060. /* Hash in A, the Additional Authentication Data */
  5061. if (aSz != 0 && a != NULL) {
  5062. word64 bigA[2];
  5063. blocks = aSz / AES_BLOCK_SIZE;
  5064. partial = aSz % AES_BLOCK_SIZE;
  5065. while (blocks--) {
  5066. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5067. #ifdef LITTLE_ENDIAN_ORDER
  5068. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5069. #endif
  5070. x[0] ^= bigA[0];
  5071. x[1] ^= bigA[1];
  5072. GMULT(x, bigH);
  5073. a += AES_BLOCK_SIZE;
  5074. }
  5075. if (partial != 0) {
  5076. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5077. XMEMCPY(bigA, a, partial);
  5078. #ifdef LITTLE_ENDIAN_ORDER
  5079. ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE);
  5080. #endif
  5081. x[0] ^= bigA[0];
  5082. x[1] ^= bigA[1];
  5083. GMULT(x, bigH);
  5084. }
  5085. #ifdef OPENSSL_EXTRA
  5086. /* store AAD partial tag for next call */
  5087. aes->aadH[0] = (word32)((x[0] & 0xFFFFFFFF00000000ULL) >> 32);
  5088. aes->aadH[1] = (word32)(x[0] & 0xFFFFFFFF);
  5089. aes->aadH[2] = (word32)((x[1] & 0xFFFFFFFF00000000ULL) >> 32);
  5090. aes->aadH[3] = (word32)(x[1] & 0xFFFFFFFF);
  5091. #endif
  5092. }
  5093. /* Hash in C, the Ciphertext */
  5094. if (cSz != 0 && c != NULL) {
  5095. word64 bigC[2];
  5096. blocks = cSz / AES_BLOCK_SIZE;
  5097. partial = cSz % AES_BLOCK_SIZE;
  5098. #ifdef OPENSSL_EXTRA
  5099. /* Start from last AAD partial tag */
  5100. if(aes->aadLen) {
  5101. x[0] = ((word64)aes->aadH[0]) << 32 | aes->aadH[1];
  5102. x[1] = ((word64)aes->aadH[2]) << 32 | aes->aadH[3];
  5103. }
  5104. #endif
  5105. while (blocks--) {
  5106. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5107. #ifdef LITTLE_ENDIAN_ORDER
  5108. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5109. #endif
  5110. x[0] ^= bigC[0];
  5111. x[1] ^= bigC[1];
  5112. GMULT(x, bigH);
  5113. c += AES_BLOCK_SIZE;
  5114. }
  5115. if (partial != 0) {
  5116. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  5117. XMEMCPY(bigC, c, partial);
  5118. #ifdef LITTLE_ENDIAN_ORDER
  5119. ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE);
  5120. #endif
  5121. x[0] ^= bigC[0];
  5122. x[1] ^= bigC[1];
  5123. GMULT(x, bigH);
  5124. }
  5125. }
  5126. /* Hash in the lengths in bits of A and C */
  5127. {
  5128. word64 len[2];
  5129. len[0] = aSz; len[1] = cSz;
  5130. #ifdef OPENSSL_EXTRA
  5131. if (aes->aadLen)
  5132. len[0] = (word64)aes->aadLen;
  5133. #endif
  5134. /* Lengths are in bytes. Convert to bits. */
  5135. len[0] *= 8;
  5136. len[1] *= 8;
  5137. x[0] ^= len[0];
  5138. x[1] ^= len[1];
  5139. GMULT(x, bigH);
  5140. }
  5141. #ifdef LITTLE_ENDIAN_ORDER
  5142. ByteReverseWords64(x, x, AES_BLOCK_SIZE);
  5143. #endif
  5144. XMEMCPY(s, x, sSz);
  5145. }
  5146. #endif /* !FREESCALE_LTC_AES_GCM */
  5147. #ifdef WOLFSSL_AESGCM_STREAM
  5148. #ifdef LITTLE_ENDIAN_ORDER
  5149. /* No extra initialization for small implementation.
  5150. *
  5151. * @param [in] aes AES GCM object.
  5152. */
  5153. #define GHASH_INIT_EXTRA(aes) \
  5154. ByteReverseWords64((word64*)aes->H, (word64*)aes->H, AES_BLOCK_SIZE)
  5155. /* GHASH one block of data..
  5156. *
  5157. * XOR block into tag and GMULT with H.
  5158. *
  5159. * @param [in, out] aes AES GCM object.
  5160. * @param [in] block Block of AAD or cipher text.
  5161. */
  5162. #define GHASH_ONE_BLOCK(aes, block) \
  5163. do { \
  5164. word64* x = (word64*)AES_TAG(aes); \
  5165. word64* h = (word64*)aes->H; \
  5166. word64 block64[2]; \
  5167. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  5168. ByteReverseWords64(block64, block64, AES_BLOCK_SIZE); \
  5169. x[0] ^= block64[0]; \
  5170. x[1] ^= block64[1]; \
  5171. GMULT(x, h); \
  5172. } \
  5173. while (0)
  5174. #ifdef OPENSSL_EXTRA
  5175. /* GHASH in AAD and cipher text lengths in bits.
  5176. *
  5177. * Convert tag back to little-endian.
  5178. *
  5179. * @param [in, out] aes AES GCM object.
  5180. */
  5181. #define GHASH_LEN_BLOCK(aes) \
  5182. do { \
  5183. word64* x = (word64*)AES_TAG(aes); \
  5184. word64* h = (word64*)aes->H; \
  5185. word64 len[2]; \
  5186. len[0] = aes->aSz; len[1] = aes->cSz; \
  5187. if (aes->aadLen) \
  5188. len[0] = (word64)aes->aadLen; \
  5189. /* Lengths are in bytes. Convert to bits. */ \
  5190. len[0] *= 8; \
  5191. len[1] *= 8; \
  5192. \
  5193. x[0] ^= len[0]; \
  5194. x[1] ^= len[1]; \
  5195. GMULT(x, h); \
  5196. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  5197. } \
  5198. while (0)
  5199. #else
  5200. /* GHASH in AAD and cipher text lengths in bits.
  5201. *
  5202. * Convert tag back to little-endian.
  5203. *
  5204. * @param [in, out] aes AES GCM object.
  5205. */
  5206. #define GHASH_LEN_BLOCK(aes) \
  5207. do { \
  5208. word64* x = (word64*)AES_TAG(aes); \
  5209. word64* h = (word64*)aes->H; \
  5210. word64 len[2]; \
  5211. len[0] = aes->aSz; len[1] = aes->cSz; \
  5212. /* Lengths are in bytes. Convert to bits. */ \
  5213. len[0] *= 8; \
  5214. len[1] *= 8; \
  5215. \
  5216. x[0] ^= len[0]; \
  5217. x[1] ^= len[1]; \
  5218. GMULT(x, h); \
  5219. ByteReverseWords64(x, x, AES_BLOCK_SIZE); \
  5220. } \
  5221. while (0)
  5222. #endif
  5223. #else
  5224. /* No extra initialization for small implementation.
  5225. *
  5226. * @param [in] aes AES GCM object.
  5227. */
  5228. #define GHASH_INIT_EXTRA(aes)
  5229. /* GHASH one block of data..
  5230. *
  5231. * XOR block into tag and GMULT with H.
  5232. *
  5233. * @param [in, out] aes AES GCM object.
  5234. * @param [in] block Block of AAD or cipher text.
  5235. */
  5236. #define GHASH_ONE_BLOCK(aes, block) \
  5237. do { \
  5238. word64* x = (word64*)AES_TAG(aes); \
  5239. word64* h = (word64*)aes->H; \
  5240. word64 block64[2]; \
  5241. XMEMCPY(block64, block, AES_BLOCK_SIZE); \
  5242. x[0] ^= block64[0]; \
  5243. x[1] ^= block64[1]; \
  5244. GMULT(x, h); \
  5245. } \
  5246. while (0)
  5247. #ifdef OPENSSL_EXTRA
  5248. /* GHASH in AAD and cipher text lengths in bits.
  5249. *
  5250. * Convert tag back to little-endian.
  5251. *
  5252. * @param [in, out] aes AES GCM object.
  5253. */
  5254. #define GHASH_LEN_BLOCK(aes) \
  5255. do { \
  5256. word64* x = (word64*)AES_TAG(aes); \
  5257. word64* h = (word64*)aes->H; \
  5258. word64 len[2]; \
  5259. len[0] = aes->aSz; len[1] = aes->cSz; \
  5260. if (aes->aadLen) \
  5261. len[0] = (word64)aes->aadLen; \
  5262. /* Lengths are in bytes. Convert to bits. */ \
  5263. len[0] *= 8; \
  5264. len[1] *= 8; \
  5265. \
  5266. x[0] ^= len[0]; \
  5267. x[1] ^= len[1]; \
  5268. GMULT(x, h); \
  5269. } \
  5270. while (0)
  5271. #else
  5272. /* GHASH in AAD and cipher text lengths in bits.
  5273. *
  5274. * Convert tag back to little-endian.
  5275. *
  5276. * @param [in, out] aes AES GCM object.
  5277. */
  5278. #define GHASH_LEN_BLOCK(aes) \
  5279. do { \
  5280. word64* x = (word64*)AES_TAG(aes); \
  5281. word64* h = (word64*)aes->H; \
  5282. word64 len[2]; \
  5283. len[0] = aes->aSz; len[1] = aes->cSz; \
  5284. /* Lengths are in bytes. Convert to bits. */ \
  5285. len[0] *= 8; \
  5286. len[1] *= 8; \
  5287. \
  5288. x[0] ^= len[0]; \
  5289. x[1] ^= len[1]; \
  5290. GMULT(x, h); \
  5291. } \
  5292. while (0)
  5293. #endif
  5294. #endif /* !LITTLE_ENDIAN_ORDER */
  5295. #endif /* WOLFSSL_AESGCM_STREAM */
  5296. /* end defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) */
  5297. #else /* GCM_WORD32 */
  5298. static void GMULT(word32* X, word32* Y)
  5299. {
  5300. word32 Z[4] = {0,0,0,0};
  5301. word32 V[4];
  5302. int i, j;
  5303. V[0] = X[0]; V[1] = X[1]; V[2] = X[2]; V[3] = X[3];
  5304. for (i = 0; i < 4; i++)
  5305. {
  5306. word32 y = Y[i];
  5307. for (j = 0; j < 32; j++)
  5308. {
  5309. if (y & 0x80000000) {
  5310. Z[0] ^= V[0];
  5311. Z[1] ^= V[1];
  5312. Z[2] ^= V[2];
  5313. Z[3] ^= V[3];
  5314. }
  5315. if (V[3] & 0x00000001) {
  5316. V[3] >>= 1;
  5317. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  5318. V[2] >>= 1;
  5319. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  5320. V[1] >>= 1;
  5321. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  5322. V[0] >>= 1;
  5323. V[0] ^= 0xE1000000;
  5324. } else {
  5325. V[3] >>= 1;
  5326. V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0);
  5327. V[2] >>= 1;
  5328. V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0);
  5329. V[1] >>= 1;
  5330. V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0);
  5331. V[0] >>= 1;
  5332. }
  5333. y <<= 1;
  5334. }
  5335. }
  5336. X[0] = Z[0];
  5337. X[1] = Z[1];
  5338. X[2] = Z[2];
  5339. X[3] = Z[3];
  5340. }
  5341. void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5342. word32 cSz, byte* s, word32 sSz)
  5343. {
  5344. word32 x[4] = {0,0,0,0};
  5345. word32 blocks, partial;
  5346. word32 bigH[4];
  5347. if (aes == NULL) {
  5348. return;
  5349. }
  5350. XMEMCPY(bigH, aes->H, AES_BLOCK_SIZE);
  5351. #ifdef LITTLE_ENDIAN_ORDER
  5352. ByteReverseWords(bigH, bigH, AES_BLOCK_SIZE);
  5353. #endif
  5354. /* Hash in A, the Additional Authentication Data */
  5355. if (aSz != 0 && a != NULL) {
  5356. word32 bigA[4];
  5357. blocks = aSz / AES_BLOCK_SIZE;
  5358. partial = aSz % AES_BLOCK_SIZE;
  5359. while (blocks--) {
  5360. XMEMCPY(bigA, a, AES_BLOCK_SIZE);
  5361. #ifdef LITTLE_ENDIAN_ORDER
  5362. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  5363. #endif
  5364. x[0] ^= bigA[0];
  5365. x[1] ^= bigA[1];
  5366. x[2] ^= bigA[2];
  5367. x[3] ^= bigA[3];
  5368. GMULT(x, bigH);
  5369. a += AES_BLOCK_SIZE;
  5370. }
  5371. if (partial != 0) {
  5372. XMEMSET(bigA, 0, AES_BLOCK_SIZE);
  5373. XMEMCPY(bigA, a, partial);
  5374. #ifdef LITTLE_ENDIAN_ORDER
  5375. ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE);
  5376. #endif
  5377. x[0] ^= bigA[0];
  5378. x[1] ^= bigA[1];
  5379. x[2] ^= bigA[2];
  5380. x[3] ^= bigA[3];
  5381. GMULT(x, bigH);
  5382. }
  5383. }
  5384. /* Hash in C, the Ciphertext */
  5385. if (cSz != 0 && c != NULL) {
  5386. word32 bigC[4];
  5387. blocks = cSz / AES_BLOCK_SIZE;
  5388. partial = cSz % AES_BLOCK_SIZE;
  5389. while (blocks--) {
  5390. XMEMCPY(bigC, c, AES_BLOCK_SIZE);
  5391. #ifdef LITTLE_ENDIAN_ORDER
  5392. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  5393. #endif
  5394. x[0] ^= bigC[0];
  5395. x[1] ^= bigC[1];
  5396. x[2] ^= bigC[2];
  5397. x[3] ^= bigC[3];
  5398. GMULT(x, bigH);
  5399. c += AES_BLOCK_SIZE;
  5400. }
  5401. if (partial != 0) {
  5402. XMEMSET(bigC, 0, AES_BLOCK_SIZE);
  5403. XMEMCPY(bigC, c, partial);
  5404. #ifdef LITTLE_ENDIAN_ORDER
  5405. ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE);
  5406. #endif
  5407. x[0] ^= bigC[0];
  5408. x[1] ^= bigC[1];
  5409. x[2] ^= bigC[2];
  5410. x[3] ^= bigC[3];
  5411. GMULT(x, bigH);
  5412. }
  5413. }
  5414. /* Hash in the lengths in bits of A and C */
  5415. {
  5416. word32 len[4];
  5417. /* Lengths are in bytes. Convert to bits. */
  5418. len[0] = (aSz >> (8*sizeof(aSz) - 3));
  5419. len[1] = aSz << 3;
  5420. len[2] = (cSz >> (8*sizeof(cSz) - 3));
  5421. len[3] = cSz << 3;
  5422. x[0] ^= len[0];
  5423. x[1] ^= len[1];
  5424. x[2] ^= len[2];
  5425. x[3] ^= len[3];
  5426. GMULT(x, bigH);
  5427. }
  5428. #ifdef LITTLE_ENDIAN_ORDER
  5429. ByteReverseWords(x, x, AES_BLOCK_SIZE);
  5430. #endif
  5431. XMEMCPY(s, x, sSz);
  5432. }
  5433. #ifdef WOLFSSL_AESGCM_STREAM
  5434. #ifdef LITTLE_ENDIAN_ORDER
  5435. /* Little-endian 32-bit word implementation requires byte reversal of H.
  5436. *
  5437. * H is all-zeros block encrypted with key.
  5438. *
  5439. * @param [in, out] aes AES GCM object.
  5440. */
  5441. #define GHASH_INIT_EXTRA(aes) \
  5442. ByteReverseWords((word32*)aes->H, (word32*)aes->H, AES_BLOCK_SIZE)
  5443. /* GHASH one block of data..
  5444. *
  5445. * XOR block, in big-endian form, into tag and GMULT with H.
  5446. *
  5447. * @param [in, out] aes AES GCM object.
  5448. * @param [in] block Block of AAD or cipher text.
  5449. */
  5450. #define GHASH_ONE_BLOCK(aes, block) \
  5451. do { \
  5452. word32* x = (word32*)AES_TAG(aes); \
  5453. word32* h = (word32*)aes->H; \
  5454. word32 bigEnd[4]; \
  5455. XMEMCPY(bigEnd, block, AES_BLOCK_SIZE); \
  5456. ByteReverseWords(bigEnd, bigEnd, AES_BLOCK_SIZE); \
  5457. x[0] ^= bigEnd[0]; \
  5458. x[1] ^= bigEnd[1]; \
  5459. x[2] ^= bigEnd[2]; \
  5460. x[3] ^= bigEnd[3]; \
  5461. GMULT(x, h); \
  5462. } \
  5463. while (0)
  5464. /* GHASH in AAD and cipher text lengths in bits.
  5465. *
  5466. * Convert tag back to little-endian.
  5467. *
  5468. * @param [in, out] aes AES GCM object.
  5469. */
  5470. #define GHASH_LEN_BLOCK(aes) \
  5471. do { \
  5472. word32 len[4]; \
  5473. word32* x = (word32*)AES_TAG(aes); \
  5474. word32* h = (word32*)aes->H; \
  5475. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  5476. len[1] = aes->aSz << 3; \
  5477. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  5478. len[3] = aes->cSz << 3; \
  5479. x[0] ^= len[0]; \
  5480. x[1] ^= len[1]; \
  5481. x[2] ^= len[2]; \
  5482. x[3] ^= len[3]; \
  5483. GMULT(x, h); \
  5484. ByteReverseWords(x, x, AES_BLOCK_SIZE); \
  5485. } \
  5486. while (0)
  5487. #else
  5488. /* No extra initialization for 32-bit word implementation.
  5489. *
  5490. * @param [in] aes AES GCM object.
  5491. */
  5492. #define GHASH_INIT_EXTRA(aes)
  5493. /* GHASH one block of data..
  5494. *
  5495. * XOR block into tag and GMULT with H.
  5496. *
  5497. * @param [in, out] aes AES GCM object.
  5498. * @param [in] block Block of AAD or cipher text.
  5499. */
  5500. #define GHASH_ONE_BLOCK(aes, block) \
  5501. do { \
  5502. word32* x = (word32*)AES_TAG(aes); \
  5503. word32* h = (word32*)aes->H; \
  5504. word32 block32[4]; \
  5505. XMEMCPY(block32, block, AES_BLOCK_SIZE); \
  5506. x[0] ^= block32[0]; \
  5507. x[1] ^= block32[1]; \
  5508. x[2] ^= block32[2]; \
  5509. x[3] ^= block32[3]; \
  5510. GMULT(x, h); \
  5511. } \
  5512. while (0)
  5513. /* GHASH in AAD and cipher text lengths in bits.
  5514. *
  5515. * @param [in, out] aes AES GCM object.
  5516. */
  5517. #define GHASH_LEN_BLOCK(aes) \
  5518. do { \
  5519. word32 len[4]; \
  5520. word32* x = (word32*)AES_TAG(aes); \
  5521. word32* h = (word32*)aes->H; \
  5522. len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \
  5523. len[1] = aes->aSz << 3; \
  5524. len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \
  5525. len[3] = aes->cSz << 3; \
  5526. x[0] ^= len[0]; \
  5527. x[1] ^= len[1]; \
  5528. x[2] ^= len[2]; \
  5529. x[3] ^= len[3]; \
  5530. GMULT(x, h); \
  5531. } \
  5532. while (0)
  5533. #endif /* LITTLE_ENDIAN_ORDER */
  5534. #endif /* WOLFSSL_AESGCM_STREAM */
  5535. #endif /* end GCM_WORD32 */
  5536. #if !defined(WOLFSSL_XILINX_CRYPT) && !defined(WOLFSSL_AFALG_XILINX_AES)
  5537. #ifdef WOLFSSL_AESGCM_STREAM
  5538. #ifndef GHASH_LEN_BLOCK
  5539. /* Hash in the lengths of the AAD and cipher text in bits.
  5540. *
  5541. * Default implementation.
  5542. *
  5543. * @param [in, out] aes AES GCM object.
  5544. */
  5545. #define GHASH_LEN_BLOCK(aes) \
  5546. do { \
  5547. byte scratch[AES_BLOCK_SIZE]; \
  5548. FlattenSzInBits(&scratch[0], (aes)->aSz); \
  5549. FlattenSzInBits(&scratch[8], (aes)->cSz); \
  5550. GHASH_ONE_BLOCK(aes, scratch); \
  5551. } \
  5552. while (0)
  5553. #endif
  5554. /* Initialize a GHASH for streaming operations.
  5555. *
  5556. * @param [in, out] aes AES GCM object.
  5557. */
  5558. static void GHASH_INIT(Aes* aes) {
  5559. /* Set tag to all zeros as initial value. */
  5560. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  5561. /* Reset counts of AAD and cipher text. */
  5562. aes->aOver = 0;
  5563. aes->cOver = 0;
  5564. /* Extra initialization baed on implementation. */
  5565. GHASH_INIT_EXTRA(aes);
  5566. }
  5567. /* Update the GHASH with AAD and/or cipher text.
  5568. *
  5569. * @param [in,out] aes AES GCM object.
  5570. * @param [in] a Additional authentication data buffer.
  5571. * @param [in] aSz Size of data in AAD buffer.
  5572. * @param [in] c Cipher text buffer.
  5573. * @param [in] cSz Size of data in cipher text buffer.
  5574. */
  5575. static void GHASH_UPDATE(Aes* aes, const byte* a, word32 aSz, const byte* c,
  5576. word32 cSz)
  5577. {
  5578. word32 blocks;
  5579. word32 partial;
  5580. /* Hash in A, the Additional Authentication Data */
  5581. if (aSz != 0 && a != NULL) {
  5582. /* Update count of AAD we have hashed. */
  5583. aes->aSz += aSz;
  5584. /* Check if we have unprocessed data. */
  5585. if (aes->aOver > 0) {
  5586. /* Calculate amount we can use - fill up the block. */
  5587. byte sz = AES_BLOCK_SIZE - aes->aOver;
  5588. if (sz > aSz) {
  5589. sz = (byte)aSz;
  5590. }
  5591. /* Copy extra into last GHASH block array and update count. */
  5592. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  5593. aes->aOver += sz;
  5594. if (aes->aOver == AES_BLOCK_SIZE) {
  5595. /* We have filled up the block and can process. */
  5596. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5597. /* Reset count. */
  5598. aes->aOver = 0;
  5599. }
  5600. /* Used up some data. */
  5601. aSz -= sz;
  5602. a += sz;
  5603. }
  5604. /* Calculate number of blocks of AAD and the leftover. */
  5605. blocks = aSz / AES_BLOCK_SIZE;
  5606. partial = aSz % AES_BLOCK_SIZE;
  5607. /* GHASH full blocks now. */
  5608. while (blocks--) {
  5609. GHASH_ONE_BLOCK(aes, a);
  5610. a += AES_BLOCK_SIZE;
  5611. }
  5612. if (partial != 0) {
  5613. /* Cache the partial block. */
  5614. XMEMCPY(AES_LASTGBLOCK(aes), a, partial);
  5615. aes->aOver = (byte)partial;
  5616. }
  5617. }
  5618. if (aes->aOver > 0 && cSz > 0 && c != NULL) {
  5619. /* No more AAD coming and we have a partial block. */
  5620. /* Fill the rest of the block with zeros. */
  5621. byte sz = AES_BLOCK_SIZE - aes->aOver;
  5622. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, sz);
  5623. /* GHASH last AAD block. */
  5624. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5625. /* Clear partial count for next time through. */
  5626. aes->aOver = 0;
  5627. }
  5628. /* Hash in C, the Ciphertext */
  5629. if (cSz != 0 && c != NULL) {
  5630. /* Update count of cipher text we have hashed. */
  5631. aes->cSz += cSz;
  5632. if (aes->cOver > 0) {
  5633. /* Calculate amount we can use - fill up the block. */
  5634. byte sz = AES_BLOCK_SIZE - aes->cOver;
  5635. if (sz > cSz) {
  5636. sz = (byte)cSz;
  5637. }
  5638. XMEMCPY(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  5639. /* Update count of unsed encrypted counter. */
  5640. aes->cOver += sz;
  5641. if (aes->cOver == AES_BLOCK_SIZE) {
  5642. /* We have filled up the block and can process. */
  5643. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5644. /* Reset count. */
  5645. aes->cOver = 0;
  5646. }
  5647. /* Used up some data. */
  5648. cSz -= sz;
  5649. c += sz;
  5650. }
  5651. /* Calculate number of blocks of cipher text and the leftover. */
  5652. blocks = cSz / AES_BLOCK_SIZE;
  5653. partial = cSz % AES_BLOCK_SIZE;
  5654. /* GHASH full blocks now. */
  5655. while (blocks--) {
  5656. GHASH_ONE_BLOCK(aes, c);
  5657. c += AES_BLOCK_SIZE;
  5658. }
  5659. if (partial != 0) {
  5660. /* Cache the partial block. */
  5661. XMEMCPY(AES_LASTGBLOCK(aes), c, partial);
  5662. aes->cOver = (byte)partial;
  5663. }
  5664. }
  5665. }
  5666. /* Finalize the GHASH calculation.
  5667. *
  5668. * Complete hashing cipher text and hash the AAD and cipher text lengths.
  5669. *
  5670. * @param [in, out] aes AES GCM object.
  5671. * @param [out] s Authentication tag.
  5672. * @param [in] sSz Size of authentication tag required.
  5673. */
  5674. static void GHASH_FINAL(Aes* aes, byte* s, word32 sSz)
  5675. {
  5676. /* AAD block incomplete when > 0 */
  5677. byte over = aes->aOver;
  5678. if (aes->cOver > 0) {
  5679. /* Cipher text block incomplete. */
  5680. over = aes->cOver;
  5681. }
  5682. if (over > 0) {
  5683. /* Zeroize the unused part of the block. */
  5684. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  5685. /* Hash the last block of cipher text. */
  5686. GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes));
  5687. }
  5688. /* Hash in the lengths of AAD and cipher text in bits */
  5689. GHASH_LEN_BLOCK(aes);
  5690. /* Copy the result into s. */
  5691. XMEMCPY(s, AES_TAG(aes), sSz);
  5692. }
  5693. #endif /* WOLFSSL_AESGCM_STREAM */
  5694. #ifdef FREESCALE_LTC_AES_GCM
  5695. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  5696. const byte* iv, word32 ivSz,
  5697. byte* authTag, word32 authTagSz,
  5698. const byte* authIn, word32 authInSz)
  5699. {
  5700. status_t status;
  5701. word32 keySize;
  5702. /* argument checks */
  5703. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  5704. return BAD_FUNC_ARG;
  5705. }
  5706. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  5707. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  5708. return BAD_FUNC_ARG;
  5709. }
  5710. status = wc_AesGetKeySize(aes, &keySize);
  5711. if (status)
  5712. return status;
  5713. status = wolfSSL_CryptHwMutexLock();
  5714. if (status != 0)
  5715. return status;
  5716. status = LTC_AES_EncryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  5717. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  5718. wolfSSL_CryptHwMutexUnLock();
  5719. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  5720. }
  5721. #else
  5722. #ifdef STM32_CRYPTO_AES_GCM
  5723. /* this function supports inline encrypt */
  5724. /* define STM32_AESGCM_PARTIAL for STM HW that does not support authentication
  5725. * on byte multiples (see CRYP_HEADERWIDTHUNIT_BYTE) */
  5726. static WARN_UNUSED_RESULT int wc_AesGcmEncrypt_STM32(
  5727. Aes* aes, byte* out, const byte* in, word32 sz,
  5728. const byte* iv, word32 ivSz,
  5729. byte* authTag, word32 authTagSz,
  5730. const byte* authIn, word32 authInSz)
  5731. {
  5732. int ret;
  5733. #ifdef WOLFSSL_STM32_CUBEMX
  5734. CRYP_HandleTypeDef hcryp;
  5735. #else
  5736. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  5737. #endif
  5738. word32 keySize;
  5739. #ifdef WOLFSSL_STM32_CUBEMX
  5740. int status = HAL_OK;
  5741. word32 blocks = sz / AES_BLOCK_SIZE;
  5742. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  5743. #else
  5744. int status = SUCCESS;
  5745. #endif
  5746. word32 partial = sz % AES_BLOCK_SIZE;
  5747. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  5748. word32 ctrInit[AES_BLOCK_SIZE/sizeof(word32)];
  5749. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  5750. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  5751. byte* authInPadded = NULL;
  5752. int authPadSz, wasAlloc = 0, useSwGhash = 0;
  5753. ret = wc_AesGetKeySize(aes, &keySize);
  5754. if (ret != 0)
  5755. return ret;
  5756. #ifdef WOLFSSL_STM32_CUBEMX
  5757. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  5758. if (ret != 0)
  5759. return ret;
  5760. #endif
  5761. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  5762. if (ivSz == GCM_NONCE_MID_SZ) {
  5763. byte* pCtr = (byte*)ctr;
  5764. XMEMCPY(ctr, iv, ivSz);
  5765. pCtr[AES_BLOCK_SIZE - 1] = 1;
  5766. }
  5767. else {
  5768. GHASH(aes, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  5769. }
  5770. XMEMCPY(ctrInit, ctr, sizeof(ctr)); /* save off initial counter for GMAC */
  5771. /* Authentication buffer - must be 4-byte multiple zero padded */
  5772. authPadSz = authInSz % sizeof(word32);
  5773. if (authPadSz != 0) {
  5774. authPadSz = authInSz + sizeof(word32) - authPadSz;
  5775. if (authPadSz <= sizeof(authhdr)) {
  5776. authInPadded = (byte*)authhdr;
  5777. }
  5778. else {
  5779. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  5780. DYNAMIC_TYPE_TMP_BUFFER);
  5781. if (authInPadded == NULL) {
  5782. wolfSSL_CryptHwMutexUnLock();
  5783. return MEMORY_E;
  5784. }
  5785. wasAlloc = 1;
  5786. }
  5787. XMEMSET(authInPadded, 0, authPadSz);
  5788. XMEMCPY(authInPadded, authIn, authInSz);
  5789. } else {
  5790. authPadSz = authInSz;
  5791. authInPadded = (byte*)authIn;
  5792. }
  5793. /* for cases where hardware cannot be used for authTag calculate it */
  5794. /* if IV is not 12 calculate GHASH using software */
  5795. if (ivSz != GCM_NONCE_MID_SZ
  5796. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  5797. /* or harware that does not support partial block */
  5798. || sz == 0 || partial != 0
  5799. #endif
  5800. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  5801. /* or authIn is not a multiple of 4 */
  5802. || authPadSz != authInSz
  5803. #endif
  5804. ) {
  5805. useSwGhash = 1;
  5806. }
  5807. /* Hardware requires counter + 1 */
  5808. IncrementGcmCounter((byte*)ctr);
  5809. ret = wolfSSL_CryptHwMutexLock();
  5810. if (ret != 0) {
  5811. return ret;
  5812. }
  5813. #ifdef WOLFSSL_STM32_CUBEMX
  5814. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  5815. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  5816. #if defined(STM32_HAL_V2)
  5817. hcryp.Init.Algorithm = CRYP_AES_GCM;
  5818. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  5819. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  5820. hcryp.Init.HeaderSize = authInSz;
  5821. #else
  5822. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  5823. #endif
  5824. #ifdef CRYP_KEYIVCONFIG_ONCE
  5825. /* allows repeated calls to HAL_CRYP_Encrypt */
  5826. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  5827. #endif
  5828. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  5829. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  5830. HAL_CRYP_Init(&hcryp);
  5831. #ifndef CRYP_KEYIVCONFIG_ONCE
  5832. /* GCM payload phase - can handle partial blocks */
  5833. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  5834. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  5835. #else
  5836. /* GCM payload phase - blocks */
  5837. if (blocks) {
  5838. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in,
  5839. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  5840. }
  5841. /* GCM payload phase - partial remainder */
  5842. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  5843. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5844. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5845. status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)partialBlock, partial,
  5846. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  5847. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5848. }
  5849. #endif
  5850. if (status == HAL_OK && !useSwGhash) {
  5851. /* Compute the authTag */
  5852. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  5853. STM32_HAL_TIMEOUT);
  5854. }
  5855. #elif defined(STM32_CRYPTO_AES_ONLY)
  5856. /* Set the CRYP parameters */
  5857. hcryp.Init.HeaderSize = authPadSz;
  5858. if (authPadSz == 0)
  5859. hcryp.Init.Header = NULL; /* cannot pass pointer here when authIn == 0 */
  5860. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  5861. hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT;
  5862. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  5863. HAL_CRYP_Init(&hcryp);
  5864. /* GCM init phase */
  5865. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  5866. if (status == HAL_OK) {
  5867. /* GCM header phase */
  5868. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  5869. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  5870. }
  5871. if (status == HAL_OK) {
  5872. /* GCM payload phase - blocks */
  5873. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  5874. if (blocks) {
  5875. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  5876. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  5877. }
  5878. }
  5879. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  5880. /* GCM payload phase - partial remainder */
  5881. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5882. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5883. status = HAL_CRYPEx_AES_Auth(&hcryp, (uint8_t*)partialBlock, partial,
  5884. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  5885. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5886. }
  5887. if (status == HAL_OK && !useSwGhash) {
  5888. /* GCM final phase */
  5889. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  5890. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  5891. }
  5892. #else
  5893. hcryp.Init.HeaderSize = authPadSz;
  5894. HAL_CRYP_Init(&hcryp);
  5895. if (blocks) {
  5896. /* GCM payload phase - blocks */
  5897. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (byte*)in,
  5898. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  5899. }
  5900. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  5901. /* GCM payload phase - partial remainder */
  5902. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  5903. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  5904. status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (uint8_t*)partialBlock, partial,
  5905. (uint8_t*)partialBlock, STM32_HAL_TIMEOUT);
  5906. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  5907. }
  5908. if (status == HAL_OK && !useSwGhash) {
  5909. /* Compute the authTag */
  5910. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT);
  5911. }
  5912. #endif
  5913. if (status != HAL_OK)
  5914. ret = AES_GCM_AUTH_E;
  5915. HAL_CRYP_DeInit(&hcryp);
  5916. #else /* Standard Peripheral Library */
  5917. ByteReverseWords(keyCopy, (word32*)aes->key, keySize);
  5918. status = CRYP_AES_GCM(MODE_ENCRYPT, (uint8_t*)ctr,
  5919. (uint8_t*)keyCopy, keySize * 8,
  5920. (uint8_t*)in, sz,
  5921. (uint8_t*)authInPadded, authInSz,
  5922. (uint8_t*)out, (uint8_t*)tag);
  5923. if (status != SUCCESS)
  5924. ret = AES_GCM_AUTH_E;
  5925. #endif /* WOLFSSL_STM32_CUBEMX */
  5926. wolfSSL_CryptHwMutexUnLock();
  5927. if (ret == 0) {
  5928. /* return authTag */
  5929. if (authTag) {
  5930. if (useSwGhash) {
  5931. GHASH(aes, authIn, authInSz, out, sz, authTag, authTagSz);
  5932. ret = wc_AesEncrypt(aes, (byte*)ctrInit, (byte*)tag);
  5933. if (ret == 0) {
  5934. xorbuf(authTag, tag, authTagSz);
  5935. }
  5936. }
  5937. else {
  5938. /* use hardware calculated tag */
  5939. XMEMCPY(authTag, tag, authTagSz);
  5940. }
  5941. }
  5942. }
  5943. /* Free memory */
  5944. if (wasAlloc) {
  5945. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  5946. }
  5947. return ret;
  5948. }
  5949. #endif /* STM32_CRYPTO_AES_GCM */
  5950. #ifdef WOLFSSL_AESNI
  5951. /* For performance reasons, this code needs to be not inlined. */
  5952. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  5953. Aes* aes, byte* out, const byte* in, word32 sz,
  5954. const byte* iv, word32 ivSz,
  5955. byte* authTag, word32 authTagSz,
  5956. const byte* authIn, word32 authInSz);
  5957. #else
  5958. static
  5959. #endif
  5960. WARN_UNUSED_RESULT int AES_GCM_encrypt_C(
  5961. Aes* aes, byte* out, const byte* in, word32 sz,
  5962. const byte* iv, word32 ivSz,
  5963. byte* authTag, word32 authTagSz,
  5964. const byte* authIn, word32 authInSz)
  5965. {
  5966. int ret = 0;
  5967. word32 blocks = sz / AES_BLOCK_SIZE;
  5968. word32 partial = sz % AES_BLOCK_SIZE;
  5969. const byte* p = in;
  5970. byte* c = out;
  5971. ALIGN16 byte counter[AES_BLOCK_SIZE];
  5972. ALIGN16 byte initialCounter[AES_BLOCK_SIZE];
  5973. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  5974. if (ivSz == GCM_NONCE_MID_SZ) {
  5975. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  5976. XMEMCPY(counter, iv, ivSz);
  5977. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  5978. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  5979. counter[AES_BLOCK_SIZE - 1] = 1;
  5980. }
  5981. else {
  5982. /* Counter is GHASH of IV. */
  5983. #ifdef OPENSSL_EXTRA
  5984. word32 aadTemp = aes->aadLen;
  5985. aes->aadLen = 0;
  5986. #endif
  5987. GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  5988. #ifdef OPENSSL_EXTRA
  5989. aes->aadLen = aadTemp;
  5990. #endif
  5991. }
  5992. XMEMCPY(initialCounter, counter, AES_BLOCK_SIZE);
  5993. #ifdef WOLFSSL_PIC32MZ_CRYPT
  5994. if (blocks) {
  5995. /* use initial IV for HW, but don't use it below */
  5996. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  5997. ret = wc_Pic32AesCrypt(
  5998. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  5999. out, in, (blocks * AES_BLOCK_SIZE),
  6000. PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6001. if (ret != 0)
  6002. return ret;
  6003. }
  6004. /* process remainder using partial handling */
  6005. #endif
  6006. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6007. /* some hardware acceleration can gain performance from doing AES encryption
  6008. * of the whole buffer at once */
  6009. if (c != p && blocks > 0) { /* can not handle inline encryption */
  6010. while (blocks--) {
  6011. IncrementGcmCounter(counter);
  6012. XMEMCPY(c, counter, AES_BLOCK_SIZE);
  6013. c += AES_BLOCK_SIZE;
  6014. }
  6015. /* reset number of blocks and then do encryption */
  6016. blocks = sz / AES_BLOCK_SIZE;
  6017. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6018. xorbuf(out, p, AES_BLOCK_SIZE * blocks);
  6019. p += AES_BLOCK_SIZE * blocks;
  6020. }
  6021. else
  6022. #endif /* HAVE_AES_ECB && !WOLFSSL_PIC32MZ_CRYPT */
  6023. {
  6024. while (blocks--) {
  6025. IncrementGcmCounter(counter);
  6026. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6027. ret = wc_AesEncrypt(aes, counter, scratch);
  6028. if (ret != 0)
  6029. return ret;
  6030. xorbufout(c, scratch, p, AES_BLOCK_SIZE);
  6031. #endif
  6032. p += AES_BLOCK_SIZE;
  6033. c += AES_BLOCK_SIZE;
  6034. }
  6035. }
  6036. if (partial != 0) {
  6037. IncrementGcmCounter(counter);
  6038. ret = wc_AesEncrypt(aes, counter, scratch);
  6039. if (ret != 0)
  6040. return ret;
  6041. xorbufout(c, scratch, p, partial);
  6042. }
  6043. if (authTag) {
  6044. GHASH(aes, authIn, authInSz, out, sz, authTag, authTagSz);
  6045. ret = wc_AesEncrypt(aes, initialCounter, scratch);
  6046. if (ret != 0)
  6047. return ret;
  6048. xorbuf(authTag, scratch, authTagSz);
  6049. #ifdef OPENSSL_EXTRA
  6050. if (!in && !sz)
  6051. /* store AAD size for next call */
  6052. aes->aadLen = authInSz;
  6053. #endif
  6054. }
  6055. return ret;
  6056. }
  6057. /* Software AES - GCM Encrypt */
  6058. int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6059. const byte* iv, word32 ivSz,
  6060. byte* authTag, word32 authTagSz,
  6061. const byte* authIn, word32 authInSz)
  6062. {
  6063. /* argument checks */
  6064. if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) {
  6065. return BAD_FUNC_ARG;
  6066. }
  6067. if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) {
  6068. WOLFSSL_MSG("GcmEncrypt authTagSz too small error");
  6069. return BAD_FUNC_ARG;
  6070. }
  6071. #ifdef WOLF_CRYPTO_CB
  6072. #ifndef WOLF_CRYPTO_CB_FIND
  6073. if (aes->devId != INVALID_DEVID)
  6074. #endif
  6075. {
  6076. int crypto_cb_ret =
  6077. wc_CryptoCb_AesGcmEncrypt(aes, out, in, sz, iv, ivSz, authTag,
  6078. authTagSz, authIn, authInSz);
  6079. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6080. return crypto_cb_ret;
  6081. /* fall-through when unavailable */
  6082. }
  6083. #endif
  6084. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6085. /* if async and byte count above threshold */
  6086. /* only 12-byte IV is supported in HW */
  6087. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6088. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6089. #if defined(HAVE_CAVIUM)
  6090. #ifdef HAVE_CAVIUM_V
  6091. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6092. return NitroxAesGcmEncrypt(aes, out, in, sz,
  6093. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6094. authTag, authTagSz, authIn, authInSz);
  6095. }
  6096. #endif
  6097. #elif defined(HAVE_INTEL_QA)
  6098. return IntelQaSymAesGcmEncrypt(&aes->asyncDev, out, in, sz,
  6099. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6100. authTag, authTagSz, authIn, authInSz);
  6101. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  6102. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_ENCRYPT)) {
  6103. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  6104. sw->aes.aes = aes;
  6105. sw->aes.out = out;
  6106. sw->aes.in = in;
  6107. sw->aes.sz = sz;
  6108. sw->aes.iv = iv;
  6109. sw->aes.ivSz = ivSz;
  6110. sw->aes.authTag = authTag;
  6111. sw->aes.authTagSz = authTagSz;
  6112. sw->aes.authIn = authIn;
  6113. sw->aes.authInSz = authInSz;
  6114. return WC_PENDING_E;
  6115. }
  6116. #endif
  6117. }
  6118. #endif /* WOLFSSL_ASYNC_CRYPT */
  6119. #ifdef WOLFSSL_SILABS_SE_ACCEL
  6120. return wc_AesGcmEncrypt_silabs(
  6121. aes, out, in, sz,
  6122. iv, ivSz,
  6123. authTag, authTagSz,
  6124. authIn, authInSz);
  6125. #endif
  6126. #ifdef STM32_CRYPTO_AES_GCM
  6127. return wc_AesGcmEncrypt_STM32(
  6128. aes, out, in, sz, iv, ivSz,
  6129. authTag, authTagSz, authIn, authInSz);
  6130. #endif /* STM32_CRYPTO_AES_GCM */
  6131. #ifdef WOLFSSL_AESNI
  6132. #ifdef HAVE_INTEL_AVX2
  6133. if (IS_INTEL_AVX2(intel_flags)) {
  6134. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6135. AES_GCM_encrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6136. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6137. RESTORE_VECTOR_REGISTERS();
  6138. return 0;
  6139. }
  6140. else
  6141. #endif
  6142. #if defined(HAVE_INTEL_AVX1)
  6143. if (IS_INTEL_AVX1(intel_flags)) {
  6144. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6145. AES_GCM_encrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6146. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6147. RESTORE_VECTOR_REGISTERS();
  6148. return 0;
  6149. }
  6150. else
  6151. #endif
  6152. if (haveAESNI) {
  6153. AES_GCM_encrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6154. authTagSz, (const byte*)aes->key, (int)aes->rounds);
  6155. return 0;
  6156. }
  6157. else
  6158. #endif
  6159. {
  6160. return AES_GCM_encrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  6161. authIn, authInSz);
  6162. }
  6163. }
  6164. #endif
  6165. /* AES GCM Decrypt */
  6166. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  6167. #ifdef FREESCALE_LTC_AES_GCM
  6168. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6169. const byte* iv, word32 ivSz,
  6170. const byte* authTag, word32 authTagSz,
  6171. const byte* authIn, word32 authInSz)
  6172. {
  6173. int ret;
  6174. word32 keySize;
  6175. status_t status;
  6176. /* argument checks */
  6177. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  6178. * in and out are don't cares, as this is is the GMAC case. */
  6179. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  6180. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  6181. ivSz == 0) {
  6182. return BAD_FUNC_ARG;
  6183. }
  6184. ret = wc_AesGetKeySize(aes, &keySize);
  6185. if (ret != 0) {
  6186. return ret;
  6187. }
  6188. status = wolfSSL_CryptHwMutexLock();
  6189. if (status != 0)
  6190. return status;
  6191. status = LTC_AES_DecryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz,
  6192. authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz);
  6193. wolfSSL_CryptHwMutexUnLock();
  6194. return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E;
  6195. }
  6196. #else
  6197. #ifdef STM32_CRYPTO_AES_GCM
  6198. /* this function supports inline decrypt */
  6199. static WARN_UNUSED_RESULT int wc_AesGcmDecrypt_STM32(
  6200. Aes* aes, byte* out,
  6201. const byte* in, word32 sz,
  6202. const byte* iv, word32 ivSz,
  6203. const byte* authTag, word32 authTagSz,
  6204. const byte* authIn, word32 authInSz)
  6205. {
  6206. int ret;
  6207. #ifdef WOLFSSL_STM32_CUBEMX
  6208. int status = HAL_OK;
  6209. CRYP_HandleTypeDef hcryp;
  6210. word32 blocks = sz / AES_BLOCK_SIZE;
  6211. #else
  6212. int status = SUCCESS;
  6213. word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)];
  6214. #endif
  6215. word32 keySize;
  6216. word32 partial = sz % AES_BLOCK_SIZE;
  6217. word32 tag[AES_BLOCK_SIZE/sizeof(word32)];
  6218. word32 tagExpected[AES_BLOCK_SIZE/sizeof(word32)];
  6219. word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)];
  6220. word32 ctr[AES_BLOCK_SIZE/sizeof(word32)];
  6221. word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)];
  6222. byte* authInPadded = NULL;
  6223. int authPadSz, wasAlloc = 0, tagComputed = 0;
  6224. ret = wc_AesGetKeySize(aes, &keySize);
  6225. if (ret != 0)
  6226. return ret;
  6227. #ifdef WOLFSSL_STM32_CUBEMX
  6228. ret = wc_Stm32_Aes_Init(aes, &hcryp);
  6229. if (ret != 0)
  6230. return ret;
  6231. #endif
  6232. XMEMSET(ctr, 0, AES_BLOCK_SIZE);
  6233. if (ivSz == GCM_NONCE_MID_SZ) {
  6234. byte* pCtr = (byte*)ctr;
  6235. XMEMCPY(ctr, iv, ivSz);
  6236. pCtr[AES_BLOCK_SIZE - 1] = 1;
  6237. }
  6238. else {
  6239. GHASH(aes, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE);
  6240. }
  6241. /* Make copy of expected authTag, which could get corrupted in some
  6242. * Cube HAL versions without proper partial block support.
  6243. * For TLS blocks the authTag is after the output buffer, so save it */
  6244. XMEMCPY(tagExpected, authTag, authTagSz);
  6245. /* Authentication buffer - must be 4-byte multiple zero padded */
  6246. authPadSz = authInSz % sizeof(word32);
  6247. if (authPadSz != 0) {
  6248. authPadSz = authInSz + sizeof(word32) - authPadSz;
  6249. }
  6250. else {
  6251. authPadSz = authInSz;
  6252. }
  6253. /* for cases where hardware cannot be used for authTag calculate it */
  6254. /* if IV is not 12 calculate GHASH using software */
  6255. if (ivSz != GCM_NONCE_MID_SZ
  6256. #ifndef CRYP_HEADERWIDTHUNIT_BYTE
  6257. /* or harware that does not support partial block */
  6258. || sz == 0 || partial != 0
  6259. #endif
  6260. #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL)
  6261. /* or authIn is not a multiple of 4 */
  6262. || authPadSz != authInSz
  6263. #endif
  6264. ) {
  6265. GHASH(aes, authIn, authInSz, in, sz, (byte*)tag, sizeof(tag));
  6266. ret = wc_AesEncrypt(aes, (byte*)ctr, (byte*)partialBlock);
  6267. if (ret != 0)
  6268. return ret;
  6269. xorbuf(tag, partialBlock, sizeof(tag));
  6270. tagComputed = 1;
  6271. }
  6272. /* if using hardware for authentication tag make sure its aligned and zero padded */
  6273. if (authPadSz != authInSz && !tagComputed) {
  6274. if (authPadSz <= sizeof(authhdr)) {
  6275. authInPadded = (byte*)authhdr;
  6276. }
  6277. else {
  6278. authInPadded = (byte*)XMALLOC(authPadSz, aes->heap,
  6279. DYNAMIC_TYPE_TMP_BUFFER);
  6280. if (authInPadded == NULL) {
  6281. wolfSSL_CryptHwMutexUnLock();
  6282. return MEMORY_E;
  6283. }
  6284. wasAlloc = 1;
  6285. }
  6286. XMEMSET(authInPadded, 0, authPadSz);
  6287. XMEMCPY(authInPadded, authIn, authInSz);
  6288. } else {
  6289. authInPadded = (byte*)authIn;
  6290. }
  6291. /* Hardware requires counter + 1 */
  6292. IncrementGcmCounter((byte*)ctr);
  6293. ret = wolfSSL_CryptHwMutexLock();
  6294. if (ret != 0) {
  6295. return ret;
  6296. }
  6297. #ifdef WOLFSSL_STM32_CUBEMX
  6298. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6299. hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded;
  6300. #if defined(STM32_HAL_V2)
  6301. hcryp.Init.Algorithm = CRYP_AES_GCM;
  6302. #ifdef CRYP_HEADERWIDTHUNIT_BYTE
  6303. /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */
  6304. hcryp.Init.HeaderSize = authInSz;
  6305. #else
  6306. hcryp.Init.HeaderSize = authPadSz/sizeof(word32);
  6307. #endif
  6308. #ifdef CRYP_KEYIVCONFIG_ONCE
  6309. /* allows repeated calls to HAL_CRYP_Decrypt */
  6310. hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE;
  6311. #endif
  6312. ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE);
  6313. hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr;
  6314. HAL_CRYP_Init(&hcryp);
  6315. #ifndef CRYP_KEYIVCONFIG_ONCE
  6316. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  6317. (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT);
  6318. #else
  6319. /* GCM payload phase - blocks */
  6320. if (blocks) {
  6321. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in,
  6322. (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT);
  6323. }
  6324. /* GCM payload phase - partial remainder */
  6325. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6326. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6327. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6328. status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)partialBlock, partial,
  6329. (uint32_t*)partialBlock, STM32_HAL_TIMEOUT);
  6330. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6331. }
  6332. #endif
  6333. if (status == HAL_OK && !tagComputed) {
  6334. /* Compute the authTag */
  6335. status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag,
  6336. STM32_HAL_TIMEOUT);
  6337. }
  6338. #elif defined(STM32_CRYPTO_AES_ONLY)
  6339. /* Set the CRYP parameters */
  6340. hcryp.Init.HeaderSize = authPadSz;
  6341. if (authPadSz == 0)
  6342. hcryp.Init.Header = NULL; /* cannot pass pointer when authIn == 0 */
  6343. hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC;
  6344. hcryp.Init.OperatingMode = CRYP_ALGOMODE_DECRYPT;
  6345. hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE;
  6346. HAL_CRYP_Init(&hcryp);
  6347. /* GCM init phase */
  6348. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6349. if (status == HAL_OK) {
  6350. /* GCM header phase */
  6351. hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE;
  6352. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT);
  6353. }
  6354. if (status == HAL_OK) {
  6355. /* GCM payload phase - blocks */
  6356. hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE;
  6357. if (blocks) {
  6358. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in,
  6359. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6360. }
  6361. }
  6362. if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) {
  6363. /* GCM payload phase - partial remainder */
  6364. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6365. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6366. status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)partialBlock, partial,
  6367. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  6368. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6369. }
  6370. if (status == HAL_OK && tagComputed == 0) {
  6371. /* GCM final phase */
  6372. hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE;
  6373. status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  6374. }
  6375. #else
  6376. hcryp.Init.HeaderSize = authPadSz;
  6377. HAL_CRYP_Init(&hcryp);
  6378. if (blocks) {
  6379. /* GCM payload phase - blocks */
  6380. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)in,
  6381. (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT);
  6382. }
  6383. if (status == HAL_OK && (partial != 0 || blocks == 0)) {
  6384. /* GCM payload phase - partial remainder */
  6385. XMEMSET(partialBlock, 0, sizeof(partialBlock));
  6386. XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial);
  6387. status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)partialBlock, partial,
  6388. (byte*)partialBlock, STM32_HAL_TIMEOUT);
  6389. XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial);
  6390. }
  6391. if (status == HAL_OK && tagComputed == 0) {
  6392. /* Compute the authTag */
  6393. status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (byte*)tag, STM32_HAL_TIMEOUT);
  6394. }
  6395. #endif
  6396. if (status != HAL_OK)
  6397. ret = AES_GCM_AUTH_E;
  6398. HAL_CRYP_DeInit(&hcryp);
  6399. #else /* Standard Peripheral Library */
  6400. ByteReverseWords(keyCopy, (word32*)aes->key, aes->keylen);
  6401. /* Input size and auth size need to be the actual sizes, even though
  6402. * they are not block aligned, because this length (in bits) is used
  6403. * in the final GHASH. */
  6404. XMEMSET(partialBlock, 0, sizeof(partialBlock)); /* use this to get tag */
  6405. status = CRYP_AES_GCM(MODE_DECRYPT, (uint8_t*)ctr,
  6406. (uint8_t*)keyCopy, keySize * 8,
  6407. (uint8_t*)in, sz,
  6408. (uint8_t*)authInPadded, authInSz,
  6409. (uint8_t*)out, (uint8_t*)partialBlock);
  6410. if (status != SUCCESS)
  6411. ret = AES_GCM_AUTH_E;
  6412. if (tagComputed == 0)
  6413. XMEMCPY(tag, partialBlock, authTagSz);
  6414. #endif /* WOLFSSL_STM32_CUBEMX */
  6415. wolfSSL_CryptHwMutexUnLock();
  6416. /* Check authentication tag */
  6417. if (ConstantCompare((const byte*)tagExpected, (byte*)tag, authTagSz) != 0) {
  6418. ret = AES_GCM_AUTH_E;
  6419. }
  6420. /* Free memory */
  6421. if (wasAlloc) {
  6422. XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER);
  6423. }
  6424. return ret;
  6425. }
  6426. #endif /* STM32_CRYPTO_AES_GCM */
  6427. #ifdef WOLFSSL_AESNI
  6428. /* For performance reasons, this code needs to be not inlined. */
  6429. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  6430. Aes* aes, byte* out, const byte* in, word32 sz,
  6431. const byte* iv, word32 ivSz,
  6432. const byte* authTag, word32 authTagSz,
  6433. const byte* authIn, word32 authInSz);
  6434. #else
  6435. static
  6436. #endif
  6437. int WARN_UNUSED_RESULT AES_GCM_decrypt_C(
  6438. Aes* aes, byte* out, const byte* in, word32 sz,
  6439. const byte* iv, word32 ivSz,
  6440. const byte* authTag, word32 authTagSz,
  6441. const byte* authIn, word32 authInSz)
  6442. {
  6443. int ret;
  6444. word32 blocks = sz / AES_BLOCK_SIZE;
  6445. word32 partial = sz % AES_BLOCK_SIZE;
  6446. const byte* c = in;
  6447. byte* p = out;
  6448. ALIGN16 byte counter[AES_BLOCK_SIZE];
  6449. ALIGN16 byte scratch[AES_BLOCK_SIZE];
  6450. ALIGN16 byte Tprime[AES_BLOCK_SIZE];
  6451. ALIGN16 byte EKY0[AES_BLOCK_SIZE];
  6452. sword32 res;
  6453. if (ivSz == GCM_NONCE_MID_SZ) {
  6454. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6455. XMEMCPY(counter, iv, ivSz);
  6456. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6457. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6458. counter[AES_BLOCK_SIZE - 1] = 1;
  6459. }
  6460. else {
  6461. /* Counter is GHASH of IV. */
  6462. #ifdef OPENSSL_EXTRA
  6463. word32 aadTemp = aes->aadLen;
  6464. aes->aadLen = 0;
  6465. #endif
  6466. GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6467. #ifdef OPENSSL_EXTRA
  6468. aes->aadLen = aadTemp;
  6469. #endif
  6470. }
  6471. /* Calc the authTag again using received auth data and the cipher text */
  6472. GHASH(aes, authIn, authInSz, in, sz, Tprime, sizeof(Tprime));
  6473. ret = wc_AesEncrypt(aes, counter, EKY0);
  6474. if (ret != 0)
  6475. return ret;
  6476. xorbuf(Tprime, EKY0, sizeof(Tprime));
  6477. #ifdef WC_AES_GCM_DEC_AUTH_EARLY
  6478. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  6479. * the pairwise bytes in the strings.
  6480. */
  6481. res = ConstantCompare(authTag, Tprime, authTagSz);
  6482. /* convert positive retval from ConstantCompare() to all-1s word, in
  6483. * constant time.
  6484. */
  6485. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  6486. ret = res & AES_GCM_AUTH_E;
  6487. if (ret != 0)
  6488. return ret;
  6489. #endif
  6490. #ifdef OPENSSL_EXTRA
  6491. if (!out) {
  6492. /* authenticated, non-confidential data */
  6493. /* store AAD size for next call */
  6494. aes->aadLen = authInSz;
  6495. }
  6496. #endif
  6497. #if defined(WOLFSSL_PIC32MZ_CRYPT)
  6498. if (blocks) {
  6499. /* use initial IV for HW, but don't use it below */
  6500. XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE);
  6501. ret = wc_Pic32AesCrypt(
  6502. aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE,
  6503. out, in, (blocks * AES_BLOCK_SIZE),
  6504. PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM);
  6505. if (ret != 0)
  6506. return ret;
  6507. }
  6508. /* process remainder using partial handling */
  6509. #endif
  6510. #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT)
  6511. /* some hardware acceleration can gain performance from doing AES encryption
  6512. * of the whole buffer at once */
  6513. if (c != p && blocks > 0) { /* can not handle inline decryption */
  6514. while (blocks--) {
  6515. IncrementGcmCounter(counter);
  6516. XMEMCPY(p, counter, AES_BLOCK_SIZE);
  6517. p += AES_BLOCK_SIZE;
  6518. }
  6519. /* reset number of blocks and then do encryption */
  6520. blocks = sz / AES_BLOCK_SIZE;
  6521. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6522. xorbuf(out, c, AES_BLOCK_SIZE * blocks);
  6523. c += AES_BLOCK_SIZE * blocks;
  6524. }
  6525. else
  6526. #endif /* HAVE_AES_ECB && !PIC32MZ */
  6527. {
  6528. while (blocks--) {
  6529. IncrementGcmCounter(counter);
  6530. #if !defined(WOLFSSL_PIC32MZ_CRYPT)
  6531. ret = wc_AesEncrypt(aes, counter, scratch);
  6532. if (ret != 0)
  6533. return ret;
  6534. xorbufout(p, scratch, c, AES_BLOCK_SIZE);
  6535. #endif
  6536. p += AES_BLOCK_SIZE;
  6537. c += AES_BLOCK_SIZE;
  6538. }
  6539. }
  6540. if (partial != 0) {
  6541. IncrementGcmCounter(counter);
  6542. ret = wc_AesEncrypt(aes, counter, scratch);
  6543. if (ret != 0)
  6544. return ret;
  6545. xorbuf(scratch, c, partial);
  6546. XMEMCPY(p, scratch, partial);
  6547. }
  6548. #ifndef WC_AES_GCM_DEC_AUTH_EARLY
  6549. /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of
  6550. * the pairwise bytes in the strings.
  6551. */
  6552. res = ConstantCompare(authTag, Tprime, (int)authTagSz);
  6553. /* convert positive retval from ConstantCompare() to all-1s word, in
  6554. * constant time.
  6555. */
  6556. res = 0 - (sword32)(((word32)(0 - res)) >> 31U);
  6557. /* now use res as a mask for constant time return of ret, unless tag
  6558. * mismatch, whereupon AES_GCM_AUTH_E is returned.
  6559. */
  6560. ret = (ret & ~res) | (res & AES_GCM_AUTH_E);
  6561. #endif
  6562. return ret;
  6563. }
  6564. /* Software AES - GCM Decrypt */
  6565. int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz,
  6566. const byte* iv, word32 ivSz,
  6567. const byte* authTag, word32 authTagSz,
  6568. const byte* authIn, word32 authInSz)
  6569. {
  6570. #ifdef WOLFSSL_AESNI
  6571. int res = AES_GCM_AUTH_E;
  6572. #endif
  6573. /* argument checks */
  6574. /* If the sz is non-zero, both in and out must be set. If sz is 0,
  6575. * in and out are don't cares, as this is is the GMAC case. */
  6576. if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  6577. authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 ||
  6578. ivSz == 0) {
  6579. return BAD_FUNC_ARG;
  6580. }
  6581. #ifdef WOLF_CRYPTO_CB
  6582. #ifndef WOLF_CRYPTO_CB_FIND
  6583. if (aes->devId != INVALID_DEVID)
  6584. #endif
  6585. {
  6586. int crypto_cb_ret =
  6587. wc_CryptoCb_AesGcmDecrypt(aes, out, in, sz, iv, ivSz,
  6588. authTag, authTagSz, authIn, authInSz);
  6589. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  6590. return crypto_cb_ret;
  6591. /* fall-through when unavailable */
  6592. }
  6593. #endif
  6594. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  6595. /* if async and byte count above threshold */
  6596. /* only 12-byte IV is supported in HW */
  6597. if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES &&
  6598. sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) {
  6599. #if defined(HAVE_CAVIUM)
  6600. #ifdef HAVE_CAVIUM_V
  6601. if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */
  6602. return NitroxAesGcmDecrypt(aes, out, in, sz,
  6603. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6604. authTag, authTagSz, authIn, authInSz);
  6605. }
  6606. #endif
  6607. #elif defined(HAVE_INTEL_QA)
  6608. return IntelQaSymAesGcmDecrypt(&aes->asyncDev, out, in, sz,
  6609. (const byte*)aes->devKey, aes->keylen, iv, ivSz,
  6610. authTag, authTagSz, authIn, authInSz);
  6611. #else /* WOLFSSL_ASYNC_CRYPT_SW */
  6612. if (wc_AsyncSwInit(&aes->asyncDev, ASYNC_SW_AES_GCM_DECRYPT)) {
  6613. WC_ASYNC_SW* sw = &aes->asyncDev.sw;
  6614. sw->aes.aes = aes;
  6615. sw->aes.out = out;
  6616. sw->aes.in = in;
  6617. sw->aes.sz = sz;
  6618. sw->aes.iv = iv;
  6619. sw->aes.ivSz = ivSz;
  6620. sw->aes.authTag = (byte*)authTag;
  6621. sw->aes.authTagSz = authTagSz;
  6622. sw->aes.authIn = authIn;
  6623. sw->aes.authInSz = authInSz;
  6624. return WC_PENDING_E;
  6625. }
  6626. #endif
  6627. }
  6628. #endif /* WOLFSSL_ASYNC_CRYPT */
  6629. #ifdef WOLFSSL_SILABS_SE_ACCEL
  6630. return wc_AesGcmDecrypt_silabs(
  6631. aes, out, in, sz, iv, ivSz,
  6632. authTag, authTagSz, authIn, authInSz);
  6633. #endif
  6634. #ifdef STM32_CRYPTO_AES_GCM
  6635. /* The STM standard peripheral library API's doesn't support partial blocks */
  6636. return wc_AesGcmDecrypt_STM32(
  6637. aes, out, in, sz, iv, ivSz,
  6638. authTag, authTagSz, authIn, authInSz);
  6639. #endif /* STM32_CRYPTO_AES_GCM */
  6640. #ifdef WOLFSSL_AESNI
  6641. #ifdef HAVE_INTEL_AVX2
  6642. if (IS_INTEL_AVX2(intel_flags)) {
  6643. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6644. AES_GCM_decrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6645. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6646. RESTORE_VECTOR_REGISTERS();
  6647. if (res == 0)
  6648. return AES_GCM_AUTH_E;
  6649. return 0;
  6650. }
  6651. else
  6652. #endif
  6653. #if defined(HAVE_INTEL_AVX1)
  6654. if (IS_INTEL_AVX1(intel_flags)) {
  6655. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6656. AES_GCM_decrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6657. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6658. RESTORE_VECTOR_REGISTERS();
  6659. if (res == 0)
  6660. return AES_GCM_AUTH_E;
  6661. return 0;
  6662. }
  6663. else
  6664. #endif
  6665. if (haveAESNI) {
  6666. AES_GCM_decrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz,
  6667. authTagSz, (byte*)aes->key, (int)aes->rounds, &res);
  6668. if (res == 0)
  6669. return AES_GCM_AUTH_E;
  6670. return 0;
  6671. }
  6672. else
  6673. #endif
  6674. {
  6675. return AES_GCM_decrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz,
  6676. authIn, authInSz);
  6677. }
  6678. }
  6679. #endif
  6680. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  6681. #ifdef WOLFSSL_AESGCM_STREAM
  6682. /* Initialize the AES GCM cipher with an IV. C implementation.
  6683. *
  6684. * @param [in, out] aes AES object.
  6685. * @param [in] iv IV/nonce buffer.
  6686. * @param [in] ivSz Length of IV/nonce data.
  6687. */
  6688. static WARN_UNUSED_RESULT int AesGcmInit_C(Aes* aes, const byte* iv, word32 ivSz)
  6689. {
  6690. ALIGN32 byte counter[AES_BLOCK_SIZE];
  6691. int ret;
  6692. if (ivSz == GCM_NONCE_MID_SZ) {
  6693. /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */
  6694. XMEMCPY(counter, iv, ivSz);
  6695. XMEMSET(counter + GCM_NONCE_MID_SZ, 0,
  6696. AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1);
  6697. counter[AES_BLOCK_SIZE - 1] = 1;
  6698. }
  6699. else {
  6700. /* Counter is GHASH of IV. */
  6701. #ifdef OPENSSL_EXTRA
  6702. word32 aadTemp = aes->aadLen;
  6703. aes->aadLen = 0;
  6704. #endif
  6705. GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE);
  6706. #ifdef OPENSSL_EXTRA
  6707. aes->aadLen = aadTemp;
  6708. #endif
  6709. }
  6710. /* Copy in the counter for use with cipher. */
  6711. XMEMCPY(AES_COUNTER(aes), counter, AES_BLOCK_SIZE);
  6712. /* Encrypt initial counter into a buffer for GCM. */
  6713. ret = wc_AesEncrypt(aes, counter, AES_INITCTR(aes));
  6714. if (ret != 0)
  6715. return ret;
  6716. /* Reset state fields. */
  6717. aes->over = 0;
  6718. aes->aSz = 0;
  6719. aes->cSz = 0;
  6720. /* Initialization for GHASH. */
  6721. GHASH_INIT(aes);
  6722. return 0;
  6723. }
  6724. /* Update the AES GCM cipher with data. C implementation.
  6725. *
  6726. * Only enciphers data.
  6727. *
  6728. * @param [in, out] aes AES object.
  6729. * @param [in] out Cipher text or plaintext buffer.
  6730. * @param [in] in Plaintext or cipher text buffer.
  6731. * @param [in] sz Length of data.
  6732. */
  6733. static WARN_UNUSED_RESULT int AesGcmCryptUpdate_C(
  6734. Aes* aes, byte* out, const byte* in, word32 sz)
  6735. {
  6736. word32 blocks;
  6737. word32 partial;
  6738. int ret;
  6739. /* Check if previous encrypted block was not used up. */
  6740. if (aes->over > 0) {
  6741. byte pSz = AES_BLOCK_SIZE - aes->over;
  6742. if (pSz > sz) pSz = (byte)sz;
  6743. /* Use some/all of last encrypted block. */
  6744. xorbufout(out, AES_LASTBLOCK(aes) + aes->over, in, pSz);
  6745. aes->over = (aes->over + pSz) & (AES_BLOCK_SIZE - 1);
  6746. /* Some data used. */
  6747. sz -= pSz;
  6748. in += pSz;
  6749. out += pSz;
  6750. }
  6751. /* Calculate the number of blocks needing to be encrypted and any leftover.
  6752. */
  6753. blocks = sz / AES_BLOCK_SIZE;
  6754. partial = sz & (AES_BLOCK_SIZE - 1);
  6755. #if defined(HAVE_AES_ECB)
  6756. /* Some hardware acceleration can gain performance from doing AES encryption
  6757. * of the whole buffer at once.
  6758. * Overwrites the cipher text before using plaintext - no inline encryption.
  6759. */
  6760. if ((out != in) && blocks > 0) {
  6761. word32 b;
  6762. /* Place incrementing counter blocks into cipher text. */
  6763. for (b = 0; b < blocks; b++) {
  6764. IncrementGcmCounter(AES_COUNTER(aes));
  6765. XMEMCPY(out + b * AES_BLOCK_SIZE, AES_COUNTER(aes), AES_BLOCK_SIZE);
  6766. }
  6767. /* Encrypt counter blocks. */
  6768. wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks);
  6769. /* XOR in plaintext. */
  6770. xorbuf(out, in, AES_BLOCK_SIZE * blocks);
  6771. /* Skip over processed data. */
  6772. in += AES_BLOCK_SIZE * blocks;
  6773. out += AES_BLOCK_SIZE * blocks;
  6774. }
  6775. else
  6776. #endif /* HAVE_AES_ECB */
  6777. {
  6778. /* Encrypt block by block. */
  6779. while (blocks--) {
  6780. ALIGN32 byte scratch[AES_BLOCK_SIZE];
  6781. IncrementGcmCounter(AES_COUNTER(aes));
  6782. /* Encrypt counter into a buffer. */
  6783. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), scratch);
  6784. if (ret != 0)
  6785. return ret;
  6786. /* XOR plain text into encrypted counter into cipher text buffer. */
  6787. xorbufout(out, scratch, in, AES_BLOCK_SIZE);
  6788. /* Data complete. */
  6789. in += AES_BLOCK_SIZE;
  6790. out += AES_BLOCK_SIZE;
  6791. }
  6792. }
  6793. if (partial != 0) {
  6794. /* Generate an extra block and use up as much as needed. */
  6795. IncrementGcmCounter(AES_COUNTER(aes));
  6796. /* Encrypt counter into cache. */
  6797. ret = wc_AesEncrypt(aes, AES_COUNTER(aes), AES_LASTBLOCK(aes));
  6798. if (ret != 0)
  6799. return ret;
  6800. /* XOR plain text into encrypted counter into cipher text buffer. */
  6801. xorbufout(out, AES_LASTBLOCK(aes), in, partial);
  6802. /* Keep amount of encrypted block used. */
  6803. aes->over = (byte)partial;
  6804. }
  6805. return 0;
  6806. }
  6807. /* Calculates authentication tag for AES GCM. C implementation.
  6808. *
  6809. * @param [in, out] aes AES object.
  6810. * @param [out] authTag Buffer to store authentication tag in.
  6811. * @param [in] authTagSz Length of tag to create.
  6812. */
  6813. static WARN_UNUSED_RESULT int AesGcmFinal_C(
  6814. Aes* aes, byte* authTag, word32 authTagSz)
  6815. {
  6816. /* Calculate authentication tag. */
  6817. GHASH_FINAL(aes, authTag, authTagSz);
  6818. /* XOR in as much of encrypted counter as is required. */
  6819. xorbuf(authTag, AES_INITCTR(aes), authTagSz);
  6820. #ifdef OPENSSL_EXTRA
  6821. /* store AAD size for next call */
  6822. aes->aadLen = aes->aSz;
  6823. #endif
  6824. /* Zeroize last block to protect sensitive data. */
  6825. ForceZero(AES_LASTBLOCK(aes), AES_BLOCK_SIZE);
  6826. return 0;
  6827. }
  6828. #ifdef WOLFSSL_AESNI
  6829. #ifdef __cplusplus
  6830. extern "C" {
  6831. #endif
  6832. /* Assembly code implementations in: aes_gcm_asm.S */
  6833. #ifdef HAVE_INTEL_AVX2
  6834. extern void AES_GCM_init_avx2(const unsigned char* key, int nr,
  6835. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6836. unsigned char* counter, unsigned char* initCtr);
  6837. extern void AES_GCM_aad_update_avx2(const unsigned char* addt,
  6838. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6839. extern void AES_GCM_encrypt_block_avx2(const unsigned char* key, int nr,
  6840. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6841. extern void AES_GCM_ghash_block_avx2(const unsigned char* data,
  6842. unsigned char* tag, unsigned char* h);
  6843. extern void AES_GCM_encrypt_update_avx2(const unsigned char* key, int nr,
  6844. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6845. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6846. extern void AES_GCM_encrypt_final_avx2(unsigned char* tag,
  6847. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6848. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6849. #endif
  6850. #ifdef HAVE_INTEL_AVX1
  6851. extern void AES_GCM_init_avx1(const unsigned char* key, int nr,
  6852. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6853. unsigned char* counter, unsigned char* initCtr);
  6854. extern void AES_GCM_aad_update_avx1(const unsigned char* addt,
  6855. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6856. extern void AES_GCM_encrypt_block_avx1(const unsigned char* key, int nr,
  6857. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6858. extern void AES_GCM_ghash_block_avx1(const unsigned char* data,
  6859. unsigned char* tag, unsigned char* h);
  6860. extern void AES_GCM_encrypt_update_avx1(const unsigned char* key, int nr,
  6861. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6862. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6863. extern void AES_GCM_encrypt_final_avx1(unsigned char* tag,
  6864. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6865. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6866. #endif
  6867. extern void AES_GCM_init_aesni(const unsigned char* key, int nr,
  6868. const unsigned char* ivec, unsigned int ibytes, unsigned char* h,
  6869. unsigned char* counter, unsigned char* initCtr);
  6870. extern void AES_GCM_aad_update_aesni(const unsigned char* addt,
  6871. unsigned int abytes, unsigned char* tag, unsigned char* h);
  6872. extern void AES_GCM_encrypt_block_aesni(const unsigned char* key, int nr,
  6873. unsigned char* out, const unsigned char* in, unsigned char* counter);
  6874. extern void AES_GCM_ghash_block_aesni(const unsigned char* data,
  6875. unsigned char* tag, unsigned char* h);
  6876. extern void AES_GCM_encrypt_update_aesni(const unsigned char* key, int nr,
  6877. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  6878. unsigned char* tag, unsigned char* h, unsigned char* counter);
  6879. extern void AES_GCM_encrypt_final_aesni(unsigned char* tag,
  6880. unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  6881. unsigned int abytes, unsigned char* h, unsigned char* initCtr);
  6882. #ifdef __cplusplus
  6883. } /* extern "C" */
  6884. #endif
  6885. /* Initialize the AES GCM cipher with an IV. AES-NI implementations.
  6886. *
  6887. * @param [in, out] aes AES object.
  6888. * @param [in] iv IV/nonce buffer.
  6889. * @param [in] ivSz Length of IV/nonce data.
  6890. */
  6891. static WARN_UNUSED_RESULT int AesGcmInit_aesni(
  6892. Aes* aes, const byte* iv, word32 ivSz)
  6893. {
  6894. /* Reset state fields. */
  6895. aes->aSz = 0;
  6896. aes->cSz = 0;
  6897. /* Set tag to all zeros as initial value. */
  6898. XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE);
  6899. /* Reset counts of AAD and cipher text. */
  6900. aes->aOver = 0;
  6901. aes->cOver = 0;
  6902. #ifdef HAVE_INTEL_AVX2
  6903. if (IS_INTEL_AVX2(intel_flags)) {
  6904. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6905. AES_GCM_init_avx2((byte*)aes->key, (int)aes->rounds, iv, ivSz, aes->H,
  6906. AES_COUNTER(aes), AES_INITCTR(aes));
  6907. RESTORE_VECTOR_REGISTERS();
  6908. }
  6909. else
  6910. #endif
  6911. #ifdef HAVE_INTEL_AVX1
  6912. if (IS_INTEL_AVX1(intel_flags)) {
  6913. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6914. AES_GCM_init_avx1((byte*)aes->key, (int)aes->rounds, iv, ivSz, aes->H,
  6915. AES_COUNTER(aes), AES_INITCTR(aes));
  6916. RESTORE_VECTOR_REGISTERS();
  6917. }
  6918. else
  6919. #endif
  6920. {
  6921. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  6922. AES_GCM_init_aesni((byte*)aes->key, (int)aes->rounds, iv, ivSz, aes->H,
  6923. AES_COUNTER(aes), AES_INITCTR(aes));
  6924. RESTORE_VECTOR_REGISTERS();
  6925. }
  6926. return 0;
  6927. }
  6928. /* Update the AES GCM for encryption with authentication data.
  6929. *
  6930. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  6931. *
  6932. * @param [in, out] aes AES object.
  6933. * @param [in] a Buffer holding authentication data.
  6934. * @param [in] aSz Length of authentication data in bytes.
  6935. * @param [in] endA Whether no more authentication data is expected.
  6936. */
  6937. static WARN_UNUSED_RESULT int AesGcmAadUpdate_aesni(
  6938. Aes* aes, const byte* a, word32 aSz, int endA)
  6939. {
  6940. word32 blocks;
  6941. int partial;
  6942. ASSERT_SAVED_VECTOR_REGISTERS();
  6943. if (aSz != 0 && a != NULL) {
  6944. /* Total count of AAD updated. */
  6945. aes->aSz += aSz;
  6946. /* Check if we have unprocessed data. */
  6947. if (aes->aOver > 0) {
  6948. /* Calculate amount we can use - fill up the block. */
  6949. byte sz = AES_BLOCK_SIZE - aes->aOver;
  6950. if (sz > aSz) {
  6951. sz = (byte)aSz;
  6952. }
  6953. /* Copy extra into last GHASH block array and update count. */
  6954. XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz);
  6955. aes->aOver += sz;
  6956. if (aes->aOver == AES_BLOCK_SIZE) {
  6957. /* We have filled up the block and can process. */
  6958. #ifdef HAVE_INTEL_AVX2
  6959. if (IS_INTEL_AVX2(intel_flags)) {
  6960. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6961. aes->H);
  6962. }
  6963. else
  6964. #endif
  6965. #ifdef HAVE_INTEL_AVX1
  6966. if (IS_INTEL_AVX1(intel_flags)) {
  6967. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6968. aes->H);
  6969. }
  6970. else
  6971. #endif
  6972. {
  6973. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  6974. aes->H);
  6975. }
  6976. /* Reset count. */
  6977. aes->aOver = 0;
  6978. }
  6979. /* Used up some data. */
  6980. aSz -= sz;
  6981. a += sz;
  6982. }
  6983. /* Calculate number of blocks of AAD and the leftover. */
  6984. blocks = aSz / AES_BLOCK_SIZE;
  6985. partial = aSz % AES_BLOCK_SIZE;
  6986. if (blocks > 0) {
  6987. /* GHASH full blocks now. */
  6988. #ifdef HAVE_INTEL_AVX2
  6989. if (IS_INTEL_AVX2(intel_flags)) {
  6990. AES_GCM_aad_update_avx2(a, blocks * AES_BLOCK_SIZE,
  6991. AES_TAG(aes), aes->H);
  6992. }
  6993. else
  6994. #endif
  6995. #ifdef HAVE_INTEL_AVX1
  6996. if (IS_INTEL_AVX1(intel_flags)) {
  6997. AES_GCM_aad_update_avx1(a, blocks * AES_BLOCK_SIZE,
  6998. AES_TAG(aes), aes->H);
  6999. }
  7000. else
  7001. #endif
  7002. {
  7003. AES_GCM_aad_update_aesni(a, blocks * AES_BLOCK_SIZE,
  7004. AES_TAG(aes), aes->H);
  7005. }
  7006. /* Skip over to end of AAD blocks. */
  7007. a += blocks * AES_BLOCK_SIZE;
  7008. }
  7009. if (partial != 0) {
  7010. /* Cache the partial block. */
  7011. XMEMCPY(AES_LASTGBLOCK(aes), a, (size_t)partial);
  7012. aes->aOver = (byte)partial;
  7013. }
  7014. }
  7015. if (endA && (aes->aOver > 0)) {
  7016. /* No more AAD coming and we have a partial block. */
  7017. /* Fill the rest of the block with zeros. */
  7018. XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0,
  7019. AES_BLOCK_SIZE - aes->aOver);
  7020. /* GHASH last AAD block. */
  7021. #ifdef HAVE_INTEL_AVX2
  7022. if (IS_INTEL_AVX2(intel_flags)) {
  7023. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  7024. }
  7025. else
  7026. #endif
  7027. #ifdef HAVE_INTEL_AVX1
  7028. if (IS_INTEL_AVX1(intel_flags)) {
  7029. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  7030. }
  7031. else
  7032. #endif
  7033. {
  7034. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7035. aes->H);
  7036. }
  7037. /* Clear partial count for next time through. */
  7038. aes->aOver = 0;
  7039. }
  7040. return 0;
  7041. }
  7042. /* Update the AES GCM for encryption with data and/or authentication data.
  7043. *
  7044. * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code.
  7045. *
  7046. * @param [in, out] aes AES object.
  7047. * @param [out] c Buffer to hold cipher text.
  7048. * @param [in] p Buffer holding plaintext.
  7049. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7050. * @param [in] a Buffer holding authentication data.
  7051. * @param [in] aSz Length of authentication data in bytes.
  7052. */
  7053. static WARN_UNUSED_RESULT int AesGcmEncryptUpdate_aesni(
  7054. Aes* aes, byte* c, const byte* p, word32 cSz, const byte* a, word32 aSz)
  7055. {
  7056. word32 blocks;
  7057. int partial;
  7058. int ret;
  7059. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7060. /* Hash in A, the Authentication Data */
  7061. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7062. if (ret != 0)
  7063. return ret;
  7064. /* Encrypt plaintext and Hash in C, the Cipher text */
  7065. if (cSz != 0 && c != NULL) {
  7066. /* Update count of cipher text we have hashed. */
  7067. aes->cSz += cSz;
  7068. if (aes->cOver > 0) {
  7069. /* Calculate amount we can use - fill up the block. */
  7070. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7071. if (sz > cSz) {
  7072. sz = (byte)cSz;
  7073. }
  7074. /* Encrypt some of the plaintext. */
  7075. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, p, sz);
  7076. XMEMCPY(c, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7077. /* Update count of unsed encrypted counter. */
  7078. aes->cOver += sz;
  7079. if (aes->cOver == AES_BLOCK_SIZE) {
  7080. /* We have filled up the block and can process. */
  7081. #ifdef HAVE_INTEL_AVX2
  7082. if (IS_INTEL_AVX2(intel_flags)) {
  7083. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7084. aes->H);
  7085. }
  7086. else
  7087. #endif
  7088. #ifdef HAVE_INTEL_AVX1
  7089. if (IS_INTEL_AVX1(intel_flags)) {
  7090. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7091. aes->H);
  7092. }
  7093. else
  7094. #endif
  7095. {
  7096. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7097. aes->H);
  7098. }
  7099. /* Reset count. */
  7100. aes->cOver = 0;
  7101. }
  7102. /* Used up some data. */
  7103. cSz -= sz;
  7104. p += sz;
  7105. c += sz;
  7106. }
  7107. /* Calculate number of blocks of plaintext and the leftover. */
  7108. blocks = cSz / AES_BLOCK_SIZE;
  7109. partial = cSz % AES_BLOCK_SIZE;
  7110. if (blocks > 0) {
  7111. /* Encrypt and GHASH full blocks now. */
  7112. #ifdef HAVE_INTEL_AVX2
  7113. if (IS_INTEL_AVX2(intel_flags)) {
  7114. AES_GCM_encrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  7115. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7116. AES_COUNTER(aes));
  7117. }
  7118. else
  7119. #endif
  7120. #ifdef HAVE_INTEL_AVX1
  7121. if (IS_INTEL_AVX1(intel_flags)) {
  7122. AES_GCM_encrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  7123. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7124. AES_COUNTER(aes));
  7125. }
  7126. else
  7127. #endif
  7128. {
  7129. AES_GCM_encrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  7130. c, p, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7131. AES_COUNTER(aes));
  7132. }
  7133. /* Skip over to end of blocks. */
  7134. p += blocks * AES_BLOCK_SIZE;
  7135. c += blocks * AES_BLOCK_SIZE;
  7136. }
  7137. if (partial != 0) {
  7138. /* Encrypt the counter - XOR in zeros as proxy for plaintext. */
  7139. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  7140. #ifdef HAVE_INTEL_AVX2
  7141. if (IS_INTEL_AVX2(intel_flags)) {
  7142. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  7143. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7144. }
  7145. else
  7146. #endif
  7147. #ifdef HAVE_INTEL_AVX1
  7148. if (IS_INTEL_AVX1(intel_flags)) {
  7149. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  7150. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7151. }
  7152. else
  7153. #endif
  7154. {
  7155. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  7156. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7157. }
  7158. /* XOR the remaining plaintext to calculate cipher text.
  7159. * Keep cipher text for GHASH of last partial block.
  7160. */
  7161. xorbuf(AES_LASTGBLOCK(aes), p, (word32)partial);
  7162. XMEMCPY(c, AES_LASTGBLOCK(aes), (size_t)partial);
  7163. /* Update count of the block used. */
  7164. aes->cOver = (byte)partial;
  7165. }
  7166. }
  7167. RESTORE_VECTOR_REGISTERS();
  7168. return 0;
  7169. }
  7170. /* Finalize the AES GCM for encryption and calculate the authentication tag.
  7171. *
  7172. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  7173. *
  7174. * @param [in, out] aes AES object.
  7175. * @param [in] authTag Buffer to hold authentication tag.
  7176. * @param [in] authTagSz Length of authentication tag in bytes.
  7177. * @return 0 on success.
  7178. */
  7179. static WARN_UNUSED_RESULT int AesGcmEncryptFinal_aesni(
  7180. Aes* aes, byte* authTag, word32 authTagSz)
  7181. {
  7182. /* AAD block incomplete when > 0 */
  7183. byte over = aes->aOver;
  7184. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7185. if (aes->cOver > 0) {
  7186. /* Cipher text block incomplete. */
  7187. over = aes->cOver;
  7188. }
  7189. if (over > 0) {
  7190. /* Fill the rest of the block with zeros. */
  7191. XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over);
  7192. /* GHASH last cipher block. */
  7193. #ifdef HAVE_INTEL_AVX2
  7194. if (IS_INTEL_AVX2(intel_flags)) {
  7195. AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  7196. }
  7197. else
  7198. #endif
  7199. #ifdef HAVE_INTEL_AVX1
  7200. if (IS_INTEL_AVX1(intel_flags)) {
  7201. AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H);
  7202. }
  7203. else
  7204. #endif
  7205. {
  7206. AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes),
  7207. aes->H);
  7208. }
  7209. }
  7210. /* Calculate the authentication tag. */
  7211. #ifdef HAVE_INTEL_AVX2
  7212. if (IS_INTEL_AVX2(intel_flags)) {
  7213. AES_GCM_encrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7214. aes->aSz, aes->H, AES_INITCTR(aes));
  7215. }
  7216. else
  7217. #endif
  7218. #ifdef HAVE_INTEL_AVX1
  7219. if (IS_INTEL_AVX1(intel_flags)) {
  7220. AES_GCM_encrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7221. aes->aSz, aes->H, AES_INITCTR(aes));
  7222. }
  7223. else
  7224. #endif
  7225. {
  7226. AES_GCM_encrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7227. aes->aSz, aes->H, AES_INITCTR(aes));
  7228. }
  7229. RESTORE_VECTOR_REGISTERS();
  7230. return 0;
  7231. }
  7232. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7233. #ifdef __cplusplus
  7234. extern "C" {
  7235. #endif
  7236. /* Assembly code implementations in: aes_gcm_asm.S and aes_gcm_x86_asm.S */
  7237. #ifdef HAVE_INTEL_AVX2
  7238. extern void AES_GCM_decrypt_update_avx2(const unsigned char* key, int nr,
  7239. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7240. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7241. extern void AES_GCM_decrypt_final_avx2(unsigned char* tag,
  7242. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7243. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7244. #endif
  7245. #ifdef HAVE_INTEL_AVX1
  7246. extern void AES_GCM_decrypt_update_avx1(const unsigned char* key, int nr,
  7247. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7248. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7249. extern void AES_GCM_decrypt_final_avx1(unsigned char* tag,
  7250. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7251. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7252. #endif
  7253. extern void AES_GCM_decrypt_update_aesni(const unsigned char* key, int nr,
  7254. unsigned char* out, const unsigned char* in, unsigned int nbytes,
  7255. unsigned char* tag, unsigned char* h, unsigned char* counter);
  7256. extern void AES_GCM_decrypt_final_aesni(unsigned char* tag,
  7257. const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes,
  7258. unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res);
  7259. #ifdef __cplusplus
  7260. } /* extern "C" */
  7261. #endif
  7262. /* Update the AES GCM for decryption with data and/or authentication data.
  7263. *
  7264. * @param [in, out] aes AES object.
  7265. * @param [out] p Buffer to hold plaintext.
  7266. * @param [in] c Buffer holding ciper text.
  7267. * @param [in] cSz Length of cipher text/plaintext in bytes.
  7268. * @param [in] a Buffer holding authentication data.
  7269. * @param [in] aSz Length of authentication data in bytes.
  7270. */
  7271. static WARN_UNUSED_RESULT int AesGcmDecryptUpdate_aesni(
  7272. Aes* aes, byte* p, const byte* c, word32 cSz, const byte* a, word32 aSz)
  7273. {
  7274. word32 blocks;
  7275. int partial;
  7276. int ret;
  7277. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7278. /* Hash in A, the Authentication Data */
  7279. ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL));
  7280. if (ret != 0)
  7281. return ret;
  7282. /* Hash in C, the Cipher text, and decrypt. */
  7283. if (cSz != 0 && p != NULL) {
  7284. /* Update count of cipher text we have hashed. */
  7285. aes->cSz += cSz;
  7286. if (aes->cOver > 0) {
  7287. /* Calculate amount we can use - fill up the block. */
  7288. byte sz = AES_BLOCK_SIZE - aes->cOver;
  7289. if (sz > cSz) {
  7290. sz = (byte)cSz;
  7291. }
  7292. /* Keep a copy of the cipher text for GHASH. */
  7293. XMEMCPY(AES_LASTBLOCK(aes) + aes->cOver, c, sz);
  7294. /* Decrypt some of the cipher text. */
  7295. xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, c, sz);
  7296. XMEMCPY(p, AES_LASTGBLOCK(aes) + aes->cOver, sz);
  7297. /* Update count of unsed encrypted counter. */
  7298. aes->cOver += sz;
  7299. if (aes->cOver == AES_BLOCK_SIZE) {
  7300. /* We have filled up the block and can process. */
  7301. #ifdef HAVE_INTEL_AVX2
  7302. if (IS_INTEL_AVX2(intel_flags)) {
  7303. AES_GCM_ghash_block_avx2(AES_LASTBLOCK(aes), AES_TAG(aes),
  7304. aes->H);
  7305. }
  7306. else
  7307. #endif
  7308. #ifdef HAVE_INTEL_AVX1
  7309. if (IS_INTEL_AVX1(intel_flags)) {
  7310. AES_GCM_ghash_block_avx1(AES_LASTBLOCK(aes), AES_TAG(aes),
  7311. aes->H);
  7312. }
  7313. else
  7314. #endif
  7315. {
  7316. AES_GCM_ghash_block_aesni(AES_LASTBLOCK(aes), AES_TAG(aes),
  7317. aes->H);
  7318. }
  7319. /* Reset count. */
  7320. aes->cOver = 0;
  7321. }
  7322. /* Used up some data. */
  7323. cSz -= sz;
  7324. c += sz;
  7325. p += sz;
  7326. }
  7327. /* Calculate number of blocks of plaintext and the leftover. */
  7328. blocks = cSz / AES_BLOCK_SIZE;
  7329. partial = cSz % AES_BLOCK_SIZE;
  7330. if (blocks > 0) {
  7331. /* Decrypt and GHASH full blocks now. */
  7332. #ifdef HAVE_INTEL_AVX2
  7333. if (IS_INTEL_AVX2(intel_flags)) {
  7334. AES_GCM_decrypt_update_avx2((byte*)aes->key, (int)aes->rounds,
  7335. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7336. AES_COUNTER(aes));
  7337. }
  7338. else
  7339. #endif
  7340. #ifdef HAVE_INTEL_AVX1
  7341. if (IS_INTEL_AVX1(intel_flags)) {
  7342. AES_GCM_decrypt_update_avx1((byte*)aes->key, (int)aes->rounds,
  7343. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7344. AES_COUNTER(aes));
  7345. }
  7346. else
  7347. #endif
  7348. {
  7349. AES_GCM_decrypt_update_aesni((byte*)aes->key, (int)aes->rounds,
  7350. p, c, blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H,
  7351. AES_COUNTER(aes));
  7352. }
  7353. /* Skip over to end of blocks. */
  7354. c += blocks * AES_BLOCK_SIZE;
  7355. p += blocks * AES_BLOCK_SIZE;
  7356. }
  7357. if (partial != 0) {
  7358. /* Encrypt the counter - XOR in zeros as proxy for cipher text. */
  7359. XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE);
  7360. #ifdef HAVE_INTEL_AVX2
  7361. if (IS_INTEL_AVX2(intel_flags)) {
  7362. AES_GCM_encrypt_block_avx2((byte*)aes->key, (int)aes->rounds,
  7363. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7364. }
  7365. else
  7366. #endif
  7367. #ifdef HAVE_INTEL_AVX1
  7368. if (IS_INTEL_AVX1(intel_flags)) {
  7369. AES_GCM_encrypt_block_avx1((byte*)aes->key, (int)aes->rounds,
  7370. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7371. }
  7372. else
  7373. #endif
  7374. {
  7375. AES_GCM_encrypt_block_aesni((byte*)aes->key, (int)aes->rounds,
  7376. AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes));
  7377. }
  7378. /* Keep cipher text for GHASH of last partial block. */
  7379. XMEMCPY(AES_LASTBLOCK(aes), c, (size_t)partial);
  7380. /* XOR the remaining cipher text to calculate plaintext. */
  7381. xorbuf(AES_LASTGBLOCK(aes), c, (word32)partial);
  7382. XMEMCPY(p, AES_LASTGBLOCK(aes), (size_t)partial);
  7383. /* Update count of the block used. */
  7384. aes->cOver = (byte)partial;
  7385. }
  7386. }
  7387. RESTORE_VECTOR_REGISTERS();
  7388. return 0;
  7389. }
  7390. /* Finalize the AES GCM for decryption and check the authentication tag.
  7391. *
  7392. * Calls AVX2, AVX1 or straight AES-NI optimized assembly code.
  7393. *
  7394. * @param [in, out] aes AES object.
  7395. * @param [in] authTag Buffer holding authentication tag.
  7396. * @param [in] authTagSz Length of authentication tag in bytes.
  7397. * @return 0 on success.
  7398. * @return AES_GCM_AUTH_E when authentication tag doesn't match calculated
  7399. * value.
  7400. */
  7401. static WARN_UNUSED_RESULT int AesGcmDecryptFinal_aesni(
  7402. Aes* aes, const byte* authTag, word32 authTagSz)
  7403. {
  7404. int ret = 0;
  7405. int res;
  7406. /* AAD block incomplete when > 0 */
  7407. byte over = aes->aOver;
  7408. byte *lastBlock = AES_LASTGBLOCK(aes);
  7409. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  7410. if (aes->cOver > 0) {
  7411. /* Cipher text block incomplete. */
  7412. over = aes->cOver;
  7413. lastBlock = AES_LASTBLOCK(aes);
  7414. }
  7415. if (over > 0) {
  7416. /* Zeroize the unused part of the block. */
  7417. XMEMSET(lastBlock + over, 0, AES_BLOCK_SIZE - over);
  7418. /* Hash the last block of cipher text. */
  7419. #ifdef HAVE_INTEL_AVX2
  7420. if (IS_INTEL_AVX2(intel_flags)) {
  7421. AES_GCM_ghash_block_avx2(lastBlock, AES_TAG(aes), aes->H);
  7422. }
  7423. else
  7424. #endif
  7425. #ifdef HAVE_INTEL_AVX1
  7426. if (IS_INTEL_AVX1(intel_flags)) {
  7427. AES_GCM_ghash_block_avx1(lastBlock, AES_TAG(aes), aes->H);
  7428. }
  7429. else
  7430. #endif
  7431. {
  7432. AES_GCM_ghash_block_aesni(lastBlock, AES_TAG(aes), aes->H);
  7433. }
  7434. }
  7435. /* Calculate and compare the authentication tag. */
  7436. #ifdef HAVE_INTEL_AVX2
  7437. if (IS_INTEL_AVX2(intel_flags)) {
  7438. AES_GCM_decrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7439. aes->aSz, aes->H, AES_INITCTR(aes), &res);
  7440. }
  7441. else
  7442. #endif
  7443. #ifdef HAVE_INTEL_AVX1
  7444. if (IS_INTEL_AVX1(intel_flags)) {
  7445. AES_GCM_decrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7446. aes->aSz, aes->H, AES_INITCTR(aes), &res);
  7447. }
  7448. else
  7449. #endif
  7450. {
  7451. AES_GCM_decrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz,
  7452. aes->aSz, aes->H, AES_INITCTR(aes), &res);
  7453. }
  7454. RESTORE_VECTOR_REGISTERS();
  7455. /* Return error code when calculated doesn't match input. */
  7456. if (res == 0) {
  7457. ret = AES_GCM_AUTH_E;
  7458. }
  7459. return ret;
  7460. }
  7461. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7462. #endif /* WOLFSSL_AESNI */
  7463. /* Initialize an AES GCM cipher for encryption or decryption.
  7464. *
  7465. * Must call wc_AesInit() before calling this function.
  7466. * Call wc_AesGcmSetIV() before calling this function to generate part of IV.
  7467. * Call wc_AesGcmSetExtIV() before calling this function to cache IV.
  7468. *
  7469. * @param [in, out] aes AES object.
  7470. * @param [in] key Buffer holding key.
  7471. * @param [in] len Length of key in bytes.
  7472. * @param [in] iv Buffer holding IV/nonce.
  7473. * @param [in] ivSz Length of IV/nonce in bytes.
  7474. * @return 0 on success.
  7475. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7476. * is NULL, or the IV is NULL and no previous IV has been set.
  7477. * @return MEMORY_E when dynamic memory allocation fails. (WOLFSSL_SMALL_STACK)
  7478. */
  7479. int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7480. word32 ivSz)
  7481. {
  7482. int ret = 0;
  7483. /* Check validity of parameters. */
  7484. if ((aes == NULL) || ((len > 0) && (key == NULL)) ||
  7485. ((ivSz == 0) && (iv != NULL)) ||
  7486. ((ivSz > 0) && (iv == NULL))) {
  7487. ret = BAD_FUNC_ARG;
  7488. }
  7489. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  7490. if ((ret == 0) && (aes->streamData == NULL)) {
  7491. /* Allocate buffers for streaming. */
  7492. aes->streamData = (byte*)XMALLOC(5 * AES_BLOCK_SIZE, aes->heap,
  7493. DYNAMIC_TYPE_AES);
  7494. if (aes->streamData == NULL) {
  7495. ret = MEMORY_E;
  7496. }
  7497. }
  7498. #endif
  7499. /* Set the key if passed in. */
  7500. if ((ret == 0) && (key != NULL)) {
  7501. ret = wc_AesGcmSetKey(aes, key, len);
  7502. }
  7503. if (ret == 0) {
  7504. /* Set the IV passed in if it is smaller than a block. */
  7505. if ((iv != NULL) && (ivSz <= AES_BLOCK_SIZE)) {
  7506. XMEMMOVE((byte*)aes->reg, iv, ivSz);
  7507. aes->nonceSz = ivSz;
  7508. }
  7509. /* No IV passed in, check for cached IV. */
  7510. if ((iv == NULL) && (aes->nonceSz != 0)) {
  7511. /* Use the cached copy. */
  7512. iv = (byte*)aes->reg;
  7513. ivSz = aes->nonceSz;
  7514. }
  7515. if (iv != NULL) {
  7516. /* Initialize with the IV. */
  7517. #ifdef WOLFSSL_AESNI
  7518. if (haveAESNI
  7519. #ifdef HAVE_INTEL_AVX2
  7520. || IS_INTEL_AVX2(intel_flags)
  7521. #endif
  7522. #ifdef HAVE_INTEL_AVX1
  7523. || IS_INTEL_AVX1(intel_flags)
  7524. #endif
  7525. ) {
  7526. ret = AesGcmInit_aesni(aes, iv, ivSz);
  7527. }
  7528. else
  7529. #endif
  7530. {
  7531. ret = AesGcmInit_C(aes, iv, ivSz);
  7532. }
  7533. aes->nonceSet = 1;
  7534. }
  7535. }
  7536. return ret;
  7537. }
  7538. /* Initialize an AES GCM cipher for encryption.
  7539. *
  7540. * Must call wc_AesInit() before calling this function.
  7541. *
  7542. * @param [in, out] aes AES object.
  7543. * @param [in] key Buffer holding key.
  7544. * @param [in] len Length of key in bytes.
  7545. * @param [in] iv Buffer holding IV/nonce.
  7546. * @param [in] ivSz Length of IV/nonce in bytes.
  7547. * @return 0 on success.
  7548. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7549. * is NULL, or the IV is NULL and no previous IV has been set.
  7550. */
  7551. int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7552. word32 ivSz)
  7553. {
  7554. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  7555. }
  7556. /* Initialize an AES GCM cipher for encryption. Get IV.
  7557. *
  7558. * Must call wc_AesGcmSetIV() to generate part of IV before calling this
  7559. * function.
  7560. * Must call wc_AesInit() before calling this function.
  7561. *
  7562. * See wc_AesGcmEncrypt_ex() for non-streaming version of getting IV out.
  7563. *
  7564. * @param [in, out] aes AES object.
  7565. * @param [in] key Buffer holding key.
  7566. * @param [in] len Length of key in bytes.
  7567. * @param [in] iv Buffer holding IV/nonce.
  7568. * @param [in] ivSz Length of IV/nonce in bytes.
  7569. * @return 0 on success.
  7570. * @return BAD_FUNC_ARG when aes is NULL, key length is non-zero but key
  7571. * is NULL, or the IV is NULL or ivOutSz is not the same as cached
  7572. * nonce size.
  7573. */
  7574. int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut,
  7575. word32 ivOutSz)
  7576. {
  7577. int ret;
  7578. /* Check validity of parameters. */
  7579. if ((aes == NULL) || (ivOut == NULL) || (ivOutSz != aes->nonceSz)) {
  7580. ret = BAD_FUNC_ARG;
  7581. }
  7582. else {
  7583. /* Copy out the IV including generated part for decryption. */
  7584. XMEMCPY(ivOut, aes->reg, ivOutSz);
  7585. /* Initialize AES GCM cipher with key and cached Iv. */
  7586. ret = wc_AesGcmInit(aes, key, len, NULL, 0);
  7587. }
  7588. return ret;
  7589. }
  7590. /* Update the AES GCM for encryption with data and/or authentication data.
  7591. *
  7592. * All the AAD must be passed to update before the plaintext.
  7593. * Last part of AAD can be passed with first part of plaintext.
  7594. *
  7595. * Must set key and IV before calling this function.
  7596. * Must call wc_AesGcmInit() before calling this function.
  7597. *
  7598. * @param [in, out] aes AES object.
  7599. * @param [out] out Buffer to hold cipher text.
  7600. * @param [in] in Buffer holding plaintext.
  7601. * @param [in] sz Length of plaintext in bytes.
  7602. * @param [in] authIn Buffer holding authentication data.
  7603. * @param [in] authInSz Length of authentication data in bytes.
  7604. * @return 0 on success.
  7605. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7606. * is NULL.
  7607. */
  7608. int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  7609. const byte* authIn, word32 authInSz)
  7610. {
  7611. int ret = 0;
  7612. /* Check validity of parameters. */
  7613. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  7614. ((out == NULL) || (in == NULL)))) {
  7615. ret = BAD_FUNC_ARG;
  7616. }
  7617. /* Check key has been set. */
  7618. if ((ret == 0) && (!aes->gcmKeySet)) {
  7619. ret = MISSING_KEY;
  7620. }
  7621. /* Check IV has been set. */
  7622. if ((ret == 0) && (!aes->nonceSet)) {
  7623. ret = MISSING_IV;
  7624. }
  7625. if ((ret == 0) && aes->ctrSet && (aes->aSz == 0) && (aes->cSz == 0)) {
  7626. aes->invokeCtr[0]++;
  7627. if (aes->invokeCtr[0] == 0) {
  7628. aes->invokeCtr[1]++;
  7629. if (aes->invokeCtr[1] == 0)
  7630. ret = AES_GCM_OVERFLOW_E;
  7631. }
  7632. }
  7633. if (ret == 0) {
  7634. /* Encrypt with AAD and/or plaintext. */
  7635. #if defined(WOLFSSL_AESNI)
  7636. if (haveAESNI
  7637. #ifdef HAVE_INTEL_AVX2
  7638. || IS_INTEL_AVX2(intel_flags)
  7639. #endif
  7640. #ifdef HAVE_INTEL_AVX1
  7641. || IS_INTEL_AVX1(intel_flags)
  7642. #endif
  7643. ) {
  7644. ret = AesGcmEncryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  7645. }
  7646. else
  7647. #endif
  7648. {
  7649. /* Encrypt the plaintext. */
  7650. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  7651. if (ret != 0)
  7652. return ret;
  7653. /* Update the authenication tag with any authentication data and the
  7654. * new cipher text. */
  7655. GHASH_UPDATE(aes, authIn, authInSz, out, sz);
  7656. }
  7657. }
  7658. return ret;
  7659. }
  7660. /* Finalize the AES GCM for encryption and return the authentication tag.
  7661. *
  7662. * Must set key and IV before calling this function.
  7663. * Must call wc_AesGcmInit() before calling this function.
  7664. *
  7665. * @param [in, out] aes AES object.
  7666. * @param [out] authTag Buffer to hold authentication tag.
  7667. * @param [in] authTagSz Length of authentication tag in bytes.
  7668. * @return 0 on success.
  7669. */
  7670. int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz)
  7671. {
  7672. int ret = 0;
  7673. /* Check validity of parameters. */
  7674. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  7675. (authTagSz == 0)) {
  7676. ret = BAD_FUNC_ARG;
  7677. }
  7678. /* Check key has been set. */
  7679. if ((ret == 0) && (!aes->gcmKeySet)) {
  7680. ret = MISSING_KEY;
  7681. }
  7682. /* Check IV has been set. */
  7683. if ((ret == 0) && (!aes->nonceSet)) {
  7684. ret = MISSING_IV;
  7685. }
  7686. if (ret == 0) {
  7687. /* Calculate authentication tag. */
  7688. #ifdef WOLFSSL_AESNI
  7689. if (haveAESNI
  7690. #ifdef HAVE_INTEL_AVX2
  7691. || IS_INTEL_AVX2(intel_flags)
  7692. #endif
  7693. #ifdef HAVE_INTEL_AVX1
  7694. || IS_INTEL_AVX1(intel_flags)
  7695. #endif
  7696. ) {
  7697. ret = AesGcmEncryptFinal_aesni(aes, authTag, authTagSz);
  7698. }
  7699. else
  7700. #endif
  7701. {
  7702. ret = AesGcmFinal_C(aes, authTag, authTagSz);
  7703. }
  7704. }
  7705. if ((ret == 0) && aes->ctrSet) {
  7706. IncCtr((byte*)aes->reg, aes->nonceSz);
  7707. }
  7708. return ret;
  7709. }
  7710. #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT)
  7711. /* Initialize an AES GCM cipher for decryption.
  7712. *
  7713. * Must call wc_AesInit() before calling this function.
  7714. *
  7715. * Call wc_AesGcmSetExtIV() before calling this function to use FIPS external IV
  7716. * instead.
  7717. *
  7718. * @param [in, out] aes AES object.
  7719. * @param [in] key Buffer holding key.
  7720. * @param [in] len Length of key in bytes.
  7721. * @param [in] iv Buffer holding IV/nonce.
  7722. * @param [in] ivSz Length of IV/nonce in bytes.
  7723. * @return 0 on success.
  7724. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7725. * is NULL, or the IV is NULL and no previous IV has been set.
  7726. */
  7727. int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv,
  7728. word32 ivSz)
  7729. {
  7730. return wc_AesGcmInit(aes, key, len, iv, ivSz);
  7731. }
  7732. /* Update the AES GCM for decryption with data and/or authentication data.
  7733. *
  7734. * All the AAD must be passed to update before the cipher text.
  7735. * Last part of AAD can be passed with first part of cipher text.
  7736. *
  7737. * Must set key and IV before calling this function.
  7738. * Must call wc_AesGcmInit() before calling this function.
  7739. *
  7740. * @param [in, out] aes AES object.
  7741. * @param [out] out Buffer to hold plaintext.
  7742. * @param [in] in Buffer holding cipher text.
  7743. * @param [in] sz Length of cipher text in bytes.
  7744. * @param [in] authIn Buffer holding authentication data.
  7745. * @param [in] authInSz Length of authentication data in bytes.
  7746. * @return 0 on success.
  7747. * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer
  7748. * is NULL.
  7749. */
  7750. int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz,
  7751. const byte* authIn, word32 authInSz)
  7752. {
  7753. int ret = 0;
  7754. /* Check validity of parameters. */
  7755. if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) &&
  7756. ((out == NULL) || (in == NULL)))) {
  7757. ret = BAD_FUNC_ARG;
  7758. }
  7759. /* Check key has been set. */
  7760. if ((ret == 0) && (!aes->gcmKeySet)) {
  7761. ret = MISSING_KEY;
  7762. }
  7763. /* Check IV has been set. */
  7764. if ((ret == 0) && (!aes->nonceSet)) {
  7765. ret = MISSING_IV;
  7766. }
  7767. if (ret == 0) {
  7768. /* Decrypt with AAD and/or cipher text. */
  7769. #if defined(WOLFSSL_AESNI)
  7770. if (haveAESNI
  7771. #ifdef HAVE_INTEL_AVX2
  7772. || IS_INTEL_AVX2(intel_flags)
  7773. #endif
  7774. #ifdef HAVE_INTEL_AVX1
  7775. || IS_INTEL_AVX1(intel_flags)
  7776. #endif
  7777. ) {
  7778. ret = AesGcmDecryptUpdate_aesni(aes, out, in, sz, authIn, authInSz);
  7779. }
  7780. else
  7781. #endif
  7782. {
  7783. /* Update the authenication tag with any authentication data and
  7784. * cipher text. */
  7785. GHASH_UPDATE(aes, authIn, authInSz, in, sz);
  7786. /* Decrypt the cipher text. */
  7787. ret = AesGcmCryptUpdate_C(aes, out, in, sz);
  7788. }
  7789. }
  7790. return ret;
  7791. }
  7792. /* Finalize the AES GCM for decryption and check the authentication tag.
  7793. *
  7794. * Must set key and IV before calling this function.
  7795. * Must call wc_AesGcmInit() before calling this function.
  7796. *
  7797. * @param [in, out] aes AES object.
  7798. * @param [in] authTag Buffer holding authentication tag.
  7799. * @param [in] authTagSz Length of authentication tag in bytes.
  7800. * @return 0 on success.
  7801. */
  7802. int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz)
  7803. {
  7804. int ret = 0;
  7805. /* Check validity of parameters. */
  7806. if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) ||
  7807. (authTagSz == 0)) {
  7808. ret = BAD_FUNC_ARG;
  7809. }
  7810. /* Check key has been set. */
  7811. if ((ret == 0) && (!aes->gcmKeySet)) {
  7812. ret = MISSING_KEY;
  7813. }
  7814. /* Check IV has been set. */
  7815. if ((ret == 0) && (!aes->nonceSet)) {
  7816. ret = MISSING_IV;
  7817. }
  7818. if (ret == 0) {
  7819. /* Calculate authentication tag and compare with one passed in.. */
  7820. #ifdef WOLFSSL_AESNI
  7821. if (haveAESNI
  7822. #ifdef HAVE_INTEL_AVX2
  7823. || IS_INTEL_AVX2(intel_flags)
  7824. #endif
  7825. #ifdef HAVE_INTEL_AVX1
  7826. || IS_INTEL_AVX1(intel_flags)
  7827. #endif
  7828. ) {
  7829. ret = AesGcmDecryptFinal_aesni(aes, authTag, authTagSz);
  7830. }
  7831. else
  7832. #endif
  7833. {
  7834. ALIGN32 byte calcTag[AES_BLOCK_SIZE];
  7835. /* Calculate authentication tag. */
  7836. ret = AesGcmFinal_C(aes, calcTag, authTagSz);
  7837. if (ret == 0) {
  7838. /* Check calculated tag matches the one passed in. */
  7839. if (ConstantCompare(authTag, calcTag, (int)authTagSz) != 0) {
  7840. ret = AES_GCM_AUTH_E;
  7841. }
  7842. }
  7843. }
  7844. }
  7845. /* reset the state */
  7846. if (ret == 0)
  7847. wc_AesFree(aes);
  7848. return ret;
  7849. }
  7850. #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */
  7851. #endif /* WOLFSSL_AESGCM_STREAM */
  7852. #endif /* WOLFSSL_XILINX_CRYPT */
  7853. #endif /* end of block for AESGCM implementation selection */
  7854. /* Common to all, abstract functions that build off of lower level AESGCM
  7855. * functions */
  7856. #ifndef WC_NO_RNG
  7857. static WARN_UNUSED_RESULT WC_INLINE int CheckAesGcmIvSize(int ivSz) {
  7858. return (ivSz == GCM_NONCE_MIN_SZ ||
  7859. ivSz == GCM_NONCE_MID_SZ ||
  7860. ivSz == GCM_NONCE_MAX_SZ);
  7861. }
  7862. int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz)
  7863. {
  7864. int ret = 0;
  7865. if (aes == NULL || iv == NULL || !CheckAesGcmIvSize((int)ivSz)) {
  7866. ret = BAD_FUNC_ARG;
  7867. }
  7868. if (ret == 0) {
  7869. XMEMCPY((byte*)aes->reg, iv, ivSz);
  7870. /* If the IV is 96, allow for a 2^64 invocation counter.
  7871. * For any other size for the nonce, limit the invocation
  7872. * counter to 32-bits. (SP 800-38D 8.3) */
  7873. aes->invokeCtr[0] = 0;
  7874. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  7875. #ifdef WOLFSSL_AESGCM_STREAM
  7876. aes->ctrSet = 1;
  7877. #endif
  7878. aes->nonceSz = ivSz;
  7879. }
  7880. return ret;
  7881. }
  7882. int wc_AesGcmSetIV(Aes* aes, word32 ivSz,
  7883. const byte* ivFixed, word32 ivFixedSz,
  7884. WC_RNG* rng)
  7885. {
  7886. int ret = 0;
  7887. if (aes == NULL || rng == NULL || !CheckAesGcmIvSize((int)ivSz) ||
  7888. (ivFixed == NULL && ivFixedSz != 0) ||
  7889. (ivFixed != NULL && ivFixedSz != AES_IV_FIXED_SZ)) {
  7890. ret = BAD_FUNC_ARG;
  7891. }
  7892. if (ret == 0) {
  7893. byte* iv = (byte*)aes->reg;
  7894. if (ivFixedSz)
  7895. XMEMCPY(iv, ivFixed, ivFixedSz);
  7896. ret = wc_RNG_GenerateBlock(rng, iv + ivFixedSz, ivSz - ivFixedSz);
  7897. }
  7898. if (ret == 0) {
  7899. /* If the IV is 96, allow for a 2^64 invocation counter.
  7900. * For any other size for the nonce, limit the invocation
  7901. * counter to 32-bits. (SP 800-38D 8.3) */
  7902. aes->invokeCtr[0] = 0;
  7903. aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF;
  7904. #ifdef WOLFSSL_AESGCM_STREAM
  7905. aes->ctrSet = 1;
  7906. #endif
  7907. aes->nonceSz = ivSz;
  7908. }
  7909. return ret;
  7910. }
  7911. int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  7912. byte* ivOut, word32 ivOutSz,
  7913. byte* authTag, word32 authTagSz,
  7914. const byte* authIn, word32 authInSz)
  7915. {
  7916. int ret = 0;
  7917. if (aes == NULL || (sz != 0 && (in == NULL || out == NULL)) ||
  7918. ivOut == NULL || ivOutSz != aes->nonceSz ||
  7919. (authIn == NULL && authInSz != 0)) {
  7920. ret = BAD_FUNC_ARG;
  7921. }
  7922. if (ret == 0) {
  7923. aes->invokeCtr[0]++;
  7924. if (aes->invokeCtr[0] == 0) {
  7925. aes->invokeCtr[1]++;
  7926. if (aes->invokeCtr[1] == 0)
  7927. ret = AES_GCM_OVERFLOW_E;
  7928. }
  7929. }
  7930. if (ret == 0) {
  7931. XMEMCPY(ivOut, aes->reg, ivOutSz);
  7932. ret = wc_AesGcmEncrypt(aes, out, in, sz,
  7933. (byte*)aes->reg, ivOutSz,
  7934. authTag, authTagSz,
  7935. authIn, authInSz);
  7936. if (ret == 0)
  7937. IncCtr((byte*)aes->reg, ivOutSz);
  7938. }
  7939. return ret;
  7940. }
  7941. int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz,
  7942. const byte* authIn, word32 authInSz,
  7943. byte* authTag, word32 authTagSz, WC_RNG* rng)
  7944. {
  7945. #ifdef WOLFSSL_SMALL_STACK
  7946. Aes *aes = NULL;
  7947. #else
  7948. Aes aes[1];
  7949. #endif
  7950. int ret;
  7951. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  7952. authTag == NULL || authTagSz == 0 || rng == NULL) {
  7953. return BAD_FUNC_ARG;
  7954. }
  7955. #ifdef WOLFSSL_SMALL_STACK
  7956. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  7957. DYNAMIC_TYPE_AES)) == NULL)
  7958. return MEMORY_E;
  7959. #endif
  7960. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  7961. if (ret == 0) {
  7962. ret = wc_AesGcmSetKey(aes, key, keySz);
  7963. if (ret == 0)
  7964. ret = wc_AesGcmSetIV(aes, ivSz, NULL, 0, rng);
  7965. if (ret == 0)
  7966. ret = wc_AesGcmEncrypt_ex(aes, NULL, NULL, 0, iv, ivSz,
  7967. authTag, authTagSz, authIn, authInSz);
  7968. wc_AesFree(aes);
  7969. }
  7970. ForceZero(aes, sizeof *aes);
  7971. #ifdef WOLFSSL_SMALL_STACK
  7972. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  7973. #endif
  7974. return ret;
  7975. }
  7976. int wc_GmacVerify(const byte* key, word32 keySz,
  7977. const byte* iv, word32 ivSz,
  7978. const byte* authIn, word32 authInSz,
  7979. const byte* authTag, word32 authTagSz)
  7980. {
  7981. int ret;
  7982. #ifdef HAVE_AES_DECRYPT
  7983. #ifdef WOLFSSL_SMALL_STACK
  7984. Aes *aes = NULL;
  7985. #else
  7986. Aes aes[1];
  7987. #endif
  7988. if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) ||
  7989. authTag == NULL || authTagSz == 0 || authTagSz > AES_BLOCK_SIZE) {
  7990. return BAD_FUNC_ARG;
  7991. }
  7992. #ifdef WOLFSSL_SMALL_STACK
  7993. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  7994. DYNAMIC_TYPE_AES)) == NULL)
  7995. return MEMORY_E;
  7996. #endif
  7997. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  7998. if (ret == 0) {
  7999. ret = wc_AesGcmSetKey(aes, key, keySz);
  8000. if (ret == 0)
  8001. ret = wc_AesGcmDecrypt(aes, NULL, NULL, 0, iv, ivSz,
  8002. authTag, authTagSz, authIn, authInSz);
  8003. wc_AesFree(aes);
  8004. }
  8005. ForceZero(aes, sizeof *aes);
  8006. #ifdef WOLFSSL_SMALL_STACK
  8007. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  8008. #endif
  8009. #else
  8010. (void)key;
  8011. (void)keySz;
  8012. (void)iv;
  8013. (void)ivSz;
  8014. (void)authIn;
  8015. (void)authInSz;
  8016. (void)authTag;
  8017. (void)authTagSz;
  8018. ret = NOT_COMPILED_IN;
  8019. #endif
  8020. return ret;
  8021. }
  8022. #endif /* WC_NO_RNG */
  8023. WOLFSSL_API int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len)
  8024. {
  8025. if (gmac == NULL || key == NULL) {
  8026. return BAD_FUNC_ARG;
  8027. }
  8028. return wc_AesGcmSetKey(&gmac->aes, key, len);
  8029. }
  8030. WOLFSSL_API int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz,
  8031. const byte* authIn, word32 authInSz,
  8032. byte* authTag, word32 authTagSz)
  8033. {
  8034. if (gmac == NULL) {
  8035. return BAD_FUNC_ARG;
  8036. }
  8037. return wc_AesGcmEncrypt(&gmac->aes, NULL, NULL, 0, iv, ivSz,
  8038. authTag, authTagSz, authIn, authInSz);
  8039. }
  8040. #endif /* HAVE_AESGCM */
  8041. #ifdef HAVE_AESCCM
  8042. int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz)
  8043. {
  8044. if (!((keySz == 16) || (keySz == 24) || (keySz == 32)))
  8045. return BAD_FUNC_ARG;
  8046. return wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  8047. }
  8048. /* Checks if the tag size is an accepted value based on RFC 3610 section 2
  8049. * returns 0 if tag size is ok
  8050. */
  8051. int wc_AesCcmCheckTagSize(int sz)
  8052. {
  8053. /* values here are from RFC 3610 section 2 */
  8054. if (sz != 4 && sz != 6 && sz != 8 && sz != 10 && sz != 12 && sz != 14
  8055. && sz != 16) {
  8056. WOLFSSL_MSG("Bad auth tag size AES-CCM");
  8057. return BAD_FUNC_ARG;
  8058. }
  8059. return 0;
  8060. }
  8061. #ifdef WOLFSSL_ARMASM
  8062. /* implementation located in wolfcrypt/src/port/arm/armv8-aes.c */
  8063. #elif defined(HAVE_COLDFIRE_SEC)
  8064. #error "Coldfire SEC doesn't currently support AES-CCM mode"
  8065. #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8066. !defined(WOLFSSL_QNX_CAAM)
  8067. /* implemented in wolfcrypt/src/port/caam_aes.c */
  8068. #elif defined(WOLFSSL_SILABS_SE_ACCEL)
  8069. /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */
  8070. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8071. const byte* nonce, word32 nonceSz,
  8072. byte* authTag, word32 authTagSz,
  8073. const byte* authIn, word32 authInSz)
  8074. {
  8075. return wc_AesCcmEncrypt_silabs(
  8076. aes, out, in, inSz,
  8077. nonce, nonceSz,
  8078. authTag, authTagSz,
  8079. authIn, authInSz);
  8080. }
  8081. #ifdef HAVE_AES_DECRYPT
  8082. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8083. const byte* nonce, word32 nonceSz,
  8084. const byte* authTag, word32 authTagSz,
  8085. const byte* authIn, word32 authInSz)
  8086. {
  8087. return wc_AesCcmDecrypt_silabs(
  8088. aes, out, in, inSz,
  8089. nonce, nonceSz,
  8090. authTag, authTagSz,
  8091. authIn, authInSz);
  8092. }
  8093. #endif
  8094. #elif defined(FREESCALE_LTC)
  8095. /* return 0 on success */
  8096. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8097. const byte* nonce, word32 nonceSz,
  8098. byte* authTag, word32 authTagSz,
  8099. const byte* authIn, word32 authInSz)
  8100. {
  8101. byte *key;
  8102. word32 keySize;
  8103. status_t status;
  8104. /* sanity check on arguments */
  8105. /* note, LTC_AES_EncryptTagCcm() doesn't allow null src or dst
  8106. * ptrs even if inSz is zero (ltc_aes_ccm_check_input_args()), so
  8107. * don't allow it here either.
  8108. */
  8109. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8110. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8111. return BAD_FUNC_ARG;
  8112. }
  8113. if (wc_AesCcmCheckTagSize(authTagSz) != 0) {
  8114. return BAD_FUNC_ARG;
  8115. }
  8116. key = (byte*)aes->key;
  8117. status = wc_AesGetKeySize(aes, &keySize);
  8118. if (status != 0) {
  8119. return status;
  8120. }
  8121. status = wolfSSL_CryptHwMutexLock();
  8122. if (status != 0)
  8123. return status;
  8124. status = LTC_AES_EncryptTagCcm(LTC_BASE, in, out, inSz,
  8125. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8126. wolfSSL_CryptHwMutexUnLock();
  8127. return (kStatus_Success == status) ? 0 : BAD_FUNC_ARG;
  8128. }
  8129. #ifdef HAVE_AES_DECRYPT
  8130. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8131. const byte* nonce, word32 nonceSz,
  8132. const byte* authTag, word32 authTagSz,
  8133. const byte* authIn, word32 authInSz)
  8134. {
  8135. byte *key;
  8136. word32 keySize;
  8137. status_t status;
  8138. /* sanity check on arguments */
  8139. if (aes == NULL || out == NULL || in == NULL || nonce == NULL
  8140. || authTag == NULL || nonceSz < 7 || nonceSz > 13) {
  8141. return BAD_FUNC_ARG;
  8142. }
  8143. key = (byte*)aes->key;
  8144. status = wc_AesGetKeySize(aes, &keySize);
  8145. if (status != 0) {
  8146. return status;
  8147. }
  8148. status = wolfSSL_CryptHwMutexLock();
  8149. if (status != 0)
  8150. return status;
  8151. status = LTC_AES_DecryptTagCcm(LTC_BASE, in, out, inSz,
  8152. nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz);
  8153. wolfSSL_CryptHwMutexUnLock();
  8154. if (status != kStatus_Success) {
  8155. XMEMSET(out, 0, inSz);
  8156. return AES_CCM_AUTH_E;
  8157. }
  8158. return 0;
  8159. }
  8160. #endif /* HAVE_AES_DECRYPT */
  8161. #else
  8162. /* Software CCM */
  8163. static WARN_UNUSED_RESULT int roll_x(
  8164. Aes* aes, const byte* in, word32 inSz, byte* out)
  8165. {
  8166. int ret;
  8167. /* process the bulk of the data */
  8168. while (inSz >= AES_BLOCK_SIZE) {
  8169. xorbuf(out, in, AES_BLOCK_SIZE);
  8170. in += AES_BLOCK_SIZE;
  8171. inSz -= AES_BLOCK_SIZE;
  8172. ret = wc_AesEncrypt(aes, out, out);
  8173. if (ret != 0)
  8174. return ret;
  8175. }
  8176. /* process remainder of the data */
  8177. if (inSz > 0) {
  8178. xorbuf(out, in, inSz);
  8179. ret = wc_AesEncrypt(aes, out, out);
  8180. if (ret != 0)
  8181. return ret;
  8182. }
  8183. return 0;
  8184. }
  8185. static WARN_UNUSED_RESULT int roll_auth(
  8186. Aes* aes, const byte* in, word32 inSz, byte* out)
  8187. {
  8188. word32 authLenSz;
  8189. word32 remainder;
  8190. int ret;
  8191. /* encode the length in */
  8192. if (inSz <= 0xFEFF) {
  8193. authLenSz = 2;
  8194. out[0] ^= (byte)(inSz >> 8);
  8195. out[1] ^= (byte)inSz;
  8196. }
  8197. else {
  8198. authLenSz = 6;
  8199. out[0] ^= 0xFF;
  8200. out[1] ^= 0xFE;
  8201. out[2] ^= (byte)(inSz >> 24);
  8202. out[3] ^= (byte)(inSz >> 16);
  8203. out[4] ^= (byte)(inSz >> 8);
  8204. out[5] ^= (byte)inSz;
  8205. }
  8206. /* Note, the protocol handles auth data up to 2^64, but we are
  8207. * using 32-bit sizes right now, so the bigger data isn't handled
  8208. * else {}
  8209. */
  8210. /* start fill out the rest of the first block */
  8211. remainder = AES_BLOCK_SIZE - authLenSz;
  8212. if (inSz >= remainder) {
  8213. /* plenty of bulk data to fill the remainder of this block */
  8214. xorbuf(out + authLenSz, in, remainder);
  8215. inSz -= remainder;
  8216. in += remainder;
  8217. }
  8218. else {
  8219. /* not enough bulk data, copy what is available, and pad zero */
  8220. xorbuf(out + authLenSz, in, inSz);
  8221. inSz = 0;
  8222. }
  8223. ret = wc_AesEncrypt(aes, out, out);
  8224. if ((ret == 0) && (inSz > 0)) {
  8225. ret = roll_x(aes, in, inSz, out);
  8226. }
  8227. return ret;
  8228. }
  8229. static WC_INLINE void AesCcmCtrInc(byte* B, word32 lenSz)
  8230. {
  8231. word32 i;
  8232. for (i = 0; i < lenSz; i++) {
  8233. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) return;
  8234. }
  8235. }
  8236. #ifdef WOLFSSL_AESNI
  8237. static WC_INLINE void AesCcmCtrIncSet4(byte* B, word32 lenSz)
  8238. {
  8239. word32 i;
  8240. /* B+1 = B */
  8241. XMEMCPY(B + AES_BLOCK_SIZE * 1, B, AES_BLOCK_SIZE);
  8242. /* B+2,B+3 = B,B+1 */
  8243. XMEMCPY(B + AES_BLOCK_SIZE * 2, B, AES_BLOCK_SIZE * 2);
  8244. for (i = 0; i < lenSz; i++) {
  8245. if (++B[AES_BLOCK_SIZE * 2 - 1 - i] != 0) break;
  8246. }
  8247. B[AES_BLOCK_SIZE * 3 - 1] += 2;
  8248. if (B[AES_BLOCK_SIZE * 3 - 1] < 2) {
  8249. for (i = 1; i < lenSz; i++) {
  8250. if (++B[AES_BLOCK_SIZE * 3 - 1 - i] != 0) break;
  8251. }
  8252. }
  8253. B[AES_BLOCK_SIZE * 4 - 1] += 3;
  8254. if (B[AES_BLOCK_SIZE * 4 - 1] < 3) {
  8255. for (i = 1; i < lenSz; i++) {
  8256. if (++B[AES_BLOCK_SIZE * 4 - 1 - i] != 0) break;
  8257. }
  8258. }
  8259. }
  8260. static WC_INLINE void AesCcmCtrInc4(byte* B, word32 lenSz)
  8261. {
  8262. word32 i;
  8263. B[AES_BLOCK_SIZE - 1] += 4;
  8264. if (B[AES_BLOCK_SIZE - 1] < 4) {
  8265. for (i = 1; i < lenSz; i++) {
  8266. if (++B[AES_BLOCK_SIZE - 1 - i] != 0) break;
  8267. }
  8268. }
  8269. }
  8270. #endif
  8271. /* Software AES - CCM Encrypt */
  8272. /* return 0 on success */
  8273. int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8274. const byte* nonce, word32 nonceSz,
  8275. byte* authTag, word32 authTagSz,
  8276. const byte* authIn, word32 authInSz)
  8277. {
  8278. #ifndef WOLFSSL_AESNI
  8279. byte A[AES_BLOCK_SIZE];
  8280. byte B[AES_BLOCK_SIZE];
  8281. #else
  8282. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  8283. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  8284. #endif
  8285. byte lenSz;
  8286. word32 i;
  8287. byte mask = 0xFF;
  8288. const word32 wordSz = (word32)sizeof(word32);
  8289. int ret;
  8290. /* sanity check on arguments */
  8291. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  8292. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  8293. authTagSz > AES_BLOCK_SIZE)
  8294. return BAD_FUNC_ARG;
  8295. /* sanity check on tag size */
  8296. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  8297. return BAD_FUNC_ARG;
  8298. }
  8299. #ifdef WOLF_CRYPTO_CB
  8300. #ifndef WOLF_CRYPTO_CB_FIND
  8301. if (aes->devId != INVALID_DEVID)
  8302. #endif
  8303. {
  8304. int crypto_cb_ret =
  8305. wc_CryptoCb_AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz,
  8306. authTag, authTagSz, authIn, authInSz);
  8307. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  8308. return crypto_cb_ret;
  8309. /* fall-through when unavailable */
  8310. }
  8311. #endif
  8312. XMEMSET(A, 0, sizeof(A));
  8313. XMEMCPY(B+1, nonce, nonceSz);
  8314. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  8315. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  8316. + (8 * (((byte)authTagSz - 2) / 2))
  8317. + (lenSz - 1));
  8318. for (i = 0; i < lenSz; i++) {
  8319. if (mask && i >= wordSz)
  8320. mask = 0x00;
  8321. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  8322. }
  8323. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8324. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  8325. #endif
  8326. ret = wc_AesEncrypt(aes, B, A);
  8327. if (ret != 0) {
  8328. ForceZero(B, sizeof(B));
  8329. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8330. wc_MemZero_Check(B, sizeof(B));
  8331. #endif
  8332. return ret;
  8333. }
  8334. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8335. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  8336. #endif
  8337. if (authInSz > 0) {
  8338. ret = roll_auth(aes, authIn, authInSz, A);
  8339. if (ret != 0) {
  8340. ForceZero(A, sizeof(A));
  8341. ForceZero(B, sizeof(B));
  8342. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8343. wc_MemZero_Check(A, sizeof(A));
  8344. wc_MemZero_Check(B, sizeof(B));
  8345. #endif
  8346. return ret;
  8347. }
  8348. }
  8349. if (inSz > 0) {
  8350. ret = roll_x(aes, in, inSz, A);
  8351. if (ret != 0) {
  8352. ForceZero(A, sizeof(A));
  8353. ForceZero(B, sizeof(B));
  8354. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8355. wc_MemZero_Check(A, sizeof(A));
  8356. wc_MemZero_Check(B, sizeof(B));
  8357. #endif
  8358. return ret;
  8359. }
  8360. }
  8361. XMEMCPY(authTag, A, authTagSz);
  8362. B[0] = lenSz - 1;
  8363. for (i = 0; i < lenSz; i++)
  8364. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8365. ret = wc_AesEncrypt(aes, B, A);
  8366. if (ret != 0) {
  8367. ForceZero(A, sizeof(A));
  8368. ForceZero(B, sizeof(B));
  8369. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8370. wc_MemZero_Check(A, sizeof(A));
  8371. wc_MemZero_Check(B, sizeof(B));
  8372. #endif
  8373. return ret;
  8374. }
  8375. xorbuf(authTag, A, authTagSz);
  8376. B[15] = 1;
  8377. #ifdef WOLFSSL_AESNI
  8378. if (haveAESNI && aes->use_aesni) {
  8379. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8380. while (inSz >= AES_BLOCK_SIZE * 4) {
  8381. AesCcmCtrIncSet4(B, lenSz);
  8382. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  8383. (int)aes->rounds);
  8384. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  8385. XMEMCPY(out, A, AES_BLOCK_SIZE * 4);
  8386. inSz -= AES_BLOCK_SIZE * 4;
  8387. in += AES_BLOCK_SIZE * 4;
  8388. out += AES_BLOCK_SIZE * 4;
  8389. AesCcmCtrInc4(B, lenSz);
  8390. }
  8391. RESTORE_VECTOR_REGISTERS();
  8392. }
  8393. #endif
  8394. while (inSz >= AES_BLOCK_SIZE) {
  8395. ret = wc_AesEncrypt(aes, B, A);
  8396. if (ret != 0) {
  8397. ForceZero(A, sizeof(A));
  8398. ForceZero(B, sizeof(B));
  8399. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8400. wc_MemZero_Check(A, sizeof(A));
  8401. wc_MemZero_Check(B, sizeof(B));
  8402. #endif
  8403. return ret;
  8404. }
  8405. xorbuf(A, in, AES_BLOCK_SIZE);
  8406. XMEMCPY(out, A, AES_BLOCK_SIZE);
  8407. AesCcmCtrInc(B, lenSz);
  8408. inSz -= AES_BLOCK_SIZE;
  8409. in += AES_BLOCK_SIZE;
  8410. out += AES_BLOCK_SIZE;
  8411. }
  8412. if (inSz > 0) {
  8413. ret = wc_AesEncrypt(aes, B, A);
  8414. if (ret != 0) {
  8415. ForceZero(A, sizeof(A));
  8416. ForceZero(B, sizeof(B));
  8417. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8418. wc_MemZero_Check(A, sizeof(A));
  8419. wc_MemZero_Check(B, sizeof(B));
  8420. #endif
  8421. return ret;
  8422. }
  8423. xorbuf(A, in, inSz);
  8424. XMEMCPY(out, A, inSz);
  8425. }
  8426. ForceZero(A, sizeof(A));
  8427. ForceZero(B, sizeof(B));
  8428. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8429. wc_MemZero_Check(A, sizeof(A));
  8430. wc_MemZero_Check(B, sizeof(B));
  8431. #endif
  8432. return 0;
  8433. }
  8434. #ifdef HAVE_AES_DECRYPT
  8435. /* Software AES - CCM Decrypt */
  8436. int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz,
  8437. const byte* nonce, word32 nonceSz,
  8438. const byte* authTag, word32 authTagSz,
  8439. const byte* authIn, word32 authInSz)
  8440. {
  8441. #ifndef WOLFSSL_AESNI
  8442. byte A[AES_BLOCK_SIZE];
  8443. byte B[AES_BLOCK_SIZE];
  8444. #else
  8445. ALIGN128 byte B[AES_BLOCK_SIZE * 4];
  8446. ALIGN128 byte A[AES_BLOCK_SIZE * 4];
  8447. #endif
  8448. byte* o;
  8449. byte lenSz;
  8450. word32 i, oSz;
  8451. byte mask = 0xFF;
  8452. const word32 wordSz = (word32)sizeof(word32);
  8453. int ret;
  8454. /* sanity check on arguments */
  8455. if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) ||
  8456. nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 ||
  8457. authTagSz > AES_BLOCK_SIZE)
  8458. return BAD_FUNC_ARG;
  8459. /* sanity check on tag size */
  8460. if (wc_AesCcmCheckTagSize((int)authTagSz) != 0) {
  8461. return BAD_FUNC_ARG;
  8462. }
  8463. #ifdef WOLF_CRYPTO_CB
  8464. #ifndef WOLF_CRYPTO_CB_FIND
  8465. if (aes->devId != INVALID_DEVID)
  8466. #endif
  8467. {
  8468. int crypto_cb_ret =
  8469. wc_CryptoCb_AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz,
  8470. authTag, authTagSz, authIn, authInSz);
  8471. if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE)
  8472. return crypto_cb_ret;
  8473. /* fall-through when unavailable */
  8474. }
  8475. #endif
  8476. o = out;
  8477. oSz = inSz;
  8478. XMEMSET(A, 0, sizeof A);
  8479. XMEMCPY(B+1, nonce, nonceSz);
  8480. lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz;
  8481. B[0] = lenSz - 1;
  8482. for (i = 0; i < lenSz; i++)
  8483. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8484. B[15] = 1;
  8485. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8486. wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A));
  8487. wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B));
  8488. #endif
  8489. #ifdef WOLFSSL_AESNI
  8490. if (haveAESNI && aes->use_aesni) {
  8491. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8492. while (oSz >= AES_BLOCK_SIZE * 4) {
  8493. AesCcmCtrIncSet4(B, lenSz);
  8494. AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key,
  8495. (int)aes->rounds);
  8496. xorbuf(A, in, AES_BLOCK_SIZE * 4);
  8497. XMEMCPY(o, A, AES_BLOCK_SIZE * 4);
  8498. oSz -= AES_BLOCK_SIZE * 4;
  8499. in += AES_BLOCK_SIZE * 4;
  8500. o += AES_BLOCK_SIZE * 4;
  8501. AesCcmCtrInc4(B, lenSz);
  8502. }
  8503. RESTORE_VECTOR_REGISTERS();
  8504. }
  8505. #endif
  8506. while (oSz >= AES_BLOCK_SIZE) {
  8507. ret = wc_AesEncrypt(aes, B, A);
  8508. if (ret != 0) {
  8509. ForceZero(A, sizeof(A));
  8510. ForceZero(B, sizeof(B));
  8511. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8512. wc_MemZero_Check(A, sizeof(A));
  8513. wc_MemZero_Check(B, sizeof(B));
  8514. #endif
  8515. return ret;
  8516. }
  8517. xorbuf(A, in, AES_BLOCK_SIZE);
  8518. XMEMCPY(o, A, AES_BLOCK_SIZE);
  8519. AesCcmCtrInc(B, lenSz);
  8520. oSz -= AES_BLOCK_SIZE;
  8521. in += AES_BLOCK_SIZE;
  8522. o += AES_BLOCK_SIZE;
  8523. }
  8524. if (inSz > 0) {
  8525. ret = wc_AesEncrypt(aes, B, A);
  8526. if (ret != 0) {
  8527. ForceZero(A, sizeof(A));
  8528. ForceZero(B, sizeof(B));
  8529. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8530. wc_MemZero_Check(A, sizeof(A));
  8531. wc_MemZero_Check(B, sizeof(B));
  8532. #endif
  8533. return ret;
  8534. }
  8535. xorbuf(A, in, oSz);
  8536. XMEMCPY(o, A, oSz);
  8537. }
  8538. for (i = 0; i < lenSz; i++)
  8539. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8540. ret = wc_AesEncrypt(aes, B, A);
  8541. if (ret != 0) {
  8542. ForceZero(A, sizeof(A));
  8543. ForceZero(B, sizeof(B));
  8544. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8545. wc_MemZero_Check(A, sizeof(A));
  8546. wc_MemZero_Check(B, sizeof(B));
  8547. #endif
  8548. return ret;
  8549. }
  8550. o = out;
  8551. oSz = inSz;
  8552. B[0] = (byte)((authInSz > 0 ? 64 : 0)
  8553. + (8 * (((byte)authTagSz - 2) / 2))
  8554. + (lenSz - 1));
  8555. for (i = 0; i < lenSz; i++) {
  8556. if (mask && i >= wordSz)
  8557. mask = 0x00;
  8558. B[AES_BLOCK_SIZE - 1 - i] = (byte)((inSz >> ((8 * i) & mask)) & mask);
  8559. }
  8560. ret = wc_AesEncrypt(aes, B, A);
  8561. if (ret != 0) {
  8562. ForceZero(A, sizeof(A));
  8563. ForceZero(B, sizeof(B));
  8564. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8565. wc_MemZero_Check(A, sizeof(A));
  8566. wc_MemZero_Check(B, sizeof(B));
  8567. #endif
  8568. return ret;
  8569. }
  8570. if (authInSz > 0) {
  8571. ret = roll_auth(aes, authIn, authInSz, A);
  8572. if (ret != 0) {
  8573. ForceZero(A, sizeof(A));
  8574. ForceZero(B, sizeof(B));
  8575. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8576. wc_MemZero_Check(A, sizeof(A));
  8577. wc_MemZero_Check(B, sizeof(B));
  8578. #endif
  8579. return ret;
  8580. }
  8581. }
  8582. if (inSz > 0) {
  8583. ret = roll_x(aes, o, oSz, A);
  8584. if (ret != 0) {
  8585. ForceZero(A, sizeof(A));
  8586. ForceZero(B, sizeof(B));
  8587. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8588. wc_MemZero_Check(A, sizeof(A));
  8589. wc_MemZero_Check(B, sizeof(B));
  8590. #endif
  8591. return ret;
  8592. }
  8593. }
  8594. B[0] = lenSz - 1;
  8595. for (i = 0; i < lenSz; i++)
  8596. B[AES_BLOCK_SIZE - 1 - i] = 0;
  8597. ret = wc_AesEncrypt(aes, B, B);
  8598. if (ret != 0) {
  8599. ForceZero(A, sizeof(A));
  8600. ForceZero(B, sizeof(B));
  8601. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8602. wc_MemZero_Check(A, sizeof(A));
  8603. wc_MemZero_Check(B, sizeof(B));
  8604. #endif
  8605. return ret;
  8606. }
  8607. xorbuf(A, B, authTagSz);
  8608. if (ConstantCompare(A, authTag, (int)authTagSz) != 0) {
  8609. /* If the authTag check fails, don't keep the decrypted data.
  8610. * Unfortunately, you need the decrypted data to calculate the
  8611. * check value. */
  8612. #if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) && \
  8613. defined(ACVP_VECTOR_TESTING)
  8614. WOLFSSL_MSG("Preserve output for vector responses");
  8615. #else
  8616. if (inSz > 0)
  8617. XMEMSET(out, 0, inSz);
  8618. #endif
  8619. ret = AES_CCM_AUTH_E;
  8620. }
  8621. ForceZero(A, sizeof(A));
  8622. ForceZero(B, sizeof(B));
  8623. o = NULL;
  8624. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8625. wc_MemZero_Check(A, sizeof(A));
  8626. wc_MemZero_Check(B, sizeof(B));
  8627. #endif
  8628. return ret;
  8629. }
  8630. #endif /* HAVE_AES_DECRYPT */
  8631. #endif /* software CCM */
  8632. /* abstract functions that call lower level AESCCM functions */
  8633. #ifndef WC_NO_RNG
  8634. int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz)
  8635. {
  8636. int ret = 0;
  8637. if (aes == NULL || nonce == NULL ||
  8638. nonceSz < CCM_NONCE_MIN_SZ || nonceSz > CCM_NONCE_MAX_SZ) {
  8639. ret = BAD_FUNC_ARG;
  8640. }
  8641. if (ret == 0) {
  8642. XMEMCPY(aes->reg, nonce, nonceSz);
  8643. aes->nonceSz = nonceSz;
  8644. /* Invocation counter should be 2^61 */
  8645. aes->invokeCtr[0] = 0;
  8646. aes->invokeCtr[1] = 0xE0000000;
  8647. }
  8648. return ret;
  8649. }
  8650. int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz,
  8651. byte* ivOut, word32 ivOutSz,
  8652. byte* authTag, word32 authTagSz,
  8653. const byte* authIn, word32 authInSz)
  8654. {
  8655. int ret = 0;
  8656. if (aes == NULL || out == NULL ||
  8657. (in == NULL && sz != 0) ||
  8658. ivOut == NULL ||
  8659. (authIn == NULL && authInSz != 0) ||
  8660. (ivOutSz != aes->nonceSz)) {
  8661. ret = BAD_FUNC_ARG;
  8662. }
  8663. if (ret == 0) {
  8664. aes->invokeCtr[0]++;
  8665. if (aes->invokeCtr[0] == 0) {
  8666. aes->invokeCtr[1]++;
  8667. if (aes->invokeCtr[1] == 0)
  8668. ret = AES_CCM_OVERFLOW_E;
  8669. }
  8670. }
  8671. if (ret == 0) {
  8672. ret = wc_AesCcmEncrypt(aes, out, in, sz,
  8673. (byte*)aes->reg, aes->nonceSz,
  8674. authTag, authTagSz,
  8675. authIn, authInSz);
  8676. if (ret == 0) {
  8677. XMEMCPY(ivOut, aes->reg, aes->nonceSz);
  8678. IncCtr((byte*)aes->reg, aes->nonceSz);
  8679. }
  8680. }
  8681. return ret;
  8682. }
  8683. #endif /* WC_NO_RNG */
  8684. #endif /* HAVE_AESCCM */
  8685. /* Initialize Aes for use with async hardware */
  8686. int wc_AesInit(Aes* aes, void* heap, int devId)
  8687. {
  8688. int ret = 0;
  8689. if (aes == NULL)
  8690. return BAD_FUNC_ARG;
  8691. aes->heap = heap;
  8692. #ifdef WOLF_CRYPTO_CB
  8693. aes->devId = devId;
  8694. aes->devCtx = NULL;
  8695. #else
  8696. (void)devId;
  8697. #endif
  8698. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  8699. ret = wolfAsync_DevCtxInit(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES,
  8700. aes->heap, devId);
  8701. #endif /* WOLFSSL_ASYNC_CRYPT */
  8702. #ifdef WOLFSSL_AFALG
  8703. aes->alFd = WC_SOCK_NOTSET;
  8704. aes->rdFd = WC_SOCK_NOTSET;
  8705. #endif
  8706. #ifdef WOLFSSL_KCAPI_AES
  8707. aes->handle = NULL;
  8708. aes->init = 0;
  8709. #endif
  8710. #if defined(WOLFSSL_DEVCRYPTO) && \
  8711. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  8712. aes->ctx.cfd = -1;
  8713. #endif
  8714. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  8715. XMEMSET(&aes->ctx, 0, sizeof(aes->ctx));
  8716. #endif
  8717. #if defined(WOLFSSL_IMXRT_DCP)
  8718. DCPAesInit(aes);
  8719. #endif
  8720. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  8721. XMEMSET(&aes->maxq_ctx, 0, sizeof(aes->maxq_ctx));
  8722. #endif
  8723. #ifdef HAVE_AESGCM
  8724. #ifdef OPENSSL_EXTRA
  8725. XMEMSET(aes->aadH, 0, sizeof(aes->aadH));
  8726. aes->aadLen = 0;
  8727. #endif
  8728. #endif
  8729. #ifdef WOLFSSL_AESGCM_STREAM
  8730. #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI)
  8731. aes->streamData = NULL;
  8732. #endif
  8733. aes->keylen = 0;
  8734. aes->nonceSz = 0;
  8735. aes->gcmKeySet = 0;
  8736. aes->nonceSet = 0;
  8737. aes->ctrSet = 0;
  8738. #endif
  8739. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8740. ret = wc_psa_aes_init(aes);
  8741. #endif
  8742. return ret;
  8743. }
  8744. #ifdef WOLF_PRIVATE_KEY_ID
  8745. int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, int devId)
  8746. {
  8747. int ret = 0;
  8748. if (aes == NULL)
  8749. ret = BAD_FUNC_ARG;
  8750. if (ret == 0 && (len < 0 || len > AES_MAX_ID_LEN))
  8751. ret = BUFFER_E;
  8752. if (ret == 0)
  8753. ret = wc_AesInit(aes, heap, devId);
  8754. if (ret == 0) {
  8755. XMEMCPY(aes->id, id, (size_t)len);
  8756. aes->idLen = len;
  8757. aes->labelLen = 0;
  8758. }
  8759. return ret;
  8760. }
  8761. int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId)
  8762. {
  8763. int ret = 0;
  8764. size_t labelLen = 0;
  8765. if (aes == NULL || label == NULL)
  8766. ret = BAD_FUNC_ARG;
  8767. if (ret == 0) {
  8768. labelLen = XSTRLEN(label);
  8769. if (labelLen == 0 || labelLen > AES_MAX_LABEL_LEN)
  8770. ret = BUFFER_E;
  8771. }
  8772. if (ret == 0)
  8773. ret = wc_AesInit(aes, heap, devId);
  8774. if (ret == 0) {
  8775. XMEMCPY(aes->label, label, labelLen);
  8776. aes->labelLen = (int)labelLen;
  8777. aes->idLen = 0;
  8778. }
  8779. return ret;
  8780. }
  8781. #endif
  8782. /* Free Aes from use with async hardware */
  8783. void wc_AesFree(Aes* aes)
  8784. {
  8785. if (aes == NULL)
  8786. return;
  8787. #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)
  8788. wolfAsync_DevCtxFree(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES);
  8789. #endif /* WOLFSSL_ASYNC_CRYPT */
  8790. #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES)
  8791. if (aes->rdFd > 0) { /* negative is error case */
  8792. close(aes->rdFd);
  8793. aes->rdFd = WC_SOCK_NOTSET;
  8794. }
  8795. if (aes->alFd > 0) {
  8796. close(aes->alFd);
  8797. aes->alFd = WC_SOCK_NOTSET;
  8798. }
  8799. #endif /* WOLFSSL_AFALG */
  8800. #ifdef WOLFSSL_KCAPI_AES
  8801. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  8802. if (aes->init == 1) {
  8803. kcapi_cipher_destroy(aes->handle);
  8804. }
  8805. aes->init = 0;
  8806. aes->handle = NULL;
  8807. #endif
  8808. #if defined(WOLFSSL_DEVCRYPTO) && \
  8809. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))
  8810. wc_DevCryptoFree(&aes->ctx);
  8811. #endif
  8812. #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \
  8813. (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \
  8814. (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES))
  8815. ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE);
  8816. #endif
  8817. #if defined(WOLFSSL_IMXRT_DCP)
  8818. DCPAesFree(aes);
  8819. #endif
  8820. #if defined(WOLFSSL_AESGCM_STREAM) && defined(WOLFSSL_SMALL_STACK) && \
  8821. !defined(WOLFSSL_AESNI)
  8822. if (aes->streamData != NULL) {
  8823. XFREE(aes->streamData, aes->heap, DYNAMIC_TYPE_AES);
  8824. aes->streamData = NULL;
  8825. }
  8826. #endif
  8827. #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT)
  8828. if (aes->useSWCrypt == 0) {
  8829. se050_aes_free(aes);
  8830. }
  8831. #endif
  8832. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8833. wc_psa_aes_free(aes);
  8834. #endif
  8835. #ifdef WOLFSSL_MAXQ10XX_CRYPTO
  8836. wc_MAXQ10XX_AesFree(aes);
  8837. #endif
  8838. #ifdef WOLFSSL_CHECK_MEM_ZERO
  8839. wc_MemZero_Check(aes, sizeof(Aes));
  8840. #endif
  8841. }
  8842. int wc_AesGetKeySize(Aes* aes, word32* keySize)
  8843. {
  8844. int ret = 0;
  8845. if (aes == NULL || keySize == NULL) {
  8846. return BAD_FUNC_ARG;
  8847. }
  8848. #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES)
  8849. return wc_psa_aes_get_key_size(aes, keySize);
  8850. #endif
  8851. #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES)
  8852. *keySize = aes->ctx.key.keySize;
  8853. return ret;
  8854. #endif
  8855. switch (aes->rounds) {
  8856. #ifdef WOLFSSL_AES_128
  8857. case 10:
  8858. *keySize = 16;
  8859. break;
  8860. #endif
  8861. #ifdef WOLFSSL_AES_192
  8862. case 12:
  8863. *keySize = 24;
  8864. break;
  8865. #endif
  8866. #ifdef WOLFSSL_AES_256
  8867. case 14:
  8868. *keySize = 32;
  8869. break;
  8870. #endif
  8871. default:
  8872. *keySize = 0;
  8873. ret = BAD_FUNC_ARG;
  8874. }
  8875. return ret;
  8876. }
  8877. #endif /* !WOLFSSL_TI_CRYPT */
  8878. #ifdef HAVE_AES_ECB
  8879. #if defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \
  8880. !defined(WOLFSSL_QNX_CAAM)
  8881. /* implemented in wolfcrypt/src/port/caam/caam_aes.c */
  8882. #elif defined(WOLFSSL_AFALG)
  8883. /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */
  8884. #elif defined(WOLFSSL_DEVCRYPTO_AES)
  8885. /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */
  8886. #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES)
  8887. /* Software AES - ECB */
  8888. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8889. {
  8890. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8891. return BAD_FUNC_ARG;
  8892. return AES_ECB_encrypt(aes, in, out, sz);
  8893. }
  8894. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8895. {
  8896. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8897. return BAD_FUNC_ARG;
  8898. return AES_ECB_decrypt(aes, in, out, sz);
  8899. }
  8900. #else
  8901. /* Software AES - ECB */
  8902. static WARN_UNUSED_RESULT int _AesEcbEncrypt(
  8903. Aes* aes, byte* out, const byte* in, word32 sz)
  8904. {
  8905. word32 blocks = sz / AES_BLOCK_SIZE;
  8906. #ifdef WOLF_CRYPTO_CB
  8907. #ifndef WOLF_CRYPTO_CB_FIND
  8908. if (aes->devId != INVALID_DEVID)
  8909. #endif
  8910. {
  8911. int ret = wc_CryptoCb_AesEcbEncrypt(aes, out, in, sz);
  8912. if (ret != CRYPTOCB_UNAVAILABLE)
  8913. return ret;
  8914. /* fall-through when unavailable */
  8915. }
  8916. #endif
  8917. #ifdef WOLFSSL_IMXRT_DCP
  8918. if (aes->keylen == 16)
  8919. return DCPAesEcbEncrypt(aes, out, in, sz);
  8920. #endif
  8921. #ifdef WOLFSSL_AESNI
  8922. if (haveAESNI && aes->use_aesni) {
  8923. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8924. AES_ECB_encrypt(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  8925. RESTORE_VECTOR_REGISTERS();
  8926. blocks = 0;
  8927. }
  8928. #endif
  8929. while (blocks > 0) {
  8930. int ret = wc_AesEncryptDirect(aes, out, in);
  8931. if (ret != 0)
  8932. return ret;
  8933. out += AES_BLOCK_SIZE;
  8934. in += AES_BLOCK_SIZE;
  8935. blocks--;
  8936. }
  8937. return 0;
  8938. }
  8939. static WARN_UNUSED_RESULT int _AesEcbDecrypt(
  8940. Aes* aes, byte* out, const byte* in, word32 sz)
  8941. {
  8942. word32 blocks = sz / AES_BLOCK_SIZE;
  8943. #ifdef WOLF_CRYPTO_CB
  8944. #ifndef WOLF_CRYPTO_CB_FIND
  8945. if (aes->devId != INVALID_DEVID)
  8946. #endif
  8947. {
  8948. int ret = wc_CryptoCb_AesEcbDecrypt(aes, out, in, sz);
  8949. if (ret != CRYPTOCB_UNAVAILABLE)
  8950. return ret;
  8951. /* fall-through when unavailable */
  8952. }
  8953. #endif
  8954. #ifdef WOLFSSL_IMXRT_DCP
  8955. if (aes->keylen == 16)
  8956. return DCPAesEcbDecrypt(aes, out, in, sz);
  8957. #endif
  8958. #ifdef WOLFSSL_AESNI
  8959. if (haveAESNI && aes->use_aesni) {
  8960. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8961. AES_ECB_decrypt(in, out, sz, (byte*)aes->key, (int)aes->rounds);
  8962. RESTORE_VECTOR_REGISTERS();
  8963. blocks = 0;
  8964. }
  8965. #endif
  8966. while (blocks > 0) {
  8967. int ret = wc_AesDecryptDirect(aes, out, in);
  8968. if (ret != 0)
  8969. return ret;
  8970. out += AES_BLOCK_SIZE;
  8971. in += AES_BLOCK_SIZE;
  8972. blocks--;
  8973. }
  8974. return 0;
  8975. }
  8976. int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8977. {
  8978. int ret;
  8979. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8980. return BAD_FUNC_ARG;
  8981. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8982. ret = _AesEcbEncrypt(aes, out, in, sz);
  8983. RESTORE_VECTOR_REGISTERS();
  8984. return ret;
  8985. }
  8986. int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  8987. {
  8988. int ret;
  8989. if ((in == NULL) || (out == NULL) || (aes == NULL))
  8990. return BAD_FUNC_ARG;
  8991. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  8992. ret = _AesEcbDecrypt(aes, out, in, sz);
  8993. RESTORE_VECTOR_REGISTERS();
  8994. return ret;
  8995. }
  8996. #endif
  8997. #endif /* HAVE_AES_ECB */
  8998. #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_OFB)
  8999. /* Feedback AES mode
  9000. *
  9001. * aes structure holding key to use for encryption
  9002. * out buffer to hold result of encryption (must be at least as large as input
  9003. * buffer)
  9004. * in buffer to encrypt
  9005. * sz size of input buffer
  9006. * mode flag to specify AES mode
  9007. *
  9008. * returns 0 on success and negative error values on failure
  9009. */
  9010. /* Software AES - CFB Encrypt */
  9011. static WARN_UNUSED_RESULT int wc_AesFeedbackEncrypt(
  9012. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9013. {
  9014. byte* tmp = NULL;
  9015. int ret = 0;
  9016. word32 processed;
  9017. if (aes == NULL || out == NULL || in == NULL) {
  9018. return BAD_FUNC_ARG;
  9019. }
  9020. /* consume any unused bytes left in aes->tmp */
  9021. processed = min(aes->left, sz);
  9022. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  9023. #ifdef WOLFSSL_AES_CFB
  9024. if (mode == AES_CFB_MODE) {
  9025. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, out, processed);
  9026. }
  9027. #endif
  9028. aes->left -= processed;
  9029. out += processed;
  9030. in += processed;
  9031. sz -= processed;
  9032. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9033. while (sz >= AES_BLOCK_SIZE) {
  9034. /* Using aes->tmp here for inline case i.e. in=out */
  9035. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9036. if (ret != 0)
  9037. break;
  9038. #ifdef WOLFSSL_AES_OFB
  9039. if (mode == AES_OFB_MODE) {
  9040. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9041. }
  9042. #endif
  9043. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9044. #ifdef WOLFSSL_AES_CFB
  9045. if (mode == AES_CFB_MODE) {
  9046. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9047. }
  9048. #endif
  9049. XMEMCPY(out, aes->tmp, AES_BLOCK_SIZE);
  9050. out += AES_BLOCK_SIZE;
  9051. in += AES_BLOCK_SIZE;
  9052. sz -= AES_BLOCK_SIZE;
  9053. aes->left = 0;
  9054. }
  9055. /* encrypt left over data */
  9056. if ((ret == 0) && sz) {
  9057. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9058. }
  9059. if ((ret == 0) && sz) {
  9060. aes->left = AES_BLOCK_SIZE;
  9061. tmp = (byte*)aes->tmp;
  9062. #ifdef WOLFSSL_AES_OFB
  9063. if (mode == AES_OFB_MODE) {
  9064. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9065. }
  9066. #endif
  9067. xorbufout(out, in, tmp, sz);
  9068. #ifdef WOLFSSL_AES_CFB
  9069. if (mode == AES_CFB_MODE) {
  9070. XMEMCPY(aes->reg, out, sz);
  9071. }
  9072. #endif
  9073. aes->left -= sz;
  9074. }
  9075. RESTORE_VECTOR_REGISTERS();
  9076. return ret;
  9077. }
  9078. #ifdef HAVE_AES_DECRYPT
  9079. /* CFB 128
  9080. *
  9081. * aes structure holding key to use for decryption
  9082. * out buffer to hold result of decryption (must be at least as large as input
  9083. * buffer)
  9084. * in buffer to decrypt
  9085. * sz size of input buffer
  9086. *
  9087. * returns 0 on success and negative error values on failure
  9088. */
  9089. /* Software AES - CFB Decrypt */
  9090. static WARN_UNUSED_RESULT int wc_AesFeedbackDecrypt(
  9091. Aes* aes, byte* out, const byte* in, word32 sz, byte mode)
  9092. {
  9093. int ret = 0;
  9094. word32 processed;
  9095. if (aes == NULL || out == NULL || in == NULL) {
  9096. return BAD_FUNC_ARG;
  9097. }
  9098. #ifdef WOLFSSL_AES_CFB
  9099. /* check if more input needs copied over to aes->reg */
  9100. if (aes->left && sz && mode == AES_CFB_MODE) {
  9101. word32 size = min(aes->left, sz);
  9102. XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, in, size);
  9103. }
  9104. #endif
  9105. /* consume any unused bytes left in aes->tmp */
  9106. processed = min(aes->left, sz);
  9107. xorbufout(out, in, (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left, processed);
  9108. aes->left -= processed;
  9109. out += processed;
  9110. in += processed;
  9111. sz -= processed;
  9112. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9113. while (sz > AES_BLOCK_SIZE) {
  9114. /* Using aes->tmp here for inline case i.e. in=out */
  9115. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9116. if (ret != 0)
  9117. break;
  9118. #ifdef WOLFSSL_AES_OFB
  9119. if (mode == AES_OFB_MODE) {
  9120. XMEMCPY((byte*)aes->reg, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9121. }
  9122. #endif
  9123. xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE);
  9124. #ifdef WOLFSSL_AES_CFB
  9125. if (mode == AES_CFB_MODE) {
  9126. XMEMCPY(aes->reg, in, AES_BLOCK_SIZE);
  9127. }
  9128. #endif
  9129. XMEMCPY(out, (byte*)aes->tmp, AES_BLOCK_SIZE);
  9130. out += AES_BLOCK_SIZE;
  9131. in += AES_BLOCK_SIZE;
  9132. sz -= AES_BLOCK_SIZE;
  9133. aes->left = 0;
  9134. }
  9135. /* decrypt left over data */
  9136. if ((ret == 0) && sz) {
  9137. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9138. }
  9139. if ((ret == 0) && sz) {
  9140. #ifdef WOLFSSL_AES_CFB
  9141. if (mode == AES_CFB_MODE) {
  9142. XMEMCPY(aes->reg, in, sz);
  9143. }
  9144. #endif
  9145. #ifdef WOLFSSL_AES_OFB
  9146. if (mode == AES_OFB_MODE) {
  9147. XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE);
  9148. }
  9149. #endif
  9150. aes->left = AES_BLOCK_SIZE - sz;
  9151. xorbufout(out, in, aes->tmp, sz);
  9152. }
  9153. RESTORE_VECTOR_REGISTERS();
  9154. return ret;
  9155. }
  9156. #endif /* HAVE_AES_DECRYPT */
  9157. #endif /* WOLFSSL_AES_CFB */
  9158. #ifdef WOLFSSL_AES_CFB
  9159. /* CFB 128
  9160. *
  9161. * aes structure holding key to use for encryption
  9162. * out buffer to hold result of encryption (must be at least as large as input
  9163. * buffer)
  9164. * in buffer to encrypt
  9165. * sz size of input buffer
  9166. *
  9167. * returns 0 on success and negative error values on failure
  9168. */
  9169. /* Software AES - CFB Encrypt */
  9170. int wc_AesCfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9171. {
  9172. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_CFB_MODE);
  9173. }
  9174. #ifdef HAVE_AES_DECRYPT
  9175. /* CFB 128
  9176. *
  9177. * aes structure holding key to use for decryption
  9178. * out buffer to hold result of decryption (must be at least as large as input
  9179. * buffer)
  9180. * in buffer to decrypt
  9181. * sz size of input buffer
  9182. *
  9183. * returns 0 on success and negative error values on failure
  9184. */
  9185. /* Software AES - CFB Decrypt */
  9186. int wc_AesCfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9187. {
  9188. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_CFB_MODE);
  9189. }
  9190. #endif /* HAVE_AES_DECRYPT */
  9191. /* shift the whole AES_BLOCK_SIZE array left by 8 or 1 bits */
  9192. static void shiftLeftArray(byte* ary, byte shift)
  9193. {
  9194. int i;
  9195. if (shift == WOLFSSL_BIT_SIZE) {
  9196. /* shifting over by 8 bits */
  9197. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  9198. ary[i] = ary[i+1];
  9199. }
  9200. ary[i] = 0;
  9201. }
  9202. else {
  9203. /* shifting over by 7 or less bits */
  9204. for (i = 0; i < AES_BLOCK_SIZE - 1; i++) {
  9205. byte carry = ary[i+1] & (0XFF << (WOLFSSL_BIT_SIZE - shift));
  9206. carry >>= (WOLFSSL_BIT_SIZE - shift);
  9207. ary[i] = (byte)((ary[i] << shift) + carry);
  9208. }
  9209. ary[i] = ary[i] << shift;
  9210. }
  9211. }
  9212. /* returns 0 on success and negative values on failure */
  9213. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB8(
  9214. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  9215. {
  9216. byte *pt;
  9217. int ret = 0;
  9218. if (aes == NULL || out == NULL || in == NULL) {
  9219. return BAD_FUNC_ARG;
  9220. }
  9221. if (sz == 0) {
  9222. return 0;
  9223. }
  9224. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9225. while (sz > 0) {
  9226. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9227. if (ret != 0)
  9228. break;
  9229. if (dir == AES_DECRYPTION) {
  9230. pt = (byte*)aes->reg;
  9231. /* LSB + CAT */
  9232. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  9233. pt[AES_BLOCK_SIZE - 1] = in[0];
  9234. }
  9235. /* MSB + XOR */
  9236. #ifdef BIG_ENDIAN_ORDER
  9237. ByteReverseWords(aes->tmp, aes->tmp, AES_BLOCK_SIZE);
  9238. #endif
  9239. out[0] = (byte)(aes->tmp[0] ^ in[0]);
  9240. if (dir == AES_ENCRYPTION) {
  9241. pt = (byte*)aes->reg;
  9242. /* LSB + CAT */
  9243. shiftLeftArray(pt, WOLFSSL_BIT_SIZE);
  9244. pt[AES_BLOCK_SIZE - 1] = out[0];
  9245. }
  9246. out += 1;
  9247. in += 1;
  9248. sz -= 1;
  9249. }
  9250. RESTORE_VECTOR_REGISTERS();
  9251. return ret;
  9252. }
  9253. /* returns 0 on success and negative values on failure */
  9254. static WARN_UNUSED_RESULT int wc_AesFeedbackCFB1(
  9255. Aes* aes, byte* out, const byte* in, word32 sz, byte dir)
  9256. {
  9257. byte tmp;
  9258. byte cur = 0; /* hold current work in order to handle inline in=out */
  9259. byte* pt;
  9260. int bit = 7;
  9261. int ret = 0;
  9262. if (aes == NULL || out == NULL || in == NULL) {
  9263. return BAD_FUNC_ARG;
  9264. }
  9265. if (sz == 0) {
  9266. return 0;
  9267. }
  9268. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9269. while (sz > 0) {
  9270. ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg);
  9271. if (ret != 0)
  9272. break;
  9273. if (dir == AES_DECRYPTION) {
  9274. pt = (byte*)aes->reg;
  9275. /* LSB + CAT */
  9276. tmp = (0X01 << bit) & in[0];
  9277. tmp = tmp >> bit;
  9278. tmp &= 0x01;
  9279. shiftLeftArray((byte*)aes->reg, 1);
  9280. pt[AES_BLOCK_SIZE - 1] |= tmp;
  9281. }
  9282. /* MSB + XOR */
  9283. tmp = (0X01 << bit) & in[0];
  9284. pt = (byte*)aes->tmp;
  9285. tmp = (pt[0] >> 7) ^ (tmp >> bit);
  9286. tmp &= 0x01;
  9287. cur |= (tmp << bit);
  9288. if (dir == AES_ENCRYPTION) {
  9289. pt = (byte*)aes->reg;
  9290. /* LSB + CAT */
  9291. shiftLeftArray((byte*)aes->reg, 1);
  9292. pt[AES_BLOCK_SIZE - 1] |= tmp;
  9293. }
  9294. bit--;
  9295. if (bit < 0) {
  9296. out[0] = cur;
  9297. out += 1;
  9298. in += 1;
  9299. sz -= 1;
  9300. bit = 7;
  9301. cur = 0;
  9302. }
  9303. else {
  9304. sz -= 1;
  9305. }
  9306. }
  9307. if (ret == 0) {
  9308. if (bit > 0 && bit < 7) {
  9309. out[0] = cur;
  9310. }
  9311. }
  9312. RESTORE_VECTOR_REGISTERS();
  9313. return ret;
  9314. }
  9315. /* CFB 1
  9316. *
  9317. * aes structure holding key to use for encryption
  9318. * out buffer to hold result of encryption (must be at least as large as input
  9319. * buffer)
  9320. * in buffer to encrypt (packed to left, i.e. 101 is 0x90)
  9321. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  9322. *
  9323. * returns 0 on success and negative values on failure
  9324. */
  9325. int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9326. {
  9327. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_ENCRYPTION);
  9328. }
  9329. /* CFB 8
  9330. *
  9331. * aes structure holding key to use for encryption
  9332. * out buffer to hold result of encryption (must be at least as large as input
  9333. * buffer)
  9334. * in buffer to encrypt
  9335. * sz size of input buffer
  9336. *
  9337. * returns 0 on success and negative values on failure
  9338. */
  9339. int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9340. {
  9341. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_ENCRYPTION);
  9342. }
  9343. #ifdef HAVE_AES_DECRYPT
  9344. /* CFB 1
  9345. *
  9346. * aes structure holding key to use for encryption
  9347. * out buffer to hold result of encryption (must be at least as large as input
  9348. * buffer)
  9349. * in buffer to encrypt
  9350. * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8)
  9351. *
  9352. * returns 0 on success and negative values on failure
  9353. */
  9354. int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9355. {
  9356. return wc_AesFeedbackCFB1(aes, out, in, sz, AES_DECRYPTION);
  9357. }
  9358. /* CFB 8
  9359. *
  9360. * aes structure holding key to use for encryption
  9361. * out buffer to hold result of encryption (must be at least as large as input
  9362. * buffer)
  9363. * in buffer to encrypt
  9364. * sz size of input buffer
  9365. *
  9366. * returns 0 on success and negative values on failure
  9367. */
  9368. int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9369. {
  9370. return wc_AesFeedbackCFB8(aes, out, in, sz, AES_DECRYPTION);
  9371. }
  9372. #endif /* HAVE_AES_DECRYPT */
  9373. #endif /* WOLFSSL_AES_CFB */
  9374. #ifdef WOLFSSL_AES_OFB
  9375. /* OFB
  9376. *
  9377. * aes structure holding key to use for encryption
  9378. * out buffer to hold result of encryption (must be at least as large as input
  9379. * buffer)
  9380. * in buffer to encrypt
  9381. * sz size of input buffer
  9382. *
  9383. * returns 0 on success and negative error values on failure
  9384. */
  9385. /* Software AES - CFB Encrypt */
  9386. int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9387. {
  9388. return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_OFB_MODE);
  9389. }
  9390. #ifdef HAVE_AES_DECRYPT
  9391. /* OFB
  9392. *
  9393. * aes structure holding key to use for decryption
  9394. * out buffer to hold result of decryption (must be at least as large as input
  9395. * buffer)
  9396. * in buffer to decrypt
  9397. * sz size of input buffer
  9398. *
  9399. * returns 0 on success and negative error values on failure
  9400. */
  9401. /* Software AES - OFB Decrypt */
  9402. int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz)
  9403. {
  9404. return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_OFB_MODE);
  9405. }
  9406. #endif /* HAVE_AES_DECRYPT */
  9407. #endif /* WOLFSSL_AES_OFB */
  9408. #ifdef HAVE_AES_KEYWRAP
  9409. /* Initialize key wrap counter with value */
  9410. static WC_INLINE void InitKeyWrapCounter(byte* inOutCtr, word32 value)
  9411. {
  9412. word32 i;
  9413. word32 bytes;
  9414. bytes = sizeof(word32);
  9415. for (i = 0; i < sizeof(word32); i++) {
  9416. inOutCtr[i+sizeof(word32)] = (byte)(value >> ((bytes - 1) * 8));
  9417. bytes--;
  9418. }
  9419. }
  9420. /* Increment key wrap counter */
  9421. static WC_INLINE void IncrementKeyWrapCounter(byte* inOutCtr)
  9422. {
  9423. int i;
  9424. /* in network byte order so start at end and work back */
  9425. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  9426. if (++inOutCtr[i]) /* we're done unless we overflow */
  9427. return;
  9428. }
  9429. }
  9430. /* Decrement key wrap counter */
  9431. static WC_INLINE void DecrementKeyWrapCounter(byte* inOutCtr)
  9432. {
  9433. int i;
  9434. for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) {
  9435. if (--inOutCtr[i] != 0xFF) /* we're done unless we underflow */
  9436. return;
  9437. }
  9438. }
  9439. int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  9440. word32 outSz, const byte* iv)
  9441. {
  9442. word32 i;
  9443. byte* r;
  9444. int j;
  9445. int ret = 0;
  9446. byte t[KEYWRAP_BLOCK_SIZE];
  9447. byte tmp[AES_BLOCK_SIZE];
  9448. /* n must be at least 2 64-bit blocks, output size is (n + 1) 8 bytes (64-bit) */
  9449. if (aes == NULL || in == NULL || inSz < 2*KEYWRAP_BLOCK_SIZE ||
  9450. out == NULL || outSz < (inSz + KEYWRAP_BLOCK_SIZE))
  9451. return BAD_FUNC_ARG;
  9452. /* input must be multiple of 64-bits */
  9453. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  9454. return BAD_FUNC_ARG;
  9455. r = out + 8;
  9456. XMEMCPY(r, in, inSz);
  9457. XMEMSET(t, 0, sizeof(t));
  9458. /* user IV is optional */
  9459. if (iv == NULL) {
  9460. XMEMSET(tmp, 0xA6, KEYWRAP_BLOCK_SIZE);
  9461. } else {
  9462. XMEMCPY(tmp, iv, KEYWRAP_BLOCK_SIZE);
  9463. }
  9464. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9465. for (j = 0; j <= 5; j++) {
  9466. for (i = 1; i <= inSz / KEYWRAP_BLOCK_SIZE; i++) {
  9467. /* load R[i] */
  9468. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  9469. ret = wc_AesEncryptDirect(aes, tmp, tmp);
  9470. if (ret != 0)
  9471. break;
  9472. /* calculate new A */
  9473. IncrementKeyWrapCounter(t);
  9474. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  9475. /* save R[i] */
  9476. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  9477. r += KEYWRAP_BLOCK_SIZE;
  9478. }
  9479. if (ret != 0)
  9480. break;
  9481. r = out + KEYWRAP_BLOCK_SIZE;
  9482. }
  9483. RESTORE_VECTOR_REGISTERS();
  9484. if (ret != 0)
  9485. return ret;
  9486. /* C[0] = A */
  9487. XMEMCPY(out, tmp, KEYWRAP_BLOCK_SIZE);
  9488. return (int)(inSz + KEYWRAP_BLOCK_SIZE);
  9489. }
  9490. /* perform AES key wrap (RFC3394), return out sz on success, negative on err */
  9491. int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  9492. byte* out, word32 outSz, const byte* iv)
  9493. {
  9494. #ifdef WOLFSSL_SMALL_STACK
  9495. Aes *aes = NULL;
  9496. #else
  9497. Aes aes[1];
  9498. #endif
  9499. int ret;
  9500. if (key == NULL)
  9501. return BAD_FUNC_ARG;
  9502. #ifdef WOLFSSL_SMALL_STACK
  9503. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9504. DYNAMIC_TYPE_AES)) == NULL)
  9505. return MEMORY_E;
  9506. #endif
  9507. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9508. if (ret != 0)
  9509. goto out;
  9510. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION);
  9511. if (ret != 0) {
  9512. wc_AesFree(aes);
  9513. goto out;
  9514. }
  9515. ret = wc_AesKeyWrap_ex(aes, in, inSz, out, outSz, iv);
  9516. wc_AesFree(aes);
  9517. out:
  9518. #ifdef WOLFSSL_SMALL_STACK
  9519. if (aes != NULL)
  9520. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9521. #endif
  9522. return ret;
  9523. }
  9524. int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out,
  9525. word32 outSz, const byte* iv)
  9526. {
  9527. byte* r;
  9528. word32 i, n;
  9529. int j;
  9530. int ret = 0;
  9531. byte t[KEYWRAP_BLOCK_SIZE];
  9532. byte tmp[AES_BLOCK_SIZE];
  9533. const byte* expIv;
  9534. const byte defaultIV[] = {
  9535. 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6
  9536. };
  9537. if (aes == NULL || in == NULL || inSz < 3 * KEYWRAP_BLOCK_SIZE ||
  9538. out == NULL || outSz < (inSz - KEYWRAP_BLOCK_SIZE))
  9539. return BAD_FUNC_ARG;
  9540. /* input must be multiple of 64-bits */
  9541. if (inSz % KEYWRAP_BLOCK_SIZE != 0)
  9542. return BAD_FUNC_ARG;
  9543. /* user IV optional */
  9544. if (iv != NULL)
  9545. expIv = iv;
  9546. else
  9547. expIv = defaultIV;
  9548. /* A = C[0], R[i] = C[i] */
  9549. XMEMCPY(tmp, in, KEYWRAP_BLOCK_SIZE);
  9550. XMEMCPY(out, in + KEYWRAP_BLOCK_SIZE, inSz - KEYWRAP_BLOCK_SIZE);
  9551. XMEMSET(t, 0, sizeof(t));
  9552. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9553. /* initialize counter to 6n */
  9554. n = (inSz - 1) / KEYWRAP_BLOCK_SIZE;
  9555. InitKeyWrapCounter(t, 6 * n);
  9556. for (j = 5; j >= 0; j--) {
  9557. for (i = n; i >= 1; i--) {
  9558. /* calculate A */
  9559. xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE);
  9560. DecrementKeyWrapCounter(t);
  9561. /* load R[i], starting at end of R */
  9562. r = out + ((i - 1) * KEYWRAP_BLOCK_SIZE);
  9563. XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE);
  9564. ret = wc_AesDecryptDirect(aes, tmp, tmp);
  9565. if (ret != 0)
  9566. break;
  9567. /* save R[i] */
  9568. XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE);
  9569. }
  9570. if (ret != 0)
  9571. break;
  9572. }
  9573. RESTORE_VECTOR_REGISTERS();
  9574. if (ret != 0)
  9575. return ret;
  9576. /* verify IV */
  9577. if (XMEMCMP(tmp, expIv, KEYWRAP_BLOCK_SIZE) != 0)
  9578. return BAD_KEYWRAP_IV_E;
  9579. return (int)(inSz - KEYWRAP_BLOCK_SIZE);
  9580. }
  9581. int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, word32 inSz,
  9582. byte* out, word32 outSz, const byte* iv)
  9583. {
  9584. #ifdef WOLFSSL_SMALL_STACK
  9585. Aes *aes = NULL;
  9586. #else
  9587. Aes aes[1];
  9588. #endif
  9589. int ret;
  9590. (void)iv;
  9591. if (key == NULL)
  9592. return BAD_FUNC_ARG;
  9593. #ifdef WOLFSSL_SMALL_STACK
  9594. if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL,
  9595. DYNAMIC_TYPE_AES)) == NULL)
  9596. return MEMORY_E;
  9597. #endif
  9598. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  9599. if (ret != 0)
  9600. goto out;
  9601. ret = wc_AesSetKey(aes, key, keySz, NULL, AES_DECRYPTION);
  9602. if (ret != 0) {
  9603. wc_AesFree(aes);
  9604. goto out;
  9605. }
  9606. ret = wc_AesKeyUnWrap_ex(aes, in, inSz, out, outSz, iv);
  9607. wc_AesFree(aes);
  9608. out:
  9609. #ifdef WOLFSSL_SMALL_STACK
  9610. if (aes)
  9611. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  9612. #endif
  9613. return ret;
  9614. }
  9615. #endif /* HAVE_AES_KEYWRAP */
  9616. #ifdef WOLFSSL_AES_XTS
  9617. /* Galios Field to use */
  9618. #define GF_XTS 0x87
  9619. /* This is to help with setting keys to correct encrypt or decrypt type.
  9620. *
  9621. * tweak AES key for tweak in XTS
  9622. * aes AES key for encrypt/decrypt process
  9623. * key buffer holding aes key | tweak key
  9624. * len length of key buffer in bytes. Should be twice that of key size. i.e.
  9625. * 32 for a 16 byte key.
  9626. * dir direction, either AES_ENCRYPTION or AES_DECRYPTION
  9627. * heap heap hint to use for memory. Can be NULL
  9628. * devId id to use with async crypto. Can be 0
  9629. *
  9630. * Note: is up to user to call wc_AesFree on tweak and aes key when done.
  9631. *
  9632. * return 0 on success
  9633. */
  9634. int wc_AesXtsSetKey(XtsAes* aes, const byte* key, word32 len, int dir,
  9635. void* heap, int devId)
  9636. {
  9637. word32 keySz;
  9638. int ret = 0;
  9639. if (aes == NULL || key == NULL) {
  9640. return BAD_FUNC_ARG;
  9641. }
  9642. if ((ret = wc_AesInit(&aes->tweak, heap, devId)) != 0) {
  9643. return ret;
  9644. }
  9645. if ((ret = wc_AesInit(&aes->aes, heap, devId)) != 0) {
  9646. return ret;
  9647. }
  9648. keySz = len/2;
  9649. if (keySz != 16 && keySz != 32) {
  9650. WOLFSSL_MSG("Unsupported key size");
  9651. return WC_KEY_SIZE_E;
  9652. }
  9653. if ((ret = wc_AesSetKey(&aes->aes, key, keySz, NULL, dir)) == 0) {
  9654. ret = wc_AesSetKey(&aes->tweak, key + keySz, keySz, NULL,
  9655. AES_ENCRYPTION);
  9656. if (ret != 0) {
  9657. wc_AesFree(&aes->aes);
  9658. }
  9659. }
  9660. return ret;
  9661. }
  9662. /* This is used to free up resources used by Aes structs
  9663. *
  9664. * aes AES keys to free
  9665. *
  9666. * return 0 on success
  9667. */
  9668. int wc_AesXtsFree(XtsAes* aes)
  9669. {
  9670. if (aes != NULL) {
  9671. wc_AesFree(&aes->aes);
  9672. wc_AesFree(&aes->tweak);
  9673. }
  9674. return 0;
  9675. }
  9676. /* Same process as wc_AesXtsEncrypt but uses a word64 type as the tweak value
  9677. * instead of a byte array. This just converts the word64 to a byte array and
  9678. * calls wc_AesXtsEncrypt.
  9679. *
  9680. * aes AES keys to use for block encrypt/decrypt
  9681. * out output buffer to hold cipher text
  9682. * in input plain text buffer to encrypt
  9683. * sz size of both out and in buffers
  9684. * sector value to use for tweak
  9685. *
  9686. * returns 0 on success
  9687. */
  9688. int wc_AesXtsEncryptSector(XtsAes* aes, byte* out, const byte* in,
  9689. word32 sz, word64 sector)
  9690. {
  9691. byte* pt;
  9692. byte i[AES_BLOCK_SIZE];
  9693. XMEMSET(i, 0, AES_BLOCK_SIZE);
  9694. #ifdef BIG_ENDIAN_ORDER
  9695. sector = ByteReverseWord64(sector);
  9696. #endif
  9697. pt = (byte*)&sector;
  9698. XMEMCPY(i, pt, sizeof(word64));
  9699. return wc_AesXtsEncrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  9700. }
  9701. /* Same process as wc_AesXtsDecrypt but uses a word64 type as the tweak value
  9702. * instead of a byte array. This just converts the word64 to a byte array.
  9703. *
  9704. * aes AES keys to use for block encrypt/decrypt
  9705. * out output buffer to hold plain text
  9706. * in input cipher text buffer to encrypt
  9707. * sz size of both out and in buffers
  9708. * sector value to use for tweak
  9709. *
  9710. * returns 0 on success
  9711. */
  9712. int wc_AesXtsDecryptSector(XtsAes* aes, byte* out, const byte* in, word32 sz,
  9713. word64 sector)
  9714. {
  9715. byte* pt;
  9716. byte i[AES_BLOCK_SIZE];
  9717. XMEMSET(i, 0, AES_BLOCK_SIZE);
  9718. #ifdef BIG_ENDIAN_ORDER
  9719. sector = ByteReverseWord64(sector);
  9720. #endif
  9721. pt = (byte*)&sector;
  9722. XMEMCPY(i, pt, sizeof(word64));
  9723. return wc_AesXtsDecrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE);
  9724. }
  9725. #ifdef HAVE_AES_ECB
  9726. /* helper function for encrypting / decrypting full buffer at once */
  9727. static WARN_UNUSED_RESULT int _AesXtsHelper(
  9728. Aes* aes, byte* out, const byte* in, word32 sz, int dir)
  9729. {
  9730. word32 outSz = sz;
  9731. word32 totalSz = (sz / AES_BLOCK_SIZE) * AES_BLOCK_SIZE; /* total bytes */
  9732. byte* pt = out;
  9733. outSz -= AES_BLOCK_SIZE;
  9734. while (outSz > 0) {
  9735. word32 j;
  9736. byte carry = 0;
  9737. /* multiply by shift left and propagate carry */
  9738. for (j = 0; j < AES_BLOCK_SIZE && outSz > 0; j++, outSz--) {
  9739. byte tmpC;
  9740. tmpC = (pt[j] >> 7) & 0x01;
  9741. pt[j+AES_BLOCK_SIZE] = (byte)((pt[j] << 1) + carry);
  9742. carry = tmpC;
  9743. }
  9744. if (carry) {
  9745. pt[AES_BLOCK_SIZE] ^= GF_XTS;
  9746. }
  9747. pt += AES_BLOCK_SIZE;
  9748. }
  9749. xorbuf(out, in, totalSz);
  9750. if (dir == AES_ENCRYPTION) {
  9751. return _AesEcbEncrypt(aes, out, out, totalSz);
  9752. }
  9753. else {
  9754. return _AesEcbDecrypt(aes, out, out, totalSz);
  9755. }
  9756. }
  9757. #endif /* HAVE_AES_ECB */
  9758. /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing.
  9759. *
  9760. * xaes AES keys to use for block encrypt/decrypt
  9761. * out output buffer to hold cipher text
  9762. * in input plain text buffer to encrypt
  9763. * sz size of both out and in buffers
  9764. * i value to use for tweak
  9765. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  9766. * adds a sanity check on how the user calls the function.
  9767. *
  9768. * returns 0 on success
  9769. */
  9770. /* Software AES - XTS Encrypt */
  9771. int wc_AesXtsEncrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9772. const byte* i, word32 iSz)
  9773. {
  9774. int ret = 0;
  9775. word32 blocks = (sz / AES_BLOCK_SIZE);
  9776. Aes *aes, *tweak;
  9777. if (xaes == NULL || out == NULL || in == NULL) {
  9778. return BAD_FUNC_ARG;
  9779. }
  9780. aes = &xaes->aes;
  9781. tweak = &xaes->tweak;
  9782. if (iSz < AES_BLOCK_SIZE) {
  9783. return BAD_FUNC_ARG;
  9784. }
  9785. if (blocks > 0) {
  9786. byte tmp[AES_BLOCK_SIZE];
  9787. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  9788. * key setup passed to encrypt direct*/
  9789. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9790. ret = wc_AesEncryptDirect(tweak, tmp, i);
  9791. if (ret != 0) {
  9792. RESTORE_VECTOR_REGISTERS();
  9793. return ret;
  9794. }
  9795. #ifdef HAVE_AES_ECB
  9796. /* encrypt all of buffer at once when possible */
  9797. if (in != out) { /* can not handle inline */
  9798. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  9799. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_ENCRYPTION)) != 0) {
  9800. RESTORE_VECTOR_REGISTERS();
  9801. return ret;
  9802. }
  9803. }
  9804. #endif
  9805. while (blocks > 0) {
  9806. word32 j;
  9807. byte carry = 0;
  9808. #ifdef HAVE_AES_ECB
  9809. if (in == out)
  9810. #endif
  9811. { /* check for if inline */
  9812. byte buf[AES_BLOCK_SIZE];
  9813. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9814. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9815. ret = wc_AesEncryptDirect(aes, out, buf);
  9816. if (ret != 0) {
  9817. RESTORE_VECTOR_REGISTERS();
  9818. return ret;
  9819. }
  9820. }
  9821. xorbuf(out, tmp, AES_BLOCK_SIZE);
  9822. /* multiply by shift left and propagate carry */
  9823. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9824. byte tmpC;
  9825. tmpC = (tmp[j] >> 7) & 0x01;
  9826. tmp[j] = (byte)((tmp[j] << 1) + carry);
  9827. carry = tmpC;
  9828. }
  9829. if (carry) {
  9830. tmp[0] ^= GF_XTS;
  9831. }
  9832. in += AES_BLOCK_SIZE;
  9833. out += AES_BLOCK_SIZE;
  9834. sz -= AES_BLOCK_SIZE;
  9835. blocks--;
  9836. }
  9837. /* stealing operation of XTS to handle left overs */
  9838. if (sz > 0) {
  9839. byte buf[AES_BLOCK_SIZE];
  9840. XMEMCPY(buf, out - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
  9841. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  9842. RESTORE_VECTOR_REGISTERS();
  9843. return BUFFER_E;
  9844. }
  9845. if (in != out) {
  9846. XMEMCPY(out, buf, sz);
  9847. XMEMCPY(buf, in, sz);
  9848. }
  9849. else {
  9850. byte buf2[AES_BLOCK_SIZE];
  9851. XMEMCPY(buf2, buf, sz);
  9852. XMEMCPY(buf, in, sz);
  9853. XMEMCPY(out, buf2, sz);
  9854. }
  9855. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9856. ret = wc_AesEncryptDirect(aes, out - AES_BLOCK_SIZE, buf);
  9857. if (ret == 0)
  9858. xorbuf(out - AES_BLOCK_SIZE, tmp, AES_BLOCK_SIZE);
  9859. }
  9860. RESTORE_VECTOR_REGISTERS();
  9861. }
  9862. else {
  9863. WOLFSSL_MSG("Plain text input too small for encryption");
  9864. return BAD_FUNC_ARG;
  9865. }
  9866. return ret;
  9867. }
  9868. /* Same process as encryption but Aes key is AES_DECRYPTION type.
  9869. *
  9870. * xaes AES keys to use for block encrypt/decrypt
  9871. * out output buffer to hold plain text
  9872. * in input cipher text buffer to decrypt
  9873. * sz size of both out and in buffers
  9874. * i value to use for tweak
  9875. * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input
  9876. * adds a sanity check on how the user calls the function.
  9877. *
  9878. * returns 0 on success
  9879. */
  9880. /* Software AES - XTS Decrypt */
  9881. int wc_AesXtsDecrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz,
  9882. const byte* i, word32 iSz)
  9883. {
  9884. int ret = 0;
  9885. word32 blocks = (sz / AES_BLOCK_SIZE);
  9886. Aes *aes, *tweak;
  9887. if (xaes == NULL || out == NULL || in == NULL) {
  9888. return BAD_FUNC_ARG;
  9889. }
  9890. aes = &xaes->aes;
  9891. tweak = &xaes->tweak;
  9892. if (iSz < AES_BLOCK_SIZE) {
  9893. return BAD_FUNC_ARG;
  9894. }
  9895. if (blocks > 0) {
  9896. word32 j;
  9897. byte carry = 0;
  9898. byte tmp[AES_BLOCK_SIZE];
  9899. byte stl = (sz % AES_BLOCK_SIZE);
  9900. XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES
  9901. * key setup passed to decrypt direct*/
  9902. SAVE_VECTOR_REGISTERS(return _svr_ret;);
  9903. ret = wc_AesEncryptDirect(tweak, tmp, i);
  9904. if (ret != 0) {
  9905. RESTORE_VECTOR_REGISTERS();
  9906. return ret;
  9907. }
  9908. /* if Stealing then break out of loop one block early to handle special
  9909. * case */
  9910. if (stl > 0) {
  9911. blocks--;
  9912. }
  9913. #ifdef HAVE_AES_ECB
  9914. /* decrypt all of buffer at once when possible */
  9915. if (in != out) { /* can not handle inline */
  9916. XMEMCPY(out, tmp, AES_BLOCK_SIZE);
  9917. if ((ret = _AesXtsHelper(aes, out, in, sz, AES_DECRYPTION)) != 0) {
  9918. RESTORE_VECTOR_REGISTERS();
  9919. return ret;
  9920. }
  9921. }
  9922. #endif
  9923. while (blocks > 0) {
  9924. #ifdef HAVE_AES_ECB
  9925. if (in == out)
  9926. #endif
  9927. { /* check for if inline */
  9928. byte buf[AES_BLOCK_SIZE];
  9929. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9930. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9931. ret = wc_AesDecryptDirect(aes, out, buf);
  9932. if (ret != 0) {
  9933. RESTORE_VECTOR_REGISTERS();
  9934. return ret;
  9935. }
  9936. }
  9937. xorbuf(out, tmp, AES_BLOCK_SIZE);
  9938. /* multiply by shift left and propagate carry */
  9939. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9940. byte tmpC;
  9941. tmpC = (tmp[j] >> 7) & 0x01;
  9942. tmp[j] = (byte)((tmp[j] << 1) + carry);
  9943. carry = tmpC;
  9944. }
  9945. if (carry) {
  9946. tmp[0] ^= GF_XTS;
  9947. }
  9948. carry = 0;
  9949. in += AES_BLOCK_SIZE;
  9950. out += AES_BLOCK_SIZE;
  9951. sz -= AES_BLOCK_SIZE;
  9952. blocks--;
  9953. }
  9954. /* stealing operation of XTS to handle left overs */
  9955. if (sz >= AES_BLOCK_SIZE) {
  9956. byte buf[AES_BLOCK_SIZE];
  9957. byte tmp2[AES_BLOCK_SIZE];
  9958. /* multiply by shift left and propagate carry */
  9959. for (j = 0; j < AES_BLOCK_SIZE; j++) {
  9960. byte tmpC;
  9961. tmpC = (tmp[j] >> 7) & 0x01;
  9962. tmp2[j] = (byte)((tmp[j] << 1) + carry);
  9963. carry = tmpC;
  9964. }
  9965. if (carry) {
  9966. tmp2[0] ^= GF_XTS;
  9967. }
  9968. XMEMCPY(buf, in, AES_BLOCK_SIZE);
  9969. xorbuf(buf, tmp2, AES_BLOCK_SIZE);
  9970. ret = wc_AesDecryptDirect(aes, out, buf);
  9971. if (ret != 0) {
  9972. RESTORE_VECTOR_REGISTERS();
  9973. return ret;
  9974. }
  9975. xorbuf(out, tmp2, AES_BLOCK_SIZE);
  9976. /* tmp2 holds partial | last */
  9977. XMEMCPY(tmp2, out, AES_BLOCK_SIZE);
  9978. in += AES_BLOCK_SIZE;
  9979. out += AES_BLOCK_SIZE;
  9980. sz -= AES_BLOCK_SIZE;
  9981. /* Make buffer with end of cipher text | last */
  9982. XMEMCPY(buf, tmp2, AES_BLOCK_SIZE);
  9983. if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */
  9984. RESTORE_VECTOR_REGISTERS();
  9985. return BUFFER_E;
  9986. }
  9987. XMEMCPY(buf, in, sz);
  9988. XMEMCPY(out, tmp2, sz);
  9989. xorbuf(buf, tmp, AES_BLOCK_SIZE);
  9990. ret = wc_AesDecryptDirect(aes, tmp2, buf);
  9991. if (ret != 0) {
  9992. RESTORE_VECTOR_REGISTERS();
  9993. return ret;
  9994. }
  9995. xorbuf(tmp2, tmp, AES_BLOCK_SIZE);
  9996. XMEMCPY(out - AES_BLOCK_SIZE, tmp2, AES_BLOCK_SIZE);
  9997. }
  9998. RESTORE_VECTOR_REGISTERS();
  9999. }
  10000. else {
  10001. WOLFSSL_MSG("Plain text input too small for encryption");
  10002. return BAD_FUNC_ARG;
  10003. }
  10004. return ret;
  10005. }
  10006. #endif /* WOLFSSL_AES_XTS */
  10007. #ifdef WOLFSSL_AES_SIV
  10008. /*
  10009. * See RFC 5297 Section 2.4.
  10010. */
  10011. static WARN_UNUSED_RESULT int S2V(
  10012. const byte* key, word32 keySz, const byte* assoc, word32 assocSz,
  10013. const byte* nonce, word32 nonceSz, const byte* data,
  10014. word32 dataSz, byte* out)
  10015. {
  10016. #ifdef WOLFSSL_SMALL_STACK
  10017. byte* tmp[3] = {NULL, NULL, NULL};
  10018. int i;
  10019. Cmac* cmac;
  10020. #else
  10021. byte tmp[3][AES_BLOCK_SIZE];
  10022. Cmac cmac[1];
  10023. #endif
  10024. word32 macSz = AES_BLOCK_SIZE;
  10025. int ret = 0;
  10026. word32 zeroBytes;
  10027. #ifdef WOLFSSL_SMALL_STACK
  10028. for (i = 0; i < 3; ++i) {
  10029. tmp[i] = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10030. if (tmp[i] == NULL) {
  10031. ret = MEMORY_E;
  10032. break;
  10033. }
  10034. }
  10035. if (ret == 0)
  10036. #endif
  10037. {
  10038. XMEMSET(tmp[1], 0, AES_BLOCK_SIZE);
  10039. XMEMSET(tmp[2], 0, AES_BLOCK_SIZE);
  10040. ret = wc_AesCmacGenerate(tmp[0], &macSz, tmp[1], AES_BLOCK_SIZE,
  10041. key, keySz);
  10042. if (ret == 0) {
  10043. ShiftAndXorRb(tmp[1], tmp[0]);
  10044. ret = wc_AesCmacGenerate(tmp[0], &macSz, assoc, assocSz, key,
  10045. keySz);
  10046. if (ret == 0) {
  10047. xorbuf(tmp[1], tmp[0], AES_BLOCK_SIZE);
  10048. }
  10049. }
  10050. }
  10051. if (ret == 0) {
  10052. if (nonceSz > 0) {
  10053. ShiftAndXorRb(tmp[0], tmp[1]);
  10054. ret = wc_AesCmacGenerate(tmp[1], &macSz, nonce, nonceSz, key,
  10055. keySz);
  10056. if (ret == 0) {
  10057. xorbuf(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10058. }
  10059. }
  10060. else {
  10061. XMEMCPY(tmp[0], tmp[1], AES_BLOCK_SIZE);
  10062. }
  10063. }
  10064. if (ret == 0) {
  10065. if (dataSz >= AES_BLOCK_SIZE) {
  10066. #ifdef WOLFSSL_SMALL_STACK
  10067. cmac = (Cmac*)XMALLOC(sizeof(Cmac), NULL, DYNAMIC_TYPE_CMAC);
  10068. if (cmac == NULL) {
  10069. ret = MEMORY_E;
  10070. }
  10071. if (ret == 0)
  10072. #endif
  10073. {
  10074. #ifdef WOLFSSL_CHECK_MEM_ZERO
  10075. /* Aes part is checked by wc_AesFree. */
  10076. wc_MemZero_Add("wc_AesCmacGenerate cmac",
  10077. ((unsigned char *)cmac) + sizeof(Aes),
  10078. sizeof(Cmac) - sizeof(Aes));
  10079. #endif
  10080. xorbuf(tmp[0], data + (dataSz - AES_BLOCK_SIZE),
  10081. AES_BLOCK_SIZE);
  10082. ret = wc_InitCmac(cmac, key, keySz, WC_CMAC_AES, NULL);
  10083. if (ret == 0) {
  10084. ret = wc_CmacUpdate(cmac, data, dataSz - AES_BLOCK_SIZE);
  10085. }
  10086. if (ret == 0) {
  10087. ret = wc_CmacUpdate(cmac, tmp[0], AES_BLOCK_SIZE);
  10088. }
  10089. if (ret == 0) {
  10090. ret = wc_CmacFinal(cmac, out, &macSz);
  10091. }
  10092. }
  10093. #ifdef WOLFSSL_SMALL_STACK
  10094. if (cmac != NULL) {
  10095. XFREE(cmac, NULL, DYNAMIC_TYPE_CMAC);
  10096. }
  10097. #elif defined(WOLFSSL_CHECK_MEM_ZERO)
  10098. wc_MemZero_Check(cmac, sizeof(Cmac));
  10099. #endif
  10100. }
  10101. else {
  10102. XMEMCPY(tmp[2], data, dataSz);
  10103. tmp[2][dataSz] |= 0x80;
  10104. zeroBytes = AES_BLOCK_SIZE - (dataSz + 1);
  10105. if (zeroBytes != 0) {
  10106. XMEMSET(tmp[2] + dataSz + 1, 0, zeroBytes);
  10107. }
  10108. ShiftAndXorRb(tmp[1], tmp[0]);
  10109. xorbuf(tmp[1], tmp[2], AES_BLOCK_SIZE);
  10110. ret = wc_AesCmacGenerate(out, &macSz, tmp[1], AES_BLOCK_SIZE, key,
  10111. keySz);
  10112. }
  10113. }
  10114. #ifdef WOLFSSL_SMALL_STACK
  10115. for (i = 0; i < 3; ++i) {
  10116. if (tmp[i] != NULL) {
  10117. XFREE(tmp[i], NULL, DYNAMIC_TYPE_TMP_BUFFER);
  10118. }
  10119. }
  10120. #endif
  10121. return ret;
  10122. }
  10123. static WARN_UNUSED_RESULT int AesSivCipher(
  10124. const byte* key, word32 keySz, const byte* assoc,
  10125. word32 assocSz, const byte* nonce, word32 nonceSz,
  10126. const byte* data, word32 dataSz, byte* siv, byte* out,
  10127. int enc)
  10128. {
  10129. int ret = 0;
  10130. #ifdef WOLFSSL_SMALL_STACK
  10131. Aes* aes = NULL;
  10132. #else
  10133. Aes aes[1];
  10134. #endif
  10135. byte sivTmp[AES_BLOCK_SIZE];
  10136. if (key == NULL || siv == NULL || out == NULL) {
  10137. WOLFSSL_MSG("Bad parameter");
  10138. ret = BAD_FUNC_ARG;
  10139. }
  10140. if (ret == 0 && keySz != 32 && keySz != 48 && keySz != 64) {
  10141. WOLFSSL_MSG("Bad key size. Must be 256, 384, or 512 bits.");
  10142. ret = BAD_FUNC_ARG;
  10143. }
  10144. if (ret == 0) {
  10145. if (enc == 1) {
  10146. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, data,
  10147. dataSz, sivTmp);
  10148. if (ret != 0) {
  10149. WOLFSSL_MSG("S2V failed.");
  10150. }
  10151. else {
  10152. XMEMCPY(siv, sivTmp, AES_BLOCK_SIZE);
  10153. }
  10154. }
  10155. else {
  10156. XMEMCPY(sivTmp, siv, AES_BLOCK_SIZE);
  10157. }
  10158. }
  10159. #ifdef WOLFSSL_SMALL_STACK
  10160. if (ret == 0) {
  10161. aes = (Aes*)XMALLOC(sizeof(Aes), NULL, DYNAMIC_TYPE_AES);
  10162. if (aes == NULL) {
  10163. ret = MEMORY_E;
  10164. }
  10165. }
  10166. #endif
  10167. if (ret == 0) {
  10168. ret = wc_AesInit(aes, NULL, INVALID_DEVID);
  10169. if (ret != 0) {
  10170. WOLFSSL_MSG("Failed to initialized AES object.");
  10171. }
  10172. }
  10173. if (ret == 0 && dataSz > 0) {
  10174. sivTmp[12] &= 0x7f;
  10175. sivTmp[8] &= 0x7f;
  10176. ret = wc_AesSetKey(aes, key + keySz / 2, keySz / 2, sivTmp,
  10177. AES_ENCRYPTION);
  10178. if (ret != 0) {
  10179. WOLFSSL_MSG("Failed to set key for AES-CTR.");
  10180. }
  10181. else {
  10182. ret = wc_AesCtrEncrypt(aes, out, data, dataSz);
  10183. if (ret != 0) {
  10184. WOLFSSL_MSG("AES-CTR encryption failed.");
  10185. }
  10186. }
  10187. }
  10188. if (ret == 0 && enc == 0) {
  10189. ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, out, dataSz,
  10190. sivTmp);
  10191. if (ret != 0) {
  10192. WOLFSSL_MSG("S2V failed.");
  10193. }
  10194. if (XMEMCMP(siv, sivTmp, AES_BLOCK_SIZE) != 0) {
  10195. WOLFSSL_MSG("Computed SIV doesn't match received SIV.");
  10196. ret = AES_SIV_AUTH_E;
  10197. }
  10198. }
  10199. wc_AesFree(aes);
  10200. #ifdef WOLFSSL_SMALL_STACK
  10201. XFREE(aes, NULL, DYNAMIC_TYPE_AES);
  10202. #endif
  10203. return ret;
  10204. }
  10205. /*
  10206. * See RFC 5297 Section 2.6.
  10207. */
  10208. int wc_AesSivEncrypt(const byte* key, word32 keySz, const byte* assoc,
  10209. word32 assocSz, const byte* nonce, word32 nonceSz,
  10210. const byte* in, word32 inSz, byte* siv, byte* out)
  10211. {
  10212. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  10213. siv, out, 1);
  10214. }
  10215. /*
  10216. * See RFC 5297 Section 2.7.
  10217. */
  10218. int wc_AesSivDecrypt(const byte* key, word32 keySz, const byte* assoc,
  10219. word32 assocSz, const byte* nonce, word32 nonceSz,
  10220. const byte* in, word32 inSz, byte* siv, byte* out)
  10221. {
  10222. return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz,
  10223. siv, out, 0);
  10224. }
  10225. #endif /* WOLFSSL_AES_SIV */
  10226. #endif /* HAVE_FIPS */
  10227. #endif /* !NO_AES */