InstEmitSimdArithmetic.cs 173 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  12. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  13. namespace ARMeilleure.Instructions
  14. {
  15. using Func2I = Func<Operand, Operand, Operand>;
  16. static partial class InstEmit
  17. {
  18. public static void Abs_S(ArmEmitterContext context)
  19. {
  20. if (Optimizations.UseAdvSimd)
  21. {
  22. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64AbsS);
  23. }
  24. else
  25. {
  26. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  27. }
  28. }
  29. public static void Abs_V(ArmEmitterContext context)
  30. {
  31. if (Optimizations.UseAdvSimd)
  32. {
  33. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64AbsV);
  34. }
  35. else
  36. {
  37. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  38. }
  39. }
  40. public static void Add_S(ArmEmitterContext context)
  41. {
  42. if (Optimizations.UseAdvSimd)
  43. {
  44. InstEmitSimdHelperArm64.EmitScalarBinaryOp(context, Intrinsic.Arm64AddS);
  45. }
  46. else
  47. {
  48. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  49. }
  50. }
  51. public static void Add_V(ArmEmitterContext context)
  52. {
  53. if (Optimizations.UseAdvSimd)
  54. {
  55. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AddV);
  56. }
  57. else if (Optimizations.UseSse2)
  58. {
  59. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  60. Operand n = GetVec(op.Rn);
  61. Operand m = GetVec(op.Rm);
  62. Intrinsic addInst = X86PaddInstruction[op.Size];
  63. Operand res = context.AddIntrinsic(addInst, n, m);
  64. if (op.RegisterSize == RegisterSize.Simd64)
  65. {
  66. res = context.VectorZeroUpper64(res);
  67. }
  68. context.Copy(GetVec(op.Rd), res);
  69. }
  70. else
  71. {
  72. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  73. }
  74. }
  75. public static void Addhn_V(ArmEmitterContext context)
  76. {
  77. if (Optimizations.UseAdvSimd)
  78. {
  79. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64AddhnV);
  80. }
  81. else
  82. {
  83. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  84. }
  85. }
  86. public static void Addp_S(ArmEmitterContext context)
  87. {
  88. if (Optimizations.UseAdvSimd)
  89. {
  90. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64AddpS);
  91. }
  92. else
  93. {
  94. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  95. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  96. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  97. Operand res = context.Add(ne0, ne1);
  98. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  99. }
  100. }
  101. public static void Addp_V(ArmEmitterContext context)
  102. {
  103. if (Optimizations.UseAdvSimd)
  104. {
  105. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AddpV);
  106. }
  107. else if (Optimizations.UseSsse3)
  108. {
  109. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  110. }
  111. else
  112. {
  113. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  114. }
  115. }
  116. public static void Addv_V(ArmEmitterContext context)
  117. {
  118. if (Optimizations.UseAdvSimd)
  119. {
  120. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64AddvV);
  121. }
  122. else
  123. {
  124. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  125. }
  126. }
  127. public static void Cls_V(ArmEmitterContext context)
  128. {
  129. if (Optimizations.UseAdvSimd)
  130. {
  131. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64ClsV);
  132. }
  133. else
  134. {
  135. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  136. Operand res = context.VectorZero();
  137. int elems = op.GetBytesCount() >> op.Size;
  138. int eSize = 8 << op.Size;
  139. for (int index = 0; index < elems; index++)
  140. {
  141. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  142. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  143. res = EmitVectorInsert(context, res, de, index, op.Size);
  144. }
  145. context.Copy(GetVec(op.Rd), res);
  146. }
  147. }
  148. public static void Clz_V(ArmEmitterContext context)
  149. {
  150. if (Optimizations.UseAdvSimd)
  151. {
  152. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64ClzV);
  153. }
  154. else
  155. {
  156. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  157. int eSize = 8 << op.Size;
  158. Operand res = eSize switch {
  159. 8 => Clz_V_I8 (context, GetVec(op.Rn)),
  160. 16 => Clz_V_I16(context, GetVec(op.Rn)),
  161. 32 => Clz_V_I32(context, GetVec(op.Rn)),
  162. _ => default
  163. };
  164. if (res != default)
  165. {
  166. if (op.RegisterSize == RegisterSize.Simd64)
  167. {
  168. res = context.VectorZeroUpper64(res);
  169. }
  170. }
  171. else
  172. {
  173. int elems = op.GetBytesCount() >> op.Size;
  174. res = context.VectorZero();
  175. for (int index = 0; index < elems; index++)
  176. {
  177. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  178. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  179. res = EmitVectorInsert(context, res, de, index, op.Size);
  180. }
  181. }
  182. context.Copy(GetVec(op.Rd), res);
  183. }
  184. }
  185. private static Operand Clz_V_I8(ArmEmitterContext context, Operand arg)
  186. {
  187. if (!Optimizations.UseSsse3)
  188. {
  189. return default;
  190. }
  191. // CLZ nibble table.
  192. Operand clzTable = X86GetScalar(context, 0x01_01_01_01_02_02_03_04);
  193. Operand maskLow = X86GetAllElements(context, 0x0f_0f_0f_0f);
  194. Operand c04 = X86GetAllElements(context, 0x04_04_04_04);
  195. // CLZ of low 4 bits of elements in arg.
  196. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, arg);
  197. // Get the high 4 bits of elements in arg.
  198. Operand hiArg = context.AddIntrinsic(Intrinsic.X86Psrlw, arg, Const(4));
  199. hiArg = context.AddIntrinsic(Intrinsic.X86Pand, hiArg, maskLow);
  200. // CLZ of high 4 bits of elements in arg.
  201. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, hiArg);
  202. // If high 4 bits are not all zero, we discard the CLZ of the low 4 bits.
  203. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqb, hiClz, c04);
  204. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  205. return context.AddIntrinsic(Intrinsic.X86Paddb, loClz, hiClz);
  206. }
  207. private static Operand Clz_V_I16(ArmEmitterContext context, Operand arg)
  208. {
  209. if (!Optimizations.UseSsse3)
  210. {
  211. return default;
  212. }
  213. Operand maskSwap = X86GetElements(context, 0x80_0f_80_0d_80_0b_80_09, 0x80_07_80_05_80_03_80_01);
  214. Operand maskLow = X86GetAllElements(context, 0x00ff_00ff);
  215. Operand c0008 = X86GetAllElements(context, 0x0008_0008);
  216. // CLZ pair of high 8 and low 8 bits of elements in arg.
  217. Operand hiloClz = Clz_V_I8(context, arg);
  218. // Get CLZ of low 8 bits in each pair.
  219. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pand, hiloClz, maskLow);
  220. // Get CLZ of high 8 bits in each pair.
  221. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, hiloClz, maskSwap);
  222. // If high 8 bits are not all zero, we discard the CLZ of the low 8 bits.
  223. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqw, hiClz, c0008);
  224. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  225. return context.AddIntrinsic(Intrinsic.X86Paddw, loClz, hiClz);
  226. }
  227. private static Operand Clz_V_I32(ArmEmitterContext context, Operand arg)
  228. {
  229. // TODO: Use vplzcntd when AVX-512 is supported.
  230. if (!Optimizations.UseSse2)
  231. {
  232. return default;
  233. }
  234. Operand AddVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Paddd, op0, op1);
  235. Operand SubVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Psubd, op0, op1);
  236. Operand ShiftRightVectorUI32(Operand op0, int imm8) => context.AddIntrinsic(Intrinsic.X86Psrld, op0, Const(imm8));
  237. Operand OrVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Por, op0, op1);
  238. Operand AndVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Pand, op0, op1);
  239. Operand NotVector(Operand op0) => context.AddIntrinsic(Intrinsic.X86Pandn, op0, context.VectorOne());
  240. Operand c55555555 = X86GetAllElements(context, 0x55555555);
  241. Operand c33333333 = X86GetAllElements(context, 0x33333333);
  242. Operand c0f0f0f0f = X86GetAllElements(context, 0x0f0f0f0f);
  243. Operand c0000003f = X86GetAllElements(context, 0x0000003f);
  244. Operand tmp0;
  245. Operand tmp1;
  246. Operand res;
  247. // Set all bits after highest set bit to 1.
  248. res = OrVector(ShiftRightVectorUI32(arg, 1), arg);
  249. res = OrVector(ShiftRightVectorUI32(res, 2), res);
  250. res = OrVector(ShiftRightVectorUI32(res, 4), res);
  251. res = OrVector(ShiftRightVectorUI32(res, 8), res);
  252. res = OrVector(ShiftRightVectorUI32(res, 16), res);
  253. // Make leading 0s into leading 1s.
  254. res = NotVector(res);
  255. // Count leading 1s, which is the population count.
  256. tmp0 = ShiftRightVectorUI32(res, 1);
  257. tmp0 = AndVector(tmp0, c55555555);
  258. res = SubVectorI32(res, tmp0);
  259. tmp0 = ShiftRightVectorUI32(res, 2);
  260. tmp0 = AndVector(tmp0, c33333333);
  261. tmp1 = AndVector(res, c33333333);
  262. res = AddVectorI32(tmp0, tmp1);
  263. tmp0 = ShiftRightVectorUI32(res, 4);
  264. tmp0 = AddVectorI32(tmp0, res);
  265. res = AndVector(tmp0, c0f0f0f0f);
  266. tmp0 = ShiftRightVectorUI32(res, 8);
  267. res = AddVectorI32(tmp0, res);
  268. tmp0 = ShiftRightVectorUI32(res, 16);
  269. res = AddVectorI32(tmp0, res);
  270. res = AndVector(res, c0000003f);
  271. return res;
  272. }
  273. public static void Cnt_V(ArmEmitterContext context)
  274. {
  275. if (Optimizations.UseAdvSimd)
  276. {
  277. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64CntV);
  278. }
  279. else
  280. {
  281. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  282. Operand res = context.VectorZero();
  283. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  284. for (int index = 0; index < elems; index++)
  285. {
  286. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  287. Operand de;
  288. if (Optimizations.UsePopCnt)
  289. {
  290. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  291. }
  292. else
  293. {
  294. de = EmitCountSetBits8(context, ne);
  295. }
  296. res = EmitVectorInsert(context, res, de, index, 0);
  297. }
  298. context.Copy(GetVec(op.Rd), res);
  299. }
  300. }
  301. public static void Fabd_S(ArmEmitterContext context)
  302. {
  303. if (Optimizations.UseAdvSimd)
  304. {
  305. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FabdS);
  306. }
  307. else if (Optimizations.FastFP && Optimizations.UseSse2)
  308. {
  309. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  310. int sizeF = op.Size & 1;
  311. if (sizeF == 0)
  312. {
  313. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  314. res = EmitFloatAbs(context, res, true, false);
  315. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  316. }
  317. else /* if (sizeF == 1) */
  318. {
  319. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  320. res = EmitFloatAbs(context, res, false, false);
  321. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  322. }
  323. }
  324. else
  325. {
  326. EmitScalarBinaryOpF(context, (op1, op2) =>
  327. {
  328. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  329. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  330. });
  331. }
  332. }
  333. public static void Fabd_V(ArmEmitterContext context)
  334. {
  335. if (Optimizations.UseAdvSimd)
  336. {
  337. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FabdV);
  338. }
  339. else if (Optimizations.FastFP && Optimizations.UseSse2)
  340. {
  341. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  342. int sizeF = op.Size & 1;
  343. if (sizeF == 0)
  344. {
  345. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  346. res = EmitFloatAbs(context, res, true, true);
  347. if (op.RegisterSize == RegisterSize.Simd64)
  348. {
  349. res = context.VectorZeroUpper64(res);
  350. }
  351. context.Copy(GetVec(op.Rd), res);
  352. }
  353. else /* if (sizeF == 1) */
  354. {
  355. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  356. res = EmitFloatAbs(context, res, false, true);
  357. context.Copy(GetVec(op.Rd), res);
  358. }
  359. }
  360. else
  361. {
  362. EmitVectorBinaryOpF(context, (op1, op2) =>
  363. {
  364. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  365. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  366. });
  367. }
  368. }
  369. public static void Fabs_S(ArmEmitterContext context)
  370. {
  371. if (Optimizations.UseAdvSimd)
  372. {
  373. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FabsS);
  374. }
  375. else if (Optimizations.UseSse2)
  376. {
  377. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  378. if (op.Size == 0)
  379. {
  380. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  381. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  382. }
  383. else /* if (op.Size == 1) */
  384. {
  385. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  386. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  387. }
  388. }
  389. else
  390. {
  391. EmitScalarUnaryOpF(context, (op1) =>
  392. {
  393. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  394. });
  395. }
  396. }
  397. public static void Fabs_V(ArmEmitterContext context)
  398. {
  399. if (Optimizations.UseAdvSimd)
  400. {
  401. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FabsV);
  402. }
  403. else if (Optimizations.UseSse2)
  404. {
  405. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  406. int sizeF = op.Size & 1;
  407. if (sizeF == 0)
  408. {
  409. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  410. if (op.RegisterSize == RegisterSize.Simd64)
  411. {
  412. res = context.VectorZeroUpper64(res);
  413. }
  414. context.Copy(GetVec(op.Rd), res);
  415. }
  416. else /* if (sizeF == 1) */
  417. {
  418. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  419. context.Copy(GetVec(op.Rd), res);
  420. }
  421. }
  422. else
  423. {
  424. EmitVectorUnaryOpF(context, (op1) =>
  425. {
  426. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  427. });
  428. }
  429. }
  430. public static void Fadd_S(ArmEmitterContext context)
  431. {
  432. if (Optimizations.UseAdvSimd)
  433. {
  434. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FaddS);
  435. }
  436. else if (Optimizations.FastFP && Optimizations.UseSse2)
  437. {
  438. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  439. }
  440. else if (Optimizations.FastFP)
  441. {
  442. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  443. }
  444. else
  445. {
  446. EmitScalarBinaryOpF(context, (op1, op2) =>
  447. {
  448. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  449. });
  450. }
  451. }
  452. public static void Fadd_V(ArmEmitterContext context)
  453. {
  454. if (Optimizations.UseAdvSimd)
  455. {
  456. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FaddV);
  457. }
  458. else if (Optimizations.FastFP && Optimizations.UseSse2)
  459. {
  460. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  461. }
  462. else if (Optimizations.FastFP)
  463. {
  464. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  465. }
  466. else
  467. {
  468. EmitVectorBinaryOpF(context, (op1, op2) =>
  469. {
  470. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  471. });
  472. }
  473. }
  474. public static void Faddp_S(ArmEmitterContext context)
  475. {
  476. if (Optimizations.UseAdvSimd)
  477. {
  478. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FaddpS);
  479. }
  480. else if (Optimizations.FastFP && Optimizations.UseSse3)
  481. {
  482. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  483. if ((op.Size & 1) == 0)
  484. {
  485. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  486. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  487. }
  488. else /* if ((op.Size & 1) == 1) */
  489. {
  490. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  491. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  492. }
  493. }
  494. else
  495. {
  496. EmitScalarPairwiseOpF(context, (op1, op2) =>
  497. {
  498. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  499. });
  500. }
  501. }
  502. public static void Faddp_V(ArmEmitterContext context)
  503. {
  504. if (Optimizations.UseAdvSimd)
  505. {
  506. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FaddpV);
  507. }
  508. else if (Optimizations.FastFP && Optimizations.UseSse41)
  509. {
  510. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  511. {
  512. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  513. {
  514. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  515. {
  516. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  517. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  518. return context.AddIntrinsic(addInst, op1, op2);
  519. }, scalar: false, op1, op2);
  520. }, scalar: false, op1, op2);
  521. });
  522. }
  523. else
  524. {
  525. EmitVectorPairwiseOpF(context, (op1, op2) =>
  526. {
  527. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  528. });
  529. }
  530. }
  531. public static void Fdiv_S(ArmEmitterContext context)
  532. {
  533. if (Optimizations.UseAdvSimd)
  534. {
  535. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FdivS);
  536. }
  537. else if (Optimizations.FastFP && Optimizations.UseSse2)
  538. {
  539. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  540. }
  541. else if (Optimizations.FastFP)
  542. {
  543. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  544. }
  545. else
  546. {
  547. EmitScalarBinaryOpF(context, (op1, op2) =>
  548. {
  549. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  550. });
  551. }
  552. }
  553. public static void Fdiv_V(ArmEmitterContext context)
  554. {
  555. if (Optimizations.UseAdvSimd)
  556. {
  557. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FdivV);
  558. }
  559. else if (Optimizations.FastFP && Optimizations.UseSse2)
  560. {
  561. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  562. }
  563. else if (Optimizations.FastFP)
  564. {
  565. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  566. }
  567. else
  568. {
  569. EmitVectorBinaryOpF(context, (op1, op2) =>
  570. {
  571. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  572. });
  573. }
  574. }
  575. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  576. {
  577. if (Optimizations.UseAdvSimd)
  578. {
  579. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FmaddS);
  580. }
  581. else if (Optimizations.FastFP && Optimizations.UseSse2)
  582. {
  583. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  584. Operand d = GetVec(op.Rd);
  585. Operand a = GetVec(op.Ra);
  586. Operand n = GetVec(op.Rn);
  587. Operand m = GetVec(op.Rm);
  588. if (op.Size == 0)
  589. {
  590. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  591. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  592. context.Copy(d, context.VectorZeroUpper96(res));
  593. }
  594. else /* if (op.Size == 1) */
  595. {
  596. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  597. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  598. context.Copy(d, context.VectorZeroUpper64(res));
  599. }
  600. }
  601. else
  602. {
  603. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  604. {
  605. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  606. });
  607. }
  608. }
  609. public static void Fmax_S(ArmEmitterContext context)
  610. {
  611. if (Optimizations.UseAdvSimd)
  612. {
  613. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmaxS);
  614. }
  615. else if (Optimizations.FastFP && Optimizations.UseSse41)
  616. {
  617. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  618. {
  619. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  620. {
  621. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  622. }, scalar: true, op1, op2);
  623. }, scalar: true);
  624. }
  625. else
  626. {
  627. EmitScalarBinaryOpF(context, (op1, op2) =>
  628. {
  629. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  630. });
  631. }
  632. }
  633. public static void Fmax_V(ArmEmitterContext context)
  634. {
  635. if (Optimizations.UseAdvSimd)
  636. {
  637. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxV);
  638. }
  639. else if (Optimizations.FastFP && Optimizations.UseSse41)
  640. {
  641. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  642. {
  643. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  644. {
  645. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  646. }, scalar: false, op1, op2);
  647. }, scalar: false);
  648. }
  649. else
  650. {
  651. EmitVectorBinaryOpF(context, (op1, op2) =>
  652. {
  653. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  654. });
  655. }
  656. }
  657. public static void Fmaxnm_S(ArmEmitterContext context)
  658. {
  659. if (Optimizations.UseAdvSimd)
  660. {
  661. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmaxnmS);
  662. }
  663. else if (Optimizations.FastFP && Optimizations.UseSse41)
  664. {
  665. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  666. }
  667. else
  668. {
  669. EmitScalarBinaryOpF(context, (op1, op2) =>
  670. {
  671. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  672. });
  673. }
  674. }
  675. public static void Fmaxnm_V(ArmEmitterContext context)
  676. {
  677. if (Optimizations.UseAdvSimd)
  678. {
  679. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxnmV);
  680. }
  681. else if (Optimizations.FastFP && Optimizations.UseSse41)
  682. {
  683. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  684. }
  685. else
  686. {
  687. EmitVectorBinaryOpF(context, (op1, op2) =>
  688. {
  689. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  690. });
  691. }
  692. }
  693. public static void Fmaxnmp_S(ArmEmitterContext context)
  694. {
  695. if (Optimizations.UseAdvSimd)
  696. {
  697. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FmaxnmpS);
  698. }
  699. else if (Optimizations.FastFP && Optimizations.UseSse41)
  700. {
  701. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  702. {
  703. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true, op1, op2);
  704. });
  705. }
  706. else
  707. {
  708. EmitScalarPairwiseOpF(context, (op1, op2) =>
  709. {
  710. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  711. });
  712. }
  713. }
  714. public static void Fmaxnmp_V(ArmEmitterContext context)
  715. {
  716. if (Optimizations.UseAdvSimd)
  717. {
  718. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxnmpV);
  719. }
  720. else if (Optimizations.FastFP && Optimizations.UseSse41)
  721. {
  722. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  723. {
  724. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  725. });
  726. }
  727. else
  728. {
  729. EmitVectorPairwiseOpF(context, (op1, op2) =>
  730. {
  731. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  732. });
  733. }
  734. }
  735. public static void Fmaxnmv_V(ArmEmitterContext context)
  736. {
  737. if (Optimizations.UseAdvSimd)
  738. {
  739. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FmaxnmvV);
  740. }
  741. else if (Optimizations.FastFP && Optimizations.UseSse41)
  742. {
  743. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  744. {
  745. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  746. });
  747. }
  748. else
  749. {
  750. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  751. {
  752. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  753. });
  754. }
  755. }
  756. public static void Fmaxp_V(ArmEmitterContext context)
  757. {
  758. if (Optimizations.UseAdvSimd)
  759. {
  760. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxpV);
  761. }
  762. else if (Optimizations.FastFP && Optimizations.UseSse41)
  763. {
  764. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  765. {
  766. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  767. {
  768. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  769. {
  770. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  771. }, scalar: false, op1, op2);
  772. }, scalar: false, op1, op2);
  773. });
  774. }
  775. else
  776. {
  777. EmitVectorPairwiseOpF(context, (op1, op2) =>
  778. {
  779. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  780. });
  781. }
  782. }
  783. public static void Fmaxv_V(ArmEmitterContext context)
  784. {
  785. if (Optimizations.UseAdvSimd)
  786. {
  787. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FmaxvV);
  788. }
  789. else if (Optimizations.FastFP && Optimizations.UseSse41)
  790. {
  791. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  792. {
  793. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  794. {
  795. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  796. {
  797. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  798. }, scalar: false, op1, op2);
  799. }, scalar: false, op1, op2);
  800. });
  801. }
  802. else
  803. {
  804. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  805. {
  806. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  807. });
  808. }
  809. }
  810. public static void Fmin_S(ArmEmitterContext context)
  811. {
  812. if (Optimizations.UseAdvSimd)
  813. {
  814. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FminS);
  815. }
  816. else if (Optimizations.FastFP && Optimizations.UseSse41)
  817. {
  818. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  819. {
  820. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  821. {
  822. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  823. }, scalar: true, op1, op2);
  824. }, scalar: true);
  825. }
  826. else
  827. {
  828. EmitScalarBinaryOpF(context, (op1, op2) =>
  829. {
  830. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  831. });
  832. }
  833. }
  834. public static void Fmin_V(ArmEmitterContext context)
  835. {
  836. if (Optimizations.UseAdvSimd)
  837. {
  838. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminV);
  839. }
  840. else if (Optimizations.FastFP && Optimizations.UseSse41)
  841. {
  842. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  843. {
  844. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  845. {
  846. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  847. }, scalar: false, op1, op2);
  848. }, scalar: false);
  849. }
  850. else
  851. {
  852. EmitVectorBinaryOpF(context, (op1, op2) =>
  853. {
  854. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  855. });
  856. }
  857. }
  858. public static void Fminnm_S(ArmEmitterContext context)
  859. {
  860. if (Optimizations.UseAdvSimd)
  861. {
  862. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FminnmS);
  863. }
  864. else if (Optimizations.FastFP && Optimizations.UseSse41)
  865. {
  866. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  867. }
  868. else
  869. {
  870. EmitScalarBinaryOpF(context, (op1, op2) =>
  871. {
  872. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  873. });
  874. }
  875. }
  876. public static void Fminnm_V(ArmEmitterContext context)
  877. {
  878. if (Optimizations.UseAdvSimd)
  879. {
  880. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminnmV);
  881. }
  882. else if (Optimizations.FastFP && Optimizations.UseSse41)
  883. {
  884. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  885. }
  886. else
  887. {
  888. EmitVectorBinaryOpF(context, (op1, op2) =>
  889. {
  890. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  891. });
  892. }
  893. }
  894. public static void Fminnmp_S(ArmEmitterContext context)
  895. {
  896. if (Optimizations.UseAdvSimd)
  897. {
  898. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FminnmpS);
  899. }
  900. else if (Optimizations.FastFP && Optimizations.UseSse41)
  901. {
  902. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  903. {
  904. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true, op1, op2);
  905. });
  906. }
  907. else
  908. {
  909. EmitScalarPairwiseOpF(context, (op1, op2) =>
  910. {
  911. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  912. });
  913. }
  914. }
  915. public static void Fminnmp_V(ArmEmitterContext context)
  916. {
  917. if (Optimizations.UseAdvSimd)
  918. {
  919. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminnmpV);
  920. }
  921. else if (Optimizations.FastFP && Optimizations.UseSse41)
  922. {
  923. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  924. {
  925. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  926. });
  927. }
  928. else
  929. {
  930. EmitVectorPairwiseOpF(context, (op1, op2) =>
  931. {
  932. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  933. });
  934. }
  935. }
  936. public static void Fminnmv_V(ArmEmitterContext context)
  937. {
  938. if (Optimizations.UseAdvSimd)
  939. {
  940. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FminnmvV);
  941. }
  942. else if (Optimizations.FastFP && Optimizations.UseSse41)
  943. {
  944. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  945. {
  946. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  947. });
  948. }
  949. else
  950. {
  951. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  952. {
  953. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  954. });
  955. }
  956. }
  957. public static void Fminp_V(ArmEmitterContext context)
  958. {
  959. if (Optimizations.UseAdvSimd)
  960. {
  961. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminpV);
  962. }
  963. else if (Optimizations.FastFP && Optimizations.UseSse41)
  964. {
  965. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  966. {
  967. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  968. {
  969. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  970. {
  971. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  972. }, scalar: false, op1, op2);
  973. }, scalar: false, op1, op2);
  974. });
  975. }
  976. else
  977. {
  978. EmitVectorPairwiseOpF(context, (op1, op2) =>
  979. {
  980. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  981. });
  982. }
  983. }
  984. public static void Fminv_V(ArmEmitterContext context)
  985. {
  986. if (Optimizations.UseAdvSimd)
  987. {
  988. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FminvV);
  989. }
  990. else if (Optimizations.FastFP && Optimizations.UseSse41)
  991. {
  992. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  993. {
  994. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  995. {
  996. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  997. {
  998. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  999. }, scalar: false, op1, op2);
  1000. }, scalar: false, op1, op2);
  1001. });
  1002. }
  1003. else
  1004. {
  1005. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  1006. {
  1007. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  1008. });
  1009. }
  1010. }
  1011. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  1012. {
  1013. if (Optimizations.UseAdvSimd)
  1014. {
  1015. InstEmitSimdHelperArm64.EmitScalarTernaryOpFRdByElem(context, Intrinsic.Arm64FmlaSe);
  1016. }
  1017. else
  1018. {
  1019. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  1020. {
  1021. return context.Add(op1, context.Multiply(op2, op3));
  1022. });
  1023. }
  1024. }
  1025. public static void Fmla_V(ArmEmitterContext context) // Fused.
  1026. {
  1027. if (Optimizations.UseAdvSimd)
  1028. {
  1029. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRd(context, Intrinsic.Arm64FmlaV);
  1030. }
  1031. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1032. {
  1033. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1034. Operand d = GetVec(op.Rd);
  1035. Operand n = GetVec(op.Rn);
  1036. Operand m = GetVec(op.Rm);
  1037. int sizeF = op.Size & 1;
  1038. if (sizeF == 0)
  1039. {
  1040. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1041. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  1042. if (op.RegisterSize == RegisterSize.Simd64)
  1043. {
  1044. res = context.VectorZeroUpper64(res);
  1045. }
  1046. context.Copy(d, res);
  1047. }
  1048. else /* if (sizeF == 1) */
  1049. {
  1050. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1051. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  1052. context.Copy(d, res);
  1053. }
  1054. }
  1055. else
  1056. {
  1057. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  1058. {
  1059. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  1060. });
  1061. }
  1062. }
  1063. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  1064. {
  1065. if (Optimizations.UseAdvSimd)
  1066. {
  1067. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRdByElem(context, Intrinsic.Arm64FmlaVe);
  1068. }
  1069. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1070. {
  1071. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1072. Operand d = GetVec(op.Rd);
  1073. Operand n = GetVec(op.Rn);
  1074. Operand m = GetVec(op.Rm);
  1075. int sizeF = op.Size & 1;
  1076. if (sizeF == 0)
  1077. {
  1078. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1079. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1080. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1081. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  1082. if (op.RegisterSize == RegisterSize.Simd64)
  1083. {
  1084. res = context.VectorZeroUpper64(res);
  1085. }
  1086. context.Copy(d, res);
  1087. }
  1088. else /* if (sizeF == 1) */
  1089. {
  1090. int shuffleMask = op.Index | op.Index << 1;
  1091. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1092. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1093. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  1094. context.Copy(d, res);
  1095. }
  1096. }
  1097. else
  1098. {
  1099. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  1100. {
  1101. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  1102. });
  1103. }
  1104. }
  1105. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  1106. {
  1107. if (Optimizations.UseAdvSimd)
  1108. {
  1109. InstEmitSimdHelperArm64.EmitScalarTernaryOpFRdByElem(context, Intrinsic.Arm64FmlsSe);
  1110. }
  1111. else
  1112. {
  1113. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  1114. {
  1115. return context.Subtract(op1, context.Multiply(op2, op3));
  1116. });
  1117. }
  1118. }
  1119. public static void Fmls_V(ArmEmitterContext context) // Fused.
  1120. {
  1121. if (Optimizations.UseAdvSimd)
  1122. {
  1123. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRd(context, Intrinsic.Arm64FmlsV);
  1124. }
  1125. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1126. {
  1127. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1128. Operand d = GetVec(op.Rd);
  1129. Operand n = GetVec(op.Rn);
  1130. Operand m = GetVec(op.Rm);
  1131. int sizeF = op.Size & 1;
  1132. if (sizeF == 0)
  1133. {
  1134. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1135. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  1136. if (op.RegisterSize == RegisterSize.Simd64)
  1137. {
  1138. res = context.VectorZeroUpper64(res);
  1139. }
  1140. context.Copy(d, res);
  1141. }
  1142. else /* if (sizeF == 1) */
  1143. {
  1144. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1145. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  1146. context.Copy(d, res);
  1147. }
  1148. }
  1149. else
  1150. {
  1151. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  1152. {
  1153. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1154. });
  1155. }
  1156. }
  1157. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  1158. {
  1159. if (Optimizations.UseAdvSimd)
  1160. {
  1161. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRdByElem(context, Intrinsic.Arm64FmlsVe);
  1162. }
  1163. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1164. {
  1165. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1166. Operand d = GetVec(op.Rd);
  1167. Operand n = GetVec(op.Rn);
  1168. Operand m = GetVec(op.Rm);
  1169. int sizeF = op.Size & 1;
  1170. if (sizeF == 0)
  1171. {
  1172. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1173. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1174. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1175. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  1176. if (op.RegisterSize == RegisterSize.Simd64)
  1177. {
  1178. res = context.VectorZeroUpper64(res);
  1179. }
  1180. context.Copy(d, res);
  1181. }
  1182. else /* if (sizeF == 1) */
  1183. {
  1184. int shuffleMask = op.Index | op.Index << 1;
  1185. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1186. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1187. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  1188. context.Copy(d, res);
  1189. }
  1190. }
  1191. else
  1192. {
  1193. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  1194. {
  1195. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1196. });
  1197. }
  1198. }
  1199. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  1200. {
  1201. if (Optimizations.UseAdvSimd)
  1202. {
  1203. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FmsubS);
  1204. }
  1205. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1206. {
  1207. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1208. Operand d = GetVec(op.Rd);
  1209. Operand a = GetVec(op.Ra);
  1210. Operand n = GetVec(op.Rn);
  1211. Operand m = GetVec(op.Rm);
  1212. if (op.Size == 0)
  1213. {
  1214. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1215. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  1216. context.Copy(d, context.VectorZeroUpper96(res));
  1217. }
  1218. else /* if (op.Size == 1) */
  1219. {
  1220. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1221. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  1222. context.Copy(d, context.VectorZeroUpper64(res));
  1223. }
  1224. }
  1225. else
  1226. {
  1227. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1228. {
  1229. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1230. });
  1231. }
  1232. }
  1233. public static void Fmul_S(ArmEmitterContext context)
  1234. {
  1235. if (Optimizations.UseAdvSimd)
  1236. {
  1237. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmulS);
  1238. }
  1239. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1240. {
  1241. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  1242. }
  1243. else if (Optimizations.FastFP)
  1244. {
  1245. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1246. }
  1247. else
  1248. {
  1249. EmitScalarBinaryOpF(context, (op1, op2) =>
  1250. {
  1251. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1252. });
  1253. }
  1254. }
  1255. public static void Fmul_Se(ArmEmitterContext context)
  1256. {
  1257. if (Optimizations.UseAdvSimd)
  1258. {
  1259. InstEmitSimdHelperArm64.EmitScalarBinaryOpFByElem(context, Intrinsic.Arm64FmulSe);
  1260. }
  1261. else
  1262. {
  1263. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1264. }
  1265. }
  1266. public static void Fmul_V(ArmEmitterContext context)
  1267. {
  1268. if (Optimizations.UseAdvSimd)
  1269. {
  1270. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmulV);
  1271. }
  1272. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1273. {
  1274. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  1275. }
  1276. else if (Optimizations.FastFP)
  1277. {
  1278. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1279. }
  1280. else
  1281. {
  1282. EmitVectorBinaryOpF(context, (op1, op2) =>
  1283. {
  1284. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1285. });
  1286. }
  1287. }
  1288. public static void Fmul_Ve(ArmEmitterContext context)
  1289. {
  1290. if (Optimizations.UseAdvSimd)
  1291. {
  1292. InstEmitSimdHelperArm64.EmitVectorBinaryOpFByElem(context, Intrinsic.Arm64FmulVe);
  1293. }
  1294. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1295. {
  1296. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1297. Operand n = GetVec(op.Rn);
  1298. Operand m = GetVec(op.Rm);
  1299. int sizeF = op.Size & 1;
  1300. if (sizeF == 0)
  1301. {
  1302. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1303. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1304. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1305. if (op.RegisterSize == RegisterSize.Simd64)
  1306. {
  1307. res = context.VectorZeroUpper64(res);
  1308. }
  1309. context.Copy(GetVec(op.Rd), res);
  1310. }
  1311. else /* if (sizeF == 1) */
  1312. {
  1313. int shuffleMask = op.Index | op.Index << 1;
  1314. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1315. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1316. context.Copy(GetVec(op.Rd), res);
  1317. }
  1318. }
  1319. else if (Optimizations.FastFP)
  1320. {
  1321. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1322. }
  1323. else
  1324. {
  1325. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1326. {
  1327. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1328. });
  1329. }
  1330. }
  1331. public static void Fmulx_S(ArmEmitterContext context)
  1332. {
  1333. if (Optimizations.UseAdvSimd)
  1334. {
  1335. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmulxS);
  1336. }
  1337. else
  1338. {
  1339. EmitScalarBinaryOpF(context, (op1, op2) =>
  1340. {
  1341. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1342. });
  1343. }
  1344. }
  1345. public static void Fmulx_Se(ArmEmitterContext context)
  1346. {
  1347. if (Optimizations.UseAdvSimd)
  1348. {
  1349. InstEmitSimdHelperArm64.EmitScalarBinaryOpFByElem(context, Intrinsic.Arm64FmulxSe);
  1350. }
  1351. else
  1352. {
  1353. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  1354. {
  1355. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1356. });
  1357. }
  1358. }
  1359. public static void Fmulx_V(ArmEmitterContext context)
  1360. {
  1361. if (Optimizations.UseAdvSimd)
  1362. {
  1363. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmulxV);
  1364. }
  1365. else
  1366. {
  1367. EmitVectorBinaryOpF(context, (op1, op2) =>
  1368. {
  1369. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1370. });
  1371. }
  1372. }
  1373. public static void Fmulx_Ve(ArmEmitterContext context)
  1374. {
  1375. if (Optimizations.UseAdvSimd)
  1376. {
  1377. InstEmitSimdHelperArm64.EmitVectorBinaryOpFByElem(context, Intrinsic.Arm64FmulxVe);
  1378. }
  1379. else
  1380. {
  1381. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1382. {
  1383. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1384. });
  1385. }
  1386. }
  1387. public static void Fneg_S(ArmEmitterContext context)
  1388. {
  1389. if (Optimizations.UseAdvSimd)
  1390. {
  1391. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FnegS);
  1392. }
  1393. else if (Optimizations.UseSse2)
  1394. {
  1395. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1396. if (op.Size == 0)
  1397. {
  1398. Operand mask = X86GetScalar(context, -0f);
  1399. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1400. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1401. }
  1402. else /* if (op.Size == 1) */
  1403. {
  1404. Operand mask = X86GetScalar(context, -0d);
  1405. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1406. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1407. }
  1408. }
  1409. else
  1410. {
  1411. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  1412. }
  1413. }
  1414. public static void Fneg_V(ArmEmitterContext context)
  1415. {
  1416. if (Optimizations.UseAdvSimd)
  1417. {
  1418. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FnegV);
  1419. }
  1420. else if (Optimizations.UseSse2)
  1421. {
  1422. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1423. int sizeF = op.Size & 1;
  1424. if (sizeF == 0)
  1425. {
  1426. Operand mask = X86GetAllElements(context, -0f);
  1427. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1428. if (op.RegisterSize == RegisterSize.Simd64)
  1429. {
  1430. res = context.VectorZeroUpper64(res);
  1431. }
  1432. context.Copy(GetVec(op.Rd), res);
  1433. }
  1434. else /* if (sizeF == 1) */
  1435. {
  1436. Operand mask = X86GetAllElements(context, -0d);
  1437. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1438. context.Copy(GetVec(op.Rd), res);
  1439. }
  1440. }
  1441. else
  1442. {
  1443. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1444. }
  1445. }
  1446. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1447. {
  1448. if (Optimizations.UseAdvSimd)
  1449. {
  1450. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FnmaddS);
  1451. }
  1452. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1453. {
  1454. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1455. Operand d = GetVec(op.Rd);
  1456. Operand a = GetVec(op.Ra);
  1457. Operand n = GetVec(op.Rn);
  1458. Operand m = GetVec(op.Rm);
  1459. if (op.Size == 0)
  1460. {
  1461. Operand mask = X86GetScalar(context, -0f);
  1462. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1463. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1464. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1465. context.Copy(d, context.VectorZeroUpper96(res));
  1466. }
  1467. else /* if (op.Size == 1) */
  1468. {
  1469. Operand mask = X86GetScalar(context, -0d);
  1470. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1471. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1472. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1473. context.Copy(d, context.VectorZeroUpper64(res));
  1474. }
  1475. }
  1476. else
  1477. {
  1478. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1479. {
  1480. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1481. });
  1482. }
  1483. }
  1484. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1485. {
  1486. if (Optimizations.UseAdvSimd)
  1487. {
  1488. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FnmsubS);
  1489. }
  1490. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1491. {
  1492. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1493. Operand d = GetVec(op.Rd);
  1494. Operand a = GetVec(op.Ra);
  1495. Operand n = GetVec(op.Rn);
  1496. Operand m = GetVec(op.Rm);
  1497. if (op.Size == 0)
  1498. {
  1499. Operand mask = X86GetScalar(context, -0f);
  1500. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1501. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1502. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1503. context.Copy(d, context.VectorZeroUpper96(res));
  1504. }
  1505. else /* if (op.Size == 1) */
  1506. {
  1507. Operand mask = X86GetScalar(context, -0d);
  1508. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1509. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1510. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1511. context.Copy(d, context.VectorZeroUpper64(res));
  1512. }
  1513. }
  1514. else
  1515. {
  1516. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1517. {
  1518. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1519. });
  1520. }
  1521. }
  1522. public static void Fnmul_S(ArmEmitterContext context)
  1523. {
  1524. if (Optimizations.UseAdvSimd)
  1525. {
  1526. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FnmulS);
  1527. }
  1528. else
  1529. {
  1530. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1531. }
  1532. }
  1533. public static void Frecpe_S(ArmEmitterContext context)
  1534. {
  1535. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1536. int sizeF = op.Size & 1;
  1537. if (Optimizations.UseAdvSimd)
  1538. {
  1539. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrecpeS);
  1540. }
  1541. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1542. {
  1543. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpss, GetVec(op.Rn)), scalar: true);
  1544. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1545. }
  1546. else
  1547. {
  1548. EmitScalarUnaryOpF(context, (op1) =>
  1549. {
  1550. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1551. });
  1552. }
  1553. }
  1554. public static void Frecpe_V(ArmEmitterContext context)
  1555. {
  1556. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1557. int sizeF = op.Size & 1;
  1558. if (Optimizations.UseAdvSimd)
  1559. {
  1560. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrecpeV);
  1561. }
  1562. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1563. {
  1564. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpps, GetVec(op.Rn)), scalar: false);
  1565. if (op.RegisterSize == RegisterSize.Simd64)
  1566. {
  1567. res = context.VectorZeroUpper64(res);
  1568. }
  1569. context.Copy(GetVec(op.Rd), res);
  1570. }
  1571. else
  1572. {
  1573. EmitVectorUnaryOpF(context, (op1) =>
  1574. {
  1575. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1576. });
  1577. }
  1578. }
  1579. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1580. {
  1581. if (Optimizations.UseAdvSimd)
  1582. {
  1583. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrecpsS);
  1584. }
  1585. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1586. {
  1587. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1588. Operand n = GetVec(op.Rn);
  1589. Operand m = GetVec(op.Rm);
  1590. int sizeF = op.Size & 1;
  1591. if (sizeF == 0)
  1592. {
  1593. Operand mask = X86GetScalar(context, 2f);
  1594. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1595. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1596. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1597. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1598. }
  1599. else /* if (sizeF == 1) */
  1600. {
  1601. Operand mask = X86GetScalar(context, 2d);
  1602. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1603. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1604. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1605. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1606. }
  1607. }
  1608. else
  1609. {
  1610. EmitScalarBinaryOpF(context, (op1, op2) =>
  1611. {
  1612. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1613. });
  1614. }
  1615. }
  1616. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1617. {
  1618. if (Optimizations.UseAdvSimd)
  1619. {
  1620. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FrecpsV);
  1621. }
  1622. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1623. {
  1624. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1625. Operand n = GetVec(op.Rn);
  1626. Operand m = GetVec(op.Rm);
  1627. int sizeF = op.Size & 1;
  1628. if (sizeF == 0)
  1629. {
  1630. Operand mask = X86GetAllElements(context, 2f);
  1631. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1632. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1633. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1634. if (op.RegisterSize == RegisterSize.Simd64)
  1635. {
  1636. res = context.VectorZeroUpper64(res);
  1637. }
  1638. context.Copy(GetVec(op.Rd), res);
  1639. }
  1640. else /* if (sizeF == 1) */
  1641. {
  1642. Operand mask = X86GetAllElements(context, 2d);
  1643. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1644. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1645. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1646. context.Copy(GetVec(op.Rd), res);
  1647. }
  1648. }
  1649. else
  1650. {
  1651. EmitVectorBinaryOpF(context, (op1, op2) =>
  1652. {
  1653. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1654. });
  1655. }
  1656. }
  1657. public static void Frecpx_S(ArmEmitterContext context)
  1658. {
  1659. if (Optimizations.UseAdvSimd)
  1660. {
  1661. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrecpxS);
  1662. }
  1663. else
  1664. {
  1665. EmitScalarUnaryOpF(context, (op1) =>
  1666. {
  1667. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1668. });
  1669. }
  1670. }
  1671. public static void Frinta_S(ArmEmitterContext context)
  1672. {
  1673. if (Optimizations.UseAdvSimd)
  1674. {
  1675. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintaS);
  1676. }
  1677. else if (Optimizations.UseSse41)
  1678. {
  1679. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearestAway);
  1680. }
  1681. else
  1682. {
  1683. EmitScalarUnaryOpF(context, (op1) =>
  1684. {
  1685. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1686. });
  1687. }
  1688. }
  1689. public static void Frinta_V(ArmEmitterContext context)
  1690. {
  1691. if (Optimizations.UseAdvSimd)
  1692. {
  1693. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintaV);
  1694. }
  1695. else if (Optimizations.UseSse41)
  1696. {
  1697. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearestAway);
  1698. }
  1699. else
  1700. {
  1701. EmitVectorUnaryOpF(context, (op1) =>
  1702. {
  1703. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1704. });
  1705. }
  1706. }
  1707. public static void Frinti_S(ArmEmitterContext context)
  1708. {
  1709. if (Optimizations.UseAdvSimd)
  1710. {
  1711. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintiS);
  1712. }
  1713. else
  1714. {
  1715. EmitScalarUnaryOpF(context, (op1) =>
  1716. {
  1717. return EmitRoundByRMode(context, op1);
  1718. });
  1719. }
  1720. }
  1721. public static void Frinti_V(ArmEmitterContext context)
  1722. {
  1723. if (Optimizations.UseAdvSimd)
  1724. {
  1725. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintiV);
  1726. }
  1727. else
  1728. {
  1729. EmitVectorUnaryOpF(context, (op1) =>
  1730. {
  1731. return EmitRoundByRMode(context, op1);
  1732. });
  1733. }
  1734. }
  1735. public static void Frintm_S(ArmEmitterContext context)
  1736. {
  1737. if (Optimizations.UseAdvSimd)
  1738. {
  1739. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintmS);
  1740. }
  1741. else if (Optimizations.UseSse41)
  1742. {
  1743. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1744. }
  1745. else
  1746. {
  1747. EmitScalarUnaryOpF(context, (op1) =>
  1748. {
  1749. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1750. });
  1751. }
  1752. }
  1753. public static void Frintm_V(ArmEmitterContext context)
  1754. {
  1755. if (Optimizations.UseAdvSimd)
  1756. {
  1757. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintmV);
  1758. }
  1759. else if (Optimizations.UseSse41)
  1760. {
  1761. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1762. }
  1763. else
  1764. {
  1765. EmitVectorUnaryOpF(context, (op1) =>
  1766. {
  1767. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1768. });
  1769. }
  1770. }
  1771. public static void Frintn_S(ArmEmitterContext context)
  1772. {
  1773. if (Optimizations.UseAdvSimd)
  1774. {
  1775. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintnS);
  1776. }
  1777. else if (Optimizations.UseSse41)
  1778. {
  1779. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1780. }
  1781. else
  1782. {
  1783. EmitScalarUnaryOpF(context, (op1) =>
  1784. {
  1785. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1786. });
  1787. }
  1788. }
  1789. public static void Frintn_V(ArmEmitterContext context)
  1790. {
  1791. if (Optimizations.UseAdvSimd)
  1792. {
  1793. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintnV);
  1794. }
  1795. else if (Optimizations.UseSse41)
  1796. {
  1797. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearest);
  1798. }
  1799. else
  1800. {
  1801. EmitVectorUnaryOpF(context, (op1) =>
  1802. {
  1803. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1804. });
  1805. }
  1806. }
  1807. public static void Frintp_S(ArmEmitterContext context)
  1808. {
  1809. if (Optimizations.UseAdvSimd)
  1810. {
  1811. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintpS);
  1812. }
  1813. else if (Optimizations.UseSse41)
  1814. {
  1815. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1816. }
  1817. else
  1818. {
  1819. EmitScalarUnaryOpF(context, (op1) =>
  1820. {
  1821. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1822. });
  1823. }
  1824. }
  1825. public static void Frintp_V(ArmEmitterContext context)
  1826. {
  1827. if (Optimizations.UseAdvSimd)
  1828. {
  1829. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintpV);
  1830. }
  1831. else if (Optimizations.UseSse41)
  1832. {
  1833. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1834. }
  1835. else
  1836. {
  1837. EmitVectorUnaryOpF(context, (op1) =>
  1838. {
  1839. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1840. });
  1841. }
  1842. }
  1843. public static void Frintx_S(ArmEmitterContext context)
  1844. {
  1845. // TODO Arm64: Fast path. Should we set host FPCR?
  1846. EmitScalarUnaryOpF(context, (op1) =>
  1847. {
  1848. return EmitRoundByRMode(context, op1);
  1849. });
  1850. }
  1851. public static void Frintx_V(ArmEmitterContext context)
  1852. {
  1853. // TODO Arm64: Fast path. Should we set host FPCR?
  1854. EmitVectorUnaryOpF(context, (op1) =>
  1855. {
  1856. return EmitRoundByRMode(context, op1);
  1857. });
  1858. }
  1859. public static void Frintz_S(ArmEmitterContext context)
  1860. {
  1861. if (Optimizations.UseAdvSimd)
  1862. {
  1863. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintzS);
  1864. }
  1865. else if (Optimizations.UseSse41)
  1866. {
  1867. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1868. }
  1869. else
  1870. {
  1871. EmitScalarUnaryOpF(context, (op1) =>
  1872. {
  1873. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1874. });
  1875. }
  1876. }
  1877. public static void Frintz_V(ArmEmitterContext context)
  1878. {
  1879. if (Optimizations.UseAdvSimd)
  1880. {
  1881. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintzV);
  1882. }
  1883. else if (Optimizations.UseSse41)
  1884. {
  1885. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1886. }
  1887. else
  1888. {
  1889. EmitVectorUnaryOpF(context, (op1) =>
  1890. {
  1891. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1892. });
  1893. }
  1894. }
  1895. public static void Frsqrte_S(ArmEmitterContext context)
  1896. {
  1897. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1898. int sizeF = op.Size & 1;
  1899. if (Optimizations.UseAdvSimd)
  1900. {
  1901. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrsqrteS);
  1902. }
  1903. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1904. {
  1905. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rsqrtss, GetVec(op.Rn)), scalar: true);
  1906. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1907. }
  1908. else
  1909. {
  1910. EmitScalarUnaryOpF(context, (op1) =>
  1911. {
  1912. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1913. });
  1914. }
  1915. }
  1916. public static void Frsqrte_V(ArmEmitterContext context)
  1917. {
  1918. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1919. int sizeF = op.Size & 1;
  1920. if (Optimizations.UseAdvSimd)
  1921. {
  1922. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrsqrteV);
  1923. }
  1924. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1925. {
  1926. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rsqrtps, GetVec(op.Rn)), scalar: false);
  1927. if (op.RegisterSize == RegisterSize.Simd64)
  1928. {
  1929. res = context.VectorZeroUpper64(res);
  1930. }
  1931. context.Copy(GetVec(op.Rd), res);
  1932. }
  1933. else
  1934. {
  1935. EmitVectorUnaryOpF(context, (op1) =>
  1936. {
  1937. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1938. });
  1939. }
  1940. }
  1941. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1942. {
  1943. if (Optimizations.UseAdvSimd)
  1944. {
  1945. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrsqrtsS);
  1946. }
  1947. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1948. {
  1949. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1950. Operand n = GetVec(op.Rn);
  1951. Operand m = GetVec(op.Rm);
  1952. int sizeF = op.Size & 1;
  1953. if (sizeF == 0)
  1954. {
  1955. Operand maskHalf = X86GetScalar(context, 0.5f);
  1956. Operand maskThree = X86GetScalar(context, 3f);
  1957. Operand maskOneHalf = X86GetScalar(context, 1.5f);
  1958. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1959. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1960. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1961. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  1962. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1963. }
  1964. else /* if (sizeF == 1) */
  1965. {
  1966. Operand maskHalf = X86GetScalar(context, 0.5d);
  1967. Operand maskThree = X86GetScalar(context, 3d);
  1968. Operand maskOneHalf = X86GetScalar(context, 1.5d);
  1969. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1970. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1971. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1972. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  1973. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1974. }
  1975. }
  1976. else
  1977. {
  1978. EmitScalarBinaryOpF(context, (op1, op2) =>
  1979. {
  1980. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1981. });
  1982. }
  1983. }
  1984. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1985. {
  1986. if (Optimizations.UseAdvSimd)
  1987. {
  1988. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FrsqrtsV);
  1989. }
  1990. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1991. {
  1992. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1993. Operand n = GetVec(op.Rn);
  1994. Operand m = GetVec(op.Rm);
  1995. int sizeF = op.Size & 1;
  1996. if (sizeF == 0)
  1997. {
  1998. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1999. Operand maskThree = X86GetAllElements(context, 3f);
  2000. Operand maskOneHalf = X86GetAllElements(context, 1.5f);
  2001. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  2002. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  2003. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  2004. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  2005. if (op.RegisterSize == RegisterSize.Simd64)
  2006. {
  2007. res = context.VectorZeroUpper64(res);
  2008. }
  2009. context.Copy(GetVec(op.Rd), res);
  2010. }
  2011. else /* if (sizeF == 1) */
  2012. {
  2013. Operand maskHalf = X86GetAllElements(context, 0.5d);
  2014. Operand maskThree = X86GetAllElements(context, 3d);
  2015. Operand maskOneHalf = X86GetAllElements(context, 1.5d);
  2016. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  2017. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  2018. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  2019. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  2020. context.Copy(GetVec(op.Rd), res);
  2021. }
  2022. }
  2023. else
  2024. {
  2025. EmitVectorBinaryOpF(context, (op1, op2) =>
  2026. {
  2027. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  2028. });
  2029. }
  2030. }
  2031. public static void Fsqrt_S(ArmEmitterContext context)
  2032. {
  2033. if (Optimizations.UseAdvSimd)
  2034. {
  2035. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FsqrtS);
  2036. }
  2037. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2038. {
  2039. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  2040. }
  2041. else
  2042. {
  2043. EmitScalarUnaryOpF(context, (op1) =>
  2044. {
  2045. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  2046. });
  2047. }
  2048. }
  2049. public static void Fsqrt_V(ArmEmitterContext context)
  2050. {
  2051. if (Optimizations.UseAdvSimd)
  2052. {
  2053. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FsqrtV);
  2054. }
  2055. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2056. {
  2057. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  2058. }
  2059. else
  2060. {
  2061. EmitVectorUnaryOpF(context, (op1) =>
  2062. {
  2063. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  2064. });
  2065. }
  2066. }
  2067. public static void Fsub_S(ArmEmitterContext context)
  2068. {
  2069. if (Optimizations.UseAdvSimd)
  2070. {
  2071. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FsubS);
  2072. }
  2073. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2074. {
  2075. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  2076. }
  2077. else if (Optimizations.FastFP)
  2078. {
  2079. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  2080. }
  2081. else
  2082. {
  2083. EmitScalarBinaryOpF(context, (op1, op2) =>
  2084. {
  2085. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  2086. });
  2087. }
  2088. }
  2089. public static void Fsub_V(ArmEmitterContext context)
  2090. {
  2091. if (Optimizations.UseAdvSimd)
  2092. {
  2093. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FsubV);
  2094. }
  2095. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2096. {
  2097. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  2098. }
  2099. else if (Optimizations.FastFP)
  2100. {
  2101. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  2102. }
  2103. else
  2104. {
  2105. EmitVectorBinaryOpF(context, (op1, op2) =>
  2106. {
  2107. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  2108. });
  2109. }
  2110. }
  2111. public static void Mla_V(ArmEmitterContext context)
  2112. {
  2113. if (Optimizations.UseAdvSimd)
  2114. {
  2115. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64MlaV);
  2116. }
  2117. else if (Optimizations.UseSse41)
  2118. {
  2119. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  2120. }
  2121. else
  2122. {
  2123. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2124. {
  2125. return context.Add(op1, context.Multiply(op2, op3));
  2126. });
  2127. }
  2128. }
  2129. public static void Mla_Ve(ArmEmitterContext context)
  2130. {
  2131. if (Optimizations.UseAdvSimd)
  2132. {
  2133. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64MlaVe);
  2134. }
  2135. else
  2136. {
  2137. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  2138. {
  2139. return context.Add(op1, context.Multiply(op2, op3));
  2140. });
  2141. }
  2142. }
  2143. public static void Mls_V(ArmEmitterContext context)
  2144. {
  2145. if (Optimizations.UseAdvSimd)
  2146. {
  2147. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64MlsV);
  2148. }
  2149. else if (Optimizations.UseSse41)
  2150. {
  2151. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  2152. }
  2153. else
  2154. {
  2155. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2156. {
  2157. return context.Subtract(op1, context.Multiply(op2, op3));
  2158. });
  2159. }
  2160. }
  2161. public static void Mls_Ve(ArmEmitterContext context)
  2162. {
  2163. if (Optimizations.UseAdvSimd)
  2164. {
  2165. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64MlsVe);
  2166. }
  2167. else
  2168. {
  2169. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  2170. {
  2171. return context.Subtract(op1, context.Multiply(op2, op3));
  2172. });
  2173. }
  2174. }
  2175. public static void Mul_V(ArmEmitterContext context)
  2176. {
  2177. if (Optimizations.UseAdvSimd)
  2178. {
  2179. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64MulV);
  2180. }
  2181. else if (Optimizations.UseSse41)
  2182. {
  2183. EmitSse41VectorMul_AddSub(context, AddSub.None);
  2184. }
  2185. else
  2186. {
  2187. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2188. }
  2189. }
  2190. public static void Mul_Ve(ArmEmitterContext context)
  2191. {
  2192. if (Optimizations.UseAdvSimd)
  2193. {
  2194. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64MulVe);
  2195. }
  2196. else
  2197. {
  2198. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2199. }
  2200. }
  2201. public static void Neg_S(ArmEmitterContext context)
  2202. {
  2203. if (Optimizations.UseAdvSimd)
  2204. {
  2205. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64NegS);
  2206. }
  2207. else
  2208. {
  2209. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  2210. }
  2211. }
  2212. public static void Neg_V(ArmEmitterContext context)
  2213. {
  2214. if (Optimizations.UseAdvSimd)
  2215. {
  2216. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64NegV);
  2217. }
  2218. else if (Optimizations.UseSse2)
  2219. {
  2220. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2221. Intrinsic subInst = X86PsubInstruction[op.Size];
  2222. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  2223. if (op.RegisterSize == RegisterSize.Simd64)
  2224. {
  2225. res = context.VectorZeroUpper64(res);
  2226. }
  2227. context.Copy(GetVec(op.Rd), res);
  2228. }
  2229. else
  2230. {
  2231. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  2232. }
  2233. }
  2234. public static void Pmull_V(ArmEmitterContext context)
  2235. {
  2236. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2237. if (Optimizations.UseArm64Pmull)
  2238. {
  2239. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64PmullV);
  2240. }
  2241. else if (Optimizations.UsePclmulqdq && op.Size == 3)
  2242. {
  2243. Operand n = GetVec(op.Rn);
  2244. Operand m = GetVec(op.Rm);
  2245. int imm8 = op.RegisterSize == RegisterSize.Simd64 ? 0b0000_0000 : 0b0001_0001;
  2246. Operand res = context.AddIntrinsic(Intrinsic.X86Pclmulqdq, n, m, Const(imm8));
  2247. context.Copy(GetVec(op.Rd), res);
  2248. }
  2249. else if (Optimizations.UseSse41)
  2250. {
  2251. Operand n = GetVec(op.Rn);
  2252. Operand m = GetVec(op.Rm);
  2253. if (op.RegisterSize == RegisterSize.Simd64)
  2254. {
  2255. n = context.VectorZeroUpper64(n);
  2256. m = context.VectorZeroUpper64(m);
  2257. }
  2258. else /* if (op.RegisterSize == RegisterSize.Simd128) */
  2259. {
  2260. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2261. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2262. }
  2263. Operand res = context.VectorZero();
  2264. if (op.Size == 0)
  2265. {
  2266. n = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, n);
  2267. m = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, m);
  2268. for (int i = 0; i < 8; i++)
  2269. {
  2270. Operand mask = context.AddIntrinsic(Intrinsic.X86Psllw, n, Const(15 - i));
  2271. mask = context.AddIntrinsic(Intrinsic.X86Psraw, mask, Const(15));
  2272. Operand tmp = context.AddIntrinsic(Intrinsic.X86Psllw, m, Const(i));
  2273. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  2274. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  2275. }
  2276. }
  2277. else /* if (op.Size == 3) */
  2278. {
  2279. Operand zero = context.VectorZero();
  2280. for (int i = 0; i < 64; i++)
  2281. {
  2282. Operand mask = context.AddIntrinsic(Intrinsic.X86Movlhps, n, n);
  2283. mask = context.AddIntrinsic(Intrinsic.X86Psllq, mask, Const(63 - i));
  2284. mask = context.AddIntrinsic(Intrinsic.X86Psrlq, mask, Const(63));
  2285. mask = context.AddIntrinsic(Intrinsic.X86Psubq, zero, mask);
  2286. Operand tmp = EmitSse2Sll_128(context, m, i);
  2287. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  2288. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  2289. }
  2290. }
  2291. context.Copy(GetVec(op.Rd), res);
  2292. }
  2293. else
  2294. {
  2295. Operand n = GetVec(op.Rn);
  2296. Operand m = GetVec(op.Rm);
  2297. Operand res;
  2298. if (op.Size == 0)
  2299. {
  2300. res = context.VectorZero();
  2301. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 8;
  2302. for (int index = 0; index < 8; index++)
  2303. {
  2304. Operand ne = context.VectorExtract8(n, part + index);
  2305. Operand me = context.VectorExtract8(m, part + index);
  2306. Operand de = EmitPolynomialMultiply(context, ne, me, 8);
  2307. res = EmitVectorInsert(context, res, de, index, 1);
  2308. }
  2309. }
  2310. else /* if (op.Size == 3) */
  2311. {
  2312. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 1;
  2313. Operand ne = context.VectorExtract(OperandType.I64, n, part);
  2314. Operand me = context.VectorExtract(OperandType.I64, m, part);
  2315. res = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.PolynomialMult64_128)), ne, me);
  2316. }
  2317. context.Copy(GetVec(op.Rd), res);
  2318. }
  2319. }
  2320. public static void Raddhn_V(ArmEmitterContext context)
  2321. {
  2322. if (Optimizations.UseAdvSimd)
  2323. {
  2324. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64RaddhnV);
  2325. }
  2326. else
  2327. {
  2328. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  2329. }
  2330. }
  2331. public static void Rsubhn_V(ArmEmitterContext context)
  2332. {
  2333. if (Optimizations.UseAdvSimd)
  2334. {
  2335. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64RsubhnV);
  2336. }
  2337. else
  2338. {
  2339. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  2340. }
  2341. }
  2342. public static void Saba_V(ArmEmitterContext context)
  2343. {
  2344. if (Optimizations.UseAdvSimd)
  2345. {
  2346. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SabaV);
  2347. }
  2348. else
  2349. {
  2350. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  2351. {
  2352. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2353. });
  2354. }
  2355. }
  2356. public static void Sabal_V(ArmEmitterContext context)
  2357. {
  2358. if (Optimizations.UseAdvSimd)
  2359. {
  2360. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SabalV);
  2361. }
  2362. else
  2363. {
  2364. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2365. {
  2366. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2367. });
  2368. }
  2369. }
  2370. public static void Sabd_V(ArmEmitterContext context)
  2371. {
  2372. if (Optimizations.UseAdvSimd)
  2373. {
  2374. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SabdV);
  2375. }
  2376. else if (Optimizations.UseSse41)
  2377. {
  2378. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2379. Operand n = GetVec(op.Rn);
  2380. Operand m = GetVec(op.Rm);
  2381. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  2382. }
  2383. else
  2384. {
  2385. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2386. {
  2387. return EmitAbs(context, context.Subtract(op1, op2));
  2388. });
  2389. }
  2390. }
  2391. public static void Sabdl_V(ArmEmitterContext context)
  2392. {
  2393. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2394. if (Optimizations.UseAdvSimd)
  2395. {
  2396. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SabdlV);
  2397. }
  2398. else if (Optimizations.UseSse41 && op.Size < 2)
  2399. {
  2400. Operand n = GetVec(op.Rn);
  2401. Operand m = GetVec(op.Rm);
  2402. if (op.RegisterSize == RegisterSize.Simd128)
  2403. {
  2404. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2405. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2406. }
  2407. Intrinsic movInst = op.Size == 0
  2408. ? Intrinsic.X86Pmovsxbw
  2409. : Intrinsic.X86Pmovsxwd;
  2410. n = context.AddIntrinsic(movInst, n);
  2411. m = context.AddIntrinsic(movInst, m);
  2412. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  2413. }
  2414. else
  2415. {
  2416. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  2417. {
  2418. return EmitAbs(context, context.Subtract(op1, op2));
  2419. });
  2420. }
  2421. }
  2422. public static void Sadalp_V(ArmEmitterContext context)
  2423. {
  2424. if (Optimizations.UseAdvSimd)
  2425. {
  2426. InstEmitSimdHelperArm64.EmitVectorBinaryOpRd(context, Intrinsic.Arm64SadalpV);
  2427. }
  2428. else
  2429. {
  2430. EmitAddLongPairwise(context, signed: true, accumulate: true);
  2431. }
  2432. }
  2433. public static void Saddl_V(ArmEmitterContext context)
  2434. {
  2435. if (Optimizations.UseAdvSimd)
  2436. {
  2437. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SaddlV);
  2438. }
  2439. else if (Optimizations.UseSse41)
  2440. {
  2441. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2442. Operand n = GetVec(op.Rn);
  2443. Operand m = GetVec(op.Rm);
  2444. if (op.RegisterSize == RegisterSize.Simd128)
  2445. {
  2446. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2447. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2448. }
  2449. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2450. n = context.AddIntrinsic(movInst, n);
  2451. m = context.AddIntrinsic(movInst, m);
  2452. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2453. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2454. }
  2455. else
  2456. {
  2457. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  2458. }
  2459. }
  2460. public static void Saddlp_V(ArmEmitterContext context)
  2461. {
  2462. if (Optimizations.UseAdvSimd)
  2463. {
  2464. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SaddlpV);
  2465. }
  2466. else
  2467. {
  2468. EmitAddLongPairwise(context, signed: true, accumulate: false);
  2469. }
  2470. }
  2471. public static void Saddlv_V(ArmEmitterContext context)
  2472. {
  2473. if (Optimizations.UseAdvSimd)
  2474. {
  2475. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SaddlvV);
  2476. }
  2477. else
  2478. {
  2479. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  2480. }
  2481. }
  2482. public static void Saddw_V(ArmEmitterContext context)
  2483. {
  2484. if (Optimizations.UseAdvSimd)
  2485. {
  2486. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SaddwV);
  2487. }
  2488. else if (Optimizations.UseSse41)
  2489. {
  2490. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2491. Operand n = GetVec(op.Rn);
  2492. Operand m = GetVec(op.Rm);
  2493. if (op.RegisterSize == RegisterSize.Simd128)
  2494. {
  2495. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2496. }
  2497. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2498. m = context.AddIntrinsic(movInst, m);
  2499. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2500. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2501. }
  2502. else
  2503. {
  2504. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  2505. }
  2506. }
  2507. public static void Shadd_V(ArmEmitterContext context)
  2508. {
  2509. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2510. if (Optimizations.UseAdvSimd)
  2511. {
  2512. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64ShaddV);
  2513. }
  2514. else if (Optimizations.UseSse2 && op.Size > 0)
  2515. {
  2516. Operand n = GetVec(op.Rn);
  2517. Operand m = GetVec(op.Rm);
  2518. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2519. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2520. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  2521. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2522. Intrinsic addInst = X86PaddInstruction[op.Size];
  2523. res = context.AddIntrinsic(addInst, res, res2);
  2524. if (op.RegisterSize == RegisterSize.Simd64)
  2525. {
  2526. res = context.VectorZeroUpper64(res);
  2527. }
  2528. context.Copy(GetVec(op.Rd), res);
  2529. }
  2530. else
  2531. {
  2532. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2533. {
  2534. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  2535. });
  2536. }
  2537. }
  2538. public static void Shsub_V(ArmEmitterContext context)
  2539. {
  2540. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2541. if (Optimizations.UseAdvSimd)
  2542. {
  2543. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64ShsubV);
  2544. }
  2545. else if (Optimizations.UseSse2 && op.Size < 2)
  2546. {
  2547. Operand n = GetVec(op.Rn);
  2548. Operand m = GetVec(op.Rm);
  2549. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2550. Intrinsic addInst = X86PaddInstruction[op.Size];
  2551. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  2552. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  2553. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2554. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  2555. Intrinsic subInst = X86PsubInstruction[op.Size];
  2556. res = context.AddIntrinsic(subInst, nPlusMask, res);
  2557. if (op.RegisterSize == RegisterSize.Simd64)
  2558. {
  2559. res = context.VectorZeroUpper64(res);
  2560. }
  2561. context.Copy(GetVec(op.Rd), res);
  2562. }
  2563. else
  2564. {
  2565. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2566. {
  2567. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  2568. });
  2569. }
  2570. }
  2571. public static void Smax_V(ArmEmitterContext context)
  2572. {
  2573. if (Optimizations.UseAdvSimd)
  2574. {
  2575. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmaxV);
  2576. }
  2577. else if (Optimizations.UseSse41)
  2578. {
  2579. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2580. Operand n = GetVec(op.Rn);
  2581. Operand m = GetVec(op.Rm);
  2582. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  2583. Operand res = context.AddIntrinsic(maxInst, n, m);
  2584. if (op.RegisterSize == RegisterSize.Simd64)
  2585. {
  2586. res = context.VectorZeroUpper64(res);
  2587. }
  2588. context.Copy(GetVec(op.Rd), res);
  2589. }
  2590. else
  2591. {
  2592. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2593. }
  2594. }
  2595. public static void Smaxp_V(ArmEmitterContext context)
  2596. {
  2597. if (Optimizations.UseAdvSimd)
  2598. {
  2599. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmaxpV);
  2600. }
  2601. else if (Optimizations.UseSsse3)
  2602. {
  2603. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  2604. }
  2605. else
  2606. {
  2607. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2608. }
  2609. }
  2610. public static void Smaxv_V(ArmEmitterContext context)
  2611. {
  2612. if (Optimizations.UseAdvSimd)
  2613. {
  2614. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SmaxvV);
  2615. }
  2616. else
  2617. {
  2618. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2619. }
  2620. }
  2621. public static void Smin_V(ArmEmitterContext context)
  2622. {
  2623. if (Optimizations.UseAdvSimd)
  2624. {
  2625. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SminV);
  2626. }
  2627. else if (Optimizations.UseSse41)
  2628. {
  2629. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2630. Operand n = GetVec(op.Rn);
  2631. Operand m = GetVec(op.Rm);
  2632. Intrinsic minInst = X86PminsInstruction[op.Size];
  2633. Operand res = context.AddIntrinsic(minInst, n, m);
  2634. if (op.RegisterSize == RegisterSize.Simd64)
  2635. {
  2636. res = context.VectorZeroUpper64(res);
  2637. }
  2638. context.Copy(GetVec(op.Rd), res);
  2639. }
  2640. else
  2641. {
  2642. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2643. }
  2644. }
  2645. public static void Sminp_V(ArmEmitterContext context)
  2646. {
  2647. if (Optimizations.UseAdvSimd)
  2648. {
  2649. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SminpV);
  2650. }
  2651. else if (Optimizations.UseSsse3)
  2652. {
  2653. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  2654. }
  2655. else
  2656. {
  2657. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2658. }
  2659. }
  2660. public static void Sminv_V(ArmEmitterContext context)
  2661. {
  2662. if (Optimizations.UseAdvSimd)
  2663. {
  2664. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SminvV);
  2665. }
  2666. else
  2667. {
  2668. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2669. }
  2670. }
  2671. public static void Smlal_V(ArmEmitterContext context)
  2672. {
  2673. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2674. if (Optimizations.UseAdvSimd)
  2675. {
  2676. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SmlalV);
  2677. }
  2678. else if (Optimizations.UseSse41 && op.Size < 2)
  2679. {
  2680. Operand d = GetVec(op.Rd);
  2681. Operand n = GetVec(op.Rn);
  2682. Operand m = GetVec(op.Rm);
  2683. if (op.RegisterSize == RegisterSize.Simd128)
  2684. {
  2685. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2686. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2687. }
  2688. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2689. n = context.AddIntrinsic(movInst, n);
  2690. m = context.AddIntrinsic(movInst, m);
  2691. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2692. Operand res = context.AddIntrinsic(mullInst, n, m);
  2693. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2694. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2695. }
  2696. else
  2697. {
  2698. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2699. {
  2700. return context.Add(op1, context.Multiply(op2, op3));
  2701. });
  2702. }
  2703. }
  2704. public static void Smlal_Ve(ArmEmitterContext context)
  2705. {
  2706. if (Optimizations.UseAdvSimd)
  2707. {
  2708. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64SmlalVe);
  2709. }
  2710. else
  2711. {
  2712. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2713. {
  2714. return context.Add(op1, context.Multiply(op2, op3));
  2715. });
  2716. }
  2717. }
  2718. public static void Smlsl_V(ArmEmitterContext context)
  2719. {
  2720. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2721. if (Optimizations.UseAdvSimd)
  2722. {
  2723. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SmlslV);
  2724. }
  2725. else if (Optimizations.UseSse41 && op.Size < 2)
  2726. {
  2727. Operand d = GetVec(op.Rd);
  2728. Operand n = GetVec(op.Rn);
  2729. Operand m = GetVec(op.Rm);
  2730. if (op.RegisterSize == RegisterSize.Simd128)
  2731. {
  2732. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2733. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2734. }
  2735. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  2736. n = context.AddIntrinsic(movInst, n);
  2737. m = context.AddIntrinsic(movInst, m);
  2738. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2739. Operand res = context.AddIntrinsic(mullInst, n, m);
  2740. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2741. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2742. }
  2743. else
  2744. {
  2745. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2746. {
  2747. return context.Subtract(op1, context.Multiply(op2, op3));
  2748. });
  2749. }
  2750. }
  2751. public static void Smlsl_Ve(ArmEmitterContext context)
  2752. {
  2753. if (Optimizations.UseAdvSimd)
  2754. {
  2755. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64SmlslVe);
  2756. }
  2757. else
  2758. {
  2759. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2760. {
  2761. return context.Subtract(op1, context.Multiply(op2, op3));
  2762. });
  2763. }
  2764. }
  2765. public static void Smull_V(ArmEmitterContext context)
  2766. {
  2767. if (Optimizations.UseAdvSimd)
  2768. {
  2769. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmullV);
  2770. }
  2771. else
  2772. {
  2773. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  2774. }
  2775. }
  2776. public static void Smull_Ve(ArmEmitterContext context)
  2777. {
  2778. if (Optimizations.UseAdvSimd)
  2779. {
  2780. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64SmullVe);
  2781. }
  2782. else
  2783. {
  2784. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  2785. }
  2786. }
  2787. public static void Sqabs_S(ArmEmitterContext context)
  2788. {
  2789. if (Optimizations.UseAdvSimd)
  2790. {
  2791. InstEmitSimdHelperArm64.EmitScalarSaturatingUnaryOp(context, Intrinsic.Arm64SqabsS);
  2792. }
  2793. else
  2794. {
  2795. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  2796. }
  2797. }
  2798. public static void Sqabs_V(ArmEmitterContext context)
  2799. {
  2800. if (Optimizations.UseAdvSimd)
  2801. {
  2802. InstEmitSimdHelperArm64.EmitVectorSaturatingUnaryOp(context, Intrinsic.Arm64SqabsV);
  2803. }
  2804. else
  2805. {
  2806. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  2807. }
  2808. }
  2809. public static void Sqadd_S(ArmEmitterContext context)
  2810. {
  2811. if (Optimizations.UseAdvSimd)
  2812. {
  2813. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqaddS);
  2814. }
  2815. else
  2816. {
  2817. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  2818. }
  2819. }
  2820. public static void Sqadd_V(ArmEmitterContext context)
  2821. {
  2822. if (Optimizations.UseAdvSimd)
  2823. {
  2824. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqaddV);
  2825. }
  2826. else
  2827. {
  2828. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  2829. }
  2830. }
  2831. public static void Sqdmulh_S(ArmEmitterContext context)
  2832. {
  2833. if (Optimizations.UseAdvSimd)
  2834. {
  2835. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqdmulhS);
  2836. }
  2837. else
  2838. {
  2839. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  2840. }
  2841. }
  2842. public static void Sqdmulh_V(ArmEmitterContext context)
  2843. {
  2844. if (Optimizations.UseAdvSimd)
  2845. {
  2846. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqdmulhV);
  2847. }
  2848. else
  2849. {
  2850. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  2851. }
  2852. }
  2853. public static void Sqdmulh_Ve(ArmEmitterContext context)
  2854. {
  2855. if (Optimizations.UseAdvSimd)
  2856. {
  2857. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpByElem(context, Intrinsic.Arm64SqdmulhVe);
  2858. }
  2859. else
  2860. {
  2861. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  2862. }
  2863. }
  2864. public static void Sqneg_S(ArmEmitterContext context)
  2865. {
  2866. if (Optimizations.UseAdvSimd)
  2867. {
  2868. InstEmitSimdHelperArm64.EmitScalarSaturatingUnaryOp(context, Intrinsic.Arm64SqnegS);
  2869. }
  2870. else
  2871. {
  2872. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  2873. }
  2874. }
  2875. public static void Sqneg_V(ArmEmitterContext context)
  2876. {
  2877. if (Optimizations.UseAdvSimd)
  2878. {
  2879. InstEmitSimdHelperArm64.EmitVectorSaturatingUnaryOp(context, Intrinsic.Arm64SqnegV);
  2880. }
  2881. else
  2882. {
  2883. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  2884. }
  2885. }
  2886. public static void Sqrdmulh_S(ArmEmitterContext context)
  2887. {
  2888. if (Optimizations.UseAdvSimd)
  2889. {
  2890. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqrdmulhS);
  2891. }
  2892. else
  2893. {
  2894. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  2895. }
  2896. }
  2897. public static void Sqrdmulh_V(ArmEmitterContext context)
  2898. {
  2899. if (Optimizations.UseAdvSimd)
  2900. {
  2901. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqrdmulhV);
  2902. }
  2903. else
  2904. {
  2905. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  2906. }
  2907. }
  2908. public static void Sqrdmulh_Ve(ArmEmitterContext context)
  2909. {
  2910. if (Optimizations.UseAdvSimd)
  2911. {
  2912. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpByElem(context, Intrinsic.Arm64SqrdmulhVe);
  2913. }
  2914. else
  2915. {
  2916. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  2917. }
  2918. }
  2919. public static void Sqsub_S(ArmEmitterContext context)
  2920. {
  2921. if (Optimizations.UseAdvSimd)
  2922. {
  2923. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqsubS);
  2924. }
  2925. else
  2926. {
  2927. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  2928. }
  2929. }
  2930. public static void Sqsub_V(ArmEmitterContext context)
  2931. {
  2932. if (Optimizations.UseAdvSimd)
  2933. {
  2934. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqsubV);
  2935. }
  2936. else
  2937. {
  2938. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  2939. }
  2940. }
  2941. public static void Sqxtn_S(ArmEmitterContext context)
  2942. {
  2943. if (Optimizations.UseAdvSimd)
  2944. {
  2945. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtnS);
  2946. }
  2947. else
  2948. {
  2949. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  2950. }
  2951. }
  2952. public static void Sqxtn_V(ArmEmitterContext context)
  2953. {
  2954. if (Optimizations.UseAdvSimd)
  2955. {
  2956. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtnV);
  2957. }
  2958. else
  2959. {
  2960. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  2961. }
  2962. }
  2963. public static void Sqxtun_S(ArmEmitterContext context)
  2964. {
  2965. if (Optimizations.UseAdvSimd)
  2966. {
  2967. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtunS);
  2968. }
  2969. else
  2970. {
  2971. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  2972. }
  2973. }
  2974. public static void Sqxtun_V(ArmEmitterContext context)
  2975. {
  2976. if (Optimizations.UseAdvSimd)
  2977. {
  2978. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtunV);
  2979. }
  2980. else
  2981. {
  2982. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  2983. }
  2984. }
  2985. public static void Srhadd_V(ArmEmitterContext context)
  2986. {
  2987. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2988. if (Optimizations.UseAdvSimd)
  2989. {
  2990. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SrhaddV);
  2991. }
  2992. else if (Optimizations.UseSse2 && op.Size < 2)
  2993. {
  2994. Operand n = GetVec(op.Rn);
  2995. Operand m = GetVec(op.Rm);
  2996. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2997. Intrinsic subInst = X86PsubInstruction[op.Size];
  2998. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  2999. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  3000. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3001. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  3002. Intrinsic addInst = X86PaddInstruction[op.Size];
  3003. res = context.AddIntrinsic(addInst, mask, res);
  3004. if (op.RegisterSize == RegisterSize.Simd64)
  3005. {
  3006. res = context.VectorZeroUpper64(res);
  3007. }
  3008. context.Copy(GetVec(op.Rd), res);
  3009. }
  3010. else
  3011. {
  3012. EmitVectorBinaryOpSx(context, (op1, op2) =>
  3013. {
  3014. Operand res = context.Add(op1, op2);
  3015. res = context.Add(res, Const(1L));
  3016. return context.ShiftRightSI(res, Const(1));
  3017. });
  3018. }
  3019. }
  3020. public static void Ssubl_V(ArmEmitterContext context)
  3021. {
  3022. if (Optimizations.UseAdvSimd)
  3023. {
  3024. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SsublV);
  3025. }
  3026. else if (Optimizations.UseSse41)
  3027. {
  3028. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3029. Operand n = GetVec(op.Rn);
  3030. Operand m = GetVec(op.Rm);
  3031. if (op.RegisterSize == RegisterSize.Simd128)
  3032. {
  3033. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3034. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3035. }
  3036. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  3037. n = context.AddIntrinsic(movInst, n);
  3038. m = context.AddIntrinsic(movInst, m);
  3039. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3040. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3041. }
  3042. else
  3043. {
  3044. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  3045. }
  3046. }
  3047. public static void Ssubw_V(ArmEmitterContext context)
  3048. {
  3049. if (Optimizations.UseAdvSimd)
  3050. {
  3051. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SsubwV);
  3052. }
  3053. else if (Optimizations.UseSse41)
  3054. {
  3055. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3056. Operand n = GetVec(op.Rn);
  3057. Operand m = GetVec(op.Rm);
  3058. if (op.RegisterSize == RegisterSize.Simd128)
  3059. {
  3060. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3061. }
  3062. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  3063. m = context.AddIntrinsic(movInst, m);
  3064. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3065. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3066. }
  3067. else
  3068. {
  3069. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  3070. }
  3071. }
  3072. public static void Sub_S(ArmEmitterContext context)
  3073. {
  3074. if (Optimizations.UseAdvSimd)
  3075. {
  3076. InstEmitSimdHelperArm64.EmitScalarBinaryOp(context, Intrinsic.Arm64SubS);
  3077. }
  3078. else
  3079. {
  3080. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3081. }
  3082. }
  3083. public static void Sub_V(ArmEmitterContext context)
  3084. {
  3085. if (Optimizations.UseAdvSimd)
  3086. {
  3087. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SubV);
  3088. }
  3089. else if (Optimizations.UseSse2)
  3090. {
  3091. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3092. Operand n = GetVec(op.Rn);
  3093. Operand m = GetVec(op.Rm);
  3094. Intrinsic subInst = X86PsubInstruction[op.Size];
  3095. Operand res = context.AddIntrinsic(subInst, n, m);
  3096. if (op.RegisterSize == RegisterSize.Simd64)
  3097. {
  3098. res = context.VectorZeroUpper64(res);
  3099. }
  3100. context.Copy(GetVec(op.Rd), res);
  3101. }
  3102. else
  3103. {
  3104. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3105. }
  3106. }
  3107. public static void Subhn_V(ArmEmitterContext context)
  3108. {
  3109. if (Optimizations.UseAdvSimd)
  3110. {
  3111. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SubhnV);
  3112. }
  3113. else
  3114. {
  3115. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  3116. }
  3117. }
  3118. public static void Suqadd_S(ArmEmitterContext context)
  3119. {
  3120. if (Optimizations.UseAdvSimd)
  3121. {
  3122. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SuqaddS);
  3123. }
  3124. else
  3125. {
  3126. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  3127. }
  3128. }
  3129. public static void Suqadd_V(ArmEmitterContext context)
  3130. {
  3131. if (Optimizations.UseAdvSimd)
  3132. {
  3133. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SuqaddV);
  3134. }
  3135. else
  3136. {
  3137. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  3138. }
  3139. }
  3140. public static void Uaba_V(ArmEmitterContext context)
  3141. {
  3142. if (Optimizations.UseAdvSimd)
  3143. {
  3144. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UabaV);
  3145. }
  3146. else
  3147. {
  3148. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  3149. {
  3150. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  3151. });
  3152. }
  3153. }
  3154. public static void Uabal_V(ArmEmitterContext context)
  3155. {
  3156. if (Optimizations.UseAdvSimd)
  3157. {
  3158. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UabalV);
  3159. }
  3160. else
  3161. {
  3162. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3163. {
  3164. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  3165. });
  3166. }
  3167. }
  3168. public static void Uabd_V(ArmEmitterContext context)
  3169. {
  3170. if (Optimizations.UseAdvSimd)
  3171. {
  3172. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UabdV);
  3173. }
  3174. else if (Optimizations.UseSse41)
  3175. {
  3176. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3177. Operand n = GetVec(op.Rn);
  3178. Operand m = GetVec(op.Rm);
  3179. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  3180. }
  3181. else
  3182. {
  3183. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3184. {
  3185. return EmitAbs(context, context.Subtract(op1, op2));
  3186. });
  3187. }
  3188. }
  3189. public static void Uabdl_V(ArmEmitterContext context)
  3190. {
  3191. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3192. if (Optimizations.UseAdvSimd)
  3193. {
  3194. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UabdlV);
  3195. }
  3196. else if (Optimizations.UseSse41 && op.Size < 2)
  3197. {
  3198. Operand n = GetVec(op.Rn);
  3199. Operand m = GetVec(op.Rm);
  3200. if (op.RegisterSize == RegisterSize.Simd128)
  3201. {
  3202. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3203. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3204. }
  3205. Intrinsic movInst = op.Size == 0
  3206. ? Intrinsic.X86Pmovzxbw
  3207. : Intrinsic.X86Pmovzxwd;
  3208. n = context.AddIntrinsic(movInst, n);
  3209. m = context.AddIntrinsic(movInst, m);
  3210. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  3211. }
  3212. else
  3213. {
  3214. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  3215. {
  3216. return EmitAbs(context, context.Subtract(op1, op2));
  3217. });
  3218. }
  3219. }
  3220. public static void Uadalp_V(ArmEmitterContext context)
  3221. {
  3222. if (Optimizations.UseAdvSimd)
  3223. {
  3224. InstEmitSimdHelperArm64.EmitVectorBinaryOpRd(context, Intrinsic.Arm64UadalpV);
  3225. }
  3226. else
  3227. {
  3228. EmitAddLongPairwise(context, signed: false, accumulate: true);
  3229. }
  3230. }
  3231. public static void Uaddl_V(ArmEmitterContext context)
  3232. {
  3233. if (Optimizations.UseAdvSimd)
  3234. {
  3235. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UaddlV);
  3236. }
  3237. else if (Optimizations.UseSse41)
  3238. {
  3239. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3240. Operand n = GetVec(op.Rn);
  3241. Operand m = GetVec(op.Rm);
  3242. if (op.RegisterSize == RegisterSize.Simd128)
  3243. {
  3244. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3245. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3246. }
  3247. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3248. n = context.AddIntrinsic(movInst, n);
  3249. m = context.AddIntrinsic(movInst, m);
  3250. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3251. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  3252. }
  3253. else
  3254. {
  3255. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  3256. }
  3257. }
  3258. public static void Uaddlp_V(ArmEmitterContext context)
  3259. {
  3260. if (Optimizations.UseAdvSimd)
  3261. {
  3262. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UaddlpV);
  3263. }
  3264. else
  3265. {
  3266. EmitAddLongPairwise(context, signed: false, accumulate: false);
  3267. }
  3268. }
  3269. public static void Uaddlv_V(ArmEmitterContext context)
  3270. {
  3271. if (Optimizations.UseAdvSimd)
  3272. {
  3273. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UaddlvV);
  3274. }
  3275. else
  3276. {
  3277. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  3278. }
  3279. }
  3280. public static void Uaddw_V(ArmEmitterContext context)
  3281. {
  3282. if (Optimizations.UseAdvSimd)
  3283. {
  3284. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UaddwV);
  3285. }
  3286. else if (Optimizations.UseSse41)
  3287. {
  3288. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3289. Operand n = GetVec(op.Rn);
  3290. Operand m = GetVec(op.Rm);
  3291. if (op.RegisterSize == RegisterSize.Simd128)
  3292. {
  3293. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3294. }
  3295. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3296. m = context.AddIntrinsic(movInst, m);
  3297. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3298. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  3299. }
  3300. else
  3301. {
  3302. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  3303. }
  3304. }
  3305. public static void Uhadd_V(ArmEmitterContext context)
  3306. {
  3307. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3308. if (Optimizations.UseAdvSimd)
  3309. {
  3310. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UhaddV);
  3311. }
  3312. else if (Optimizations.UseSse2 && op.Size > 0)
  3313. {
  3314. Operand n = GetVec(op.Rn);
  3315. Operand m = GetVec(op.Rm);
  3316. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  3317. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  3318. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  3319. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  3320. Intrinsic addInst = X86PaddInstruction[op.Size];
  3321. res = context.AddIntrinsic(addInst, res, res2);
  3322. if (op.RegisterSize == RegisterSize.Simd64)
  3323. {
  3324. res = context.VectorZeroUpper64(res);
  3325. }
  3326. context.Copy(GetVec(op.Rd), res);
  3327. }
  3328. else
  3329. {
  3330. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3331. {
  3332. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  3333. });
  3334. }
  3335. }
  3336. public static void Uhsub_V(ArmEmitterContext context)
  3337. {
  3338. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3339. if (Optimizations.UseAdvSimd)
  3340. {
  3341. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UhsubV);
  3342. }
  3343. else if (Optimizations.UseSse2 && op.Size < 2)
  3344. {
  3345. Operand n = GetVec(op.Rn);
  3346. Operand m = GetVec(op.Rm);
  3347. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3348. Operand res = context.AddIntrinsic(avgInst, n, m);
  3349. Intrinsic subInst = X86PsubInstruction[op.Size];
  3350. res = context.AddIntrinsic(subInst, n, res);
  3351. if (op.RegisterSize == RegisterSize.Simd64)
  3352. {
  3353. res = context.VectorZeroUpper64(res);
  3354. }
  3355. context.Copy(GetVec(op.Rd), res);
  3356. }
  3357. else
  3358. {
  3359. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3360. {
  3361. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  3362. });
  3363. }
  3364. }
  3365. public static void Umax_V(ArmEmitterContext context)
  3366. {
  3367. if (Optimizations.UseAdvSimd)
  3368. {
  3369. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmaxV);
  3370. }
  3371. else if (Optimizations.UseSse41)
  3372. {
  3373. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3374. Operand n = GetVec(op.Rn);
  3375. Operand m = GetVec(op.Rm);
  3376. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  3377. Operand res = context.AddIntrinsic(maxInst, n, m);
  3378. if (op.RegisterSize == RegisterSize.Simd64)
  3379. {
  3380. res = context.VectorZeroUpper64(res);
  3381. }
  3382. context.Copy(GetVec(op.Rd), res);
  3383. }
  3384. else
  3385. {
  3386. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3387. }
  3388. }
  3389. public static void Umaxp_V(ArmEmitterContext context)
  3390. {
  3391. if (Optimizations.UseAdvSimd)
  3392. {
  3393. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmaxpV);
  3394. }
  3395. else if (Optimizations.UseSsse3)
  3396. {
  3397. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  3398. }
  3399. else
  3400. {
  3401. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3402. }
  3403. }
  3404. public static void Umaxv_V(ArmEmitterContext context)
  3405. {
  3406. if (Optimizations.UseAdvSimd)
  3407. {
  3408. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UmaxvV);
  3409. }
  3410. else
  3411. {
  3412. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3413. }
  3414. }
  3415. public static void Umin_V(ArmEmitterContext context)
  3416. {
  3417. if (Optimizations.UseAdvSimd)
  3418. {
  3419. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UminV);
  3420. }
  3421. else if (Optimizations.UseSse41)
  3422. {
  3423. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3424. Operand n = GetVec(op.Rn);
  3425. Operand m = GetVec(op.Rm);
  3426. Intrinsic minInst = X86PminuInstruction[op.Size];
  3427. Operand res = context.AddIntrinsic(minInst, n, m);
  3428. if (op.RegisterSize == RegisterSize.Simd64)
  3429. {
  3430. res = context.VectorZeroUpper64(res);
  3431. }
  3432. context.Copy(GetVec(op.Rd), res);
  3433. }
  3434. else
  3435. {
  3436. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3437. }
  3438. }
  3439. public static void Uminp_V(ArmEmitterContext context)
  3440. {
  3441. if (Optimizations.UseAdvSimd)
  3442. {
  3443. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UminpV);
  3444. }
  3445. else if (Optimizations.UseSsse3)
  3446. {
  3447. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  3448. }
  3449. else
  3450. {
  3451. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3452. }
  3453. }
  3454. public static void Uminv_V(ArmEmitterContext context)
  3455. {
  3456. if (Optimizations.UseAdvSimd)
  3457. {
  3458. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UminvV);
  3459. }
  3460. else
  3461. {
  3462. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3463. }
  3464. }
  3465. public static void Umlal_V(ArmEmitterContext context)
  3466. {
  3467. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3468. if (Optimizations.UseAdvSimd)
  3469. {
  3470. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UmlalV);
  3471. }
  3472. else if (Optimizations.UseSse41 && op.Size < 2)
  3473. {
  3474. Operand d = GetVec(op.Rd);
  3475. Operand n = GetVec(op.Rn);
  3476. Operand m = GetVec(op.Rm);
  3477. if (op.RegisterSize == RegisterSize.Simd128)
  3478. {
  3479. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3480. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3481. }
  3482. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3483. n = context.AddIntrinsic(movInst, n);
  3484. m = context.AddIntrinsic(movInst, m);
  3485. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3486. Operand res = context.AddIntrinsic(mullInst, n, m);
  3487. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3488. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  3489. }
  3490. else
  3491. {
  3492. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3493. {
  3494. return context.Add(op1, context.Multiply(op2, op3));
  3495. });
  3496. }
  3497. }
  3498. public static void Umlal_Ve(ArmEmitterContext context)
  3499. {
  3500. if (Optimizations.UseAdvSimd)
  3501. {
  3502. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64UmlalVe);
  3503. }
  3504. else
  3505. {
  3506. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  3507. {
  3508. return context.Add(op1, context.Multiply(op2, op3));
  3509. });
  3510. }
  3511. }
  3512. public static void Umlsl_V(ArmEmitterContext context)
  3513. {
  3514. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3515. if (Optimizations.UseAdvSimd)
  3516. {
  3517. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UmlslV);
  3518. }
  3519. else if (Optimizations.UseSse41 && op.Size < 2)
  3520. {
  3521. Operand d = GetVec(op.Rd);
  3522. Operand n = GetVec(op.Rn);
  3523. Operand m = GetVec(op.Rm);
  3524. if (op.RegisterSize == RegisterSize.Simd128)
  3525. {
  3526. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3527. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3528. }
  3529. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  3530. n = context.AddIntrinsic(movInst, n);
  3531. m = context.AddIntrinsic(movInst, m);
  3532. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3533. Operand res = context.AddIntrinsic(mullInst, n, m);
  3534. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3535. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  3536. }
  3537. else
  3538. {
  3539. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3540. {
  3541. return context.Subtract(op1, context.Multiply(op2, op3));
  3542. });
  3543. }
  3544. }
  3545. public static void Umlsl_Ve(ArmEmitterContext context)
  3546. {
  3547. if (Optimizations.UseAdvSimd)
  3548. {
  3549. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64UmlslVe);
  3550. }
  3551. else
  3552. {
  3553. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  3554. {
  3555. return context.Subtract(op1, context.Multiply(op2, op3));
  3556. });
  3557. }
  3558. }
  3559. public static void Umull_V(ArmEmitterContext context)
  3560. {
  3561. if (Optimizations.UseAdvSimd)
  3562. {
  3563. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmullV);
  3564. }
  3565. else
  3566. {
  3567. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  3568. }
  3569. }
  3570. public static void Umull_Ve(ArmEmitterContext context)
  3571. {
  3572. if (Optimizations.UseAdvSimd)
  3573. {
  3574. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64UmullVe);
  3575. }
  3576. else
  3577. {
  3578. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  3579. }
  3580. }
  3581. public static void Uqadd_S(ArmEmitterContext context)
  3582. {
  3583. if (Optimizations.UseAdvSimd)
  3584. {
  3585. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64UqaddS);
  3586. }
  3587. else
  3588. {
  3589. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  3590. }
  3591. }
  3592. public static void Uqadd_V(ArmEmitterContext context)
  3593. {
  3594. if (Optimizations.UseAdvSimd)
  3595. {
  3596. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64UqaddV);
  3597. }
  3598. else
  3599. {
  3600. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  3601. }
  3602. }
  3603. public static void Uqsub_S(ArmEmitterContext context)
  3604. {
  3605. if (Optimizations.UseAdvSimd)
  3606. {
  3607. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64UqsubS);
  3608. }
  3609. else
  3610. {
  3611. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  3612. }
  3613. }
  3614. public static void Uqsub_V(ArmEmitterContext context)
  3615. {
  3616. if (Optimizations.UseAdvSimd)
  3617. {
  3618. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64UqsubV);
  3619. }
  3620. else
  3621. {
  3622. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  3623. }
  3624. }
  3625. public static void Uqxtn_S(ArmEmitterContext context)
  3626. {
  3627. if (Optimizations.UseAdvSimd)
  3628. {
  3629. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64UqxtnS);
  3630. }
  3631. else
  3632. {
  3633. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  3634. }
  3635. }
  3636. public static void Uqxtn_V(ArmEmitterContext context)
  3637. {
  3638. if (Optimizations.UseAdvSimd)
  3639. {
  3640. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64UqxtnV);
  3641. }
  3642. else
  3643. {
  3644. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  3645. }
  3646. }
  3647. public static void Urhadd_V(ArmEmitterContext context)
  3648. {
  3649. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3650. if (Optimizations.UseAdvSimd)
  3651. {
  3652. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UrhaddV);
  3653. }
  3654. else if (Optimizations.UseSse2 && op.Size < 2)
  3655. {
  3656. Operand n = GetVec(op.Rn);
  3657. Operand m = GetVec(op.Rm);
  3658. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3659. Operand res = context.AddIntrinsic(avgInst, n, m);
  3660. if (op.RegisterSize == RegisterSize.Simd64)
  3661. {
  3662. res = context.VectorZeroUpper64(res);
  3663. }
  3664. context.Copy(GetVec(op.Rd), res);
  3665. }
  3666. else
  3667. {
  3668. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3669. {
  3670. Operand res = context.Add(op1, op2);
  3671. res = context.Add(res, Const(1L));
  3672. return context.ShiftRightUI(res, Const(1));
  3673. });
  3674. }
  3675. }
  3676. public static void Usqadd_S(ArmEmitterContext context)
  3677. {
  3678. if (Optimizations.UseAdvSimd)
  3679. {
  3680. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64UsqaddS);
  3681. }
  3682. else
  3683. {
  3684. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  3685. }
  3686. }
  3687. public static void Usqadd_V(ArmEmitterContext context)
  3688. {
  3689. if (Optimizations.UseAdvSimd)
  3690. {
  3691. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64UsqaddV);
  3692. }
  3693. else
  3694. {
  3695. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  3696. }
  3697. }
  3698. public static void Usubl_V(ArmEmitterContext context)
  3699. {
  3700. if (Optimizations.UseAdvSimd)
  3701. {
  3702. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UsublV);
  3703. }
  3704. else if (Optimizations.UseSse41)
  3705. {
  3706. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3707. Operand n = GetVec(op.Rn);
  3708. Operand m = GetVec(op.Rm);
  3709. if (op.RegisterSize == RegisterSize.Simd128)
  3710. {
  3711. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3712. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3713. }
  3714. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3715. n = context.AddIntrinsic(movInst, n);
  3716. m = context.AddIntrinsic(movInst, m);
  3717. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3718. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3719. }
  3720. else
  3721. {
  3722. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3723. }
  3724. }
  3725. public static void Usubw_V(ArmEmitterContext context)
  3726. {
  3727. if (Optimizations.UseAdvSimd)
  3728. {
  3729. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UsubwV);
  3730. }
  3731. else if (Optimizations.UseSse41)
  3732. {
  3733. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3734. Operand n = GetVec(op.Rn);
  3735. Operand m = GetVec(op.Rm);
  3736. if (op.RegisterSize == RegisterSize.Simd128)
  3737. {
  3738. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3739. }
  3740. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3741. m = context.AddIntrinsic(movInst, m);
  3742. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3743. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3744. }
  3745. else
  3746. {
  3747. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3748. }
  3749. }
  3750. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  3751. {
  3752. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  3753. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  3754. }
  3755. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  3756. {
  3757. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  3758. Operand res = context.VectorZero();
  3759. int pairs = op.GetPairsCount() >> op.Size;
  3760. for (int index = 0; index < pairs; index++)
  3761. {
  3762. int pairIndex = index << 1;
  3763. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  3764. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  3765. Operand e = context.Add(ne0, ne1);
  3766. if (accumulate)
  3767. {
  3768. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  3769. e = context.Add(e, de);
  3770. }
  3771. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  3772. }
  3773. context.Copy(GetVec(op.Rd), res);
  3774. }
  3775. private static Operand EmitDoublingMultiplyHighHalf(
  3776. ArmEmitterContext context,
  3777. Operand n,
  3778. Operand m,
  3779. bool round)
  3780. {
  3781. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3782. int eSize = 8 << op.Size;
  3783. Operand res = context.Multiply(n, m);
  3784. if (!round)
  3785. {
  3786. res = context.ShiftRightSI(res, Const(eSize - 1));
  3787. }
  3788. else
  3789. {
  3790. long roundConst = 1L << (eSize - 1);
  3791. res = context.ShiftLeft(res, Const(1));
  3792. res = context.Add(res, Const(roundConst));
  3793. res = context.ShiftRightSI(res, Const(eSize));
  3794. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  3795. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  3796. }
  3797. return res;
  3798. }
  3799. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  3800. {
  3801. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3802. int elems = 8 >> op.Size;
  3803. int eSize = 8 << op.Size;
  3804. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  3805. Operand d = GetVec(op.Rd);
  3806. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  3807. long roundConst = 1L << (eSize - 1);
  3808. for (int index = 0; index < elems; index++)
  3809. {
  3810. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  3811. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  3812. Operand de = emit(ne, me);
  3813. if (round)
  3814. {
  3815. de = context.Add(de, Const(roundConst));
  3816. }
  3817. de = context.ShiftRightUI(de, Const(eSize));
  3818. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  3819. }
  3820. context.Copy(d, res);
  3821. }
  3822. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  3823. {
  3824. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  3825. Operand cmp = signed
  3826. ? context.ICompareGreaterOrEqual (op1, op2)
  3827. : context.ICompareGreaterOrEqualUI(op1, op2);
  3828. return context.ConditionalSelect(cmp, op1, op2);
  3829. }
  3830. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  3831. {
  3832. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  3833. Operand cmp = signed
  3834. ? context.ICompareLessOrEqual (op1, op2)
  3835. : context.ICompareLessOrEqualUI(op1, op2);
  3836. return context.ConditionalSelect(cmp, op1, op2);
  3837. }
  3838. private static void EmitSse41ScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  3839. {
  3840. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  3841. Operand n = GetVec(op.Rn);
  3842. Operand res;
  3843. if (roundMode != FPRoundingMode.ToNearestAway)
  3844. {
  3845. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  3846. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  3847. }
  3848. else
  3849. {
  3850. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: true);
  3851. }
  3852. if ((op.Size & 1) != 0)
  3853. {
  3854. res = context.VectorZeroUpper64(res);
  3855. }
  3856. else
  3857. {
  3858. res = context.VectorZeroUpper96(res);
  3859. }
  3860. context.Copy(GetVec(op.Rd), res);
  3861. }
  3862. private static void EmitSse41VectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  3863. {
  3864. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  3865. Operand n = GetVec(op.Rn);
  3866. Operand res;
  3867. if (roundMode != FPRoundingMode.ToNearestAway)
  3868. {
  3869. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  3870. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  3871. }
  3872. else
  3873. {
  3874. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: false);
  3875. }
  3876. if (op.RegisterSize == RegisterSize.Simd64)
  3877. {
  3878. res = context.VectorZeroUpper64(res);
  3879. }
  3880. context.Copy(GetVec(op.Rd), res);
  3881. }
  3882. private static Operand EmitSse41Round32Exp8OpF(ArmEmitterContext context, Operand value, bool scalar)
  3883. {
  3884. Operand roundMask;
  3885. Operand truncMask;
  3886. Operand expMask;
  3887. if (scalar)
  3888. {
  3889. roundMask = X86GetScalar(context, 0x4000);
  3890. truncMask = X86GetScalar(context, unchecked((int)0xFFFF8000));
  3891. expMask = X86GetScalar(context, 0x7F800000);
  3892. }
  3893. else
  3894. {
  3895. roundMask = X86GetAllElements(context, 0x4000);
  3896. truncMask = X86GetAllElements(context, unchecked((int)0xFFFF8000));
  3897. expMask = X86GetAllElements(context, 0x7F800000);
  3898. }
  3899. Operand oValue = value;
  3900. Operand masked = context.AddIntrinsic(Intrinsic.X86Pand, value, expMask);
  3901. Operand isNaNInf = context.AddIntrinsic(Intrinsic.X86Pcmpeqd, masked, expMask);
  3902. value = context.AddIntrinsic(Intrinsic.X86Paddd, value, roundMask);
  3903. value = context.AddIntrinsic(Intrinsic.X86Pand, value, truncMask);
  3904. return context.AddIntrinsic(Intrinsic.X86Blendvps, value, oValue, isNaNInf);
  3905. }
  3906. private static Operand EmitSse41RecipStepSelectOpF(
  3907. ArmEmitterContext context,
  3908. Operand n,
  3909. Operand m,
  3910. Operand res,
  3911. Operand mask,
  3912. bool scalar,
  3913. int sizeF)
  3914. {
  3915. Intrinsic cmpOp;
  3916. Intrinsic shlOp;
  3917. Intrinsic blendOp;
  3918. Operand zero = context.VectorZero();
  3919. Operand expMask;
  3920. if (sizeF == 0)
  3921. {
  3922. cmpOp = Intrinsic.X86Pcmpeqd;
  3923. shlOp = Intrinsic.X86Pslld;
  3924. blendOp = Intrinsic.X86Blendvps;
  3925. expMask = scalar ? X86GetScalar(context, 0x7F800000 << 1) : X86GetAllElements(context, 0x7F800000 << 1);
  3926. }
  3927. else /* if (sizeF == 1) */
  3928. {
  3929. cmpOp = Intrinsic.X86Pcmpeqq;
  3930. shlOp = Intrinsic.X86Psllq;
  3931. blendOp = Intrinsic.X86Blendvpd;
  3932. expMask = scalar ? X86GetScalar(context, 0x7FF0000000000000L << 1) : X86GetAllElements(context, 0x7FF0000000000000L << 1);
  3933. }
  3934. n = context.AddIntrinsic(shlOp, n, Const(1));
  3935. m = context.AddIntrinsic(shlOp, m, Const(1));
  3936. Operand nZero = context.AddIntrinsic(cmpOp, n, zero);
  3937. Operand mZero = context.AddIntrinsic(cmpOp, m, zero);
  3938. Operand nInf = context.AddIntrinsic(cmpOp, n, expMask);
  3939. Operand mInf = context.AddIntrinsic(cmpOp, m, expMask);
  3940. Operand nmZero = context.AddIntrinsic(Intrinsic.X86Por, nZero, mZero);
  3941. Operand nmInf = context.AddIntrinsic(Intrinsic.X86Por, nInf, mInf);
  3942. Operand nmZeroInf = context.AddIntrinsic(Intrinsic.X86Pand, nmZero, nmInf);
  3943. return context.AddIntrinsic(blendOp, res, mask, nmZeroInf);
  3944. }
  3945. public static void EmitSse2VectorIsNaNOpF(
  3946. ArmEmitterContext context,
  3947. Operand opF,
  3948. out Operand qNaNMask,
  3949. out Operand sNaNMask,
  3950. bool? isQNaN = null)
  3951. {
  3952. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  3953. if ((op.Size & 1) == 0)
  3954. {
  3955. const int QBit = 22;
  3956. Operand qMask = X86GetAllElements(context, 1 << QBit);
  3957. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  3958. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  3959. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  3960. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : default;
  3961. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : default;
  3962. }
  3963. else /* if ((op.Size & 1) == 1) */
  3964. {
  3965. const int QBit = 51;
  3966. Operand qMask = X86GetAllElements(context, 1L << QBit);
  3967. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  3968. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  3969. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  3970. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : default;
  3971. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : default;
  3972. }
  3973. }
  3974. public static Operand EmitSse41ProcessNaNsOpF(
  3975. ArmEmitterContext context,
  3976. Func2I emit,
  3977. bool scalar,
  3978. Operand n = default,
  3979. Operand m = default)
  3980. {
  3981. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  3982. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  3983. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  3984. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  3985. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  3986. if (sizeF == 0)
  3987. {
  3988. const int QBit = 22;
  3989. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  3990. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  3991. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  3992. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  3993. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  3994. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  3995. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  3996. if (n != default || m != default)
  3997. {
  3998. return res;
  3999. }
  4000. if (scalar)
  4001. {
  4002. res = context.VectorZeroUpper96(res);
  4003. }
  4004. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4005. {
  4006. res = context.VectorZeroUpper64(res);
  4007. }
  4008. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4009. return default;
  4010. }
  4011. else /* if (sizeF == 1) */
  4012. {
  4013. const int QBit = 51;
  4014. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  4015. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  4016. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  4017. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  4018. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  4019. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  4020. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  4021. if (n != default || m != default)
  4022. {
  4023. return res;
  4024. }
  4025. if (scalar)
  4026. {
  4027. res = context.VectorZeroUpper64(res);
  4028. }
  4029. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4030. return default;
  4031. }
  4032. }
  4033. public static Operand EmitSseOrAvxHandleFzModeOpF(
  4034. ArmEmitterContext context,
  4035. Func2I emit,
  4036. bool scalar,
  4037. Operand n = default,
  4038. Operand m = default)
  4039. {
  4040. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  4041. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  4042. EmitSseOrAvxEnterFtzAndDazModesOpF(context, out Operand isTrue);
  4043. Operand res = emit(nCopy, mCopy);
  4044. EmitSseOrAvxExitFtzAndDazModesOpF(context, isTrue);
  4045. if (n != default || m != default)
  4046. {
  4047. return res;
  4048. }
  4049. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  4050. if (sizeF == 0)
  4051. {
  4052. if (scalar)
  4053. {
  4054. res = context.VectorZeroUpper96(res);
  4055. }
  4056. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4057. {
  4058. res = context.VectorZeroUpper64(res);
  4059. }
  4060. }
  4061. else /* if (sizeF == 1) */
  4062. {
  4063. if (scalar)
  4064. {
  4065. res = context.VectorZeroUpper64(res);
  4066. }
  4067. }
  4068. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4069. return default;
  4070. }
  4071. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  4072. {
  4073. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  4074. if ((op.Size & 1) == 0)
  4075. {
  4076. Operand mask = X86GetAllElements(context, -0f);
  4077. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  4078. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  4079. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  4080. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  4081. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  4082. }
  4083. else /* if ((op.Size & 1) == 1) */
  4084. {
  4085. Operand mask = X86GetAllElements(context, -0d);
  4086. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  4087. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  4088. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  4089. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  4090. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  4091. }
  4092. }
  4093. private static Operand EmitSse41MaxMinNumOpF(
  4094. ArmEmitterContext context,
  4095. bool isMaxNum,
  4096. bool scalar,
  4097. Operand n = default,
  4098. Operand m = default)
  4099. {
  4100. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  4101. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  4102. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  4103. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  4104. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  4105. if (sizeF == 0)
  4106. {
  4107. Operand negInfMask = scalar
  4108. ? X86GetScalar (context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  4109. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  4110. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  4111. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  4112. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  4113. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  4114. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  4115. {
  4116. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  4117. {
  4118. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  4119. }, scalar: scalar, op1, op2);
  4120. }, scalar: scalar, nCopy, mCopy);
  4121. if (n != default || m != default)
  4122. {
  4123. return res;
  4124. }
  4125. if (scalar)
  4126. {
  4127. res = context.VectorZeroUpper96(res);
  4128. }
  4129. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4130. {
  4131. res = context.VectorZeroUpper64(res);
  4132. }
  4133. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4134. return default;
  4135. }
  4136. else /* if (sizeF == 1) */
  4137. {
  4138. Operand negInfMask = scalar
  4139. ? X86GetScalar (context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  4140. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  4141. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  4142. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  4143. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  4144. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  4145. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  4146. {
  4147. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  4148. {
  4149. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  4150. }, scalar: scalar, op1, op2);
  4151. }, scalar: scalar, nCopy, mCopy);
  4152. if (n != default || m != default)
  4153. {
  4154. return res;
  4155. }
  4156. if (scalar)
  4157. {
  4158. res = context.VectorZeroUpper64(res);
  4159. }
  4160. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4161. return default;
  4162. }
  4163. }
  4164. private enum AddSub
  4165. {
  4166. None,
  4167. Add,
  4168. Subtract
  4169. }
  4170. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  4171. {
  4172. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  4173. Operand n = GetVec(op.Rn);
  4174. Operand m = GetVec(op.Rm);
  4175. Operand res;
  4176. if (op.Size == 0)
  4177. {
  4178. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  4179. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  4180. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  4181. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  4182. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  4183. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  4184. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  4185. }
  4186. else if (op.Size == 1)
  4187. {
  4188. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  4189. }
  4190. else
  4191. {
  4192. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  4193. }
  4194. Operand d = GetVec(op.Rd);
  4195. if (addSub == AddSub.Add)
  4196. {
  4197. Intrinsic addInst = X86PaddInstruction[op.Size];
  4198. res = context.AddIntrinsic(addInst, d, res);
  4199. }
  4200. else if (addSub == AddSub.Subtract)
  4201. {
  4202. Intrinsic subInst = X86PsubInstruction[op.Size];
  4203. res = context.AddIntrinsic(subInst, d, res);
  4204. }
  4205. if (op.RegisterSize == RegisterSize.Simd64)
  4206. {
  4207. res = context.VectorZeroUpper64(res);
  4208. }
  4209. context.Copy(d, res);
  4210. }
  4211. private static void EmitSse41VectorSabdOp(
  4212. ArmEmitterContext context,
  4213. OpCodeSimdReg op,
  4214. Operand n,
  4215. Operand m,
  4216. bool isLong)
  4217. {
  4218. int size = isLong ? op.Size + 1 : op.Size;
  4219. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  4220. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  4221. Intrinsic subInst = X86PsubInstruction[size];
  4222. Operand res = context.AddIntrinsic(subInst, n, m);
  4223. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  4224. Operand res2 = context.AddIntrinsic(subInst, m, n);
  4225. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  4226. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  4227. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  4228. {
  4229. res = context.VectorZeroUpper64(res);
  4230. }
  4231. context.Copy(GetVec(op.Rd), res);
  4232. }
  4233. private static void EmitSse41VectorUabdOp(
  4234. ArmEmitterContext context,
  4235. OpCodeSimdReg op,
  4236. Operand n,
  4237. Operand m,
  4238. bool isLong)
  4239. {
  4240. int size = isLong ? op.Size + 1 : op.Size;
  4241. Intrinsic maxInst = X86PmaxuInstruction[size];
  4242. Operand max = context.AddIntrinsic(maxInst, m, n);
  4243. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  4244. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  4245. Operand onesMask = X86GetAllElements(context, -1L);
  4246. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  4247. Intrinsic subInst = X86PsubInstruction[size];
  4248. Operand res = context.AddIntrinsic(subInst, n, m);
  4249. Operand res2 = context.AddIntrinsic(subInst, m, n);
  4250. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  4251. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  4252. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  4253. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  4254. {
  4255. res = context.VectorZeroUpper64(res);
  4256. }
  4257. context.Copy(GetVec(op.Rd), res);
  4258. }
  4259. private static Operand EmitSse2Sll_128(ArmEmitterContext context, Operand op, int shift)
  4260. {
  4261. // The upper part of op is assumed to be zero.
  4262. Debug.Assert(shift >= 0 && shift < 64);
  4263. if (shift == 0)
  4264. {
  4265. return op;
  4266. }
  4267. Operand high = context.AddIntrinsic(Intrinsic.X86Pslldq, op, Const(8));
  4268. high = context.AddIntrinsic(Intrinsic.X86Psrlq, high, Const(64 - shift));
  4269. Operand low = context.AddIntrinsic(Intrinsic.X86Psllq, op, Const(shift));
  4270. return context.AddIntrinsic(Intrinsic.X86Por, high, low);
  4271. }
  4272. }
  4273. }