InstEmitSimdArithmetic.cs 181 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  12. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  13. namespace ARMeilleure.Instructions
  14. {
  15. using Func2I = Func<Operand, Operand, Operand>;
  16. static partial class InstEmit
  17. {
  18. public static void Abs_S(ArmEmitterContext context)
  19. {
  20. if (Optimizations.UseAdvSimd)
  21. {
  22. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64AbsS);
  23. }
  24. else
  25. {
  26. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  27. }
  28. }
  29. public static void Abs_V(ArmEmitterContext context)
  30. {
  31. if (Optimizations.UseAdvSimd)
  32. {
  33. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64AbsV);
  34. }
  35. else
  36. {
  37. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  38. }
  39. }
  40. public static void Add_S(ArmEmitterContext context)
  41. {
  42. if (Optimizations.UseAdvSimd)
  43. {
  44. InstEmitSimdHelperArm64.EmitScalarBinaryOp(context, Intrinsic.Arm64AddS);
  45. }
  46. else
  47. {
  48. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  49. }
  50. }
  51. public static void Add_V(ArmEmitterContext context)
  52. {
  53. if (Optimizations.UseAdvSimd)
  54. {
  55. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AddV);
  56. }
  57. else if (Optimizations.UseSse2)
  58. {
  59. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  60. Operand n = GetVec(op.Rn);
  61. Operand m = GetVec(op.Rm);
  62. Intrinsic addInst = X86PaddInstruction[op.Size];
  63. Operand res = context.AddIntrinsic(addInst, n, m);
  64. if (op.RegisterSize == RegisterSize.Simd64)
  65. {
  66. res = context.VectorZeroUpper64(res);
  67. }
  68. context.Copy(GetVec(op.Rd), res);
  69. }
  70. else
  71. {
  72. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  73. }
  74. }
  75. public static void Addhn_V(ArmEmitterContext context)
  76. {
  77. if (Optimizations.UseAdvSimd)
  78. {
  79. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64AddhnV);
  80. }
  81. else
  82. {
  83. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  84. }
  85. }
  86. public static void Addp_S(ArmEmitterContext context)
  87. {
  88. if (Optimizations.UseAdvSimd)
  89. {
  90. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64AddpS);
  91. }
  92. else
  93. {
  94. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  95. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  96. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  97. Operand res = context.Add(ne0, ne1);
  98. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  99. }
  100. }
  101. public static void Addp_V(ArmEmitterContext context)
  102. {
  103. if (Optimizations.UseAdvSimd)
  104. {
  105. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AddpV);
  106. }
  107. else if (Optimizations.UseSsse3)
  108. {
  109. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  110. }
  111. else
  112. {
  113. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  114. }
  115. }
  116. public static void Addv_V(ArmEmitterContext context)
  117. {
  118. if (Optimizations.UseAdvSimd)
  119. {
  120. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64AddvV);
  121. }
  122. else
  123. {
  124. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  125. }
  126. }
  127. public static void Cls_V(ArmEmitterContext context)
  128. {
  129. if (Optimizations.UseAdvSimd)
  130. {
  131. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64ClsV);
  132. }
  133. else
  134. {
  135. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  136. Operand res = context.VectorZero();
  137. int elems = op.GetBytesCount() >> op.Size;
  138. int eSize = 8 << op.Size;
  139. for (int index = 0; index < elems; index++)
  140. {
  141. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  142. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  143. res = EmitVectorInsert(context, res, de, index, op.Size);
  144. }
  145. context.Copy(GetVec(op.Rd), res);
  146. }
  147. }
  148. public static void Clz_V(ArmEmitterContext context)
  149. {
  150. if (Optimizations.UseAdvSimd)
  151. {
  152. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64ClzV);
  153. }
  154. else
  155. {
  156. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  157. int eSize = 8 << op.Size;
  158. Operand res = eSize switch
  159. {
  160. 8 => Clz_V_I8(context, GetVec(op.Rn)),
  161. 16 => Clz_V_I16(context, GetVec(op.Rn)),
  162. 32 => Clz_V_I32(context, GetVec(op.Rn)),
  163. _ => default,
  164. };
  165. if (res != default)
  166. {
  167. if (op.RegisterSize == RegisterSize.Simd64)
  168. {
  169. res = context.VectorZeroUpper64(res);
  170. }
  171. }
  172. else
  173. {
  174. int elems = op.GetBytesCount() >> op.Size;
  175. res = context.VectorZero();
  176. for (int index = 0; index < elems; index++)
  177. {
  178. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  179. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  180. res = EmitVectorInsert(context, res, de, index, op.Size);
  181. }
  182. }
  183. context.Copy(GetVec(op.Rd), res);
  184. }
  185. }
  186. private static Operand Clz_V_I8(ArmEmitterContext context, Operand arg)
  187. {
  188. if (!Optimizations.UseSsse3)
  189. {
  190. return default;
  191. }
  192. // CLZ nibble table.
  193. Operand clzTable = X86GetScalar(context, 0x01_01_01_01_02_02_03_04);
  194. Operand maskLow = X86GetAllElements(context, 0x0f_0f_0f_0f);
  195. Operand c04 = X86GetAllElements(context, 0x04_04_04_04);
  196. // CLZ of low 4 bits of elements in arg.
  197. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, arg);
  198. // Get the high 4 bits of elements in arg.
  199. Operand hiArg = context.AddIntrinsic(Intrinsic.X86Psrlw, arg, Const(4));
  200. hiArg = context.AddIntrinsic(Intrinsic.X86Pand, hiArg, maskLow);
  201. // CLZ of high 4 bits of elements in arg.
  202. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, hiArg);
  203. // If high 4 bits are not all zero, we discard the CLZ of the low 4 bits.
  204. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqb, hiClz, c04);
  205. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  206. return context.AddIntrinsic(Intrinsic.X86Paddb, loClz, hiClz);
  207. }
  208. private static Operand Clz_V_I16(ArmEmitterContext context, Operand arg)
  209. {
  210. if (!Optimizations.UseSsse3)
  211. {
  212. return default;
  213. }
  214. Operand maskSwap = X86GetElements(context, 0x80_0f_80_0d_80_0b_80_09, 0x80_07_80_05_80_03_80_01);
  215. Operand maskLow = X86GetAllElements(context, 0x00ff_00ff);
  216. Operand c0008 = X86GetAllElements(context, 0x0008_0008);
  217. // CLZ pair of high 8 and low 8 bits of elements in arg.
  218. Operand hiloClz = Clz_V_I8(context, arg);
  219. // Get CLZ of low 8 bits in each pair.
  220. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pand, hiloClz, maskLow);
  221. // Get CLZ of high 8 bits in each pair.
  222. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, hiloClz, maskSwap);
  223. // If high 8 bits are not all zero, we discard the CLZ of the low 8 bits.
  224. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqw, hiClz, c0008);
  225. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  226. return context.AddIntrinsic(Intrinsic.X86Paddw, loClz, hiClz);
  227. }
  228. private static Operand Clz_V_I32(ArmEmitterContext context, Operand arg)
  229. {
  230. // TODO: Use vplzcntd when AVX-512 is supported.
  231. if (!Optimizations.UseSse2)
  232. {
  233. return default;
  234. }
  235. #pragma warning disable IDE0055 // Disable formatting
  236. Operand AddVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Paddd, op0, op1);
  237. Operand SubVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Psubd, op0, op1);
  238. Operand ShiftRightVectorUI32(Operand op0, int imm8) => context.AddIntrinsic(Intrinsic.X86Psrld, op0, Const(imm8));
  239. Operand OrVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Por, op0, op1);
  240. Operand AndVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Pand, op0, op1);
  241. Operand NotVector(Operand op0) => context.AddIntrinsic(Intrinsic.X86Pandn, op0, context.VectorOne());
  242. #pragma warning restore IDE0055
  243. Operand c55555555 = X86GetAllElements(context, 0x55555555);
  244. Operand c33333333 = X86GetAllElements(context, 0x33333333);
  245. Operand c0f0f0f0f = X86GetAllElements(context, 0x0f0f0f0f);
  246. Operand c0000003f = X86GetAllElements(context, 0x0000003f);
  247. Operand tmp0;
  248. Operand tmp1;
  249. Operand res;
  250. // Set all bits after highest set bit to 1.
  251. res = OrVector(ShiftRightVectorUI32(arg, 1), arg);
  252. res = OrVector(ShiftRightVectorUI32(res, 2), res);
  253. res = OrVector(ShiftRightVectorUI32(res, 4), res);
  254. res = OrVector(ShiftRightVectorUI32(res, 8), res);
  255. res = OrVector(ShiftRightVectorUI32(res, 16), res);
  256. // Make leading 0s into leading 1s.
  257. res = NotVector(res);
  258. // Count leading 1s, which is the population count.
  259. tmp0 = ShiftRightVectorUI32(res, 1);
  260. tmp0 = AndVector(tmp0, c55555555);
  261. res = SubVectorI32(res, tmp0);
  262. tmp0 = ShiftRightVectorUI32(res, 2);
  263. tmp0 = AndVector(tmp0, c33333333);
  264. tmp1 = AndVector(res, c33333333);
  265. res = AddVectorI32(tmp0, tmp1);
  266. tmp0 = ShiftRightVectorUI32(res, 4);
  267. tmp0 = AddVectorI32(tmp0, res);
  268. res = AndVector(tmp0, c0f0f0f0f);
  269. tmp0 = ShiftRightVectorUI32(res, 8);
  270. res = AddVectorI32(tmp0, res);
  271. tmp0 = ShiftRightVectorUI32(res, 16);
  272. res = AddVectorI32(tmp0, res);
  273. res = AndVector(res, c0000003f);
  274. return res;
  275. }
  276. public static void Cnt_V(ArmEmitterContext context)
  277. {
  278. if (Optimizations.UseAdvSimd)
  279. {
  280. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64CntV);
  281. }
  282. else
  283. {
  284. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  285. Operand res = context.VectorZero();
  286. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  287. for (int index = 0; index < elems; index++)
  288. {
  289. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  290. Operand de;
  291. if (Optimizations.UsePopCnt)
  292. {
  293. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  294. }
  295. else
  296. {
  297. de = EmitCountSetBits8(context, ne);
  298. }
  299. res = EmitVectorInsert(context, res, de, index, 0);
  300. }
  301. context.Copy(GetVec(op.Rd), res);
  302. }
  303. }
  304. public static void Fabd_S(ArmEmitterContext context)
  305. {
  306. if (Optimizations.UseAdvSimd)
  307. {
  308. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FabdS);
  309. }
  310. else if (Optimizations.FastFP && Optimizations.UseSse2)
  311. {
  312. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  313. int sizeF = op.Size & 1;
  314. if (sizeF == 0)
  315. {
  316. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  317. res = EmitFloatAbs(context, res, true, false);
  318. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  319. }
  320. else /* if (sizeF == 1) */
  321. {
  322. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  323. res = EmitFloatAbs(context, res, false, false);
  324. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  325. }
  326. }
  327. else
  328. {
  329. EmitScalarBinaryOpF(context, (op1, op2) =>
  330. {
  331. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  332. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  333. });
  334. }
  335. }
  336. public static void Fabd_V(ArmEmitterContext context)
  337. {
  338. if (Optimizations.UseAdvSimd)
  339. {
  340. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FabdV);
  341. }
  342. else if (Optimizations.FastFP && Optimizations.UseSse2)
  343. {
  344. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  345. int sizeF = op.Size & 1;
  346. if (sizeF == 0)
  347. {
  348. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  349. res = EmitFloatAbs(context, res, true, true);
  350. if (op.RegisterSize == RegisterSize.Simd64)
  351. {
  352. res = context.VectorZeroUpper64(res);
  353. }
  354. context.Copy(GetVec(op.Rd), res);
  355. }
  356. else /* if (sizeF == 1) */
  357. {
  358. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  359. res = EmitFloatAbs(context, res, false, true);
  360. context.Copy(GetVec(op.Rd), res);
  361. }
  362. }
  363. else
  364. {
  365. EmitVectorBinaryOpF(context, (op1, op2) =>
  366. {
  367. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  368. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  369. });
  370. }
  371. }
  372. public static void Fabs_S(ArmEmitterContext context)
  373. {
  374. if (Optimizations.UseAdvSimd)
  375. {
  376. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FabsS);
  377. }
  378. else if (Optimizations.UseSse2)
  379. {
  380. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  381. if (op.Size == 0)
  382. {
  383. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  384. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  385. }
  386. else /* if (op.Size == 1) */
  387. {
  388. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  389. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  390. }
  391. }
  392. else
  393. {
  394. EmitScalarUnaryOpF(context, (op1) =>
  395. {
  396. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  397. });
  398. }
  399. }
  400. public static void Fabs_V(ArmEmitterContext context)
  401. {
  402. if (Optimizations.UseAdvSimd)
  403. {
  404. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FabsV);
  405. }
  406. else if (Optimizations.UseSse2)
  407. {
  408. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  409. int sizeF = op.Size & 1;
  410. if (sizeF == 0)
  411. {
  412. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  413. if (op.RegisterSize == RegisterSize.Simd64)
  414. {
  415. res = context.VectorZeroUpper64(res);
  416. }
  417. context.Copy(GetVec(op.Rd), res);
  418. }
  419. else /* if (sizeF == 1) */
  420. {
  421. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  422. context.Copy(GetVec(op.Rd), res);
  423. }
  424. }
  425. else
  426. {
  427. EmitVectorUnaryOpF(context, (op1) =>
  428. {
  429. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  430. });
  431. }
  432. }
  433. public static void Fadd_S(ArmEmitterContext context)
  434. {
  435. if (Optimizations.UseAdvSimd)
  436. {
  437. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FaddS);
  438. }
  439. else if (Optimizations.FastFP && Optimizations.UseSse2)
  440. {
  441. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  442. }
  443. else if (Optimizations.FastFP)
  444. {
  445. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  446. }
  447. else
  448. {
  449. EmitScalarBinaryOpF(context, (op1, op2) =>
  450. {
  451. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  452. });
  453. }
  454. }
  455. public static void Fadd_V(ArmEmitterContext context)
  456. {
  457. if (Optimizations.UseAdvSimd)
  458. {
  459. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FaddV);
  460. }
  461. else if (Optimizations.FastFP && Optimizations.UseSse2)
  462. {
  463. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  464. }
  465. else if (Optimizations.FastFP)
  466. {
  467. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  468. }
  469. else
  470. {
  471. EmitVectorBinaryOpF(context, (op1, op2) =>
  472. {
  473. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  474. });
  475. }
  476. }
  477. public static void Faddp_S(ArmEmitterContext context)
  478. {
  479. if (Optimizations.UseAdvSimd)
  480. {
  481. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FaddpS);
  482. }
  483. else if (Optimizations.FastFP && Optimizations.UseSse3)
  484. {
  485. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  486. if ((op.Size & 1) == 0)
  487. {
  488. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  489. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  490. }
  491. else /* if ((op.Size & 1) == 1) */
  492. {
  493. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  494. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  495. }
  496. }
  497. else
  498. {
  499. EmitScalarPairwiseOpF(context, (op1, op2) =>
  500. {
  501. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  502. });
  503. }
  504. }
  505. public static void Faddp_V(ArmEmitterContext context)
  506. {
  507. if (Optimizations.UseAdvSimd)
  508. {
  509. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FaddpV);
  510. }
  511. else if (Optimizations.FastFP && Optimizations.UseSse41)
  512. {
  513. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  514. {
  515. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  516. {
  517. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  518. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  519. return context.AddIntrinsic(addInst, op1, op2);
  520. }, scalar: false, op1, op2);
  521. });
  522. }
  523. else
  524. {
  525. EmitVectorPairwiseOpF(context, (op1, op2) =>
  526. {
  527. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  528. });
  529. }
  530. }
  531. public static void Fdiv_S(ArmEmitterContext context)
  532. {
  533. if (Optimizations.UseAdvSimd)
  534. {
  535. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FdivS);
  536. }
  537. else if (Optimizations.FastFP && Optimizations.UseSse2)
  538. {
  539. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  540. }
  541. else if (Optimizations.FastFP)
  542. {
  543. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  544. }
  545. else
  546. {
  547. EmitScalarBinaryOpF(context, (op1, op2) =>
  548. {
  549. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  550. });
  551. }
  552. }
  553. public static void Fdiv_V(ArmEmitterContext context)
  554. {
  555. if (Optimizations.UseAdvSimd)
  556. {
  557. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FdivV);
  558. }
  559. else if (Optimizations.FastFP && Optimizations.UseSse2)
  560. {
  561. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  562. }
  563. else if (Optimizations.FastFP)
  564. {
  565. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  566. }
  567. else
  568. {
  569. EmitVectorBinaryOpF(context, (op1, op2) =>
  570. {
  571. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  572. });
  573. }
  574. }
  575. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  576. {
  577. if (Optimizations.UseAdvSimd)
  578. {
  579. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FmaddS);
  580. }
  581. else if (Optimizations.FastFP && Optimizations.UseSse2)
  582. {
  583. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  584. Operand d = GetVec(op.Rd);
  585. Operand a = GetVec(op.Ra);
  586. Operand n = GetVec(op.Rn);
  587. Operand m = GetVec(op.Rm);
  588. Operand res;
  589. if (op.Size == 0)
  590. {
  591. if (Optimizations.UseFma)
  592. {
  593. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ss, a, n, m);
  594. }
  595. else
  596. {
  597. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  598. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  599. }
  600. context.Copy(d, context.VectorZeroUpper96(res));
  601. }
  602. else /* if (op.Size == 1) */
  603. {
  604. if (Optimizations.UseFma)
  605. {
  606. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231sd, a, n, m);
  607. }
  608. else
  609. {
  610. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  611. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  612. }
  613. context.Copy(d, context.VectorZeroUpper64(res));
  614. }
  615. }
  616. else
  617. {
  618. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  619. {
  620. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  621. });
  622. }
  623. }
  624. public static void Fmax_S(ArmEmitterContext context)
  625. {
  626. if (Optimizations.UseAdvSimd)
  627. {
  628. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmaxS);
  629. }
  630. else if (Optimizations.FastFP && Optimizations.UseSse41)
  631. {
  632. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  633. {
  634. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  635. }, scalar: true);
  636. }
  637. else
  638. {
  639. EmitScalarBinaryOpF(context, (op1, op2) =>
  640. {
  641. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  642. });
  643. }
  644. }
  645. public static void Fmax_V(ArmEmitterContext context)
  646. {
  647. if (Optimizations.UseAdvSimd)
  648. {
  649. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxV);
  650. }
  651. else if (Optimizations.FastFP && Optimizations.UseSse41)
  652. {
  653. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  654. {
  655. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  656. }, scalar: false);
  657. }
  658. else
  659. {
  660. EmitVectorBinaryOpF(context, (op1, op2) =>
  661. {
  662. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  663. });
  664. }
  665. }
  666. public static void Fmaxnm_S(ArmEmitterContext context)
  667. {
  668. if (Optimizations.UseAdvSimd)
  669. {
  670. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmaxnmS);
  671. }
  672. else if (Optimizations.FastFP && Optimizations.UseSse41)
  673. {
  674. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  675. }
  676. else
  677. {
  678. EmitScalarBinaryOpF(context, (op1, op2) =>
  679. {
  680. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  681. });
  682. }
  683. }
  684. public static void Fmaxnm_V(ArmEmitterContext context)
  685. {
  686. if (Optimizations.UseAdvSimd)
  687. {
  688. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxnmV);
  689. }
  690. else if (Optimizations.FastFP && Optimizations.UseSse41)
  691. {
  692. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  693. }
  694. else
  695. {
  696. EmitVectorBinaryOpF(context, (op1, op2) =>
  697. {
  698. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  699. });
  700. }
  701. }
  702. public static void Fmaxnmp_S(ArmEmitterContext context)
  703. {
  704. if (Optimizations.UseAdvSimd)
  705. {
  706. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FmaxnmpS);
  707. }
  708. else if (Optimizations.FastFP && Optimizations.UseSse41)
  709. {
  710. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  711. {
  712. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true, op1, op2);
  713. });
  714. }
  715. else
  716. {
  717. EmitScalarPairwiseOpF(context, (op1, op2) =>
  718. {
  719. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  720. });
  721. }
  722. }
  723. public static void Fmaxnmp_V(ArmEmitterContext context)
  724. {
  725. if (Optimizations.UseAdvSimd)
  726. {
  727. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxnmpV);
  728. }
  729. else if (Optimizations.FastFP && Optimizations.UseSse41)
  730. {
  731. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  732. {
  733. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  734. });
  735. }
  736. else
  737. {
  738. EmitVectorPairwiseOpF(context, (op1, op2) =>
  739. {
  740. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  741. });
  742. }
  743. }
  744. public static void Fmaxnmv_V(ArmEmitterContext context)
  745. {
  746. if (Optimizations.UseAdvSimd)
  747. {
  748. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FmaxnmvV);
  749. }
  750. else if (Optimizations.FastFP && Optimizations.UseSse41)
  751. {
  752. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  753. {
  754. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  755. });
  756. }
  757. else
  758. {
  759. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  760. {
  761. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  762. });
  763. }
  764. }
  765. public static void Fmaxp_S(ArmEmitterContext context)
  766. {
  767. if (Optimizations.UseAdvSimd)
  768. {
  769. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FmaxpS);
  770. }
  771. else if (Optimizations.FastFP && Optimizations.UseSse41)
  772. {
  773. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  774. {
  775. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  776. {
  777. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  778. }, scalar: true, op1, op2);
  779. });
  780. }
  781. else
  782. {
  783. EmitScalarPairwiseOpF(context, (op1, op2) =>
  784. {
  785. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  786. });
  787. }
  788. }
  789. public static void Fmaxp_V(ArmEmitterContext context)
  790. {
  791. if (Optimizations.UseAdvSimd)
  792. {
  793. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxpV);
  794. }
  795. else if (Optimizations.FastFP && Optimizations.UseSse41)
  796. {
  797. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  798. {
  799. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  800. {
  801. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  802. }, scalar: false, op1, op2);
  803. });
  804. }
  805. else
  806. {
  807. EmitVectorPairwiseOpF(context, (op1, op2) =>
  808. {
  809. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  810. });
  811. }
  812. }
  813. public static void Fmaxv_V(ArmEmitterContext context)
  814. {
  815. if (Optimizations.UseAdvSimd)
  816. {
  817. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FmaxvV);
  818. }
  819. else if (Optimizations.FastFP && Optimizations.UseSse41)
  820. {
  821. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  822. {
  823. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  824. {
  825. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  826. }, scalar: false, op1, op2);
  827. });
  828. }
  829. else
  830. {
  831. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  832. {
  833. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  834. });
  835. }
  836. }
  837. public static void Fmin_S(ArmEmitterContext context)
  838. {
  839. if (Optimizations.UseAdvSimd)
  840. {
  841. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FminS);
  842. }
  843. else if (Optimizations.FastFP && Optimizations.UseSse41)
  844. {
  845. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  846. {
  847. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  848. }, scalar: true);
  849. }
  850. else
  851. {
  852. EmitScalarBinaryOpF(context, (op1, op2) =>
  853. {
  854. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  855. });
  856. }
  857. }
  858. public static void Fmin_V(ArmEmitterContext context)
  859. {
  860. if (Optimizations.UseAdvSimd)
  861. {
  862. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminV);
  863. }
  864. else if (Optimizations.FastFP && Optimizations.UseSse41)
  865. {
  866. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  867. {
  868. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  869. }, scalar: false);
  870. }
  871. else
  872. {
  873. EmitVectorBinaryOpF(context, (op1, op2) =>
  874. {
  875. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  876. });
  877. }
  878. }
  879. public static void Fminnm_S(ArmEmitterContext context)
  880. {
  881. if (Optimizations.UseAdvSimd)
  882. {
  883. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FminnmS);
  884. }
  885. else if (Optimizations.FastFP && Optimizations.UseSse41)
  886. {
  887. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  888. }
  889. else
  890. {
  891. EmitScalarBinaryOpF(context, (op1, op2) =>
  892. {
  893. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  894. });
  895. }
  896. }
  897. public static void Fminnm_V(ArmEmitterContext context)
  898. {
  899. if (Optimizations.UseAdvSimd)
  900. {
  901. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminnmV);
  902. }
  903. else if (Optimizations.FastFP && Optimizations.UseSse41)
  904. {
  905. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  906. }
  907. else
  908. {
  909. EmitVectorBinaryOpF(context, (op1, op2) =>
  910. {
  911. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  912. });
  913. }
  914. }
  915. public static void Fminnmp_S(ArmEmitterContext context)
  916. {
  917. if (Optimizations.UseAdvSimd)
  918. {
  919. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FminnmpS);
  920. }
  921. else if (Optimizations.FastFP && Optimizations.UseSse41)
  922. {
  923. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  924. {
  925. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true, op1, op2);
  926. });
  927. }
  928. else
  929. {
  930. EmitScalarPairwiseOpF(context, (op1, op2) =>
  931. {
  932. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  933. });
  934. }
  935. }
  936. public static void Fminnmp_V(ArmEmitterContext context)
  937. {
  938. if (Optimizations.UseAdvSimd)
  939. {
  940. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminnmpV);
  941. }
  942. else if (Optimizations.FastFP && Optimizations.UseSse41)
  943. {
  944. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  945. {
  946. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  947. });
  948. }
  949. else
  950. {
  951. EmitVectorPairwiseOpF(context, (op1, op2) =>
  952. {
  953. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  954. });
  955. }
  956. }
  957. public static void Fminnmv_V(ArmEmitterContext context)
  958. {
  959. if (Optimizations.UseAdvSimd)
  960. {
  961. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FminnmvV);
  962. }
  963. else if (Optimizations.FastFP && Optimizations.UseSse41)
  964. {
  965. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  966. {
  967. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  968. });
  969. }
  970. else
  971. {
  972. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  973. {
  974. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  975. });
  976. }
  977. }
  978. public static void Fminp_S(ArmEmitterContext context)
  979. {
  980. if (Optimizations.UseAdvSimd)
  981. {
  982. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FminpS);
  983. }
  984. else if (Optimizations.FastFP && Optimizations.UseSse41)
  985. {
  986. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  987. {
  988. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  989. {
  990. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  991. }, scalar: true, op1, op2);
  992. });
  993. }
  994. else
  995. {
  996. EmitScalarPairwiseOpF(context, (op1, op2) =>
  997. {
  998. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  999. });
  1000. }
  1001. }
  1002. public static void Fminp_V(ArmEmitterContext context)
  1003. {
  1004. if (Optimizations.UseAdvSimd)
  1005. {
  1006. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminpV);
  1007. }
  1008. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1009. {
  1010. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  1011. {
  1012. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  1013. {
  1014. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  1015. }, scalar: false, op1, op2);
  1016. });
  1017. }
  1018. else
  1019. {
  1020. EmitVectorPairwiseOpF(context, (op1, op2) =>
  1021. {
  1022. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  1023. });
  1024. }
  1025. }
  1026. public static void Fminv_V(ArmEmitterContext context)
  1027. {
  1028. if (Optimizations.UseAdvSimd)
  1029. {
  1030. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FminvV);
  1031. }
  1032. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1033. {
  1034. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  1035. {
  1036. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  1037. {
  1038. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  1039. }, scalar: false, op1, op2);
  1040. });
  1041. }
  1042. else
  1043. {
  1044. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  1045. {
  1046. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  1047. });
  1048. }
  1049. }
  1050. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  1051. {
  1052. if (Optimizations.UseAdvSimd)
  1053. {
  1054. InstEmitSimdHelperArm64.EmitScalarTernaryOpFRdByElem(context, Intrinsic.Arm64FmlaSe);
  1055. }
  1056. else if (Optimizations.UseFma)
  1057. {
  1058. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1059. Operand d = GetVec(op.Rd);
  1060. Operand n = GetVec(op.Rn);
  1061. Operand m = GetVec(op.Rm);
  1062. int sizeF = op.Size & 1;
  1063. if (sizeF == 0)
  1064. {
  1065. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1066. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1067. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ss, d, n, res);
  1068. context.Copy(d, context.VectorZeroUpper96(res));
  1069. }
  1070. else /* if (sizeF == 1) */
  1071. {
  1072. int shuffleMask = op.Index | op.Index << 1;
  1073. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1074. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231sd, d, n, res);
  1075. context.Copy(d, context.VectorZeroUpper64(res));
  1076. }
  1077. }
  1078. else
  1079. {
  1080. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  1081. {
  1082. return context.Add(op1, context.Multiply(op2, op3));
  1083. });
  1084. }
  1085. }
  1086. public static void Fmla_V(ArmEmitterContext context) // Fused.
  1087. {
  1088. if (Optimizations.UseAdvSimd)
  1089. {
  1090. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRd(context, Intrinsic.Arm64FmlaV);
  1091. }
  1092. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1093. {
  1094. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1095. Operand d = GetVec(op.Rd);
  1096. Operand n = GetVec(op.Rn);
  1097. Operand m = GetVec(op.Rm);
  1098. int sizeF = op.Size & 1;
  1099. Operand res;
  1100. if (sizeF == 0)
  1101. {
  1102. if (Optimizations.UseFma)
  1103. {
  1104. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ps, d, n, m);
  1105. }
  1106. else
  1107. {
  1108. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1109. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  1110. }
  1111. if (op.RegisterSize == RegisterSize.Simd64)
  1112. {
  1113. res = context.VectorZeroUpper64(res);
  1114. }
  1115. context.Copy(d, res);
  1116. }
  1117. else /* if (sizeF == 1) */
  1118. {
  1119. if (Optimizations.UseFma)
  1120. {
  1121. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231pd, d, n, m);
  1122. }
  1123. else
  1124. {
  1125. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1126. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  1127. }
  1128. context.Copy(d, res);
  1129. }
  1130. }
  1131. else
  1132. {
  1133. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  1134. {
  1135. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  1136. });
  1137. }
  1138. }
  1139. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  1140. {
  1141. if (Optimizations.UseAdvSimd)
  1142. {
  1143. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRdByElem(context, Intrinsic.Arm64FmlaVe);
  1144. }
  1145. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1146. {
  1147. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1148. Operand d = GetVec(op.Rd);
  1149. Operand n = GetVec(op.Rn);
  1150. Operand m = GetVec(op.Rm);
  1151. int sizeF = op.Size & 1;
  1152. if (sizeF == 0)
  1153. {
  1154. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1155. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1156. if (Optimizations.UseFma)
  1157. {
  1158. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ps, d, n, res);
  1159. }
  1160. else
  1161. {
  1162. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1163. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  1164. }
  1165. if (op.RegisterSize == RegisterSize.Simd64)
  1166. {
  1167. res = context.VectorZeroUpper64(res);
  1168. }
  1169. context.Copy(d, res);
  1170. }
  1171. else /* if (sizeF == 1) */
  1172. {
  1173. int shuffleMask = op.Index | op.Index << 1;
  1174. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1175. if (Optimizations.UseFma)
  1176. {
  1177. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231pd, d, n, res);
  1178. }
  1179. else
  1180. {
  1181. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1182. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  1183. }
  1184. context.Copy(d, res);
  1185. }
  1186. }
  1187. else
  1188. {
  1189. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  1190. {
  1191. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  1192. });
  1193. }
  1194. }
  1195. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  1196. {
  1197. if (Optimizations.UseAdvSimd)
  1198. {
  1199. InstEmitSimdHelperArm64.EmitScalarTernaryOpFRdByElem(context, Intrinsic.Arm64FmlsSe);
  1200. }
  1201. else if (Optimizations.UseFma)
  1202. {
  1203. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1204. Operand d = GetVec(op.Rd);
  1205. Operand n = GetVec(op.Rn);
  1206. Operand m = GetVec(op.Rm);
  1207. int sizeF = op.Size & 1;
  1208. if (sizeF == 0)
  1209. {
  1210. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1211. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1212. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, d, n, res);
  1213. context.Copy(d, context.VectorZeroUpper96(res));
  1214. }
  1215. else /* if (sizeF == 1) */
  1216. {
  1217. int shuffleMask = op.Index | op.Index << 1;
  1218. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1219. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, d, n, res);
  1220. context.Copy(d, context.VectorZeroUpper64(res));
  1221. }
  1222. }
  1223. else
  1224. {
  1225. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  1226. {
  1227. return context.Subtract(op1, context.Multiply(op2, op3));
  1228. });
  1229. }
  1230. }
  1231. public static void Fmls_V(ArmEmitterContext context) // Fused.
  1232. {
  1233. if (Optimizations.UseAdvSimd)
  1234. {
  1235. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRd(context, Intrinsic.Arm64FmlsV);
  1236. }
  1237. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1238. {
  1239. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1240. Operand d = GetVec(op.Rd);
  1241. Operand n = GetVec(op.Rn);
  1242. Operand m = GetVec(op.Rm);
  1243. int sizeF = op.Size & 1;
  1244. Operand res;
  1245. if (sizeF == 0)
  1246. {
  1247. if (Optimizations.UseFma)
  1248. {
  1249. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, d, n, m);
  1250. }
  1251. else
  1252. {
  1253. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1254. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  1255. }
  1256. if (op.RegisterSize == RegisterSize.Simd64)
  1257. {
  1258. res = context.VectorZeroUpper64(res);
  1259. }
  1260. context.Copy(d, res);
  1261. }
  1262. else /* if (sizeF == 1) */
  1263. {
  1264. if (Optimizations.UseFma)
  1265. {
  1266. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, d, n, m);
  1267. }
  1268. else
  1269. {
  1270. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1271. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  1272. }
  1273. context.Copy(d, res);
  1274. }
  1275. }
  1276. else
  1277. {
  1278. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  1279. {
  1280. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1281. });
  1282. }
  1283. }
  1284. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  1285. {
  1286. if (Optimizations.UseAdvSimd)
  1287. {
  1288. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRdByElem(context, Intrinsic.Arm64FmlsVe);
  1289. }
  1290. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1291. {
  1292. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1293. Operand d = GetVec(op.Rd);
  1294. Operand n = GetVec(op.Rn);
  1295. Operand m = GetVec(op.Rm);
  1296. int sizeF = op.Size & 1;
  1297. if (sizeF == 0)
  1298. {
  1299. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1300. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1301. if (Optimizations.UseFma)
  1302. {
  1303. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, d, n, res);
  1304. }
  1305. else
  1306. {
  1307. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1308. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  1309. }
  1310. if (op.RegisterSize == RegisterSize.Simd64)
  1311. {
  1312. res = context.VectorZeroUpper64(res);
  1313. }
  1314. context.Copy(d, res);
  1315. }
  1316. else /* if (sizeF == 1) */
  1317. {
  1318. int shuffleMask = op.Index | op.Index << 1;
  1319. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1320. if (Optimizations.UseFma)
  1321. {
  1322. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, d, n, res);
  1323. }
  1324. else
  1325. {
  1326. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1327. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  1328. }
  1329. context.Copy(d, res);
  1330. }
  1331. }
  1332. else
  1333. {
  1334. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  1335. {
  1336. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1337. });
  1338. }
  1339. }
  1340. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  1341. {
  1342. if (Optimizations.UseAdvSimd)
  1343. {
  1344. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FmsubS);
  1345. }
  1346. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1347. {
  1348. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1349. Operand d = GetVec(op.Rd);
  1350. Operand a = GetVec(op.Ra);
  1351. Operand n = GetVec(op.Rn);
  1352. Operand m = GetVec(op.Rm);
  1353. Operand res;
  1354. if (op.Size == 0)
  1355. {
  1356. if (Optimizations.UseFma)
  1357. {
  1358. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, a, n, m);
  1359. }
  1360. else
  1361. {
  1362. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1363. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  1364. }
  1365. context.Copy(d, context.VectorZeroUpper96(res));
  1366. }
  1367. else /* if (op.Size == 1) */
  1368. {
  1369. if (Optimizations.UseFma)
  1370. {
  1371. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, a, n, m);
  1372. }
  1373. else
  1374. {
  1375. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1376. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  1377. }
  1378. context.Copy(d, context.VectorZeroUpper64(res));
  1379. }
  1380. }
  1381. else
  1382. {
  1383. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1384. {
  1385. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1386. });
  1387. }
  1388. }
  1389. public static void Fmul_S(ArmEmitterContext context)
  1390. {
  1391. if (Optimizations.UseAdvSimd)
  1392. {
  1393. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmulS);
  1394. }
  1395. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1396. {
  1397. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  1398. }
  1399. else if (Optimizations.FastFP)
  1400. {
  1401. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1402. }
  1403. else
  1404. {
  1405. EmitScalarBinaryOpF(context, (op1, op2) =>
  1406. {
  1407. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1408. });
  1409. }
  1410. }
  1411. public static void Fmul_Se(ArmEmitterContext context)
  1412. {
  1413. if (Optimizations.UseAdvSimd)
  1414. {
  1415. InstEmitSimdHelperArm64.EmitScalarBinaryOpFByElem(context, Intrinsic.Arm64FmulSe);
  1416. }
  1417. else
  1418. {
  1419. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1420. }
  1421. }
  1422. public static void Fmul_V(ArmEmitterContext context)
  1423. {
  1424. if (Optimizations.UseAdvSimd)
  1425. {
  1426. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmulV);
  1427. }
  1428. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1429. {
  1430. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  1431. }
  1432. else if (Optimizations.FastFP)
  1433. {
  1434. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1435. }
  1436. else
  1437. {
  1438. EmitVectorBinaryOpF(context, (op1, op2) =>
  1439. {
  1440. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1441. });
  1442. }
  1443. }
  1444. public static void Fmul_Ve(ArmEmitterContext context)
  1445. {
  1446. if (Optimizations.UseAdvSimd)
  1447. {
  1448. InstEmitSimdHelperArm64.EmitVectorBinaryOpFByElem(context, Intrinsic.Arm64FmulVe);
  1449. }
  1450. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1451. {
  1452. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1453. Operand n = GetVec(op.Rn);
  1454. Operand m = GetVec(op.Rm);
  1455. int sizeF = op.Size & 1;
  1456. if (sizeF == 0)
  1457. {
  1458. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1459. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1460. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1461. if (op.RegisterSize == RegisterSize.Simd64)
  1462. {
  1463. res = context.VectorZeroUpper64(res);
  1464. }
  1465. context.Copy(GetVec(op.Rd), res);
  1466. }
  1467. else /* if (sizeF == 1) */
  1468. {
  1469. int shuffleMask = op.Index | op.Index << 1;
  1470. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1471. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1472. context.Copy(GetVec(op.Rd), res);
  1473. }
  1474. }
  1475. else if (Optimizations.FastFP)
  1476. {
  1477. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1478. }
  1479. else
  1480. {
  1481. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1482. {
  1483. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1484. });
  1485. }
  1486. }
  1487. public static void Fmulx_S(ArmEmitterContext context)
  1488. {
  1489. if (Optimizations.UseAdvSimd)
  1490. {
  1491. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmulxS);
  1492. }
  1493. else
  1494. {
  1495. EmitScalarBinaryOpF(context, (op1, op2) =>
  1496. {
  1497. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1498. });
  1499. }
  1500. }
  1501. public static void Fmulx_Se(ArmEmitterContext context)
  1502. {
  1503. if (Optimizations.UseAdvSimd)
  1504. {
  1505. InstEmitSimdHelperArm64.EmitScalarBinaryOpFByElem(context, Intrinsic.Arm64FmulxSe);
  1506. }
  1507. else
  1508. {
  1509. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  1510. {
  1511. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1512. });
  1513. }
  1514. }
  1515. public static void Fmulx_V(ArmEmitterContext context)
  1516. {
  1517. if (Optimizations.UseAdvSimd)
  1518. {
  1519. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmulxV);
  1520. }
  1521. else
  1522. {
  1523. EmitVectorBinaryOpF(context, (op1, op2) =>
  1524. {
  1525. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1526. });
  1527. }
  1528. }
  1529. public static void Fmulx_Ve(ArmEmitterContext context)
  1530. {
  1531. if (Optimizations.UseAdvSimd)
  1532. {
  1533. InstEmitSimdHelperArm64.EmitVectorBinaryOpFByElem(context, Intrinsic.Arm64FmulxVe);
  1534. }
  1535. else
  1536. {
  1537. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1538. {
  1539. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1540. });
  1541. }
  1542. }
  1543. public static void Fneg_S(ArmEmitterContext context)
  1544. {
  1545. if (Optimizations.UseAdvSimd)
  1546. {
  1547. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FnegS);
  1548. }
  1549. else if (Optimizations.UseSse2)
  1550. {
  1551. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1552. if (op.Size == 0)
  1553. {
  1554. Operand mask = X86GetScalar(context, -0f);
  1555. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1556. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1557. }
  1558. else /* if (op.Size == 1) */
  1559. {
  1560. Operand mask = X86GetScalar(context, -0d);
  1561. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1562. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1563. }
  1564. }
  1565. else
  1566. {
  1567. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  1568. }
  1569. }
  1570. public static void Fneg_V(ArmEmitterContext context)
  1571. {
  1572. if (Optimizations.UseAdvSimd)
  1573. {
  1574. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FnegV);
  1575. }
  1576. else if (Optimizations.UseSse2)
  1577. {
  1578. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1579. int sizeF = op.Size & 1;
  1580. if (sizeF == 0)
  1581. {
  1582. Operand mask = X86GetAllElements(context, -0f);
  1583. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1584. if (op.RegisterSize == RegisterSize.Simd64)
  1585. {
  1586. res = context.VectorZeroUpper64(res);
  1587. }
  1588. context.Copy(GetVec(op.Rd), res);
  1589. }
  1590. else /* if (sizeF == 1) */
  1591. {
  1592. Operand mask = X86GetAllElements(context, -0d);
  1593. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1594. context.Copy(GetVec(op.Rd), res);
  1595. }
  1596. }
  1597. else
  1598. {
  1599. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1600. }
  1601. }
  1602. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1603. {
  1604. if (Optimizations.UseAdvSimd)
  1605. {
  1606. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FnmaddS);
  1607. }
  1608. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1609. {
  1610. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1611. Operand d = GetVec(op.Rd);
  1612. Operand a = GetVec(op.Ra);
  1613. Operand n = GetVec(op.Rn);
  1614. Operand m = GetVec(op.Rm);
  1615. Operand res;
  1616. if (op.Size == 0)
  1617. {
  1618. if (Optimizations.UseFma)
  1619. {
  1620. res = context.AddIntrinsic(Intrinsic.X86Vfnmsub231ss, a, n, m);
  1621. }
  1622. else
  1623. {
  1624. Operand mask = X86GetScalar(context, -0f);
  1625. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1626. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1627. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1628. }
  1629. context.Copy(d, context.VectorZeroUpper96(res));
  1630. }
  1631. else /* if (op.Size == 1) */
  1632. {
  1633. if (Optimizations.UseFma)
  1634. {
  1635. res = context.AddIntrinsic(Intrinsic.X86Vfnmsub231sd, a, n, m);
  1636. }
  1637. else
  1638. {
  1639. Operand mask = X86GetScalar(context, -0d);
  1640. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1641. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1642. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1643. }
  1644. context.Copy(d, context.VectorZeroUpper64(res));
  1645. }
  1646. }
  1647. else
  1648. {
  1649. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1650. {
  1651. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1652. });
  1653. }
  1654. }
  1655. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1656. {
  1657. if (Optimizations.UseAdvSimd)
  1658. {
  1659. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FnmsubS);
  1660. }
  1661. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1662. {
  1663. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1664. Operand d = GetVec(op.Rd);
  1665. Operand a = GetVec(op.Ra);
  1666. Operand n = GetVec(op.Rn);
  1667. Operand m = GetVec(op.Rm);
  1668. Operand res;
  1669. if (op.Size == 0)
  1670. {
  1671. if (Optimizations.UseFma)
  1672. {
  1673. res = context.AddIntrinsic(Intrinsic.X86Vfmsub231ss, a, n, m);
  1674. }
  1675. else
  1676. {
  1677. Operand mask = X86GetScalar(context, -0f);
  1678. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1679. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1680. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1681. }
  1682. context.Copy(d, context.VectorZeroUpper96(res));
  1683. }
  1684. else /* if (op.Size == 1) */
  1685. {
  1686. if (Optimizations.UseFma)
  1687. {
  1688. res = context.AddIntrinsic(Intrinsic.X86Vfmsub231sd, a, n, m);
  1689. }
  1690. else
  1691. {
  1692. Operand mask = X86GetScalar(context, -0d);
  1693. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1694. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1695. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1696. }
  1697. context.Copy(d, context.VectorZeroUpper64(res));
  1698. }
  1699. }
  1700. else
  1701. {
  1702. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1703. {
  1704. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1705. });
  1706. }
  1707. }
  1708. public static void Fnmul_S(ArmEmitterContext context)
  1709. {
  1710. if (Optimizations.UseAdvSimd)
  1711. {
  1712. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FnmulS);
  1713. }
  1714. else
  1715. {
  1716. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1717. }
  1718. }
  1719. public static void Frecpe_S(ArmEmitterContext context)
  1720. {
  1721. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1722. int sizeF = op.Size & 1;
  1723. if (Optimizations.UseAdvSimd)
  1724. {
  1725. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrecpeS);
  1726. }
  1727. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1728. {
  1729. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpss, GetVec(op.Rn)), scalar: true);
  1730. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1731. }
  1732. else
  1733. {
  1734. EmitScalarUnaryOpF(context, (op1) =>
  1735. {
  1736. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1737. });
  1738. }
  1739. }
  1740. public static void Frecpe_V(ArmEmitterContext context)
  1741. {
  1742. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1743. int sizeF = op.Size & 1;
  1744. if (Optimizations.UseAdvSimd)
  1745. {
  1746. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrecpeV);
  1747. }
  1748. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1749. {
  1750. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpps, GetVec(op.Rn)), scalar: false);
  1751. if (op.RegisterSize == RegisterSize.Simd64)
  1752. {
  1753. res = context.VectorZeroUpper64(res);
  1754. }
  1755. context.Copy(GetVec(op.Rd), res);
  1756. }
  1757. else
  1758. {
  1759. EmitVectorUnaryOpF(context, (op1) =>
  1760. {
  1761. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1762. });
  1763. }
  1764. }
  1765. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1766. {
  1767. if (Optimizations.UseAdvSimd)
  1768. {
  1769. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrecpsS);
  1770. }
  1771. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1772. {
  1773. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1774. Operand n = GetVec(op.Rn);
  1775. Operand m = GetVec(op.Rm);
  1776. int sizeF = op.Size & 1;
  1777. Operand res;
  1778. if (sizeF == 0)
  1779. {
  1780. Operand mask = X86GetScalar(context, 2f);
  1781. if (Optimizations.UseFma)
  1782. {
  1783. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, mask, n, m);
  1784. }
  1785. else
  1786. {
  1787. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1788. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1789. }
  1790. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1791. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1792. }
  1793. else /* if (sizeF == 1) */
  1794. {
  1795. Operand mask = X86GetScalar(context, 2d);
  1796. if (Optimizations.UseFma)
  1797. {
  1798. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, mask, n, m);
  1799. }
  1800. else
  1801. {
  1802. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1803. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1804. }
  1805. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1806. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1807. }
  1808. }
  1809. else
  1810. {
  1811. EmitScalarBinaryOpF(context, (op1, op2) =>
  1812. {
  1813. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1814. });
  1815. }
  1816. }
  1817. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1818. {
  1819. if (Optimizations.UseAdvSimd)
  1820. {
  1821. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FrecpsV);
  1822. }
  1823. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1824. {
  1825. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1826. Operand n = GetVec(op.Rn);
  1827. Operand m = GetVec(op.Rm);
  1828. int sizeF = op.Size & 1;
  1829. Operand res;
  1830. if (sizeF == 0)
  1831. {
  1832. Operand mask = X86GetAllElements(context, 2f);
  1833. if (Optimizations.UseFma)
  1834. {
  1835. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, mask, n, m);
  1836. }
  1837. else
  1838. {
  1839. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1840. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1841. }
  1842. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1843. if (op.RegisterSize == RegisterSize.Simd64)
  1844. {
  1845. res = context.VectorZeroUpper64(res);
  1846. }
  1847. context.Copy(GetVec(op.Rd), res);
  1848. }
  1849. else /* if (sizeF == 1) */
  1850. {
  1851. Operand mask = X86GetAllElements(context, 2d);
  1852. if (Optimizations.UseFma)
  1853. {
  1854. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, mask, n, m);
  1855. }
  1856. else
  1857. {
  1858. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1859. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1860. }
  1861. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1862. context.Copy(GetVec(op.Rd), res);
  1863. }
  1864. }
  1865. else
  1866. {
  1867. EmitVectorBinaryOpF(context, (op1, op2) =>
  1868. {
  1869. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1870. });
  1871. }
  1872. }
  1873. public static void Frecpx_S(ArmEmitterContext context)
  1874. {
  1875. if (Optimizations.UseAdvSimd)
  1876. {
  1877. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrecpxS);
  1878. }
  1879. else
  1880. {
  1881. EmitScalarUnaryOpF(context, (op1) =>
  1882. {
  1883. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1884. });
  1885. }
  1886. }
  1887. public static void Frinta_S(ArmEmitterContext context)
  1888. {
  1889. if (Optimizations.UseAdvSimd)
  1890. {
  1891. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintaS);
  1892. }
  1893. else if (Optimizations.UseSse41)
  1894. {
  1895. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearestAway);
  1896. }
  1897. else
  1898. {
  1899. EmitScalarUnaryOpF(context, (op1) =>
  1900. {
  1901. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1902. });
  1903. }
  1904. }
  1905. public static void Frinta_V(ArmEmitterContext context)
  1906. {
  1907. if (Optimizations.UseAdvSimd)
  1908. {
  1909. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintaV);
  1910. }
  1911. else if (Optimizations.UseSse41)
  1912. {
  1913. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearestAway);
  1914. }
  1915. else
  1916. {
  1917. EmitVectorUnaryOpF(context, (op1) =>
  1918. {
  1919. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1920. });
  1921. }
  1922. }
  1923. public static void Frinti_S(ArmEmitterContext context)
  1924. {
  1925. if (Optimizations.UseAdvSimd)
  1926. {
  1927. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintiS);
  1928. }
  1929. else
  1930. {
  1931. EmitScalarUnaryOpF(context, (op1) =>
  1932. {
  1933. return EmitRoundByRMode(context, op1);
  1934. });
  1935. }
  1936. }
  1937. public static void Frinti_V(ArmEmitterContext context)
  1938. {
  1939. if (Optimizations.UseAdvSimd)
  1940. {
  1941. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintiV);
  1942. }
  1943. else
  1944. {
  1945. EmitVectorUnaryOpF(context, (op1) =>
  1946. {
  1947. return EmitRoundByRMode(context, op1);
  1948. });
  1949. }
  1950. }
  1951. public static void Frintm_S(ArmEmitterContext context)
  1952. {
  1953. if (Optimizations.UseAdvSimd)
  1954. {
  1955. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintmS);
  1956. }
  1957. else if (Optimizations.UseSse41)
  1958. {
  1959. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1960. }
  1961. else
  1962. {
  1963. EmitScalarUnaryOpF(context, (op1) =>
  1964. {
  1965. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1966. });
  1967. }
  1968. }
  1969. public static void Frintm_V(ArmEmitterContext context)
  1970. {
  1971. if (Optimizations.UseAdvSimd)
  1972. {
  1973. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintmV);
  1974. }
  1975. else if (Optimizations.UseSse41)
  1976. {
  1977. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1978. }
  1979. else
  1980. {
  1981. EmitVectorUnaryOpF(context, (op1) =>
  1982. {
  1983. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1984. });
  1985. }
  1986. }
  1987. public static void Frintn_S(ArmEmitterContext context)
  1988. {
  1989. if (Optimizations.UseAdvSimd)
  1990. {
  1991. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintnS);
  1992. }
  1993. else if (Optimizations.UseSse41)
  1994. {
  1995. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1996. }
  1997. else
  1998. {
  1999. EmitScalarUnaryOpF(context, (op1) =>
  2000. {
  2001. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  2002. });
  2003. }
  2004. }
  2005. public static void Frintn_V(ArmEmitterContext context)
  2006. {
  2007. if (Optimizations.UseAdvSimd)
  2008. {
  2009. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintnV);
  2010. }
  2011. else if (Optimizations.UseSse41)
  2012. {
  2013. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearest);
  2014. }
  2015. else
  2016. {
  2017. EmitVectorUnaryOpF(context, (op1) =>
  2018. {
  2019. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  2020. });
  2021. }
  2022. }
  2023. public static void Frintp_S(ArmEmitterContext context)
  2024. {
  2025. if (Optimizations.UseAdvSimd)
  2026. {
  2027. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintpS);
  2028. }
  2029. else if (Optimizations.UseSse41)
  2030. {
  2031. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  2032. }
  2033. else
  2034. {
  2035. EmitScalarUnaryOpF(context, (op1) =>
  2036. {
  2037. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  2038. });
  2039. }
  2040. }
  2041. public static void Frintp_V(ArmEmitterContext context)
  2042. {
  2043. if (Optimizations.UseAdvSimd)
  2044. {
  2045. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintpV);
  2046. }
  2047. else if (Optimizations.UseSse41)
  2048. {
  2049. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  2050. }
  2051. else
  2052. {
  2053. EmitVectorUnaryOpF(context, (op1) =>
  2054. {
  2055. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  2056. });
  2057. }
  2058. }
  2059. public static void Frintx_S(ArmEmitterContext context)
  2060. {
  2061. if (Optimizations.UseAdvSimd)
  2062. {
  2063. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintxS);
  2064. }
  2065. else
  2066. {
  2067. EmitScalarUnaryOpF(context, (op1) =>
  2068. {
  2069. return EmitRoundByRMode(context, op1);
  2070. });
  2071. }
  2072. }
  2073. public static void Frintx_V(ArmEmitterContext context)
  2074. {
  2075. if (Optimizations.UseAdvSimd)
  2076. {
  2077. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintxV);
  2078. }
  2079. else
  2080. {
  2081. EmitVectorUnaryOpF(context, (op1) =>
  2082. {
  2083. return EmitRoundByRMode(context, op1);
  2084. });
  2085. }
  2086. }
  2087. public static void Frintz_S(ArmEmitterContext context)
  2088. {
  2089. if (Optimizations.UseAdvSimd)
  2090. {
  2091. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintzS);
  2092. }
  2093. else if (Optimizations.UseSse41)
  2094. {
  2095. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  2096. }
  2097. else
  2098. {
  2099. EmitScalarUnaryOpF(context, (op1) =>
  2100. {
  2101. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  2102. });
  2103. }
  2104. }
  2105. public static void Frintz_V(ArmEmitterContext context)
  2106. {
  2107. if (Optimizations.UseAdvSimd)
  2108. {
  2109. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintzV);
  2110. }
  2111. else if (Optimizations.UseSse41)
  2112. {
  2113. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsZero);
  2114. }
  2115. else
  2116. {
  2117. EmitVectorUnaryOpF(context, (op1) =>
  2118. {
  2119. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  2120. });
  2121. }
  2122. }
  2123. public static void Frsqrte_S(ArmEmitterContext context)
  2124. {
  2125. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2126. int sizeF = op.Size & 1;
  2127. if (Optimizations.UseAdvSimd)
  2128. {
  2129. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrsqrteS);
  2130. }
  2131. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  2132. {
  2133. // RSQRTSS handles subnormals as zero, which differs from Arm, so we can't use it here.
  2134. Operand res = context.AddIntrinsic(Intrinsic.X86Sqrtss, GetVec(op.Rn));
  2135. res = context.AddIntrinsic(Intrinsic.X86Rcpss, res);
  2136. res = EmitSse41Round32Exp8OpF(context, res, scalar: true);
  2137. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  2138. }
  2139. else
  2140. {
  2141. EmitScalarUnaryOpF(context, (op1) =>
  2142. {
  2143. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  2144. });
  2145. }
  2146. }
  2147. public static void Frsqrte_V(ArmEmitterContext context)
  2148. {
  2149. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2150. int sizeF = op.Size & 1;
  2151. if (Optimizations.UseAdvSimd)
  2152. {
  2153. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrsqrteV);
  2154. }
  2155. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  2156. {
  2157. // RSQRTPS handles subnormals as zero, which differs from Arm, so we can't use it here.
  2158. Operand res = context.AddIntrinsic(Intrinsic.X86Sqrtps, GetVec(op.Rn));
  2159. res = context.AddIntrinsic(Intrinsic.X86Rcpps, res);
  2160. res = EmitSse41Round32Exp8OpF(context, res, scalar: false);
  2161. if (op.RegisterSize == RegisterSize.Simd64)
  2162. {
  2163. res = context.VectorZeroUpper64(res);
  2164. }
  2165. context.Copy(GetVec(op.Rd), res);
  2166. }
  2167. else
  2168. {
  2169. EmitVectorUnaryOpF(context, (op1) =>
  2170. {
  2171. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  2172. });
  2173. }
  2174. }
  2175. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  2176. {
  2177. if (Optimizations.UseAdvSimd)
  2178. {
  2179. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrsqrtsS);
  2180. }
  2181. else if (Optimizations.FastFP && Optimizations.UseSse41)
  2182. {
  2183. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2184. Operand n = GetVec(op.Rn);
  2185. Operand m = GetVec(op.Rm);
  2186. int sizeF = op.Size & 1;
  2187. Operand res;
  2188. if (sizeF == 0)
  2189. {
  2190. Operand maskHalf = X86GetScalar(context, 0.5f);
  2191. Operand maskThree = X86GetScalar(context, 3f);
  2192. Operand maskOneHalf = X86GetScalar(context, 1.5f);
  2193. if (Optimizations.UseFma)
  2194. {
  2195. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, maskThree, n, m);
  2196. }
  2197. else
  2198. {
  2199. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  2200. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  2201. }
  2202. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  2203. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  2204. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  2205. }
  2206. else /* if (sizeF == 1) */
  2207. {
  2208. Operand maskHalf = X86GetScalar(context, 0.5d);
  2209. Operand maskThree = X86GetScalar(context, 3d);
  2210. Operand maskOneHalf = X86GetScalar(context, 1.5d);
  2211. if (Optimizations.UseFma)
  2212. {
  2213. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, maskThree, n, m);
  2214. }
  2215. else
  2216. {
  2217. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  2218. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  2219. }
  2220. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  2221. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  2222. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  2223. }
  2224. }
  2225. else
  2226. {
  2227. EmitScalarBinaryOpF(context, (op1, op2) =>
  2228. {
  2229. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  2230. });
  2231. }
  2232. }
  2233. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  2234. {
  2235. if (Optimizations.UseAdvSimd)
  2236. {
  2237. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FrsqrtsV);
  2238. }
  2239. else if (Optimizations.FastFP && Optimizations.UseSse41)
  2240. {
  2241. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2242. Operand n = GetVec(op.Rn);
  2243. Operand m = GetVec(op.Rm);
  2244. int sizeF = op.Size & 1;
  2245. Operand res;
  2246. if (sizeF == 0)
  2247. {
  2248. Operand maskHalf = X86GetAllElements(context, 0.5f);
  2249. Operand maskThree = X86GetAllElements(context, 3f);
  2250. Operand maskOneHalf = X86GetAllElements(context, 1.5f);
  2251. if (Optimizations.UseFma)
  2252. {
  2253. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, maskThree, n, m);
  2254. }
  2255. else
  2256. {
  2257. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  2258. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  2259. }
  2260. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  2261. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  2262. if (op.RegisterSize == RegisterSize.Simd64)
  2263. {
  2264. res = context.VectorZeroUpper64(res);
  2265. }
  2266. context.Copy(GetVec(op.Rd), res);
  2267. }
  2268. else /* if (sizeF == 1) */
  2269. {
  2270. Operand maskHalf = X86GetAllElements(context, 0.5d);
  2271. Operand maskThree = X86GetAllElements(context, 3d);
  2272. Operand maskOneHalf = X86GetAllElements(context, 1.5d);
  2273. if (Optimizations.UseFma)
  2274. {
  2275. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, maskThree, n, m);
  2276. }
  2277. else
  2278. {
  2279. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  2280. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  2281. }
  2282. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  2283. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  2284. context.Copy(GetVec(op.Rd), res);
  2285. }
  2286. }
  2287. else
  2288. {
  2289. EmitVectorBinaryOpF(context, (op1, op2) =>
  2290. {
  2291. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  2292. });
  2293. }
  2294. }
  2295. public static void Fsqrt_S(ArmEmitterContext context)
  2296. {
  2297. if (Optimizations.UseAdvSimd)
  2298. {
  2299. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FsqrtS);
  2300. }
  2301. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2302. {
  2303. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  2304. }
  2305. else
  2306. {
  2307. EmitScalarUnaryOpF(context, (op1) =>
  2308. {
  2309. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  2310. });
  2311. }
  2312. }
  2313. public static void Fsqrt_V(ArmEmitterContext context)
  2314. {
  2315. if (Optimizations.UseAdvSimd)
  2316. {
  2317. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FsqrtV);
  2318. }
  2319. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2320. {
  2321. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  2322. }
  2323. else
  2324. {
  2325. EmitVectorUnaryOpF(context, (op1) =>
  2326. {
  2327. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  2328. });
  2329. }
  2330. }
  2331. public static void Fsub_S(ArmEmitterContext context)
  2332. {
  2333. if (Optimizations.UseAdvSimd)
  2334. {
  2335. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FsubS);
  2336. }
  2337. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2338. {
  2339. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  2340. }
  2341. else if (Optimizations.FastFP)
  2342. {
  2343. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  2344. }
  2345. else
  2346. {
  2347. EmitScalarBinaryOpF(context, (op1, op2) =>
  2348. {
  2349. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  2350. });
  2351. }
  2352. }
  2353. public static void Fsub_V(ArmEmitterContext context)
  2354. {
  2355. if (Optimizations.UseAdvSimd)
  2356. {
  2357. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FsubV);
  2358. }
  2359. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2360. {
  2361. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  2362. }
  2363. else if (Optimizations.FastFP)
  2364. {
  2365. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  2366. }
  2367. else
  2368. {
  2369. EmitVectorBinaryOpF(context, (op1, op2) =>
  2370. {
  2371. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  2372. });
  2373. }
  2374. }
  2375. public static void Mla_V(ArmEmitterContext context)
  2376. {
  2377. if (Optimizations.UseAdvSimd)
  2378. {
  2379. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64MlaV);
  2380. }
  2381. else if (Optimizations.UseSse41)
  2382. {
  2383. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  2384. }
  2385. else
  2386. {
  2387. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2388. {
  2389. return context.Add(op1, context.Multiply(op2, op3));
  2390. });
  2391. }
  2392. }
  2393. public static void Mla_Ve(ArmEmitterContext context)
  2394. {
  2395. if (Optimizations.UseAdvSimd)
  2396. {
  2397. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64MlaVe);
  2398. }
  2399. else
  2400. {
  2401. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  2402. {
  2403. return context.Add(op1, context.Multiply(op2, op3));
  2404. });
  2405. }
  2406. }
  2407. public static void Mls_V(ArmEmitterContext context)
  2408. {
  2409. if (Optimizations.UseAdvSimd)
  2410. {
  2411. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64MlsV);
  2412. }
  2413. else if (Optimizations.UseSse41)
  2414. {
  2415. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  2416. }
  2417. else
  2418. {
  2419. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2420. {
  2421. return context.Subtract(op1, context.Multiply(op2, op3));
  2422. });
  2423. }
  2424. }
  2425. public static void Mls_Ve(ArmEmitterContext context)
  2426. {
  2427. if (Optimizations.UseAdvSimd)
  2428. {
  2429. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64MlsVe);
  2430. }
  2431. else
  2432. {
  2433. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  2434. {
  2435. return context.Subtract(op1, context.Multiply(op2, op3));
  2436. });
  2437. }
  2438. }
  2439. public static void Mul_V(ArmEmitterContext context)
  2440. {
  2441. if (Optimizations.UseAdvSimd)
  2442. {
  2443. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64MulV);
  2444. }
  2445. else if (Optimizations.UseSse41)
  2446. {
  2447. EmitSse41VectorMul_AddSub(context, AddSub.None);
  2448. }
  2449. else
  2450. {
  2451. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2452. }
  2453. }
  2454. public static void Mul_Ve(ArmEmitterContext context)
  2455. {
  2456. if (Optimizations.UseAdvSimd)
  2457. {
  2458. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64MulVe);
  2459. }
  2460. else
  2461. {
  2462. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2463. }
  2464. }
  2465. public static void Neg_S(ArmEmitterContext context)
  2466. {
  2467. if (Optimizations.UseAdvSimd)
  2468. {
  2469. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64NegS);
  2470. }
  2471. else
  2472. {
  2473. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  2474. }
  2475. }
  2476. public static void Neg_V(ArmEmitterContext context)
  2477. {
  2478. if (Optimizations.UseAdvSimd)
  2479. {
  2480. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64NegV);
  2481. }
  2482. else if (Optimizations.UseSse2)
  2483. {
  2484. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2485. Intrinsic subInst = X86PsubInstruction[op.Size];
  2486. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  2487. if (op.RegisterSize == RegisterSize.Simd64)
  2488. {
  2489. res = context.VectorZeroUpper64(res);
  2490. }
  2491. context.Copy(GetVec(op.Rd), res);
  2492. }
  2493. else
  2494. {
  2495. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  2496. }
  2497. }
  2498. public static void Pmull_V(ArmEmitterContext context)
  2499. {
  2500. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2501. if (Optimizations.UseArm64Pmull)
  2502. {
  2503. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64PmullV);
  2504. }
  2505. else if (Optimizations.UsePclmulqdq && op.Size == 3)
  2506. {
  2507. Operand n = GetVec(op.Rn);
  2508. Operand m = GetVec(op.Rm);
  2509. int imm8 = op.RegisterSize == RegisterSize.Simd64 ? 0b0000_0000 : 0b0001_0001;
  2510. Operand res = context.AddIntrinsic(Intrinsic.X86Pclmulqdq, n, m, Const(imm8));
  2511. context.Copy(GetVec(op.Rd), res);
  2512. }
  2513. else if (Optimizations.UseSse41)
  2514. {
  2515. Operand n = GetVec(op.Rn);
  2516. Operand m = GetVec(op.Rm);
  2517. if (op.RegisterSize == RegisterSize.Simd64)
  2518. {
  2519. n = context.VectorZeroUpper64(n);
  2520. m = context.VectorZeroUpper64(m);
  2521. }
  2522. else /* if (op.RegisterSize == RegisterSize.Simd128) */
  2523. {
  2524. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2525. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2526. }
  2527. Operand res = context.VectorZero();
  2528. if (op.Size == 0)
  2529. {
  2530. n = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, n);
  2531. m = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, m);
  2532. for (int i = 0; i < 8; i++)
  2533. {
  2534. Operand mask = context.AddIntrinsic(Intrinsic.X86Psllw, n, Const(15 - i));
  2535. mask = context.AddIntrinsic(Intrinsic.X86Psraw, mask, Const(15));
  2536. Operand tmp = context.AddIntrinsic(Intrinsic.X86Psllw, m, Const(i));
  2537. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  2538. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  2539. }
  2540. }
  2541. else /* if (op.Size == 3) */
  2542. {
  2543. Operand zero = context.VectorZero();
  2544. for (int i = 0; i < 64; i++)
  2545. {
  2546. Operand mask = context.AddIntrinsic(Intrinsic.X86Movlhps, n, n);
  2547. mask = context.AddIntrinsic(Intrinsic.X86Psllq, mask, Const(63 - i));
  2548. mask = context.AddIntrinsic(Intrinsic.X86Psrlq, mask, Const(63));
  2549. mask = context.AddIntrinsic(Intrinsic.X86Psubq, zero, mask);
  2550. Operand tmp = EmitSse2Sll_128(context, m, i);
  2551. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  2552. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  2553. }
  2554. }
  2555. context.Copy(GetVec(op.Rd), res);
  2556. }
  2557. else
  2558. {
  2559. Operand n = GetVec(op.Rn);
  2560. Operand m = GetVec(op.Rm);
  2561. Operand res;
  2562. if (op.Size == 0)
  2563. {
  2564. res = context.VectorZero();
  2565. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 8;
  2566. for (int index = 0; index < 8; index++)
  2567. {
  2568. Operand ne = context.VectorExtract8(n, part + index);
  2569. Operand me = context.VectorExtract8(m, part + index);
  2570. Operand de = EmitPolynomialMultiply(context, ne, me, 8);
  2571. res = EmitVectorInsert(context, res, de, index, 1);
  2572. }
  2573. }
  2574. else /* if (op.Size == 3) */
  2575. {
  2576. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 1;
  2577. Operand ne = context.VectorExtract(OperandType.I64, n, part);
  2578. Operand me = context.VectorExtract(OperandType.I64, m, part);
  2579. res = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.PolynomialMult64_128)), ne, me);
  2580. }
  2581. context.Copy(GetVec(op.Rd), res);
  2582. }
  2583. }
  2584. public static void Raddhn_V(ArmEmitterContext context)
  2585. {
  2586. if (Optimizations.UseAdvSimd)
  2587. {
  2588. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64RaddhnV);
  2589. }
  2590. else
  2591. {
  2592. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  2593. }
  2594. }
  2595. public static void Rsubhn_V(ArmEmitterContext context)
  2596. {
  2597. if (Optimizations.UseAdvSimd)
  2598. {
  2599. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64RsubhnV);
  2600. }
  2601. else
  2602. {
  2603. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  2604. }
  2605. }
  2606. public static void Saba_V(ArmEmitterContext context)
  2607. {
  2608. if (Optimizations.UseAdvSimd)
  2609. {
  2610. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SabaV);
  2611. }
  2612. else
  2613. {
  2614. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  2615. {
  2616. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2617. });
  2618. }
  2619. }
  2620. public static void Sabal_V(ArmEmitterContext context)
  2621. {
  2622. if (Optimizations.UseAdvSimd)
  2623. {
  2624. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SabalV);
  2625. }
  2626. else
  2627. {
  2628. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2629. {
  2630. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2631. });
  2632. }
  2633. }
  2634. public static void Sabd_V(ArmEmitterContext context)
  2635. {
  2636. if (Optimizations.UseAdvSimd)
  2637. {
  2638. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SabdV);
  2639. }
  2640. else if (Optimizations.UseSse41)
  2641. {
  2642. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2643. Operand n = GetVec(op.Rn);
  2644. Operand m = GetVec(op.Rm);
  2645. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  2646. }
  2647. else
  2648. {
  2649. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2650. {
  2651. return EmitAbs(context, context.Subtract(op1, op2));
  2652. });
  2653. }
  2654. }
  2655. public static void Sabdl_V(ArmEmitterContext context)
  2656. {
  2657. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2658. if (Optimizations.UseAdvSimd)
  2659. {
  2660. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SabdlV);
  2661. }
  2662. else if (Optimizations.UseSse41 && op.Size < 2)
  2663. {
  2664. Operand n = GetVec(op.Rn);
  2665. Operand m = GetVec(op.Rm);
  2666. if (op.RegisterSize == RegisterSize.Simd128)
  2667. {
  2668. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2669. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2670. }
  2671. Intrinsic movInst = op.Size == 0
  2672. ? Intrinsic.X86Pmovsxbw
  2673. : Intrinsic.X86Pmovsxwd;
  2674. n = context.AddIntrinsic(movInst, n);
  2675. m = context.AddIntrinsic(movInst, m);
  2676. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  2677. }
  2678. else
  2679. {
  2680. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  2681. {
  2682. return EmitAbs(context, context.Subtract(op1, op2));
  2683. });
  2684. }
  2685. }
  2686. public static void Sadalp_V(ArmEmitterContext context)
  2687. {
  2688. if (Optimizations.UseAdvSimd)
  2689. {
  2690. InstEmitSimdHelperArm64.EmitVectorBinaryOpRd(context, Intrinsic.Arm64SadalpV);
  2691. }
  2692. else
  2693. {
  2694. EmitAddLongPairwise(context, signed: true, accumulate: true);
  2695. }
  2696. }
  2697. public static void Saddl_V(ArmEmitterContext context)
  2698. {
  2699. if (Optimizations.UseAdvSimd)
  2700. {
  2701. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SaddlV);
  2702. }
  2703. else if (Optimizations.UseSse41)
  2704. {
  2705. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2706. Operand n = GetVec(op.Rn);
  2707. Operand m = GetVec(op.Rm);
  2708. if (op.RegisterSize == RegisterSize.Simd128)
  2709. {
  2710. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2711. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2712. }
  2713. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2714. n = context.AddIntrinsic(movInst, n);
  2715. m = context.AddIntrinsic(movInst, m);
  2716. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2717. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2718. }
  2719. else
  2720. {
  2721. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  2722. }
  2723. }
  2724. public static void Saddlp_V(ArmEmitterContext context)
  2725. {
  2726. if (Optimizations.UseAdvSimd)
  2727. {
  2728. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SaddlpV);
  2729. }
  2730. else
  2731. {
  2732. EmitAddLongPairwise(context, signed: true, accumulate: false);
  2733. }
  2734. }
  2735. public static void Saddlv_V(ArmEmitterContext context)
  2736. {
  2737. if (Optimizations.UseAdvSimd)
  2738. {
  2739. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SaddlvV);
  2740. }
  2741. else
  2742. {
  2743. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  2744. }
  2745. }
  2746. public static void Saddw_V(ArmEmitterContext context)
  2747. {
  2748. if (Optimizations.UseAdvSimd)
  2749. {
  2750. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SaddwV);
  2751. }
  2752. else if (Optimizations.UseSse41)
  2753. {
  2754. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2755. Operand n = GetVec(op.Rn);
  2756. Operand m = GetVec(op.Rm);
  2757. if (op.RegisterSize == RegisterSize.Simd128)
  2758. {
  2759. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2760. }
  2761. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2762. m = context.AddIntrinsic(movInst, m);
  2763. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2764. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2765. }
  2766. else
  2767. {
  2768. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  2769. }
  2770. }
  2771. public static void Shadd_V(ArmEmitterContext context)
  2772. {
  2773. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2774. if (Optimizations.UseAdvSimd)
  2775. {
  2776. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64ShaddV);
  2777. }
  2778. else if (Optimizations.UseSse2 && op.Size > 0)
  2779. {
  2780. Operand n = GetVec(op.Rn);
  2781. Operand m = GetVec(op.Rm);
  2782. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2783. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2784. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  2785. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2786. Intrinsic addInst = X86PaddInstruction[op.Size];
  2787. res = context.AddIntrinsic(addInst, res, res2);
  2788. if (op.RegisterSize == RegisterSize.Simd64)
  2789. {
  2790. res = context.VectorZeroUpper64(res);
  2791. }
  2792. context.Copy(GetVec(op.Rd), res);
  2793. }
  2794. else
  2795. {
  2796. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2797. {
  2798. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  2799. });
  2800. }
  2801. }
  2802. public static void Shsub_V(ArmEmitterContext context)
  2803. {
  2804. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2805. if (Optimizations.UseAdvSimd)
  2806. {
  2807. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64ShsubV);
  2808. }
  2809. else if (Optimizations.UseSse2 && op.Size < 2)
  2810. {
  2811. Operand n = GetVec(op.Rn);
  2812. Operand m = GetVec(op.Rm);
  2813. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2814. Intrinsic addInst = X86PaddInstruction[op.Size];
  2815. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  2816. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  2817. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2818. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  2819. Intrinsic subInst = X86PsubInstruction[op.Size];
  2820. res = context.AddIntrinsic(subInst, nPlusMask, res);
  2821. if (op.RegisterSize == RegisterSize.Simd64)
  2822. {
  2823. res = context.VectorZeroUpper64(res);
  2824. }
  2825. context.Copy(GetVec(op.Rd), res);
  2826. }
  2827. else
  2828. {
  2829. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2830. {
  2831. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  2832. });
  2833. }
  2834. }
  2835. public static void Smax_V(ArmEmitterContext context)
  2836. {
  2837. if (Optimizations.UseAdvSimd)
  2838. {
  2839. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmaxV);
  2840. }
  2841. else if (Optimizations.UseSse41)
  2842. {
  2843. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2844. Operand n = GetVec(op.Rn);
  2845. Operand m = GetVec(op.Rm);
  2846. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  2847. Operand res = context.AddIntrinsic(maxInst, n, m);
  2848. if (op.RegisterSize == RegisterSize.Simd64)
  2849. {
  2850. res = context.VectorZeroUpper64(res);
  2851. }
  2852. context.Copy(GetVec(op.Rd), res);
  2853. }
  2854. else
  2855. {
  2856. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2857. }
  2858. }
  2859. public static void Smaxp_V(ArmEmitterContext context)
  2860. {
  2861. if (Optimizations.UseAdvSimd)
  2862. {
  2863. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmaxpV);
  2864. }
  2865. else if (Optimizations.UseSsse3)
  2866. {
  2867. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  2868. }
  2869. else
  2870. {
  2871. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2872. }
  2873. }
  2874. public static void Smaxv_V(ArmEmitterContext context)
  2875. {
  2876. if (Optimizations.UseAdvSimd)
  2877. {
  2878. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SmaxvV);
  2879. }
  2880. else
  2881. {
  2882. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2883. }
  2884. }
  2885. public static void Smin_V(ArmEmitterContext context)
  2886. {
  2887. if (Optimizations.UseAdvSimd)
  2888. {
  2889. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SminV);
  2890. }
  2891. else if (Optimizations.UseSse41)
  2892. {
  2893. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2894. Operand n = GetVec(op.Rn);
  2895. Operand m = GetVec(op.Rm);
  2896. Intrinsic minInst = X86PminsInstruction[op.Size];
  2897. Operand res = context.AddIntrinsic(minInst, n, m);
  2898. if (op.RegisterSize == RegisterSize.Simd64)
  2899. {
  2900. res = context.VectorZeroUpper64(res);
  2901. }
  2902. context.Copy(GetVec(op.Rd), res);
  2903. }
  2904. else
  2905. {
  2906. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2907. }
  2908. }
  2909. public static void Sminp_V(ArmEmitterContext context)
  2910. {
  2911. if (Optimizations.UseAdvSimd)
  2912. {
  2913. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SminpV);
  2914. }
  2915. else if (Optimizations.UseSsse3)
  2916. {
  2917. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  2918. }
  2919. else
  2920. {
  2921. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2922. }
  2923. }
  2924. public static void Sminv_V(ArmEmitterContext context)
  2925. {
  2926. if (Optimizations.UseAdvSimd)
  2927. {
  2928. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SminvV);
  2929. }
  2930. else
  2931. {
  2932. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2933. }
  2934. }
  2935. public static void Smlal_V(ArmEmitterContext context)
  2936. {
  2937. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2938. if (Optimizations.UseAdvSimd)
  2939. {
  2940. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SmlalV);
  2941. }
  2942. else if (Optimizations.UseSse41 && op.Size < 2)
  2943. {
  2944. Operand d = GetVec(op.Rd);
  2945. Operand n = GetVec(op.Rn);
  2946. Operand m = GetVec(op.Rm);
  2947. if (op.RegisterSize == RegisterSize.Simd128)
  2948. {
  2949. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2950. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2951. }
  2952. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2953. n = context.AddIntrinsic(movInst, n);
  2954. m = context.AddIntrinsic(movInst, m);
  2955. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2956. Operand res = context.AddIntrinsic(mullInst, n, m);
  2957. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2958. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2959. }
  2960. else
  2961. {
  2962. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2963. {
  2964. return context.Add(op1, context.Multiply(op2, op3));
  2965. });
  2966. }
  2967. }
  2968. public static void Smlal_Ve(ArmEmitterContext context)
  2969. {
  2970. if (Optimizations.UseAdvSimd)
  2971. {
  2972. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64SmlalVe);
  2973. }
  2974. else
  2975. {
  2976. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2977. {
  2978. return context.Add(op1, context.Multiply(op2, op3));
  2979. });
  2980. }
  2981. }
  2982. public static void Smlsl_V(ArmEmitterContext context)
  2983. {
  2984. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2985. if (Optimizations.UseAdvSimd)
  2986. {
  2987. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SmlslV);
  2988. }
  2989. else if (Optimizations.UseSse41 && op.Size < 2)
  2990. {
  2991. Operand d = GetVec(op.Rd);
  2992. Operand n = GetVec(op.Rn);
  2993. Operand m = GetVec(op.Rm);
  2994. if (op.RegisterSize == RegisterSize.Simd128)
  2995. {
  2996. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2997. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2998. }
  2999. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  3000. n = context.AddIntrinsic(movInst, n);
  3001. m = context.AddIntrinsic(movInst, m);
  3002. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3003. Operand res = context.AddIntrinsic(mullInst, n, m);
  3004. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3005. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  3006. }
  3007. else
  3008. {
  3009. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  3010. {
  3011. return context.Subtract(op1, context.Multiply(op2, op3));
  3012. });
  3013. }
  3014. }
  3015. public static void Smlsl_Ve(ArmEmitterContext context)
  3016. {
  3017. if (Optimizations.UseAdvSimd)
  3018. {
  3019. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64SmlslVe);
  3020. }
  3021. else
  3022. {
  3023. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  3024. {
  3025. return context.Subtract(op1, context.Multiply(op2, op3));
  3026. });
  3027. }
  3028. }
  3029. public static void Smull_V(ArmEmitterContext context)
  3030. {
  3031. if (Optimizations.UseAdvSimd)
  3032. {
  3033. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmullV);
  3034. }
  3035. else
  3036. {
  3037. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  3038. }
  3039. }
  3040. public static void Smull_Ve(ArmEmitterContext context)
  3041. {
  3042. if (Optimizations.UseAdvSimd)
  3043. {
  3044. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64SmullVe);
  3045. }
  3046. else
  3047. {
  3048. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  3049. }
  3050. }
  3051. public static void Sqabs_S(ArmEmitterContext context)
  3052. {
  3053. if (Optimizations.UseAdvSimd)
  3054. {
  3055. InstEmitSimdHelperArm64.EmitScalarSaturatingUnaryOp(context, Intrinsic.Arm64SqabsS);
  3056. }
  3057. else
  3058. {
  3059. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  3060. }
  3061. }
  3062. public static void Sqabs_V(ArmEmitterContext context)
  3063. {
  3064. if (Optimizations.UseAdvSimd)
  3065. {
  3066. InstEmitSimdHelperArm64.EmitVectorSaturatingUnaryOp(context, Intrinsic.Arm64SqabsV);
  3067. }
  3068. else
  3069. {
  3070. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  3071. }
  3072. }
  3073. public static void Sqadd_S(ArmEmitterContext context)
  3074. {
  3075. if (Optimizations.UseAdvSimd)
  3076. {
  3077. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqaddS);
  3078. }
  3079. else
  3080. {
  3081. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  3082. }
  3083. }
  3084. public static void Sqadd_V(ArmEmitterContext context)
  3085. {
  3086. if (Optimizations.UseAdvSimd)
  3087. {
  3088. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqaddV);
  3089. }
  3090. else
  3091. {
  3092. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  3093. }
  3094. }
  3095. public static void Sqdmulh_S(ArmEmitterContext context)
  3096. {
  3097. if (Optimizations.UseAdvSimd)
  3098. {
  3099. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqdmulhS);
  3100. }
  3101. else
  3102. {
  3103. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  3104. }
  3105. }
  3106. public static void Sqdmulh_V(ArmEmitterContext context)
  3107. {
  3108. if (Optimizations.UseAdvSimd)
  3109. {
  3110. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqdmulhV);
  3111. }
  3112. else
  3113. {
  3114. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  3115. }
  3116. }
  3117. public static void Sqdmulh_Ve(ArmEmitterContext context)
  3118. {
  3119. if (Optimizations.UseAdvSimd)
  3120. {
  3121. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpByElem(context, Intrinsic.Arm64SqdmulhVe);
  3122. }
  3123. else
  3124. {
  3125. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  3126. }
  3127. }
  3128. public static void Sqneg_S(ArmEmitterContext context)
  3129. {
  3130. if (Optimizations.UseAdvSimd)
  3131. {
  3132. InstEmitSimdHelperArm64.EmitScalarSaturatingUnaryOp(context, Intrinsic.Arm64SqnegS);
  3133. }
  3134. else
  3135. {
  3136. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  3137. }
  3138. }
  3139. public static void Sqneg_V(ArmEmitterContext context)
  3140. {
  3141. if (Optimizations.UseAdvSimd)
  3142. {
  3143. InstEmitSimdHelperArm64.EmitVectorSaturatingUnaryOp(context, Intrinsic.Arm64SqnegV);
  3144. }
  3145. else
  3146. {
  3147. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  3148. }
  3149. }
  3150. public static void Sqrdmulh_S(ArmEmitterContext context)
  3151. {
  3152. if (Optimizations.UseAdvSimd)
  3153. {
  3154. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqrdmulhS);
  3155. }
  3156. else
  3157. {
  3158. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  3159. }
  3160. }
  3161. public static void Sqrdmulh_V(ArmEmitterContext context)
  3162. {
  3163. if (Optimizations.UseAdvSimd)
  3164. {
  3165. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqrdmulhV);
  3166. }
  3167. else
  3168. {
  3169. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  3170. }
  3171. }
  3172. public static void Sqrdmulh_Ve(ArmEmitterContext context)
  3173. {
  3174. if (Optimizations.UseAdvSimd)
  3175. {
  3176. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpByElem(context, Intrinsic.Arm64SqrdmulhVe);
  3177. }
  3178. else
  3179. {
  3180. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  3181. }
  3182. }
  3183. public static void Sqsub_S(ArmEmitterContext context)
  3184. {
  3185. if (Optimizations.UseAdvSimd)
  3186. {
  3187. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqsubS);
  3188. }
  3189. else
  3190. {
  3191. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  3192. }
  3193. }
  3194. public static void Sqsub_V(ArmEmitterContext context)
  3195. {
  3196. if (Optimizations.UseAdvSimd)
  3197. {
  3198. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqsubV);
  3199. }
  3200. else
  3201. {
  3202. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  3203. }
  3204. }
  3205. public static void Sqxtn_S(ArmEmitterContext context)
  3206. {
  3207. if (Optimizations.UseAdvSimd)
  3208. {
  3209. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtnS);
  3210. }
  3211. else
  3212. {
  3213. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  3214. }
  3215. }
  3216. public static void Sqxtn_V(ArmEmitterContext context)
  3217. {
  3218. if (Optimizations.UseAdvSimd)
  3219. {
  3220. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtnV);
  3221. }
  3222. else
  3223. {
  3224. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  3225. }
  3226. }
  3227. public static void Sqxtun_S(ArmEmitterContext context)
  3228. {
  3229. if (Optimizations.UseAdvSimd)
  3230. {
  3231. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtunS);
  3232. }
  3233. else
  3234. {
  3235. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  3236. }
  3237. }
  3238. public static void Sqxtun_V(ArmEmitterContext context)
  3239. {
  3240. if (Optimizations.UseAdvSimd)
  3241. {
  3242. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtunV);
  3243. }
  3244. else
  3245. {
  3246. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  3247. }
  3248. }
  3249. public static void Srhadd_V(ArmEmitterContext context)
  3250. {
  3251. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3252. if (Optimizations.UseAdvSimd)
  3253. {
  3254. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SrhaddV);
  3255. }
  3256. else if (Optimizations.UseSse2 && op.Size < 2)
  3257. {
  3258. Operand n = GetVec(op.Rn);
  3259. Operand m = GetVec(op.Rm);
  3260. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  3261. Intrinsic subInst = X86PsubInstruction[op.Size];
  3262. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  3263. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  3264. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3265. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  3266. Intrinsic addInst = X86PaddInstruction[op.Size];
  3267. res = context.AddIntrinsic(addInst, mask, res);
  3268. if (op.RegisterSize == RegisterSize.Simd64)
  3269. {
  3270. res = context.VectorZeroUpper64(res);
  3271. }
  3272. context.Copy(GetVec(op.Rd), res);
  3273. }
  3274. else
  3275. {
  3276. EmitVectorBinaryOpSx(context, (op1, op2) =>
  3277. {
  3278. Operand res = context.Add(op1, op2);
  3279. res = context.Add(res, Const(1L));
  3280. return context.ShiftRightSI(res, Const(1));
  3281. });
  3282. }
  3283. }
  3284. public static void Ssubl_V(ArmEmitterContext context)
  3285. {
  3286. if (Optimizations.UseAdvSimd)
  3287. {
  3288. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SsublV);
  3289. }
  3290. else if (Optimizations.UseSse41)
  3291. {
  3292. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3293. Operand n = GetVec(op.Rn);
  3294. Operand m = GetVec(op.Rm);
  3295. if (op.RegisterSize == RegisterSize.Simd128)
  3296. {
  3297. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3298. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3299. }
  3300. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  3301. n = context.AddIntrinsic(movInst, n);
  3302. m = context.AddIntrinsic(movInst, m);
  3303. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3304. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3305. }
  3306. else
  3307. {
  3308. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  3309. }
  3310. }
  3311. public static void Ssubw_V(ArmEmitterContext context)
  3312. {
  3313. if (Optimizations.UseAdvSimd)
  3314. {
  3315. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SsubwV);
  3316. }
  3317. else if (Optimizations.UseSse41)
  3318. {
  3319. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3320. Operand n = GetVec(op.Rn);
  3321. Operand m = GetVec(op.Rm);
  3322. if (op.RegisterSize == RegisterSize.Simd128)
  3323. {
  3324. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3325. }
  3326. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  3327. m = context.AddIntrinsic(movInst, m);
  3328. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3329. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3330. }
  3331. else
  3332. {
  3333. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  3334. }
  3335. }
  3336. public static void Sub_S(ArmEmitterContext context)
  3337. {
  3338. if (Optimizations.UseAdvSimd)
  3339. {
  3340. InstEmitSimdHelperArm64.EmitScalarBinaryOp(context, Intrinsic.Arm64SubS);
  3341. }
  3342. else
  3343. {
  3344. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3345. }
  3346. }
  3347. public static void Sub_V(ArmEmitterContext context)
  3348. {
  3349. if (Optimizations.UseAdvSimd)
  3350. {
  3351. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SubV);
  3352. }
  3353. else if (Optimizations.UseSse2)
  3354. {
  3355. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3356. Operand n = GetVec(op.Rn);
  3357. Operand m = GetVec(op.Rm);
  3358. Intrinsic subInst = X86PsubInstruction[op.Size];
  3359. Operand res = context.AddIntrinsic(subInst, n, m);
  3360. if (op.RegisterSize == RegisterSize.Simd64)
  3361. {
  3362. res = context.VectorZeroUpper64(res);
  3363. }
  3364. context.Copy(GetVec(op.Rd), res);
  3365. }
  3366. else
  3367. {
  3368. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3369. }
  3370. }
  3371. public static void Subhn_V(ArmEmitterContext context)
  3372. {
  3373. if (Optimizations.UseAdvSimd)
  3374. {
  3375. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SubhnV);
  3376. }
  3377. else
  3378. {
  3379. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  3380. }
  3381. }
  3382. public static void Suqadd_S(ArmEmitterContext context)
  3383. {
  3384. if (Optimizations.UseAdvSimd)
  3385. {
  3386. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SuqaddS);
  3387. }
  3388. else
  3389. {
  3390. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  3391. }
  3392. }
  3393. public static void Suqadd_V(ArmEmitterContext context)
  3394. {
  3395. if (Optimizations.UseAdvSimd)
  3396. {
  3397. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SuqaddV);
  3398. }
  3399. else
  3400. {
  3401. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  3402. }
  3403. }
  3404. public static void Uaba_V(ArmEmitterContext context)
  3405. {
  3406. if (Optimizations.UseAdvSimd)
  3407. {
  3408. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UabaV);
  3409. }
  3410. else
  3411. {
  3412. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  3413. {
  3414. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  3415. });
  3416. }
  3417. }
  3418. public static void Uabal_V(ArmEmitterContext context)
  3419. {
  3420. if (Optimizations.UseAdvSimd)
  3421. {
  3422. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UabalV);
  3423. }
  3424. else
  3425. {
  3426. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3427. {
  3428. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  3429. });
  3430. }
  3431. }
  3432. public static void Uabd_V(ArmEmitterContext context)
  3433. {
  3434. if (Optimizations.UseAdvSimd)
  3435. {
  3436. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UabdV);
  3437. }
  3438. else if (Optimizations.UseSse41)
  3439. {
  3440. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3441. Operand n = GetVec(op.Rn);
  3442. Operand m = GetVec(op.Rm);
  3443. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  3444. }
  3445. else
  3446. {
  3447. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3448. {
  3449. return EmitAbs(context, context.Subtract(op1, op2));
  3450. });
  3451. }
  3452. }
  3453. public static void Uabdl_V(ArmEmitterContext context)
  3454. {
  3455. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3456. if (Optimizations.UseAdvSimd)
  3457. {
  3458. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UabdlV);
  3459. }
  3460. else if (Optimizations.UseSse41 && op.Size < 2)
  3461. {
  3462. Operand n = GetVec(op.Rn);
  3463. Operand m = GetVec(op.Rm);
  3464. if (op.RegisterSize == RegisterSize.Simd128)
  3465. {
  3466. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3467. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3468. }
  3469. Intrinsic movInst = op.Size == 0
  3470. ? Intrinsic.X86Pmovzxbw
  3471. : Intrinsic.X86Pmovzxwd;
  3472. n = context.AddIntrinsic(movInst, n);
  3473. m = context.AddIntrinsic(movInst, m);
  3474. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  3475. }
  3476. else
  3477. {
  3478. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  3479. {
  3480. return EmitAbs(context, context.Subtract(op1, op2));
  3481. });
  3482. }
  3483. }
  3484. public static void Uadalp_V(ArmEmitterContext context)
  3485. {
  3486. if (Optimizations.UseAdvSimd)
  3487. {
  3488. InstEmitSimdHelperArm64.EmitVectorBinaryOpRd(context, Intrinsic.Arm64UadalpV);
  3489. }
  3490. else
  3491. {
  3492. EmitAddLongPairwise(context, signed: false, accumulate: true);
  3493. }
  3494. }
  3495. public static void Uaddl_V(ArmEmitterContext context)
  3496. {
  3497. if (Optimizations.UseAdvSimd)
  3498. {
  3499. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UaddlV);
  3500. }
  3501. else if (Optimizations.UseSse41)
  3502. {
  3503. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3504. Operand n = GetVec(op.Rn);
  3505. Operand m = GetVec(op.Rm);
  3506. if (op.RegisterSize == RegisterSize.Simd128)
  3507. {
  3508. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3509. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3510. }
  3511. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3512. n = context.AddIntrinsic(movInst, n);
  3513. m = context.AddIntrinsic(movInst, m);
  3514. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3515. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  3516. }
  3517. else
  3518. {
  3519. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  3520. }
  3521. }
  3522. public static void Uaddlp_V(ArmEmitterContext context)
  3523. {
  3524. if (Optimizations.UseAdvSimd)
  3525. {
  3526. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UaddlpV);
  3527. }
  3528. else
  3529. {
  3530. EmitAddLongPairwise(context, signed: false, accumulate: false);
  3531. }
  3532. }
  3533. public static void Uaddlv_V(ArmEmitterContext context)
  3534. {
  3535. if (Optimizations.UseAdvSimd)
  3536. {
  3537. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UaddlvV);
  3538. }
  3539. else
  3540. {
  3541. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  3542. }
  3543. }
  3544. public static void Uaddw_V(ArmEmitterContext context)
  3545. {
  3546. if (Optimizations.UseAdvSimd)
  3547. {
  3548. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UaddwV);
  3549. }
  3550. else if (Optimizations.UseSse41)
  3551. {
  3552. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3553. Operand n = GetVec(op.Rn);
  3554. Operand m = GetVec(op.Rm);
  3555. if (op.RegisterSize == RegisterSize.Simd128)
  3556. {
  3557. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3558. }
  3559. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3560. m = context.AddIntrinsic(movInst, m);
  3561. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3562. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  3563. }
  3564. else
  3565. {
  3566. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  3567. }
  3568. }
  3569. public static void Uhadd_V(ArmEmitterContext context)
  3570. {
  3571. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3572. if (Optimizations.UseAdvSimd)
  3573. {
  3574. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UhaddV);
  3575. }
  3576. else if (Optimizations.UseSse2 && op.Size > 0)
  3577. {
  3578. Operand n = GetVec(op.Rn);
  3579. Operand m = GetVec(op.Rm);
  3580. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  3581. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  3582. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  3583. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  3584. Intrinsic addInst = X86PaddInstruction[op.Size];
  3585. res = context.AddIntrinsic(addInst, res, res2);
  3586. if (op.RegisterSize == RegisterSize.Simd64)
  3587. {
  3588. res = context.VectorZeroUpper64(res);
  3589. }
  3590. context.Copy(GetVec(op.Rd), res);
  3591. }
  3592. else
  3593. {
  3594. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3595. {
  3596. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  3597. });
  3598. }
  3599. }
  3600. public static void Uhsub_V(ArmEmitterContext context)
  3601. {
  3602. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3603. if (Optimizations.UseAdvSimd)
  3604. {
  3605. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UhsubV);
  3606. }
  3607. else if (Optimizations.UseSse2 && op.Size < 2)
  3608. {
  3609. Operand n = GetVec(op.Rn);
  3610. Operand m = GetVec(op.Rm);
  3611. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3612. Operand res = context.AddIntrinsic(avgInst, n, m);
  3613. Intrinsic subInst = X86PsubInstruction[op.Size];
  3614. res = context.AddIntrinsic(subInst, n, res);
  3615. if (op.RegisterSize == RegisterSize.Simd64)
  3616. {
  3617. res = context.VectorZeroUpper64(res);
  3618. }
  3619. context.Copy(GetVec(op.Rd), res);
  3620. }
  3621. else
  3622. {
  3623. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3624. {
  3625. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  3626. });
  3627. }
  3628. }
  3629. public static void Umax_V(ArmEmitterContext context)
  3630. {
  3631. if (Optimizations.UseAdvSimd)
  3632. {
  3633. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmaxV);
  3634. }
  3635. else if (Optimizations.UseSse41)
  3636. {
  3637. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3638. Operand n = GetVec(op.Rn);
  3639. Operand m = GetVec(op.Rm);
  3640. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  3641. Operand res = context.AddIntrinsic(maxInst, n, m);
  3642. if (op.RegisterSize == RegisterSize.Simd64)
  3643. {
  3644. res = context.VectorZeroUpper64(res);
  3645. }
  3646. context.Copy(GetVec(op.Rd), res);
  3647. }
  3648. else
  3649. {
  3650. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3651. }
  3652. }
  3653. public static void Umaxp_V(ArmEmitterContext context)
  3654. {
  3655. if (Optimizations.UseAdvSimd)
  3656. {
  3657. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmaxpV);
  3658. }
  3659. else if (Optimizations.UseSsse3)
  3660. {
  3661. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  3662. }
  3663. else
  3664. {
  3665. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3666. }
  3667. }
  3668. public static void Umaxv_V(ArmEmitterContext context)
  3669. {
  3670. if (Optimizations.UseAdvSimd)
  3671. {
  3672. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UmaxvV);
  3673. }
  3674. else
  3675. {
  3676. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3677. }
  3678. }
  3679. public static void Umin_V(ArmEmitterContext context)
  3680. {
  3681. if (Optimizations.UseAdvSimd)
  3682. {
  3683. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UminV);
  3684. }
  3685. else if (Optimizations.UseSse41)
  3686. {
  3687. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3688. Operand n = GetVec(op.Rn);
  3689. Operand m = GetVec(op.Rm);
  3690. Intrinsic minInst = X86PminuInstruction[op.Size];
  3691. Operand res = context.AddIntrinsic(minInst, n, m);
  3692. if (op.RegisterSize == RegisterSize.Simd64)
  3693. {
  3694. res = context.VectorZeroUpper64(res);
  3695. }
  3696. context.Copy(GetVec(op.Rd), res);
  3697. }
  3698. else
  3699. {
  3700. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3701. }
  3702. }
  3703. public static void Uminp_V(ArmEmitterContext context)
  3704. {
  3705. if (Optimizations.UseAdvSimd)
  3706. {
  3707. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UminpV);
  3708. }
  3709. else if (Optimizations.UseSsse3)
  3710. {
  3711. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  3712. }
  3713. else
  3714. {
  3715. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3716. }
  3717. }
  3718. public static void Uminv_V(ArmEmitterContext context)
  3719. {
  3720. if (Optimizations.UseAdvSimd)
  3721. {
  3722. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UminvV);
  3723. }
  3724. else
  3725. {
  3726. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3727. }
  3728. }
  3729. public static void Umlal_V(ArmEmitterContext context)
  3730. {
  3731. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3732. if (Optimizations.UseAdvSimd)
  3733. {
  3734. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UmlalV);
  3735. }
  3736. else if (Optimizations.UseSse41 && op.Size < 2)
  3737. {
  3738. Operand d = GetVec(op.Rd);
  3739. Operand n = GetVec(op.Rn);
  3740. Operand m = GetVec(op.Rm);
  3741. if (op.RegisterSize == RegisterSize.Simd128)
  3742. {
  3743. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3744. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3745. }
  3746. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3747. n = context.AddIntrinsic(movInst, n);
  3748. m = context.AddIntrinsic(movInst, m);
  3749. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3750. Operand res = context.AddIntrinsic(mullInst, n, m);
  3751. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3752. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  3753. }
  3754. else
  3755. {
  3756. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3757. {
  3758. return context.Add(op1, context.Multiply(op2, op3));
  3759. });
  3760. }
  3761. }
  3762. public static void Umlal_Ve(ArmEmitterContext context)
  3763. {
  3764. if (Optimizations.UseAdvSimd)
  3765. {
  3766. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64UmlalVe);
  3767. }
  3768. else
  3769. {
  3770. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  3771. {
  3772. return context.Add(op1, context.Multiply(op2, op3));
  3773. });
  3774. }
  3775. }
  3776. public static void Umlsl_V(ArmEmitterContext context)
  3777. {
  3778. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3779. if (Optimizations.UseAdvSimd)
  3780. {
  3781. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UmlslV);
  3782. }
  3783. else if (Optimizations.UseSse41 && op.Size < 2)
  3784. {
  3785. Operand d = GetVec(op.Rd);
  3786. Operand n = GetVec(op.Rn);
  3787. Operand m = GetVec(op.Rm);
  3788. if (op.RegisterSize == RegisterSize.Simd128)
  3789. {
  3790. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3791. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3792. }
  3793. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  3794. n = context.AddIntrinsic(movInst, n);
  3795. m = context.AddIntrinsic(movInst, m);
  3796. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3797. Operand res = context.AddIntrinsic(mullInst, n, m);
  3798. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3799. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  3800. }
  3801. else
  3802. {
  3803. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3804. {
  3805. return context.Subtract(op1, context.Multiply(op2, op3));
  3806. });
  3807. }
  3808. }
  3809. public static void Umlsl_Ve(ArmEmitterContext context)
  3810. {
  3811. if (Optimizations.UseAdvSimd)
  3812. {
  3813. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64UmlslVe);
  3814. }
  3815. else
  3816. {
  3817. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  3818. {
  3819. return context.Subtract(op1, context.Multiply(op2, op3));
  3820. });
  3821. }
  3822. }
  3823. public static void Umull_V(ArmEmitterContext context)
  3824. {
  3825. if (Optimizations.UseAdvSimd)
  3826. {
  3827. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmullV);
  3828. }
  3829. else
  3830. {
  3831. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  3832. }
  3833. }
  3834. public static void Umull_Ve(ArmEmitterContext context)
  3835. {
  3836. if (Optimizations.UseAdvSimd)
  3837. {
  3838. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64UmullVe);
  3839. }
  3840. else
  3841. {
  3842. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  3843. }
  3844. }
  3845. public static void Uqadd_S(ArmEmitterContext context)
  3846. {
  3847. if (Optimizations.UseAdvSimd)
  3848. {
  3849. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64UqaddS);
  3850. }
  3851. else
  3852. {
  3853. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  3854. }
  3855. }
  3856. public static void Uqadd_V(ArmEmitterContext context)
  3857. {
  3858. if (Optimizations.UseAdvSimd)
  3859. {
  3860. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64UqaddV);
  3861. }
  3862. else
  3863. {
  3864. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  3865. }
  3866. }
  3867. public static void Uqsub_S(ArmEmitterContext context)
  3868. {
  3869. if (Optimizations.UseAdvSimd)
  3870. {
  3871. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64UqsubS);
  3872. }
  3873. else
  3874. {
  3875. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  3876. }
  3877. }
  3878. public static void Uqsub_V(ArmEmitterContext context)
  3879. {
  3880. if (Optimizations.UseAdvSimd)
  3881. {
  3882. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64UqsubV);
  3883. }
  3884. else
  3885. {
  3886. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  3887. }
  3888. }
  3889. public static void Uqxtn_S(ArmEmitterContext context)
  3890. {
  3891. if (Optimizations.UseAdvSimd)
  3892. {
  3893. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64UqxtnS);
  3894. }
  3895. else
  3896. {
  3897. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  3898. }
  3899. }
  3900. public static void Uqxtn_V(ArmEmitterContext context)
  3901. {
  3902. if (Optimizations.UseAdvSimd)
  3903. {
  3904. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64UqxtnV);
  3905. }
  3906. else
  3907. {
  3908. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  3909. }
  3910. }
  3911. public static void Urhadd_V(ArmEmitterContext context)
  3912. {
  3913. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3914. if (Optimizations.UseAdvSimd)
  3915. {
  3916. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UrhaddV);
  3917. }
  3918. else if (Optimizations.UseSse2 && op.Size < 2)
  3919. {
  3920. Operand n = GetVec(op.Rn);
  3921. Operand m = GetVec(op.Rm);
  3922. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3923. Operand res = context.AddIntrinsic(avgInst, n, m);
  3924. if (op.RegisterSize == RegisterSize.Simd64)
  3925. {
  3926. res = context.VectorZeroUpper64(res);
  3927. }
  3928. context.Copy(GetVec(op.Rd), res);
  3929. }
  3930. else
  3931. {
  3932. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3933. {
  3934. Operand res = context.Add(op1, op2);
  3935. res = context.Add(res, Const(1L));
  3936. return context.ShiftRightUI(res, Const(1));
  3937. });
  3938. }
  3939. }
  3940. public static void Usqadd_S(ArmEmitterContext context)
  3941. {
  3942. if (Optimizations.UseAdvSimd)
  3943. {
  3944. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64UsqaddS);
  3945. }
  3946. else
  3947. {
  3948. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  3949. }
  3950. }
  3951. public static void Usqadd_V(ArmEmitterContext context)
  3952. {
  3953. if (Optimizations.UseAdvSimd)
  3954. {
  3955. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64UsqaddV);
  3956. }
  3957. else
  3958. {
  3959. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  3960. }
  3961. }
  3962. public static void Usubl_V(ArmEmitterContext context)
  3963. {
  3964. if (Optimizations.UseAdvSimd)
  3965. {
  3966. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UsublV);
  3967. }
  3968. else if (Optimizations.UseSse41)
  3969. {
  3970. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3971. Operand n = GetVec(op.Rn);
  3972. Operand m = GetVec(op.Rm);
  3973. if (op.RegisterSize == RegisterSize.Simd128)
  3974. {
  3975. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3976. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3977. }
  3978. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3979. n = context.AddIntrinsic(movInst, n);
  3980. m = context.AddIntrinsic(movInst, m);
  3981. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3982. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3983. }
  3984. else
  3985. {
  3986. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3987. }
  3988. }
  3989. public static void Usubw_V(ArmEmitterContext context)
  3990. {
  3991. if (Optimizations.UseAdvSimd)
  3992. {
  3993. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UsubwV);
  3994. }
  3995. else if (Optimizations.UseSse41)
  3996. {
  3997. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3998. Operand n = GetVec(op.Rn);
  3999. Operand m = GetVec(op.Rm);
  4000. if (op.RegisterSize == RegisterSize.Simd128)
  4001. {
  4002. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  4003. }
  4004. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  4005. m = context.AddIntrinsic(movInst, m);
  4006. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  4007. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  4008. }
  4009. else
  4010. {
  4011. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  4012. }
  4013. }
  4014. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  4015. {
  4016. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  4017. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  4018. }
  4019. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  4020. {
  4021. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  4022. Operand res = context.VectorZero();
  4023. int pairs = op.GetPairsCount() >> op.Size;
  4024. for (int index = 0; index < pairs; index++)
  4025. {
  4026. int pairIndex = index << 1;
  4027. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  4028. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  4029. Operand e = context.Add(ne0, ne1);
  4030. if (accumulate)
  4031. {
  4032. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  4033. e = context.Add(e, de);
  4034. }
  4035. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  4036. }
  4037. context.Copy(GetVec(op.Rd), res);
  4038. }
  4039. private static Operand EmitDoublingMultiplyHighHalf(
  4040. ArmEmitterContext context,
  4041. Operand n,
  4042. Operand m,
  4043. bool round)
  4044. {
  4045. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  4046. int eSize = 8 << op.Size;
  4047. Operand res = context.Multiply(n, m);
  4048. if (!round)
  4049. {
  4050. res = context.ShiftRightSI(res, Const(eSize - 1));
  4051. }
  4052. else
  4053. {
  4054. long roundConst = 1L << (eSize - 1);
  4055. res = context.ShiftLeft(res, Const(1));
  4056. res = context.Add(res, Const(roundConst));
  4057. res = context.ShiftRightSI(res, Const(eSize));
  4058. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  4059. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  4060. }
  4061. return res;
  4062. }
  4063. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  4064. {
  4065. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  4066. int elems = 8 >> op.Size;
  4067. int eSize = 8 << op.Size;
  4068. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  4069. Operand d = GetVec(op.Rd);
  4070. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  4071. long roundConst = 1L << (eSize - 1);
  4072. for (int index = 0; index < elems; index++)
  4073. {
  4074. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  4075. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  4076. Operand de = emit(ne, me);
  4077. if (round)
  4078. {
  4079. de = context.Add(de, Const(roundConst));
  4080. }
  4081. de = context.ShiftRightUI(de, Const(eSize));
  4082. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  4083. }
  4084. context.Copy(d, res);
  4085. }
  4086. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  4087. {
  4088. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  4089. Operand cmp = signed
  4090. ? context.ICompareGreaterOrEqual(op1, op2)
  4091. : context.ICompareGreaterOrEqualUI(op1, op2);
  4092. return context.ConditionalSelect(cmp, op1, op2);
  4093. }
  4094. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  4095. {
  4096. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  4097. Operand cmp = signed
  4098. ? context.ICompareLessOrEqual(op1, op2)
  4099. : context.ICompareLessOrEqualUI(op1, op2);
  4100. return context.ConditionalSelect(cmp, op1, op2);
  4101. }
  4102. private static void EmitSse41ScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  4103. {
  4104. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  4105. Operand n = GetVec(op.Rn);
  4106. Operand res;
  4107. if (roundMode != FPRoundingMode.ToNearestAway)
  4108. {
  4109. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  4110. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  4111. }
  4112. else
  4113. {
  4114. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: true);
  4115. }
  4116. if ((op.Size & 1) != 0)
  4117. {
  4118. res = context.VectorZeroUpper64(res);
  4119. }
  4120. else
  4121. {
  4122. res = context.VectorZeroUpper96(res);
  4123. }
  4124. context.Copy(GetVec(op.Rd), res);
  4125. }
  4126. private static void EmitSse41VectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  4127. {
  4128. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  4129. Operand n = GetVec(op.Rn);
  4130. Operand res;
  4131. if (roundMode != FPRoundingMode.ToNearestAway)
  4132. {
  4133. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  4134. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  4135. }
  4136. else
  4137. {
  4138. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: false);
  4139. }
  4140. if (op.RegisterSize == RegisterSize.Simd64)
  4141. {
  4142. res = context.VectorZeroUpper64(res);
  4143. }
  4144. context.Copy(GetVec(op.Rd), res);
  4145. }
  4146. private static Operand EmitSse41Round32Exp8OpF(ArmEmitterContext context, Operand value, bool scalar)
  4147. {
  4148. Operand roundMask;
  4149. Operand truncMask;
  4150. Operand expMask;
  4151. if (scalar)
  4152. {
  4153. roundMask = X86GetScalar(context, 0x4000);
  4154. truncMask = X86GetScalar(context, unchecked((int)0xFFFF8000));
  4155. expMask = X86GetScalar(context, 0x7F800000);
  4156. }
  4157. else
  4158. {
  4159. roundMask = X86GetAllElements(context, 0x4000);
  4160. truncMask = X86GetAllElements(context, unchecked((int)0xFFFF8000));
  4161. expMask = X86GetAllElements(context, 0x7F800000);
  4162. }
  4163. Operand oValue = value;
  4164. Operand masked = context.AddIntrinsic(Intrinsic.X86Pand, value, expMask);
  4165. Operand isNaNInf = context.AddIntrinsic(Intrinsic.X86Pcmpeqd, masked, expMask);
  4166. value = context.AddIntrinsic(Intrinsic.X86Paddd, value, roundMask);
  4167. value = context.AddIntrinsic(Intrinsic.X86Pand, value, truncMask);
  4168. return context.AddIntrinsic(Intrinsic.X86Blendvps, value, oValue, isNaNInf);
  4169. }
  4170. private static Operand EmitSse41RecipStepSelectOpF(
  4171. ArmEmitterContext context,
  4172. Operand n,
  4173. Operand m,
  4174. Operand res,
  4175. Operand mask,
  4176. bool scalar,
  4177. int sizeF)
  4178. {
  4179. Intrinsic cmpOp;
  4180. Intrinsic shlOp;
  4181. Intrinsic blendOp;
  4182. Operand zero = context.VectorZero();
  4183. Operand expMask;
  4184. if (sizeF == 0)
  4185. {
  4186. cmpOp = Intrinsic.X86Pcmpeqd;
  4187. shlOp = Intrinsic.X86Pslld;
  4188. blendOp = Intrinsic.X86Blendvps;
  4189. expMask = scalar ? X86GetScalar(context, 0x7F800000 << 1) : X86GetAllElements(context, 0x7F800000 << 1);
  4190. }
  4191. else /* if (sizeF == 1) */
  4192. {
  4193. cmpOp = Intrinsic.X86Pcmpeqq;
  4194. shlOp = Intrinsic.X86Psllq;
  4195. blendOp = Intrinsic.X86Blendvpd;
  4196. expMask = scalar ? X86GetScalar(context, 0x7FF0000000000000L << 1) : X86GetAllElements(context, 0x7FF0000000000000L << 1);
  4197. }
  4198. n = context.AddIntrinsic(shlOp, n, Const(1));
  4199. m = context.AddIntrinsic(shlOp, m, Const(1));
  4200. Operand nZero = context.AddIntrinsic(cmpOp, n, zero);
  4201. Operand mZero = context.AddIntrinsic(cmpOp, m, zero);
  4202. Operand nInf = context.AddIntrinsic(cmpOp, n, expMask);
  4203. Operand mInf = context.AddIntrinsic(cmpOp, m, expMask);
  4204. Operand nmZero = context.AddIntrinsic(Intrinsic.X86Por, nZero, mZero);
  4205. Operand nmInf = context.AddIntrinsic(Intrinsic.X86Por, nInf, mInf);
  4206. Operand nmZeroInf = context.AddIntrinsic(Intrinsic.X86Pand, nmZero, nmInf);
  4207. return context.AddIntrinsic(blendOp, res, mask, nmZeroInf);
  4208. }
  4209. public static void EmitSse2VectorIsNaNOpF(
  4210. ArmEmitterContext context,
  4211. Operand opF,
  4212. out Operand qNaNMask,
  4213. out Operand sNaNMask,
  4214. bool? isQNaN = null)
  4215. {
  4216. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  4217. if ((op.Size & 1) == 0)
  4218. {
  4219. const int QBit = 22;
  4220. Operand qMask = X86GetAllElements(context, 1 << QBit);
  4221. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  4222. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  4223. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  4224. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : default;
  4225. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : default;
  4226. }
  4227. else /* if ((op.Size & 1) == 1) */
  4228. {
  4229. const int QBit = 51;
  4230. Operand qMask = X86GetAllElements(context, 1L << QBit);
  4231. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  4232. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  4233. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  4234. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : default;
  4235. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : default;
  4236. }
  4237. }
  4238. public static Operand EmitSse41ProcessNaNsOpF(
  4239. ArmEmitterContext context,
  4240. Func2I emit,
  4241. bool scalar,
  4242. Operand n = default,
  4243. Operand m = default)
  4244. {
  4245. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  4246. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  4247. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  4248. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  4249. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  4250. if (sizeF == 0)
  4251. {
  4252. const int QBit = 22;
  4253. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  4254. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  4255. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  4256. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  4257. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  4258. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  4259. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  4260. if (n != default || m != default)
  4261. {
  4262. return res;
  4263. }
  4264. if (scalar)
  4265. {
  4266. res = context.VectorZeroUpper96(res);
  4267. }
  4268. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4269. {
  4270. res = context.VectorZeroUpper64(res);
  4271. }
  4272. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4273. return default;
  4274. }
  4275. else /* if (sizeF == 1) */
  4276. {
  4277. const int QBit = 51;
  4278. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  4279. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  4280. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  4281. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  4282. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  4283. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  4284. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  4285. if (n != default || m != default)
  4286. {
  4287. return res;
  4288. }
  4289. if (scalar)
  4290. {
  4291. res = context.VectorZeroUpper64(res);
  4292. }
  4293. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4294. return default;
  4295. }
  4296. }
  4297. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  4298. {
  4299. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  4300. if ((op.Size & 1) == 0)
  4301. {
  4302. Operand mask = X86GetAllElements(context, -0f);
  4303. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  4304. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  4305. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  4306. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  4307. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  4308. }
  4309. else /* if ((op.Size & 1) == 1) */
  4310. {
  4311. Operand mask = X86GetAllElements(context, -0d);
  4312. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  4313. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  4314. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  4315. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  4316. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  4317. }
  4318. }
  4319. private static Operand EmitSse41MaxMinNumOpF(
  4320. ArmEmitterContext context,
  4321. bool isMaxNum,
  4322. bool scalar,
  4323. Operand n = default,
  4324. Operand m = default)
  4325. {
  4326. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  4327. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  4328. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  4329. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  4330. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  4331. if (sizeF == 0)
  4332. {
  4333. Operand negInfMask = scalar
  4334. ? X86GetScalar(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  4335. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  4336. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  4337. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  4338. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  4339. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  4340. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  4341. {
  4342. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  4343. }, scalar: scalar, nCopy, mCopy);
  4344. if (n != default || m != default)
  4345. {
  4346. return res;
  4347. }
  4348. if (scalar)
  4349. {
  4350. res = context.VectorZeroUpper96(res);
  4351. }
  4352. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4353. {
  4354. res = context.VectorZeroUpper64(res);
  4355. }
  4356. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4357. return default;
  4358. }
  4359. else /* if (sizeF == 1) */
  4360. {
  4361. Operand negInfMask = scalar
  4362. ? X86GetScalar(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  4363. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  4364. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  4365. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  4366. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  4367. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  4368. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  4369. {
  4370. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  4371. }, scalar: scalar, nCopy, mCopy);
  4372. if (n != default || m != default)
  4373. {
  4374. return res;
  4375. }
  4376. if (scalar)
  4377. {
  4378. res = context.VectorZeroUpper64(res);
  4379. }
  4380. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4381. return default;
  4382. }
  4383. }
  4384. private enum AddSub
  4385. {
  4386. None,
  4387. Add,
  4388. Subtract,
  4389. }
  4390. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  4391. {
  4392. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  4393. Operand n = GetVec(op.Rn);
  4394. Operand m = GetVec(op.Rm);
  4395. Operand res;
  4396. if (op.Size == 0)
  4397. {
  4398. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  4399. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  4400. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  4401. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  4402. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  4403. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  4404. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  4405. }
  4406. else if (op.Size == 1)
  4407. {
  4408. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  4409. }
  4410. else
  4411. {
  4412. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  4413. }
  4414. Operand d = GetVec(op.Rd);
  4415. if (addSub == AddSub.Add)
  4416. {
  4417. Intrinsic addInst = X86PaddInstruction[op.Size];
  4418. res = context.AddIntrinsic(addInst, d, res);
  4419. }
  4420. else if (addSub == AddSub.Subtract)
  4421. {
  4422. Intrinsic subInst = X86PsubInstruction[op.Size];
  4423. res = context.AddIntrinsic(subInst, d, res);
  4424. }
  4425. if (op.RegisterSize == RegisterSize.Simd64)
  4426. {
  4427. res = context.VectorZeroUpper64(res);
  4428. }
  4429. context.Copy(d, res);
  4430. }
  4431. private static void EmitSse41VectorSabdOp(
  4432. ArmEmitterContext context,
  4433. OpCodeSimdReg op,
  4434. Operand n,
  4435. Operand m,
  4436. bool isLong)
  4437. {
  4438. int size = isLong ? op.Size + 1 : op.Size;
  4439. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  4440. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  4441. Intrinsic subInst = X86PsubInstruction[size];
  4442. Operand res = context.AddIntrinsic(subInst, n, m);
  4443. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  4444. Operand res2 = context.AddIntrinsic(subInst, m, n);
  4445. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  4446. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  4447. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  4448. {
  4449. res = context.VectorZeroUpper64(res);
  4450. }
  4451. context.Copy(GetVec(op.Rd), res);
  4452. }
  4453. private static void EmitSse41VectorUabdOp(
  4454. ArmEmitterContext context,
  4455. OpCodeSimdReg op,
  4456. Operand n,
  4457. Operand m,
  4458. bool isLong)
  4459. {
  4460. int size = isLong ? op.Size + 1 : op.Size;
  4461. Intrinsic maxInst = X86PmaxuInstruction[size];
  4462. Operand max = context.AddIntrinsic(maxInst, m, n);
  4463. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  4464. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  4465. Operand onesMask = X86GetAllElements(context, -1L);
  4466. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  4467. Intrinsic subInst = X86PsubInstruction[size];
  4468. Operand res = context.AddIntrinsic(subInst, n, m);
  4469. Operand res2 = context.AddIntrinsic(subInst, m, n);
  4470. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  4471. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  4472. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  4473. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  4474. {
  4475. res = context.VectorZeroUpper64(res);
  4476. }
  4477. context.Copy(GetVec(op.Rd), res);
  4478. }
  4479. private static Operand EmitSse2Sll_128(ArmEmitterContext context, Operand op, int shift)
  4480. {
  4481. // The upper part of op is assumed to be zero.
  4482. Debug.Assert(shift >= 0 && shift < 64);
  4483. if (shift == 0)
  4484. {
  4485. return op;
  4486. }
  4487. Operand high = context.AddIntrinsic(Intrinsic.X86Pslldq, op, Const(8));
  4488. high = context.AddIntrinsic(Intrinsic.X86Psrlq, high, Const(64 - shift));
  4489. Operand low = context.AddIntrinsic(Intrinsic.X86Psllq, op, Const(shift));
  4490. return context.AddIntrinsic(Intrinsic.X86Por, high, low);
  4491. }
  4492. }
  4493. }