InstEmitSimdArithmetic.cs 178 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  12. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  13. namespace ARMeilleure.Instructions
  14. {
  15. using Func2I = Func<Operand, Operand, Operand>;
  16. static partial class InstEmit
  17. {
  18. public static void Abs_S(ArmEmitterContext context)
  19. {
  20. if (Optimizations.UseAdvSimd)
  21. {
  22. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64AbsS);
  23. }
  24. else
  25. {
  26. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  27. }
  28. }
  29. public static void Abs_V(ArmEmitterContext context)
  30. {
  31. if (Optimizations.UseAdvSimd)
  32. {
  33. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64AbsV);
  34. }
  35. else
  36. {
  37. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  38. }
  39. }
  40. public static void Add_S(ArmEmitterContext context)
  41. {
  42. if (Optimizations.UseAdvSimd)
  43. {
  44. InstEmitSimdHelperArm64.EmitScalarBinaryOp(context, Intrinsic.Arm64AddS);
  45. }
  46. else
  47. {
  48. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  49. }
  50. }
  51. public static void Add_V(ArmEmitterContext context)
  52. {
  53. if (Optimizations.UseAdvSimd)
  54. {
  55. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AddV);
  56. }
  57. else if (Optimizations.UseSse2)
  58. {
  59. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  60. Operand n = GetVec(op.Rn);
  61. Operand m = GetVec(op.Rm);
  62. Intrinsic addInst = X86PaddInstruction[op.Size];
  63. Operand res = context.AddIntrinsic(addInst, n, m);
  64. if (op.RegisterSize == RegisterSize.Simd64)
  65. {
  66. res = context.VectorZeroUpper64(res);
  67. }
  68. context.Copy(GetVec(op.Rd), res);
  69. }
  70. else
  71. {
  72. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  73. }
  74. }
  75. public static void Addhn_V(ArmEmitterContext context)
  76. {
  77. if (Optimizations.UseAdvSimd)
  78. {
  79. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64AddhnV);
  80. }
  81. else
  82. {
  83. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  84. }
  85. }
  86. public static void Addp_S(ArmEmitterContext context)
  87. {
  88. if (Optimizations.UseAdvSimd)
  89. {
  90. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64AddpS);
  91. }
  92. else
  93. {
  94. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  95. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  96. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  97. Operand res = context.Add(ne0, ne1);
  98. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  99. }
  100. }
  101. public static void Addp_V(ArmEmitterContext context)
  102. {
  103. if (Optimizations.UseAdvSimd)
  104. {
  105. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64AddpV);
  106. }
  107. else if (Optimizations.UseSsse3)
  108. {
  109. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  110. }
  111. else
  112. {
  113. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  114. }
  115. }
  116. public static void Addv_V(ArmEmitterContext context)
  117. {
  118. if (Optimizations.UseAdvSimd)
  119. {
  120. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64AddvV);
  121. }
  122. else
  123. {
  124. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  125. }
  126. }
  127. public static void Cls_V(ArmEmitterContext context)
  128. {
  129. if (Optimizations.UseAdvSimd)
  130. {
  131. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64ClsV);
  132. }
  133. else
  134. {
  135. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  136. Operand res = context.VectorZero();
  137. int elems = op.GetBytesCount() >> op.Size;
  138. int eSize = 8 << op.Size;
  139. for (int index = 0; index < elems; index++)
  140. {
  141. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  142. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  143. res = EmitVectorInsert(context, res, de, index, op.Size);
  144. }
  145. context.Copy(GetVec(op.Rd), res);
  146. }
  147. }
  148. public static void Clz_V(ArmEmitterContext context)
  149. {
  150. if (Optimizations.UseAdvSimd)
  151. {
  152. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64ClzV);
  153. }
  154. else
  155. {
  156. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  157. int eSize = 8 << op.Size;
  158. Operand res = eSize switch {
  159. 8 => Clz_V_I8 (context, GetVec(op.Rn)),
  160. 16 => Clz_V_I16(context, GetVec(op.Rn)),
  161. 32 => Clz_V_I32(context, GetVec(op.Rn)),
  162. _ => default
  163. };
  164. if (res != default)
  165. {
  166. if (op.RegisterSize == RegisterSize.Simd64)
  167. {
  168. res = context.VectorZeroUpper64(res);
  169. }
  170. }
  171. else
  172. {
  173. int elems = op.GetBytesCount() >> op.Size;
  174. res = context.VectorZero();
  175. for (int index = 0; index < elems; index++)
  176. {
  177. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  178. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  179. res = EmitVectorInsert(context, res, de, index, op.Size);
  180. }
  181. }
  182. context.Copy(GetVec(op.Rd), res);
  183. }
  184. }
  185. private static Operand Clz_V_I8(ArmEmitterContext context, Operand arg)
  186. {
  187. if (!Optimizations.UseSsse3)
  188. {
  189. return default;
  190. }
  191. // CLZ nibble table.
  192. Operand clzTable = X86GetScalar(context, 0x01_01_01_01_02_02_03_04);
  193. Operand maskLow = X86GetAllElements(context, 0x0f_0f_0f_0f);
  194. Operand c04 = X86GetAllElements(context, 0x04_04_04_04);
  195. // CLZ of low 4 bits of elements in arg.
  196. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, arg);
  197. // Get the high 4 bits of elements in arg.
  198. Operand hiArg = context.AddIntrinsic(Intrinsic.X86Psrlw, arg, Const(4));
  199. hiArg = context.AddIntrinsic(Intrinsic.X86Pand, hiArg, maskLow);
  200. // CLZ of high 4 bits of elements in arg.
  201. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, hiArg);
  202. // If high 4 bits are not all zero, we discard the CLZ of the low 4 bits.
  203. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqb, hiClz, c04);
  204. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  205. return context.AddIntrinsic(Intrinsic.X86Paddb, loClz, hiClz);
  206. }
  207. private static Operand Clz_V_I16(ArmEmitterContext context, Operand arg)
  208. {
  209. if (!Optimizations.UseSsse3)
  210. {
  211. return default;
  212. }
  213. Operand maskSwap = X86GetElements(context, 0x80_0f_80_0d_80_0b_80_09, 0x80_07_80_05_80_03_80_01);
  214. Operand maskLow = X86GetAllElements(context, 0x00ff_00ff);
  215. Operand c0008 = X86GetAllElements(context, 0x0008_0008);
  216. // CLZ pair of high 8 and low 8 bits of elements in arg.
  217. Operand hiloClz = Clz_V_I8(context, arg);
  218. // Get CLZ of low 8 bits in each pair.
  219. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pand, hiloClz, maskLow);
  220. // Get CLZ of high 8 bits in each pair.
  221. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, hiloClz, maskSwap);
  222. // If high 8 bits are not all zero, we discard the CLZ of the low 8 bits.
  223. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqw, hiClz, c0008);
  224. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  225. return context.AddIntrinsic(Intrinsic.X86Paddw, loClz, hiClz);
  226. }
  227. private static Operand Clz_V_I32(ArmEmitterContext context, Operand arg)
  228. {
  229. // TODO: Use vplzcntd when AVX-512 is supported.
  230. if (!Optimizations.UseSse2)
  231. {
  232. return default;
  233. }
  234. Operand AddVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Paddd, op0, op1);
  235. Operand SubVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Psubd, op0, op1);
  236. Operand ShiftRightVectorUI32(Operand op0, int imm8) => context.AddIntrinsic(Intrinsic.X86Psrld, op0, Const(imm8));
  237. Operand OrVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Por, op0, op1);
  238. Operand AndVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Pand, op0, op1);
  239. Operand NotVector(Operand op0) => context.AddIntrinsic(Intrinsic.X86Pandn, op0, context.VectorOne());
  240. Operand c55555555 = X86GetAllElements(context, 0x55555555);
  241. Operand c33333333 = X86GetAllElements(context, 0x33333333);
  242. Operand c0f0f0f0f = X86GetAllElements(context, 0x0f0f0f0f);
  243. Operand c0000003f = X86GetAllElements(context, 0x0000003f);
  244. Operand tmp0;
  245. Operand tmp1;
  246. Operand res;
  247. // Set all bits after highest set bit to 1.
  248. res = OrVector(ShiftRightVectorUI32(arg, 1), arg);
  249. res = OrVector(ShiftRightVectorUI32(res, 2), res);
  250. res = OrVector(ShiftRightVectorUI32(res, 4), res);
  251. res = OrVector(ShiftRightVectorUI32(res, 8), res);
  252. res = OrVector(ShiftRightVectorUI32(res, 16), res);
  253. // Make leading 0s into leading 1s.
  254. res = NotVector(res);
  255. // Count leading 1s, which is the population count.
  256. tmp0 = ShiftRightVectorUI32(res, 1);
  257. tmp0 = AndVector(tmp0, c55555555);
  258. res = SubVectorI32(res, tmp0);
  259. tmp0 = ShiftRightVectorUI32(res, 2);
  260. tmp0 = AndVector(tmp0, c33333333);
  261. tmp1 = AndVector(res, c33333333);
  262. res = AddVectorI32(tmp0, tmp1);
  263. tmp0 = ShiftRightVectorUI32(res, 4);
  264. tmp0 = AddVectorI32(tmp0, res);
  265. res = AndVector(tmp0, c0f0f0f0f);
  266. tmp0 = ShiftRightVectorUI32(res, 8);
  267. res = AddVectorI32(tmp0, res);
  268. tmp0 = ShiftRightVectorUI32(res, 16);
  269. res = AddVectorI32(tmp0, res);
  270. res = AndVector(res, c0000003f);
  271. return res;
  272. }
  273. public static void Cnt_V(ArmEmitterContext context)
  274. {
  275. if (Optimizations.UseAdvSimd)
  276. {
  277. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64CntV);
  278. }
  279. else
  280. {
  281. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  282. Operand res = context.VectorZero();
  283. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  284. for (int index = 0; index < elems; index++)
  285. {
  286. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  287. Operand de;
  288. if (Optimizations.UsePopCnt)
  289. {
  290. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  291. }
  292. else
  293. {
  294. de = EmitCountSetBits8(context, ne);
  295. }
  296. res = EmitVectorInsert(context, res, de, index, 0);
  297. }
  298. context.Copy(GetVec(op.Rd), res);
  299. }
  300. }
  301. public static void Fabd_S(ArmEmitterContext context)
  302. {
  303. if (Optimizations.UseAdvSimd)
  304. {
  305. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FabdS);
  306. }
  307. else if (Optimizations.FastFP && Optimizations.UseSse2)
  308. {
  309. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  310. int sizeF = op.Size & 1;
  311. if (sizeF == 0)
  312. {
  313. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  314. res = EmitFloatAbs(context, res, true, false);
  315. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  316. }
  317. else /* if (sizeF == 1) */
  318. {
  319. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  320. res = EmitFloatAbs(context, res, false, false);
  321. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  322. }
  323. }
  324. else
  325. {
  326. EmitScalarBinaryOpF(context, (op1, op2) =>
  327. {
  328. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  329. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  330. });
  331. }
  332. }
  333. public static void Fabd_V(ArmEmitterContext context)
  334. {
  335. if (Optimizations.UseAdvSimd)
  336. {
  337. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FabdV);
  338. }
  339. else if (Optimizations.FastFP && Optimizations.UseSse2)
  340. {
  341. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  342. int sizeF = op.Size & 1;
  343. if (sizeF == 0)
  344. {
  345. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  346. res = EmitFloatAbs(context, res, true, true);
  347. if (op.RegisterSize == RegisterSize.Simd64)
  348. {
  349. res = context.VectorZeroUpper64(res);
  350. }
  351. context.Copy(GetVec(op.Rd), res);
  352. }
  353. else /* if (sizeF == 1) */
  354. {
  355. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  356. res = EmitFloatAbs(context, res, false, true);
  357. context.Copy(GetVec(op.Rd), res);
  358. }
  359. }
  360. else
  361. {
  362. EmitVectorBinaryOpF(context, (op1, op2) =>
  363. {
  364. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  365. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  366. });
  367. }
  368. }
  369. public static void Fabs_S(ArmEmitterContext context)
  370. {
  371. if (Optimizations.UseAdvSimd)
  372. {
  373. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FabsS);
  374. }
  375. else if (Optimizations.UseSse2)
  376. {
  377. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  378. if (op.Size == 0)
  379. {
  380. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  381. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  382. }
  383. else /* if (op.Size == 1) */
  384. {
  385. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  386. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  387. }
  388. }
  389. else
  390. {
  391. EmitScalarUnaryOpF(context, (op1) =>
  392. {
  393. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  394. });
  395. }
  396. }
  397. public static void Fabs_V(ArmEmitterContext context)
  398. {
  399. if (Optimizations.UseAdvSimd)
  400. {
  401. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FabsV);
  402. }
  403. else if (Optimizations.UseSse2)
  404. {
  405. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  406. int sizeF = op.Size & 1;
  407. if (sizeF == 0)
  408. {
  409. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  410. if (op.RegisterSize == RegisterSize.Simd64)
  411. {
  412. res = context.VectorZeroUpper64(res);
  413. }
  414. context.Copy(GetVec(op.Rd), res);
  415. }
  416. else /* if (sizeF == 1) */
  417. {
  418. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  419. context.Copy(GetVec(op.Rd), res);
  420. }
  421. }
  422. else
  423. {
  424. EmitVectorUnaryOpF(context, (op1) =>
  425. {
  426. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  427. });
  428. }
  429. }
  430. public static void Fadd_S(ArmEmitterContext context)
  431. {
  432. if (Optimizations.UseAdvSimd)
  433. {
  434. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FaddS);
  435. }
  436. else if (Optimizations.FastFP && Optimizations.UseSse2)
  437. {
  438. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  439. }
  440. else if (Optimizations.FastFP)
  441. {
  442. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  443. }
  444. else
  445. {
  446. EmitScalarBinaryOpF(context, (op1, op2) =>
  447. {
  448. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  449. });
  450. }
  451. }
  452. public static void Fadd_V(ArmEmitterContext context)
  453. {
  454. if (Optimizations.UseAdvSimd)
  455. {
  456. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FaddV);
  457. }
  458. else if (Optimizations.FastFP && Optimizations.UseSse2)
  459. {
  460. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  461. }
  462. else if (Optimizations.FastFP)
  463. {
  464. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  465. }
  466. else
  467. {
  468. EmitVectorBinaryOpF(context, (op1, op2) =>
  469. {
  470. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  471. });
  472. }
  473. }
  474. public static void Faddp_S(ArmEmitterContext context)
  475. {
  476. if (Optimizations.UseAdvSimd)
  477. {
  478. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FaddpS);
  479. }
  480. else if (Optimizations.FastFP && Optimizations.UseSse3)
  481. {
  482. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  483. if ((op.Size & 1) == 0)
  484. {
  485. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  486. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  487. }
  488. else /* if ((op.Size & 1) == 1) */
  489. {
  490. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  491. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  492. }
  493. }
  494. else
  495. {
  496. EmitScalarPairwiseOpF(context, (op1, op2) =>
  497. {
  498. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  499. });
  500. }
  501. }
  502. public static void Faddp_V(ArmEmitterContext context)
  503. {
  504. if (Optimizations.UseAdvSimd)
  505. {
  506. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FaddpV);
  507. }
  508. else if (Optimizations.FastFP && Optimizations.UseSse41)
  509. {
  510. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  511. {
  512. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  513. {
  514. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  515. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  516. return context.AddIntrinsic(addInst, op1, op2);
  517. }, scalar: false, op1, op2);
  518. });
  519. }
  520. else
  521. {
  522. EmitVectorPairwiseOpF(context, (op1, op2) =>
  523. {
  524. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  525. });
  526. }
  527. }
  528. public static void Fdiv_S(ArmEmitterContext context)
  529. {
  530. if (Optimizations.UseAdvSimd)
  531. {
  532. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FdivS);
  533. }
  534. else if (Optimizations.FastFP && Optimizations.UseSse2)
  535. {
  536. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  537. }
  538. else if (Optimizations.FastFP)
  539. {
  540. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  541. }
  542. else
  543. {
  544. EmitScalarBinaryOpF(context, (op1, op2) =>
  545. {
  546. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  547. });
  548. }
  549. }
  550. public static void Fdiv_V(ArmEmitterContext context)
  551. {
  552. if (Optimizations.UseAdvSimd)
  553. {
  554. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FdivV);
  555. }
  556. else if (Optimizations.FastFP && Optimizations.UseSse2)
  557. {
  558. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  559. }
  560. else if (Optimizations.FastFP)
  561. {
  562. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  563. }
  564. else
  565. {
  566. EmitVectorBinaryOpF(context, (op1, op2) =>
  567. {
  568. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  569. });
  570. }
  571. }
  572. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  573. {
  574. if (Optimizations.UseAdvSimd)
  575. {
  576. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FmaddS);
  577. }
  578. else if (Optimizations.FastFP && Optimizations.UseSse2)
  579. {
  580. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  581. Operand d = GetVec(op.Rd);
  582. Operand a = GetVec(op.Ra);
  583. Operand n = GetVec(op.Rn);
  584. Operand m = GetVec(op.Rm);
  585. Operand res;
  586. if (op.Size == 0)
  587. {
  588. if (Optimizations.UseFma)
  589. {
  590. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ss, a, n, m);
  591. }
  592. else
  593. {
  594. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  595. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  596. }
  597. context.Copy(d, context.VectorZeroUpper96(res));
  598. }
  599. else /* if (op.Size == 1) */
  600. {
  601. if (Optimizations.UseFma)
  602. {
  603. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231sd, a, n, m);
  604. }
  605. else
  606. {
  607. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  608. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  609. }
  610. context.Copy(d, context.VectorZeroUpper64(res));
  611. }
  612. }
  613. else
  614. {
  615. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  616. {
  617. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  618. });
  619. }
  620. }
  621. public static void Fmax_S(ArmEmitterContext context)
  622. {
  623. if (Optimizations.UseAdvSimd)
  624. {
  625. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmaxS);
  626. }
  627. else if (Optimizations.FastFP && Optimizations.UseSse41)
  628. {
  629. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  630. {
  631. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  632. }, scalar: true);
  633. }
  634. else
  635. {
  636. EmitScalarBinaryOpF(context, (op1, op2) =>
  637. {
  638. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  639. });
  640. }
  641. }
  642. public static void Fmax_V(ArmEmitterContext context)
  643. {
  644. if (Optimizations.UseAdvSimd)
  645. {
  646. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxV);
  647. }
  648. else if (Optimizations.FastFP && Optimizations.UseSse41)
  649. {
  650. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  651. {
  652. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  653. }, scalar: false);
  654. }
  655. else
  656. {
  657. EmitVectorBinaryOpF(context, (op1, op2) =>
  658. {
  659. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  660. });
  661. }
  662. }
  663. public static void Fmaxnm_S(ArmEmitterContext context)
  664. {
  665. if (Optimizations.UseAdvSimd)
  666. {
  667. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmaxnmS);
  668. }
  669. else if (Optimizations.FastFP && Optimizations.UseSse41)
  670. {
  671. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  672. }
  673. else
  674. {
  675. EmitScalarBinaryOpF(context, (op1, op2) =>
  676. {
  677. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  678. });
  679. }
  680. }
  681. public static void Fmaxnm_V(ArmEmitterContext context)
  682. {
  683. if (Optimizations.UseAdvSimd)
  684. {
  685. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxnmV);
  686. }
  687. else if (Optimizations.FastFP && Optimizations.UseSse41)
  688. {
  689. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  690. }
  691. else
  692. {
  693. EmitVectorBinaryOpF(context, (op1, op2) =>
  694. {
  695. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  696. });
  697. }
  698. }
  699. public static void Fmaxnmp_S(ArmEmitterContext context)
  700. {
  701. if (Optimizations.UseAdvSimd)
  702. {
  703. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FmaxnmpS);
  704. }
  705. else if (Optimizations.FastFP && Optimizations.UseSse41)
  706. {
  707. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  708. {
  709. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true, op1, op2);
  710. });
  711. }
  712. else
  713. {
  714. EmitScalarPairwiseOpF(context, (op1, op2) =>
  715. {
  716. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  717. });
  718. }
  719. }
  720. public static void Fmaxnmp_V(ArmEmitterContext context)
  721. {
  722. if (Optimizations.UseAdvSimd)
  723. {
  724. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxnmpV);
  725. }
  726. else if (Optimizations.FastFP && Optimizations.UseSse41)
  727. {
  728. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  729. {
  730. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  731. });
  732. }
  733. else
  734. {
  735. EmitVectorPairwiseOpF(context, (op1, op2) =>
  736. {
  737. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  738. });
  739. }
  740. }
  741. public static void Fmaxnmv_V(ArmEmitterContext context)
  742. {
  743. if (Optimizations.UseAdvSimd)
  744. {
  745. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FmaxnmvV);
  746. }
  747. else if (Optimizations.FastFP && Optimizations.UseSse41)
  748. {
  749. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  750. {
  751. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  752. });
  753. }
  754. else
  755. {
  756. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  757. {
  758. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  759. });
  760. }
  761. }
  762. public static void Fmaxp_V(ArmEmitterContext context)
  763. {
  764. if (Optimizations.UseAdvSimd)
  765. {
  766. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmaxpV);
  767. }
  768. else if (Optimizations.FastFP && Optimizations.UseSse41)
  769. {
  770. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  771. {
  772. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  773. {
  774. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  775. }, scalar: false, op1, op2);
  776. });
  777. }
  778. else
  779. {
  780. EmitVectorPairwiseOpF(context, (op1, op2) =>
  781. {
  782. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  783. });
  784. }
  785. }
  786. public static void Fmaxv_V(ArmEmitterContext context)
  787. {
  788. if (Optimizations.UseAdvSimd)
  789. {
  790. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FmaxvV);
  791. }
  792. else if (Optimizations.FastFP && Optimizations.UseSse41)
  793. {
  794. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  795. {
  796. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  797. {
  798. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  799. }, scalar: false, op1, op2);
  800. });
  801. }
  802. else
  803. {
  804. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  805. {
  806. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  807. });
  808. }
  809. }
  810. public static void Fmin_S(ArmEmitterContext context)
  811. {
  812. if (Optimizations.UseAdvSimd)
  813. {
  814. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FminS);
  815. }
  816. else if (Optimizations.FastFP && Optimizations.UseSse41)
  817. {
  818. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  819. {
  820. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  821. }, scalar: true);
  822. }
  823. else
  824. {
  825. EmitScalarBinaryOpF(context, (op1, op2) =>
  826. {
  827. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  828. });
  829. }
  830. }
  831. public static void Fmin_V(ArmEmitterContext context)
  832. {
  833. if (Optimizations.UseAdvSimd)
  834. {
  835. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminV);
  836. }
  837. else if (Optimizations.FastFP && Optimizations.UseSse41)
  838. {
  839. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  840. {
  841. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  842. }, scalar: false);
  843. }
  844. else
  845. {
  846. EmitVectorBinaryOpF(context, (op1, op2) =>
  847. {
  848. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  849. });
  850. }
  851. }
  852. public static void Fminnm_S(ArmEmitterContext context)
  853. {
  854. if (Optimizations.UseAdvSimd)
  855. {
  856. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FminnmS);
  857. }
  858. else if (Optimizations.FastFP && Optimizations.UseSse41)
  859. {
  860. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  861. }
  862. else
  863. {
  864. EmitScalarBinaryOpF(context, (op1, op2) =>
  865. {
  866. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  867. });
  868. }
  869. }
  870. public static void Fminnm_V(ArmEmitterContext context)
  871. {
  872. if (Optimizations.UseAdvSimd)
  873. {
  874. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminnmV);
  875. }
  876. else if (Optimizations.FastFP && Optimizations.UseSse41)
  877. {
  878. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  879. }
  880. else
  881. {
  882. EmitVectorBinaryOpF(context, (op1, op2) =>
  883. {
  884. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  885. });
  886. }
  887. }
  888. public static void Fminnmp_S(ArmEmitterContext context)
  889. {
  890. if (Optimizations.UseAdvSimd)
  891. {
  892. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FminnmpS);
  893. }
  894. else if (Optimizations.FastFP && Optimizations.UseSse41)
  895. {
  896. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  897. {
  898. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true, op1, op2);
  899. });
  900. }
  901. else
  902. {
  903. EmitScalarPairwiseOpF(context, (op1, op2) =>
  904. {
  905. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  906. });
  907. }
  908. }
  909. public static void Fminnmp_V(ArmEmitterContext context)
  910. {
  911. if (Optimizations.UseAdvSimd)
  912. {
  913. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminnmpV);
  914. }
  915. else if (Optimizations.FastFP && Optimizations.UseSse41)
  916. {
  917. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  918. {
  919. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  920. });
  921. }
  922. else
  923. {
  924. EmitVectorPairwiseOpF(context, (op1, op2) =>
  925. {
  926. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  927. });
  928. }
  929. }
  930. public static void Fminnmv_V(ArmEmitterContext context)
  931. {
  932. if (Optimizations.UseAdvSimd)
  933. {
  934. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FminnmvV);
  935. }
  936. else if (Optimizations.FastFP && Optimizations.UseSse41)
  937. {
  938. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  939. {
  940. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  941. });
  942. }
  943. else
  944. {
  945. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  946. {
  947. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  948. });
  949. }
  950. }
  951. public static void Fminp_V(ArmEmitterContext context)
  952. {
  953. if (Optimizations.UseAdvSimd)
  954. {
  955. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FminpV);
  956. }
  957. else if (Optimizations.FastFP && Optimizations.UseSse41)
  958. {
  959. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  960. {
  961. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  962. {
  963. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  964. }, scalar: false, op1, op2);
  965. });
  966. }
  967. else
  968. {
  969. EmitVectorPairwiseOpF(context, (op1, op2) =>
  970. {
  971. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  972. });
  973. }
  974. }
  975. public static void Fminv_V(ArmEmitterContext context)
  976. {
  977. if (Optimizations.UseAdvSimd)
  978. {
  979. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FminvV);
  980. }
  981. else if (Optimizations.FastFP && Optimizations.UseSse41)
  982. {
  983. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  984. {
  985. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  986. {
  987. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  988. }, scalar: false, op1, op2);
  989. });
  990. }
  991. else
  992. {
  993. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  994. {
  995. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  996. });
  997. }
  998. }
  999. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  1000. {
  1001. if (Optimizations.UseAdvSimd)
  1002. {
  1003. InstEmitSimdHelperArm64.EmitScalarTernaryOpFRdByElem(context, Intrinsic.Arm64FmlaSe);
  1004. }
  1005. else if (Optimizations.UseFma)
  1006. {
  1007. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1008. Operand d = GetVec(op.Rd);
  1009. Operand n = GetVec(op.Rn);
  1010. Operand m = GetVec(op.Rm);
  1011. int sizeF = op.Size & 1;
  1012. if (sizeF == 0)
  1013. {
  1014. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1015. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1016. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ss, d, n, res);
  1017. context.Copy(d, context.VectorZeroUpper96(res));
  1018. }
  1019. else /* if (sizeF == 1) */
  1020. {
  1021. int shuffleMask = op.Index | op.Index << 1;
  1022. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1023. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231sd, d, n, res);
  1024. context.Copy(d, context.VectorZeroUpper64(res));
  1025. }
  1026. }
  1027. else
  1028. {
  1029. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  1030. {
  1031. return context.Add(op1, context.Multiply(op2, op3));
  1032. });
  1033. }
  1034. }
  1035. public static void Fmla_V(ArmEmitterContext context) // Fused.
  1036. {
  1037. if (Optimizations.UseAdvSimd)
  1038. {
  1039. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRd(context, Intrinsic.Arm64FmlaV);
  1040. }
  1041. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1042. {
  1043. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1044. Operand d = GetVec(op.Rd);
  1045. Operand n = GetVec(op.Rn);
  1046. Operand m = GetVec(op.Rm);
  1047. int sizeF = op.Size & 1;
  1048. Operand res;
  1049. if (sizeF == 0)
  1050. {
  1051. if (Optimizations.UseFma)
  1052. {
  1053. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ps, d, n, m);
  1054. }
  1055. else
  1056. {
  1057. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1058. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  1059. }
  1060. if (op.RegisterSize == RegisterSize.Simd64)
  1061. {
  1062. res = context.VectorZeroUpper64(res);
  1063. }
  1064. context.Copy(d, res);
  1065. }
  1066. else /* if (sizeF == 1) */
  1067. {
  1068. if (Optimizations.UseFma)
  1069. {
  1070. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231pd, d, n, m);
  1071. }
  1072. else
  1073. {
  1074. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1075. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  1076. }
  1077. context.Copy(d, res);
  1078. }
  1079. }
  1080. else
  1081. {
  1082. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  1083. {
  1084. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  1085. });
  1086. }
  1087. }
  1088. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  1089. {
  1090. if (Optimizations.UseAdvSimd)
  1091. {
  1092. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRdByElem(context, Intrinsic.Arm64FmlaVe);
  1093. }
  1094. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1095. {
  1096. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1097. Operand d = GetVec(op.Rd);
  1098. Operand n = GetVec(op.Rn);
  1099. Operand m = GetVec(op.Rm);
  1100. int sizeF = op.Size & 1;
  1101. if (sizeF == 0)
  1102. {
  1103. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1104. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1105. if (Optimizations.UseFma)
  1106. {
  1107. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231ps, d, n, res);
  1108. }
  1109. else
  1110. {
  1111. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1112. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  1113. }
  1114. if (op.RegisterSize == RegisterSize.Simd64)
  1115. {
  1116. res = context.VectorZeroUpper64(res);
  1117. }
  1118. context.Copy(d, res);
  1119. }
  1120. else /* if (sizeF == 1) */
  1121. {
  1122. int shuffleMask = op.Index | op.Index << 1;
  1123. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1124. if (Optimizations.UseFma)
  1125. {
  1126. res = context.AddIntrinsic(Intrinsic.X86Vfmadd231pd, d, n, res);
  1127. }
  1128. else
  1129. {
  1130. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1131. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  1132. }
  1133. context.Copy(d, res);
  1134. }
  1135. }
  1136. else
  1137. {
  1138. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  1139. {
  1140. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  1141. });
  1142. }
  1143. }
  1144. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  1145. {
  1146. if (Optimizations.UseAdvSimd)
  1147. {
  1148. InstEmitSimdHelperArm64.EmitScalarTernaryOpFRdByElem(context, Intrinsic.Arm64FmlsSe);
  1149. }
  1150. else if (Optimizations.UseFma)
  1151. {
  1152. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1153. Operand d = GetVec(op.Rd);
  1154. Operand n = GetVec(op.Rn);
  1155. Operand m = GetVec(op.Rm);
  1156. int sizeF = op.Size & 1;
  1157. if (sizeF == 0)
  1158. {
  1159. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1160. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1161. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, d, n, res);
  1162. context.Copy(d, context.VectorZeroUpper96(res));
  1163. }
  1164. else /* if (sizeF == 1) */
  1165. {
  1166. int shuffleMask = op.Index | op.Index << 1;
  1167. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1168. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, d, n, res);
  1169. context.Copy(d, context.VectorZeroUpper64(res));
  1170. }
  1171. }
  1172. else
  1173. {
  1174. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  1175. {
  1176. return context.Subtract(op1, context.Multiply(op2, op3));
  1177. });
  1178. }
  1179. }
  1180. public static void Fmls_V(ArmEmitterContext context) // Fused.
  1181. {
  1182. if (Optimizations.UseAdvSimd)
  1183. {
  1184. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRd(context, Intrinsic.Arm64FmlsV);
  1185. }
  1186. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1187. {
  1188. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1189. Operand d = GetVec(op.Rd);
  1190. Operand n = GetVec(op.Rn);
  1191. Operand m = GetVec(op.Rm);
  1192. int sizeF = op.Size & 1;
  1193. Operand res;
  1194. if (sizeF == 0)
  1195. {
  1196. if (Optimizations.UseFma)
  1197. {
  1198. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, d, n, m);
  1199. }
  1200. else
  1201. {
  1202. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1203. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  1204. }
  1205. if (op.RegisterSize == RegisterSize.Simd64)
  1206. {
  1207. res = context.VectorZeroUpper64(res);
  1208. }
  1209. context.Copy(d, res);
  1210. }
  1211. else /* if (sizeF == 1) */
  1212. {
  1213. if (Optimizations.UseFma)
  1214. {
  1215. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, d, n, m);
  1216. }
  1217. else
  1218. {
  1219. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1220. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  1221. }
  1222. context.Copy(d, res);
  1223. }
  1224. }
  1225. else
  1226. {
  1227. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  1228. {
  1229. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1230. });
  1231. }
  1232. }
  1233. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  1234. {
  1235. if (Optimizations.UseAdvSimd)
  1236. {
  1237. InstEmitSimdHelperArm64.EmitVectorTernaryOpFRdByElem(context, Intrinsic.Arm64FmlsVe);
  1238. }
  1239. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1240. {
  1241. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1242. Operand d = GetVec(op.Rd);
  1243. Operand n = GetVec(op.Rn);
  1244. Operand m = GetVec(op.Rm);
  1245. int sizeF = op.Size & 1;
  1246. if (sizeF == 0)
  1247. {
  1248. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1249. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1250. if (Optimizations.UseFma)
  1251. {
  1252. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, d, n, res);
  1253. }
  1254. else
  1255. {
  1256. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1257. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  1258. }
  1259. if (op.RegisterSize == RegisterSize.Simd64)
  1260. {
  1261. res = context.VectorZeroUpper64(res);
  1262. }
  1263. context.Copy(d, res);
  1264. }
  1265. else /* if (sizeF == 1) */
  1266. {
  1267. int shuffleMask = op.Index | op.Index << 1;
  1268. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1269. if (Optimizations.UseFma)
  1270. {
  1271. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, d, n, res);
  1272. }
  1273. else
  1274. {
  1275. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1276. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  1277. }
  1278. context.Copy(d, res);
  1279. }
  1280. }
  1281. else
  1282. {
  1283. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  1284. {
  1285. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1286. });
  1287. }
  1288. }
  1289. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  1290. {
  1291. if (Optimizations.UseAdvSimd)
  1292. {
  1293. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FmsubS);
  1294. }
  1295. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1296. {
  1297. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1298. Operand d = GetVec(op.Rd);
  1299. Operand a = GetVec(op.Ra);
  1300. Operand n = GetVec(op.Rn);
  1301. Operand m = GetVec(op.Rm);
  1302. Operand res;
  1303. if (op.Size == 0)
  1304. {
  1305. if (Optimizations.UseFma)
  1306. {
  1307. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, a, n, m);
  1308. }
  1309. else
  1310. {
  1311. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1312. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  1313. }
  1314. context.Copy(d, context.VectorZeroUpper96(res));
  1315. }
  1316. else /* if (op.Size == 1) */
  1317. {
  1318. if (Optimizations.UseFma)
  1319. {
  1320. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, a, n, m);
  1321. }
  1322. else
  1323. {
  1324. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1325. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  1326. }
  1327. context.Copy(d, context.VectorZeroUpper64(res));
  1328. }
  1329. }
  1330. else
  1331. {
  1332. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1333. {
  1334. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1335. });
  1336. }
  1337. }
  1338. public static void Fmul_S(ArmEmitterContext context)
  1339. {
  1340. if (Optimizations.UseAdvSimd)
  1341. {
  1342. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmulS);
  1343. }
  1344. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1345. {
  1346. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  1347. }
  1348. else if (Optimizations.FastFP)
  1349. {
  1350. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1351. }
  1352. else
  1353. {
  1354. EmitScalarBinaryOpF(context, (op1, op2) =>
  1355. {
  1356. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1357. });
  1358. }
  1359. }
  1360. public static void Fmul_Se(ArmEmitterContext context)
  1361. {
  1362. if (Optimizations.UseAdvSimd)
  1363. {
  1364. InstEmitSimdHelperArm64.EmitScalarBinaryOpFByElem(context, Intrinsic.Arm64FmulSe);
  1365. }
  1366. else
  1367. {
  1368. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1369. }
  1370. }
  1371. public static void Fmul_V(ArmEmitterContext context)
  1372. {
  1373. if (Optimizations.UseAdvSimd)
  1374. {
  1375. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmulV);
  1376. }
  1377. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1378. {
  1379. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  1380. }
  1381. else if (Optimizations.FastFP)
  1382. {
  1383. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1384. }
  1385. else
  1386. {
  1387. EmitVectorBinaryOpF(context, (op1, op2) =>
  1388. {
  1389. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1390. });
  1391. }
  1392. }
  1393. public static void Fmul_Ve(ArmEmitterContext context)
  1394. {
  1395. if (Optimizations.UseAdvSimd)
  1396. {
  1397. InstEmitSimdHelperArm64.EmitVectorBinaryOpFByElem(context, Intrinsic.Arm64FmulVe);
  1398. }
  1399. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1400. {
  1401. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1402. Operand n = GetVec(op.Rn);
  1403. Operand m = GetVec(op.Rm);
  1404. int sizeF = op.Size & 1;
  1405. if (sizeF == 0)
  1406. {
  1407. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1408. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1409. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1410. if (op.RegisterSize == RegisterSize.Simd64)
  1411. {
  1412. res = context.VectorZeroUpper64(res);
  1413. }
  1414. context.Copy(GetVec(op.Rd), res);
  1415. }
  1416. else /* if (sizeF == 1) */
  1417. {
  1418. int shuffleMask = op.Index | op.Index << 1;
  1419. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1420. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1421. context.Copy(GetVec(op.Rd), res);
  1422. }
  1423. }
  1424. else if (Optimizations.FastFP)
  1425. {
  1426. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1427. }
  1428. else
  1429. {
  1430. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1431. {
  1432. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1433. });
  1434. }
  1435. }
  1436. public static void Fmulx_S(ArmEmitterContext context)
  1437. {
  1438. if (Optimizations.UseAdvSimd)
  1439. {
  1440. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FmulxS);
  1441. }
  1442. else
  1443. {
  1444. EmitScalarBinaryOpF(context, (op1, op2) =>
  1445. {
  1446. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1447. });
  1448. }
  1449. }
  1450. public static void Fmulx_Se(ArmEmitterContext context)
  1451. {
  1452. if (Optimizations.UseAdvSimd)
  1453. {
  1454. InstEmitSimdHelperArm64.EmitScalarBinaryOpFByElem(context, Intrinsic.Arm64FmulxSe);
  1455. }
  1456. else
  1457. {
  1458. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  1459. {
  1460. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1461. });
  1462. }
  1463. }
  1464. public static void Fmulx_V(ArmEmitterContext context)
  1465. {
  1466. if (Optimizations.UseAdvSimd)
  1467. {
  1468. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FmulxV);
  1469. }
  1470. else
  1471. {
  1472. EmitVectorBinaryOpF(context, (op1, op2) =>
  1473. {
  1474. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1475. });
  1476. }
  1477. }
  1478. public static void Fmulx_Ve(ArmEmitterContext context)
  1479. {
  1480. if (Optimizations.UseAdvSimd)
  1481. {
  1482. InstEmitSimdHelperArm64.EmitVectorBinaryOpFByElem(context, Intrinsic.Arm64FmulxVe);
  1483. }
  1484. else
  1485. {
  1486. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1487. {
  1488. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1489. });
  1490. }
  1491. }
  1492. public static void Fneg_S(ArmEmitterContext context)
  1493. {
  1494. if (Optimizations.UseAdvSimd)
  1495. {
  1496. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FnegS);
  1497. }
  1498. else if (Optimizations.UseSse2)
  1499. {
  1500. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1501. if (op.Size == 0)
  1502. {
  1503. Operand mask = X86GetScalar(context, -0f);
  1504. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1505. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1506. }
  1507. else /* if (op.Size == 1) */
  1508. {
  1509. Operand mask = X86GetScalar(context, -0d);
  1510. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1511. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1512. }
  1513. }
  1514. else
  1515. {
  1516. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  1517. }
  1518. }
  1519. public static void Fneg_V(ArmEmitterContext context)
  1520. {
  1521. if (Optimizations.UseAdvSimd)
  1522. {
  1523. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FnegV);
  1524. }
  1525. else if (Optimizations.UseSse2)
  1526. {
  1527. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1528. int sizeF = op.Size & 1;
  1529. if (sizeF == 0)
  1530. {
  1531. Operand mask = X86GetAllElements(context, -0f);
  1532. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1533. if (op.RegisterSize == RegisterSize.Simd64)
  1534. {
  1535. res = context.VectorZeroUpper64(res);
  1536. }
  1537. context.Copy(GetVec(op.Rd), res);
  1538. }
  1539. else /* if (sizeF == 1) */
  1540. {
  1541. Operand mask = X86GetAllElements(context, -0d);
  1542. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1543. context.Copy(GetVec(op.Rd), res);
  1544. }
  1545. }
  1546. else
  1547. {
  1548. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1549. }
  1550. }
  1551. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1552. {
  1553. if (Optimizations.UseAdvSimd)
  1554. {
  1555. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FnmaddS);
  1556. }
  1557. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1558. {
  1559. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1560. Operand d = GetVec(op.Rd);
  1561. Operand a = GetVec(op.Ra);
  1562. Operand n = GetVec(op.Rn);
  1563. Operand m = GetVec(op.Rm);
  1564. Operand res;
  1565. if (op.Size == 0)
  1566. {
  1567. if (Optimizations.UseFma)
  1568. {
  1569. res = context.AddIntrinsic(Intrinsic.X86Vfnmsub231ss, a, n, m);
  1570. }
  1571. else
  1572. {
  1573. Operand mask = X86GetScalar(context, -0f);
  1574. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1575. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1576. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1577. }
  1578. context.Copy(d, context.VectorZeroUpper96(res));
  1579. }
  1580. else /* if (op.Size == 1) */
  1581. {
  1582. if (Optimizations.UseFma)
  1583. {
  1584. res = context.AddIntrinsic(Intrinsic.X86Vfnmsub231sd, a, n, m);
  1585. }
  1586. else
  1587. {
  1588. Operand mask = X86GetScalar(context, -0d);
  1589. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1590. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1591. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1592. }
  1593. context.Copy(d, context.VectorZeroUpper64(res));
  1594. }
  1595. }
  1596. else
  1597. {
  1598. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1599. {
  1600. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1601. });
  1602. }
  1603. }
  1604. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1605. {
  1606. if (Optimizations.UseAdvSimd)
  1607. {
  1608. InstEmitSimdHelperArm64.EmitScalarTernaryOpF(context, Intrinsic.Arm64FnmsubS);
  1609. }
  1610. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1611. {
  1612. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1613. Operand d = GetVec(op.Rd);
  1614. Operand a = GetVec(op.Ra);
  1615. Operand n = GetVec(op.Rn);
  1616. Operand m = GetVec(op.Rm);
  1617. Operand res;
  1618. if (op.Size == 0)
  1619. {
  1620. if (Optimizations.UseFma)
  1621. {
  1622. res = context.AddIntrinsic(Intrinsic.X86Vfmsub231ss, a, n, m);
  1623. }
  1624. else
  1625. {
  1626. Operand mask = X86GetScalar(context, -0f);
  1627. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1628. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1629. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1630. }
  1631. context.Copy(d, context.VectorZeroUpper96(res));
  1632. }
  1633. else /* if (op.Size == 1) */
  1634. {
  1635. if (Optimizations.UseFma)
  1636. {
  1637. res = context.AddIntrinsic(Intrinsic.X86Vfmsub231sd, a, n, m);
  1638. }
  1639. else
  1640. {
  1641. Operand mask = X86GetScalar(context, -0d);
  1642. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1643. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1644. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1645. }
  1646. context.Copy(d, context.VectorZeroUpper64(res));
  1647. }
  1648. }
  1649. else
  1650. {
  1651. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1652. {
  1653. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1654. });
  1655. }
  1656. }
  1657. public static void Fnmul_S(ArmEmitterContext context)
  1658. {
  1659. if (Optimizations.UseAdvSimd)
  1660. {
  1661. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FnmulS);
  1662. }
  1663. else
  1664. {
  1665. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1666. }
  1667. }
  1668. public static void Frecpe_S(ArmEmitterContext context)
  1669. {
  1670. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1671. int sizeF = op.Size & 1;
  1672. if (Optimizations.UseAdvSimd)
  1673. {
  1674. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrecpeS);
  1675. }
  1676. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1677. {
  1678. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpss, GetVec(op.Rn)), scalar: true);
  1679. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1680. }
  1681. else
  1682. {
  1683. EmitScalarUnaryOpF(context, (op1) =>
  1684. {
  1685. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1686. });
  1687. }
  1688. }
  1689. public static void Frecpe_V(ArmEmitterContext context)
  1690. {
  1691. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1692. int sizeF = op.Size & 1;
  1693. if (Optimizations.UseAdvSimd)
  1694. {
  1695. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrecpeV);
  1696. }
  1697. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1698. {
  1699. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpps, GetVec(op.Rn)), scalar: false);
  1700. if (op.RegisterSize == RegisterSize.Simd64)
  1701. {
  1702. res = context.VectorZeroUpper64(res);
  1703. }
  1704. context.Copy(GetVec(op.Rd), res);
  1705. }
  1706. else
  1707. {
  1708. EmitVectorUnaryOpF(context, (op1) =>
  1709. {
  1710. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1711. });
  1712. }
  1713. }
  1714. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1715. {
  1716. if (Optimizations.UseAdvSimd)
  1717. {
  1718. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrecpsS);
  1719. }
  1720. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1721. {
  1722. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1723. Operand n = GetVec(op.Rn);
  1724. Operand m = GetVec(op.Rm);
  1725. int sizeF = op.Size & 1;
  1726. Operand res;
  1727. if (sizeF == 0)
  1728. {
  1729. Operand mask = X86GetScalar(context, 2f);
  1730. if (Optimizations.UseFma)
  1731. {
  1732. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, mask, n, m);
  1733. }
  1734. else
  1735. {
  1736. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1737. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1738. }
  1739. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1740. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1741. }
  1742. else /* if (sizeF == 1) */
  1743. {
  1744. Operand mask = X86GetScalar(context, 2d);
  1745. if (Optimizations.UseFma)
  1746. {
  1747. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, mask, n, m);
  1748. }
  1749. else
  1750. {
  1751. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1752. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1753. }
  1754. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1755. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1756. }
  1757. }
  1758. else
  1759. {
  1760. EmitScalarBinaryOpF(context, (op1, op2) =>
  1761. {
  1762. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1763. });
  1764. }
  1765. }
  1766. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1767. {
  1768. if (Optimizations.UseAdvSimd)
  1769. {
  1770. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FrecpsV);
  1771. }
  1772. else if (Optimizations.FastFP && Optimizations.UseSse41)
  1773. {
  1774. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1775. Operand n = GetVec(op.Rn);
  1776. Operand m = GetVec(op.Rm);
  1777. int sizeF = op.Size & 1;
  1778. Operand res;
  1779. if (sizeF == 0)
  1780. {
  1781. Operand mask = X86GetAllElements(context, 2f);
  1782. if (Optimizations.UseFma)
  1783. {
  1784. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, mask, n, m);
  1785. }
  1786. else
  1787. {
  1788. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1789. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1790. }
  1791. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1792. if (op.RegisterSize == RegisterSize.Simd64)
  1793. {
  1794. res = context.VectorZeroUpper64(res);
  1795. }
  1796. context.Copy(GetVec(op.Rd), res);
  1797. }
  1798. else /* if (sizeF == 1) */
  1799. {
  1800. Operand mask = X86GetAllElements(context, 2d);
  1801. if (Optimizations.UseFma)
  1802. {
  1803. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, mask, n, m);
  1804. }
  1805. else
  1806. {
  1807. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1808. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1809. }
  1810. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1811. context.Copy(GetVec(op.Rd), res);
  1812. }
  1813. }
  1814. else
  1815. {
  1816. EmitVectorBinaryOpF(context, (op1, op2) =>
  1817. {
  1818. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1819. });
  1820. }
  1821. }
  1822. public static void Frecpx_S(ArmEmitterContext context)
  1823. {
  1824. if (Optimizations.UseAdvSimd)
  1825. {
  1826. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrecpxS);
  1827. }
  1828. else
  1829. {
  1830. EmitScalarUnaryOpF(context, (op1) =>
  1831. {
  1832. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1833. });
  1834. }
  1835. }
  1836. public static void Frinta_S(ArmEmitterContext context)
  1837. {
  1838. if (Optimizations.UseAdvSimd)
  1839. {
  1840. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintaS);
  1841. }
  1842. else if (Optimizations.UseSse41)
  1843. {
  1844. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearestAway);
  1845. }
  1846. else
  1847. {
  1848. EmitScalarUnaryOpF(context, (op1) =>
  1849. {
  1850. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1851. });
  1852. }
  1853. }
  1854. public static void Frinta_V(ArmEmitterContext context)
  1855. {
  1856. if (Optimizations.UseAdvSimd)
  1857. {
  1858. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintaV);
  1859. }
  1860. else if (Optimizations.UseSse41)
  1861. {
  1862. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearestAway);
  1863. }
  1864. else
  1865. {
  1866. EmitVectorUnaryOpF(context, (op1) =>
  1867. {
  1868. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1869. });
  1870. }
  1871. }
  1872. public static void Frinti_S(ArmEmitterContext context)
  1873. {
  1874. if (Optimizations.UseAdvSimd)
  1875. {
  1876. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintiS);
  1877. }
  1878. else
  1879. {
  1880. EmitScalarUnaryOpF(context, (op1) =>
  1881. {
  1882. return EmitRoundByRMode(context, op1);
  1883. });
  1884. }
  1885. }
  1886. public static void Frinti_V(ArmEmitterContext context)
  1887. {
  1888. if (Optimizations.UseAdvSimd)
  1889. {
  1890. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintiV);
  1891. }
  1892. else
  1893. {
  1894. EmitVectorUnaryOpF(context, (op1) =>
  1895. {
  1896. return EmitRoundByRMode(context, op1);
  1897. });
  1898. }
  1899. }
  1900. public static void Frintm_S(ArmEmitterContext context)
  1901. {
  1902. if (Optimizations.UseAdvSimd)
  1903. {
  1904. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintmS);
  1905. }
  1906. else if (Optimizations.UseSse41)
  1907. {
  1908. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1909. }
  1910. else
  1911. {
  1912. EmitScalarUnaryOpF(context, (op1) =>
  1913. {
  1914. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1915. });
  1916. }
  1917. }
  1918. public static void Frintm_V(ArmEmitterContext context)
  1919. {
  1920. if (Optimizations.UseAdvSimd)
  1921. {
  1922. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintmV);
  1923. }
  1924. else if (Optimizations.UseSse41)
  1925. {
  1926. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1927. }
  1928. else
  1929. {
  1930. EmitVectorUnaryOpF(context, (op1) =>
  1931. {
  1932. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1933. });
  1934. }
  1935. }
  1936. public static void Frintn_S(ArmEmitterContext context)
  1937. {
  1938. if (Optimizations.UseAdvSimd)
  1939. {
  1940. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintnS);
  1941. }
  1942. else if (Optimizations.UseSse41)
  1943. {
  1944. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1945. }
  1946. else
  1947. {
  1948. EmitScalarUnaryOpF(context, (op1) =>
  1949. {
  1950. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1951. });
  1952. }
  1953. }
  1954. public static void Frintn_V(ArmEmitterContext context)
  1955. {
  1956. if (Optimizations.UseAdvSimd)
  1957. {
  1958. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintnV);
  1959. }
  1960. else if (Optimizations.UseSse41)
  1961. {
  1962. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearest);
  1963. }
  1964. else
  1965. {
  1966. EmitVectorUnaryOpF(context, (op1) =>
  1967. {
  1968. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1969. });
  1970. }
  1971. }
  1972. public static void Frintp_S(ArmEmitterContext context)
  1973. {
  1974. if (Optimizations.UseAdvSimd)
  1975. {
  1976. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintpS);
  1977. }
  1978. else if (Optimizations.UseSse41)
  1979. {
  1980. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1981. }
  1982. else
  1983. {
  1984. EmitScalarUnaryOpF(context, (op1) =>
  1985. {
  1986. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1987. });
  1988. }
  1989. }
  1990. public static void Frintp_V(ArmEmitterContext context)
  1991. {
  1992. if (Optimizations.UseAdvSimd)
  1993. {
  1994. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintpV);
  1995. }
  1996. else if (Optimizations.UseSse41)
  1997. {
  1998. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1999. }
  2000. else
  2001. {
  2002. EmitVectorUnaryOpF(context, (op1) =>
  2003. {
  2004. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  2005. });
  2006. }
  2007. }
  2008. public static void Frintx_S(ArmEmitterContext context)
  2009. {
  2010. if (Optimizations.UseAdvSimd)
  2011. {
  2012. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintxS);
  2013. }
  2014. else
  2015. {
  2016. EmitScalarUnaryOpF(context, (op1) =>
  2017. {
  2018. return EmitRoundByRMode(context, op1);
  2019. });
  2020. }
  2021. }
  2022. public static void Frintx_V(ArmEmitterContext context)
  2023. {
  2024. if (Optimizations.UseAdvSimd)
  2025. {
  2026. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintxV);
  2027. }
  2028. else
  2029. {
  2030. EmitVectorUnaryOpF(context, (op1) =>
  2031. {
  2032. return EmitRoundByRMode(context, op1);
  2033. });
  2034. }
  2035. }
  2036. public static void Frintz_S(ArmEmitterContext context)
  2037. {
  2038. if (Optimizations.UseAdvSimd)
  2039. {
  2040. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrintzS);
  2041. }
  2042. else if (Optimizations.UseSse41)
  2043. {
  2044. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  2045. }
  2046. else
  2047. {
  2048. EmitScalarUnaryOpF(context, (op1) =>
  2049. {
  2050. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  2051. });
  2052. }
  2053. }
  2054. public static void Frintz_V(ArmEmitterContext context)
  2055. {
  2056. if (Optimizations.UseAdvSimd)
  2057. {
  2058. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrintzV);
  2059. }
  2060. else if (Optimizations.UseSse41)
  2061. {
  2062. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsZero);
  2063. }
  2064. else
  2065. {
  2066. EmitVectorUnaryOpF(context, (op1) =>
  2067. {
  2068. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  2069. });
  2070. }
  2071. }
  2072. public static void Frsqrte_S(ArmEmitterContext context)
  2073. {
  2074. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2075. int sizeF = op.Size & 1;
  2076. if (Optimizations.UseAdvSimd)
  2077. {
  2078. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FrsqrteS);
  2079. }
  2080. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  2081. {
  2082. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rsqrtss, GetVec(op.Rn)), scalar: true);
  2083. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  2084. }
  2085. else
  2086. {
  2087. EmitScalarUnaryOpF(context, (op1) =>
  2088. {
  2089. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  2090. });
  2091. }
  2092. }
  2093. public static void Frsqrte_V(ArmEmitterContext context)
  2094. {
  2095. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2096. int sizeF = op.Size & 1;
  2097. if (Optimizations.UseAdvSimd)
  2098. {
  2099. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FrsqrteV);
  2100. }
  2101. else if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  2102. {
  2103. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rsqrtps, GetVec(op.Rn)), scalar: false);
  2104. if (op.RegisterSize == RegisterSize.Simd64)
  2105. {
  2106. res = context.VectorZeroUpper64(res);
  2107. }
  2108. context.Copy(GetVec(op.Rd), res);
  2109. }
  2110. else
  2111. {
  2112. EmitVectorUnaryOpF(context, (op1) =>
  2113. {
  2114. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  2115. });
  2116. }
  2117. }
  2118. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  2119. {
  2120. if (Optimizations.UseAdvSimd)
  2121. {
  2122. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FrsqrtsS);
  2123. }
  2124. else if (Optimizations.FastFP && Optimizations.UseSse41)
  2125. {
  2126. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2127. Operand n = GetVec(op.Rn);
  2128. Operand m = GetVec(op.Rm);
  2129. int sizeF = op.Size & 1;
  2130. Operand res;
  2131. if (sizeF == 0)
  2132. {
  2133. Operand maskHalf = X86GetScalar(context, 0.5f);
  2134. Operand maskThree = X86GetScalar(context, 3f);
  2135. Operand maskOneHalf = X86GetScalar(context, 1.5f);
  2136. if (Optimizations.UseFma)
  2137. {
  2138. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ss, maskThree, n, m);
  2139. }
  2140. else
  2141. {
  2142. res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  2143. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  2144. }
  2145. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  2146. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  2147. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  2148. }
  2149. else /* if (sizeF == 1) */
  2150. {
  2151. Operand maskHalf = X86GetScalar(context, 0.5d);
  2152. Operand maskThree = X86GetScalar(context, 3d);
  2153. Operand maskOneHalf = X86GetScalar(context, 1.5d);
  2154. if (Optimizations.UseFma)
  2155. {
  2156. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231sd, maskThree, n, m);
  2157. }
  2158. else
  2159. {
  2160. res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  2161. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  2162. }
  2163. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  2164. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  2165. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  2166. }
  2167. }
  2168. else
  2169. {
  2170. EmitScalarBinaryOpF(context, (op1, op2) =>
  2171. {
  2172. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  2173. });
  2174. }
  2175. }
  2176. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  2177. {
  2178. if (Optimizations.UseAdvSimd)
  2179. {
  2180. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FrsqrtsV);
  2181. }
  2182. else if (Optimizations.FastFP && Optimizations.UseSse41)
  2183. {
  2184. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2185. Operand n = GetVec(op.Rn);
  2186. Operand m = GetVec(op.Rm);
  2187. int sizeF = op.Size & 1;
  2188. Operand res;
  2189. if (sizeF == 0)
  2190. {
  2191. Operand maskHalf = X86GetAllElements(context, 0.5f);
  2192. Operand maskThree = X86GetAllElements(context, 3f);
  2193. Operand maskOneHalf = X86GetAllElements(context, 1.5f);
  2194. if (Optimizations.UseFma)
  2195. {
  2196. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231ps, maskThree, n, m);
  2197. }
  2198. else
  2199. {
  2200. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  2201. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  2202. }
  2203. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  2204. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  2205. if (op.RegisterSize == RegisterSize.Simd64)
  2206. {
  2207. res = context.VectorZeroUpper64(res);
  2208. }
  2209. context.Copy(GetVec(op.Rd), res);
  2210. }
  2211. else /* if (sizeF == 1) */
  2212. {
  2213. Operand maskHalf = X86GetAllElements(context, 0.5d);
  2214. Operand maskThree = X86GetAllElements(context, 3d);
  2215. Operand maskOneHalf = X86GetAllElements(context, 1.5d);
  2216. if (Optimizations.UseFma)
  2217. {
  2218. res = context.AddIntrinsic(Intrinsic.X86Vfnmadd231pd, maskThree, n, m);
  2219. }
  2220. else
  2221. {
  2222. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  2223. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  2224. }
  2225. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  2226. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  2227. context.Copy(GetVec(op.Rd), res);
  2228. }
  2229. }
  2230. else
  2231. {
  2232. EmitVectorBinaryOpF(context, (op1, op2) =>
  2233. {
  2234. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  2235. });
  2236. }
  2237. }
  2238. public static void Fsqrt_S(ArmEmitterContext context)
  2239. {
  2240. if (Optimizations.UseAdvSimd)
  2241. {
  2242. InstEmitSimdHelperArm64.EmitScalarUnaryOpF(context, Intrinsic.Arm64FsqrtS);
  2243. }
  2244. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2245. {
  2246. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  2247. }
  2248. else
  2249. {
  2250. EmitScalarUnaryOpF(context, (op1) =>
  2251. {
  2252. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  2253. });
  2254. }
  2255. }
  2256. public static void Fsqrt_V(ArmEmitterContext context)
  2257. {
  2258. if (Optimizations.UseAdvSimd)
  2259. {
  2260. InstEmitSimdHelperArm64.EmitVectorUnaryOpF(context, Intrinsic.Arm64FsqrtV);
  2261. }
  2262. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2263. {
  2264. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  2265. }
  2266. else
  2267. {
  2268. EmitVectorUnaryOpF(context, (op1) =>
  2269. {
  2270. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  2271. });
  2272. }
  2273. }
  2274. public static void Fsub_S(ArmEmitterContext context)
  2275. {
  2276. if (Optimizations.UseAdvSimd)
  2277. {
  2278. InstEmitSimdHelperArm64.EmitScalarBinaryOpF(context, Intrinsic.Arm64FsubS);
  2279. }
  2280. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2281. {
  2282. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  2283. }
  2284. else if (Optimizations.FastFP)
  2285. {
  2286. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  2287. }
  2288. else
  2289. {
  2290. EmitScalarBinaryOpF(context, (op1, op2) =>
  2291. {
  2292. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  2293. });
  2294. }
  2295. }
  2296. public static void Fsub_V(ArmEmitterContext context)
  2297. {
  2298. if (Optimizations.UseAdvSimd)
  2299. {
  2300. InstEmitSimdHelperArm64.EmitVectorBinaryOpF(context, Intrinsic.Arm64FsubV);
  2301. }
  2302. else if (Optimizations.FastFP && Optimizations.UseSse2)
  2303. {
  2304. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  2305. }
  2306. else if (Optimizations.FastFP)
  2307. {
  2308. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  2309. }
  2310. else
  2311. {
  2312. EmitVectorBinaryOpF(context, (op1, op2) =>
  2313. {
  2314. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  2315. });
  2316. }
  2317. }
  2318. public static void Mla_V(ArmEmitterContext context)
  2319. {
  2320. if (Optimizations.UseAdvSimd)
  2321. {
  2322. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64MlaV);
  2323. }
  2324. else if (Optimizations.UseSse41)
  2325. {
  2326. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  2327. }
  2328. else
  2329. {
  2330. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2331. {
  2332. return context.Add(op1, context.Multiply(op2, op3));
  2333. });
  2334. }
  2335. }
  2336. public static void Mla_Ve(ArmEmitterContext context)
  2337. {
  2338. if (Optimizations.UseAdvSimd)
  2339. {
  2340. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64MlaVe);
  2341. }
  2342. else
  2343. {
  2344. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  2345. {
  2346. return context.Add(op1, context.Multiply(op2, op3));
  2347. });
  2348. }
  2349. }
  2350. public static void Mls_V(ArmEmitterContext context)
  2351. {
  2352. if (Optimizations.UseAdvSimd)
  2353. {
  2354. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64MlsV);
  2355. }
  2356. else if (Optimizations.UseSse41)
  2357. {
  2358. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  2359. }
  2360. else
  2361. {
  2362. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2363. {
  2364. return context.Subtract(op1, context.Multiply(op2, op3));
  2365. });
  2366. }
  2367. }
  2368. public static void Mls_Ve(ArmEmitterContext context)
  2369. {
  2370. if (Optimizations.UseAdvSimd)
  2371. {
  2372. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64MlsVe);
  2373. }
  2374. else
  2375. {
  2376. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  2377. {
  2378. return context.Subtract(op1, context.Multiply(op2, op3));
  2379. });
  2380. }
  2381. }
  2382. public static void Mul_V(ArmEmitterContext context)
  2383. {
  2384. if (Optimizations.UseAdvSimd)
  2385. {
  2386. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64MulV);
  2387. }
  2388. else if (Optimizations.UseSse41)
  2389. {
  2390. EmitSse41VectorMul_AddSub(context, AddSub.None);
  2391. }
  2392. else
  2393. {
  2394. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2395. }
  2396. }
  2397. public static void Mul_Ve(ArmEmitterContext context)
  2398. {
  2399. if (Optimizations.UseAdvSimd)
  2400. {
  2401. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64MulVe);
  2402. }
  2403. else
  2404. {
  2405. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2406. }
  2407. }
  2408. public static void Neg_S(ArmEmitterContext context)
  2409. {
  2410. if (Optimizations.UseAdvSimd)
  2411. {
  2412. InstEmitSimdHelperArm64.EmitScalarUnaryOp(context, Intrinsic.Arm64NegS);
  2413. }
  2414. else
  2415. {
  2416. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  2417. }
  2418. }
  2419. public static void Neg_V(ArmEmitterContext context)
  2420. {
  2421. if (Optimizations.UseAdvSimd)
  2422. {
  2423. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64NegV);
  2424. }
  2425. else if (Optimizations.UseSse2)
  2426. {
  2427. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2428. Intrinsic subInst = X86PsubInstruction[op.Size];
  2429. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  2430. if (op.RegisterSize == RegisterSize.Simd64)
  2431. {
  2432. res = context.VectorZeroUpper64(res);
  2433. }
  2434. context.Copy(GetVec(op.Rd), res);
  2435. }
  2436. else
  2437. {
  2438. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  2439. }
  2440. }
  2441. public static void Pmull_V(ArmEmitterContext context)
  2442. {
  2443. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2444. if (Optimizations.UseArm64Pmull)
  2445. {
  2446. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64PmullV);
  2447. }
  2448. else if (Optimizations.UsePclmulqdq && op.Size == 3)
  2449. {
  2450. Operand n = GetVec(op.Rn);
  2451. Operand m = GetVec(op.Rm);
  2452. int imm8 = op.RegisterSize == RegisterSize.Simd64 ? 0b0000_0000 : 0b0001_0001;
  2453. Operand res = context.AddIntrinsic(Intrinsic.X86Pclmulqdq, n, m, Const(imm8));
  2454. context.Copy(GetVec(op.Rd), res);
  2455. }
  2456. else if (Optimizations.UseSse41)
  2457. {
  2458. Operand n = GetVec(op.Rn);
  2459. Operand m = GetVec(op.Rm);
  2460. if (op.RegisterSize == RegisterSize.Simd64)
  2461. {
  2462. n = context.VectorZeroUpper64(n);
  2463. m = context.VectorZeroUpper64(m);
  2464. }
  2465. else /* if (op.RegisterSize == RegisterSize.Simd128) */
  2466. {
  2467. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2468. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2469. }
  2470. Operand res = context.VectorZero();
  2471. if (op.Size == 0)
  2472. {
  2473. n = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, n);
  2474. m = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, m);
  2475. for (int i = 0; i < 8; i++)
  2476. {
  2477. Operand mask = context.AddIntrinsic(Intrinsic.X86Psllw, n, Const(15 - i));
  2478. mask = context.AddIntrinsic(Intrinsic.X86Psraw, mask, Const(15));
  2479. Operand tmp = context.AddIntrinsic(Intrinsic.X86Psllw, m, Const(i));
  2480. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  2481. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  2482. }
  2483. }
  2484. else /* if (op.Size == 3) */
  2485. {
  2486. Operand zero = context.VectorZero();
  2487. for (int i = 0; i < 64; i++)
  2488. {
  2489. Operand mask = context.AddIntrinsic(Intrinsic.X86Movlhps, n, n);
  2490. mask = context.AddIntrinsic(Intrinsic.X86Psllq, mask, Const(63 - i));
  2491. mask = context.AddIntrinsic(Intrinsic.X86Psrlq, mask, Const(63));
  2492. mask = context.AddIntrinsic(Intrinsic.X86Psubq, zero, mask);
  2493. Operand tmp = EmitSse2Sll_128(context, m, i);
  2494. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  2495. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  2496. }
  2497. }
  2498. context.Copy(GetVec(op.Rd), res);
  2499. }
  2500. else
  2501. {
  2502. Operand n = GetVec(op.Rn);
  2503. Operand m = GetVec(op.Rm);
  2504. Operand res;
  2505. if (op.Size == 0)
  2506. {
  2507. res = context.VectorZero();
  2508. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 8;
  2509. for (int index = 0; index < 8; index++)
  2510. {
  2511. Operand ne = context.VectorExtract8(n, part + index);
  2512. Operand me = context.VectorExtract8(m, part + index);
  2513. Operand de = EmitPolynomialMultiply(context, ne, me, 8);
  2514. res = EmitVectorInsert(context, res, de, index, 1);
  2515. }
  2516. }
  2517. else /* if (op.Size == 3) */
  2518. {
  2519. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 1;
  2520. Operand ne = context.VectorExtract(OperandType.I64, n, part);
  2521. Operand me = context.VectorExtract(OperandType.I64, m, part);
  2522. res = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.PolynomialMult64_128)), ne, me);
  2523. }
  2524. context.Copy(GetVec(op.Rd), res);
  2525. }
  2526. }
  2527. public static void Raddhn_V(ArmEmitterContext context)
  2528. {
  2529. if (Optimizations.UseAdvSimd)
  2530. {
  2531. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64RaddhnV);
  2532. }
  2533. else
  2534. {
  2535. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  2536. }
  2537. }
  2538. public static void Rsubhn_V(ArmEmitterContext context)
  2539. {
  2540. if (Optimizations.UseAdvSimd)
  2541. {
  2542. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64RsubhnV);
  2543. }
  2544. else
  2545. {
  2546. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  2547. }
  2548. }
  2549. public static void Saba_V(ArmEmitterContext context)
  2550. {
  2551. if (Optimizations.UseAdvSimd)
  2552. {
  2553. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SabaV);
  2554. }
  2555. else
  2556. {
  2557. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  2558. {
  2559. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2560. });
  2561. }
  2562. }
  2563. public static void Sabal_V(ArmEmitterContext context)
  2564. {
  2565. if (Optimizations.UseAdvSimd)
  2566. {
  2567. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SabalV);
  2568. }
  2569. else
  2570. {
  2571. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2572. {
  2573. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2574. });
  2575. }
  2576. }
  2577. public static void Sabd_V(ArmEmitterContext context)
  2578. {
  2579. if (Optimizations.UseAdvSimd)
  2580. {
  2581. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SabdV);
  2582. }
  2583. else if (Optimizations.UseSse41)
  2584. {
  2585. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2586. Operand n = GetVec(op.Rn);
  2587. Operand m = GetVec(op.Rm);
  2588. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  2589. }
  2590. else
  2591. {
  2592. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2593. {
  2594. return EmitAbs(context, context.Subtract(op1, op2));
  2595. });
  2596. }
  2597. }
  2598. public static void Sabdl_V(ArmEmitterContext context)
  2599. {
  2600. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2601. if (Optimizations.UseAdvSimd)
  2602. {
  2603. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SabdlV);
  2604. }
  2605. else if (Optimizations.UseSse41 && op.Size < 2)
  2606. {
  2607. Operand n = GetVec(op.Rn);
  2608. Operand m = GetVec(op.Rm);
  2609. if (op.RegisterSize == RegisterSize.Simd128)
  2610. {
  2611. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2612. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2613. }
  2614. Intrinsic movInst = op.Size == 0
  2615. ? Intrinsic.X86Pmovsxbw
  2616. : Intrinsic.X86Pmovsxwd;
  2617. n = context.AddIntrinsic(movInst, n);
  2618. m = context.AddIntrinsic(movInst, m);
  2619. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  2620. }
  2621. else
  2622. {
  2623. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  2624. {
  2625. return EmitAbs(context, context.Subtract(op1, op2));
  2626. });
  2627. }
  2628. }
  2629. public static void Sadalp_V(ArmEmitterContext context)
  2630. {
  2631. if (Optimizations.UseAdvSimd)
  2632. {
  2633. InstEmitSimdHelperArm64.EmitVectorBinaryOpRd(context, Intrinsic.Arm64SadalpV);
  2634. }
  2635. else
  2636. {
  2637. EmitAddLongPairwise(context, signed: true, accumulate: true);
  2638. }
  2639. }
  2640. public static void Saddl_V(ArmEmitterContext context)
  2641. {
  2642. if (Optimizations.UseAdvSimd)
  2643. {
  2644. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SaddlV);
  2645. }
  2646. else if (Optimizations.UseSse41)
  2647. {
  2648. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2649. Operand n = GetVec(op.Rn);
  2650. Operand m = GetVec(op.Rm);
  2651. if (op.RegisterSize == RegisterSize.Simd128)
  2652. {
  2653. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2654. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2655. }
  2656. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2657. n = context.AddIntrinsic(movInst, n);
  2658. m = context.AddIntrinsic(movInst, m);
  2659. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2660. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2661. }
  2662. else
  2663. {
  2664. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  2665. }
  2666. }
  2667. public static void Saddlp_V(ArmEmitterContext context)
  2668. {
  2669. if (Optimizations.UseAdvSimd)
  2670. {
  2671. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SaddlpV);
  2672. }
  2673. else
  2674. {
  2675. EmitAddLongPairwise(context, signed: true, accumulate: false);
  2676. }
  2677. }
  2678. public static void Saddlv_V(ArmEmitterContext context)
  2679. {
  2680. if (Optimizations.UseAdvSimd)
  2681. {
  2682. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SaddlvV);
  2683. }
  2684. else
  2685. {
  2686. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  2687. }
  2688. }
  2689. public static void Saddw_V(ArmEmitterContext context)
  2690. {
  2691. if (Optimizations.UseAdvSimd)
  2692. {
  2693. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SaddwV);
  2694. }
  2695. else if (Optimizations.UseSse41)
  2696. {
  2697. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2698. Operand n = GetVec(op.Rn);
  2699. Operand m = GetVec(op.Rm);
  2700. if (op.RegisterSize == RegisterSize.Simd128)
  2701. {
  2702. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2703. }
  2704. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2705. m = context.AddIntrinsic(movInst, m);
  2706. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2707. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2708. }
  2709. else
  2710. {
  2711. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  2712. }
  2713. }
  2714. public static void Shadd_V(ArmEmitterContext context)
  2715. {
  2716. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2717. if (Optimizations.UseAdvSimd)
  2718. {
  2719. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64ShaddV);
  2720. }
  2721. else if (Optimizations.UseSse2 && op.Size > 0)
  2722. {
  2723. Operand n = GetVec(op.Rn);
  2724. Operand m = GetVec(op.Rm);
  2725. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2726. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2727. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  2728. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2729. Intrinsic addInst = X86PaddInstruction[op.Size];
  2730. res = context.AddIntrinsic(addInst, res, res2);
  2731. if (op.RegisterSize == RegisterSize.Simd64)
  2732. {
  2733. res = context.VectorZeroUpper64(res);
  2734. }
  2735. context.Copy(GetVec(op.Rd), res);
  2736. }
  2737. else
  2738. {
  2739. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2740. {
  2741. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  2742. });
  2743. }
  2744. }
  2745. public static void Shsub_V(ArmEmitterContext context)
  2746. {
  2747. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2748. if (Optimizations.UseAdvSimd)
  2749. {
  2750. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64ShsubV);
  2751. }
  2752. else if (Optimizations.UseSse2 && op.Size < 2)
  2753. {
  2754. Operand n = GetVec(op.Rn);
  2755. Operand m = GetVec(op.Rm);
  2756. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2757. Intrinsic addInst = X86PaddInstruction[op.Size];
  2758. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  2759. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  2760. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2761. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  2762. Intrinsic subInst = X86PsubInstruction[op.Size];
  2763. res = context.AddIntrinsic(subInst, nPlusMask, res);
  2764. if (op.RegisterSize == RegisterSize.Simd64)
  2765. {
  2766. res = context.VectorZeroUpper64(res);
  2767. }
  2768. context.Copy(GetVec(op.Rd), res);
  2769. }
  2770. else
  2771. {
  2772. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2773. {
  2774. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  2775. });
  2776. }
  2777. }
  2778. public static void Smax_V(ArmEmitterContext context)
  2779. {
  2780. if (Optimizations.UseAdvSimd)
  2781. {
  2782. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmaxV);
  2783. }
  2784. else if (Optimizations.UseSse41)
  2785. {
  2786. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2787. Operand n = GetVec(op.Rn);
  2788. Operand m = GetVec(op.Rm);
  2789. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  2790. Operand res = context.AddIntrinsic(maxInst, n, m);
  2791. if (op.RegisterSize == RegisterSize.Simd64)
  2792. {
  2793. res = context.VectorZeroUpper64(res);
  2794. }
  2795. context.Copy(GetVec(op.Rd), res);
  2796. }
  2797. else
  2798. {
  2799. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2800. }
  2801. }
  2802. public static void Smaxp_V(ArmEmitterContext context)
  2803. {
  2804. if (Optimizations.UseAdvSimd)
  2805. {
  2806. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmaxpV);
  2807. }
  2808. else if (Optimizations.UseSsse3)
  2809. {
  2810. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  2811. }
  2812. else
  2813. {
  2814. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2815. }
  2816. }
  2817. public static void Smaxv_V(ArmEmitterContext context)
  2818. {
  2819. if (Optimizations.UseAdvSimd)
  2820. {
  2821. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SmaxvV);
  2822. }
  2823. else
  2824. {
  2825. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2826. }
  2827. }
  2828. public static void Smin_V(ArmEmitterContext context)
  2829. {
  2830. if (Optimizations.UseAdvSimd)
  2831. {
  2832. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SminV);
  2833. }
  2834. else if (Optimizations.UseSse41)
  2835. {
  2836. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2837. Operand n = GetVec(op.Rn);
  2838. Operand m = GetVec(op.Rm);
  2839. Intrinsic minInst = X86PminsInstruction[op.Size];
  2840. Operand res = context.AddIntrinsic(minInst, n, m);
  2841. if (op.RegisterSize == RegisterSize.Simd64)
  2842. {
  2843. res = context.VectorZeroUpper64(res);
  2844. }
  2845. context.Copy(GetVec(op.Rd), res);
  2846. }
  2847. else
  2848. {
  2849. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2850. }
  2851. }
  2852. public static void Sminp_V(ArmEmitterContext context)
  2853. {
  2854. if (Optimizations.UseAdvSimd)
  2855. {
  2856. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SminpV);
  2857. }
  2858. else if (Optimizations.UseSsse3)
  2859. {
  2860. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  2861. }
  2862. else
  2863. {
  2864. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2865. }
  2866. }
  2867. public static void Sminv_V(ArmEmitterContext context)
  2868. {
  2869. if (Optimizations.UseAdvSimd)
  2870. {
  2871. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64SminvV);
  2872. }
  2873. else
  2874. {
  2875. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2876. }
  2877. }
  2878. public static void Smlal_V(ArmEmitterContext context)
  2879. {
  2880. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2881. if (Optimizations.UseAdvSimd)
  2882. {
  2883. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SmlalV);
  2884. }
  2885. else if (Optimizations.UseSse41 && op.Size < 2)
  2886. {
  2887. Operand d = GetVec(op.Rd);
  2888. Operand n = GetVec(op.Rn);
  2889. Operand m = GetVec(op.Rm);
  2890. if (op.RegisterSize == RegisterSize.Simd128)
  2891. {
  2892. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2893. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2894. }
  2895. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2896. n = context.AddIntrinsic(movInst, n);
  2897. m = context.AddIntrinsic(movInst, m);
  2898. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2899. Operand res = context.AddIntrinsic(mullInst, n, m);
  2900. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2901. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2902. }
  2903. else
  2904. {
  2905. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2906. {
  2907. return context.Add(op1, context.Multiply(op2, op3));
  2908. });
  2909. }
  2910. }
  2911. public static void Smlal_Ve(ArmEmitterContext context)
  2912. {
  2913. if (Optimizations.UseAdvSimd)
  2914. {
  2915. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64SmlalVe);
  2916. }
  2917. else
  2918. {
  2919. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2920. {
  2921. return context.Add(op1, context.Multiply(op2, op3));
  2922. });
  2923. }
  2924. }
  2925. public static void Smlsl_V(ArmEmitterContext context)
  2926. {
  2927. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2928. if (Optimizations.UseAdvSimd)
  2929. {
  2930. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SmlslV);
  2931. }
  2932. else if (Optimizations.UseSse41 && op.Size < 2)
  2933. {
  2934. Operand d = GetVec(op.Rd);
  2935. Operand n = GetVec(op.Rn);
  2936. Operand m = GetVec(op.Rm);
  2937. if (op.RegisterSize == RegisterSize.Simd128)
  2938. {
  2939. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2940. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2941. }
  2942. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  2943. n = context.AddIntrinsic(movInst, n);
  2944. m = context.AddIntrinsic(movInst, m);
  2945. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2946. Operand res = context.AddIntrinsic(mullInst, n, m);
  2947. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2948. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2949. }
  2950. else
  2951. {
  2952. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2953. {
  2954. return context.Subtract(op1, context.Multiply(op2, op3));
  2955. });
  2956. }
  2957. }
  2958. public static void Smlsl_Ve(ArmEmitterContext context)
  2959. {
  2960. if (Optimizations.UseAdvSimd)
  2961. {
  2962. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64SmlslVe);
  2963. }
  2964. else
  2965. {
  2966. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2967. {
  2968. return context.Subtract(op1, context.Multiply(op2, op3));
  2969. });
  2970. }
  2971. }
  2972. public static void Smull_V(ArmEmitterContext context)
  2973. {
  2974. if (Optimizations.UseAdvSimd)
  2975. {
  2976. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SmullV);
  2977. }
  2978. else
  2979. {
  2980. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  2981. }
  2982. }
  2983. public static void Smull_Ve(ArmEmitterContext context)
  2984. {
  2985. if (Optimizations.UseAdvSimd)
  2986. {
  2987. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64SmullVe);
  2988. }
  2989. else
  2990. {
  2991. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  2992. }
  2993. }
  2994. public static void Sqabs_S(ArmEmitterContext context)
  2995. {
  2996. if (Optimizations.UseAdvSimd)
  2997. {
  2998. InstEmitSimdHelperArm64.EmitScalarSaturatingUnaryOp(context, Intrinsic.Arm64SqabsS);
  2999. }
  3000. else
  3001. {
  3002. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  3003. }
  3004. }
  3005. public static void Sqabs_V(ArmEmitterContext context)
  3006. {
  3007. if (Optimizations.UseAdvSimd)
  3008. {
  3009. InstEmitSimdHelperArm64.EmitVectorSaturatingUnaryOp(context, Intrinsic.Arm64SqabsV);
  3010. }
  3011. else
  3012. {
  3013. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  3014. }
  3015. }
  3016. public static void Sqadd_S(ArmEmitterContext context)
  3017. {
  3018. if (Optimizations.UseAdvSimd)
  3019. {
  3020. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqaddS);
  3021. }
  3022. else
  3023. {
  3024. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  3025. }
  3026. }
  3027. public static void Sqadd_V(ArmEmitterContext context)
  3028. {
  3029. if (Optimizations.UseAdvSimd)
  3030. {
  3031. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqaddV);
  3032. }
  3033. else
  3034. {
  3035. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  3036. }
  3037. }
  3038. public static void Sqdmulh_S(ArmEmitterContext context)
  3039. {
  3040. if (Optimizations.UseAdvSimd)
  3041. {
  3042. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqdmulhS);
  3043. }
  3044. else
  3045. {
  3046. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  3047. }
  3048. }
  3049. public static void Sqdmulh_V(ArmEmitterContext context)
  3050. {
  3051. if (Optimizations.UseAdvSimd)
  3052. {
  3053. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqdmulhV);
  3054. }
  3055. else
  3056. {
  3057. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  3058. }
  3059. }
  3060. public static void Sqdmulh_Ve(ArmEmitterContext context)
  3061. {
  3062. if (Optimizations.UseAdvSimd)
  3063. {
  3064. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpByElem(context, Intrinsic.Arm64SqdmulhVe);
  3065. }
  3066. else
  3067. {
  3068. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  3069. }
  3070. }
  3071. public static void Sqneg_S(ArmEmitterContext context)
  3072. {
  3073. if (Optimizations.UseAdvSimd)
  3074. {
  3075. InstEmitSimdHelperArm64.EmitScalarSaturatingUnaryOp(context, Intrinsic.Arm64SqnegS);
  3076. }
  3077. else
  3078. {
  3079. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  3080. }
  3081. }
  3082. public static void Sqneg_V(ArmEmitterContext context)
  3083. {
  3084. if (Optimizations.UseAdvSimd)
  3085. {
  3086. InstEmitSimdHelperArm64.EmitVectorSaturatingUnaryOp(context, Intrinsic.Arm64SqnegV);
  3087. }
  3088. else
  3089. {
  3090. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  3091. }
  3092. }
  3093. public static void Sqrdmulh_S(ArmEmitterContext context)
  3094. {
  3095. if (Optimizations.UseAdvSimd)
  3096. {
  3097. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqrdmulhS);
  3098. }
  3099. else
  3100. {
  3101. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  3102. }
  3103. }
  3104. public static void Sqrdmulh_V(ArmEmitterContext context)
  3105. {
  3106. if (Optimizations.UseAdvSimd)
  3107. {
  3108. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqrdmulhV);
  3109. }
  3110. else
  3111. {
  3112. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  3113. }
  3114. }
  3115. public static void Sqrdmulh_Ve(ArmEmitterContext context)
  3116. {
  3117. if (Optimizations.UseAdvSimd)
  3118. {
  3119. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpByElem(context, Intrinsic.Arm64SqrdmulhVe);
  3120. }
  3121. else
  3122. {
  3123. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  3124. }
  3125. }
  3126. public static void Sqsub_S(ArmEmitterContext context)
  3127. {
  3128. if (Optimizations.UseAdvSimd)
  3129. {
  3130. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64SqsubS);
  3131. }
  3132. else
  3133. {
  3134. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  3135. }
  3136. }
  3137. public static void Sqsub_V(ArmEmitterContext context)
  3138. {
  3139. if (Optimizations.UseAdvSimd)
  3140. {
  3141. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64SqsubV);
  3142. }
  3143. else
  3144. {
  3145. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  3146. }
  3147. }
  3148. public static void Sqxtn_S(ArmEmitterContext context)
  3149. {
  3150. if (Optimizations.UseAdvSimd)
  3151. {
  3152. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtnS);
  3153. }
  3154. else
  3155. {
  3156. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  3157. }
  3158. }
  3159. public static void Sqxtn_V(ArmEmitterContext context)
  3160. {
  3161. if (Optimizations.UseAdvSimd)
  3162. {
  3163. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtnV);
  3164. }
  3165. else
  3166. {
  3167. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  3168. }
  3169. }
  3170. public static void Sqxtun_S(ArmEmitterContext context)
  3171. {
  3172. if (Optimizations.UseAdvSimd)
  3173. {
  3174. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtunS);
  3175. }
  3176. else
  3177. {
  3178. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  3179. }
  3180. }
  3181. public static void Sqxtun_V(ArmEmitterContext context)
  3182. {
  3183. if (Optimizations.UseAdvSimd)
  3184. {
  3185. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SqxtunV);
  3186. }
  3187. else
  3188. {
  3189. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  3190. }
  3191. }
  3192. public static void Srhadd_V(ArmEmitterContext context)
  3193. {
  3194. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3195. if (Optimizations.UseAdvSimd)
  3196. {
  3197. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SrhaddV);
  3198. }
  3199. else if (Optimizations.UseSse2 && op.Size < 2)
  3200. {
  3201. Operand n = GetVec(op.Rn);
  3202. Operand m = GetVec(op.Rm);
  3203. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  3204. Intrinsic subInst = X86PsubInstruction[op.Size];
  3205. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  3206. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  3207. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3208. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  3209. Intrinsic addInst = X86PaddInstruction[op.Size];
  3210. res = context.AddIntrinsic(addInst, mask, res);
  3211. if (op.RegisterSize == RegisterSize.Simd64)
  3212. {
  3213. res = context.VectorZeroUpper64(res);
  3214. }
  3215. context.Copy(GetVec(op.Rd), res);
  3216. }
  3217. else
  3218. {
  3219. EmitVectorBinaryOpSx(context, (op1, op2) =>
  3220. {
  3221. Operand res = context.Add(op1, op2);
  3222. res = context.Add(res, Const(1L));
  3223. return context.ShiftRightSI(res, Const(1));
  3224. });
  3225. }
  3226. }
  3227. public static void Ssubl_V(ArmEmitterContext context)
  3228. {
  3229. if (Optimizations.UseAdvSimd)
  3230. {
  3231. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SsublV);
  3232. }
  3233. else if (Optimizations.UseSse41)
  3234. {
  3235. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3236. Operand n = GetVec(op.Rn);
  3237. Operand m = GetVec(op.Rm);
  3238. if (op.RegisterSize == RegisterSize.Simd128)
  3239. {
  3240. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3241. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3242. }
  3243. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  3244. n = context.AddIntrinsic(movInst, n);
  3245. m = context.AddIntrinsic(movInst, m);
  3246. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3247. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3248. }
  3249. else
  3250. {
  3251. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  3252. }
  3253. }
  3254. public static void Ssubw_V(ArmEmitterContext context)
  3255. {
  3256. if (Optimizations.UseAdvSimd)
  3257. {
  3258. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SsubwV);
  3259. }
  3260. else if (Optimizations.UseSse41)
  3261. {
  3262. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3263. Operand n = GetVec(op.Rn);
  3264. Operand m = GetVec(op.Rm);
  3265. if (op.RegisterSize == RegisterSize.Simd128)
  3266. {
  3267. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3268. }
  3269. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  3270. m = context.AddIntrinsic(movInst, m);
  3271. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3272. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3273. }
  3274. else
  3275. {
  3276. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  3277. }
  3278. }
  3279. public static void Sub_S(ArmEmitterContext context)
  3280. {
  3281. if (Optimizations.UseAdvSimd)
  3282. {
  3283. InstEmitSimdHelperArm64.EmitScalarBinaryOp(context, Intrinsic.Arm64SubS);
  3284. }
  3285. else
  3286. {
  3287. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3288. }
  3289. }
  3290. public static void Sub_V(ArmEmitterContext context)
  3291. {
  3292. if (Optimizations.UseAdvSimd)
  3293. {
  3294. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64SubV);
  3295. }
  3296. else if (Optimizations.UseSse2)
  3297. {
  3298. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3299. Operand n = GetVec(op.Rn);
  3300. Operand m = GetVec(op.Rm);
  3301. Intrinsic subInst = X86PsubInstruction[op.Size];
  3302. Operand res = context.AddIntrinsic(subInst, n, m);
  3303. if (op.RegisterSize == RegisterSize.Simd64)
  3304. {
  3305. res = context.VectorZeroUpper64(res);
  3306. }
  3307. context.Copy(GetVec(op.Rd), res);
  3308. }
  3309. else
  3310. {
  3311. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3312. }
  3313. }
  3314. public static void Subhn_V(ArmEmitterContext context)
  3315. {
  3316. if (Optimizations.UseAdvSimd)
  3317. {
  3318. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64SubhnV);
  3319. }
  3320. else
  3321. {
  3322. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  3323. }
  3324. }
  3325. public static void Suqadd_S(ArmEmitterContext context)
  3326. {
  3327. if (Optimizations.UseAdvSimd)
  3328. {
  3329. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64SuqaddS);
  3330. }
  3331. else
  3332. {
  3333. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  3334. }
  3335. }
  3336. public static void Suqadd_V(ArmEmitterContext context)
  3337. {
  3338. if (Optimizations.UseAdvSimd)
  3339. {
  3340. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64SuqaddV);
  3341. }
  3342. else
  3343. {
  3344. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  3345. }
  3346. }
  3347. public static void Uaba_V(ArmEmitterContext context)
  3348. {
  3349. if (Optimizations.UseAdvSimd)
  3350. {
  3351. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UabaV);
  3352. }
  3353. else
  3354. {
  3355. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  3356. {
  3357. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  3358. });
  3359. }
  3360. }
  3361. public static void Uabal_V(ArmEmitterContext context)
  3362. {
  3363. if (Optimizations.UseAdvSimd)
  3364. {
  3365. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UabalV);
  3366. }
  3367. else
  3368. {
  3369. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3370. {
  3371. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  3372. });
  3373. }
  3374. }
  3375. public static void Uabd_V(ArmEmitterContext context)
  3376. {
  3377. if (Optimizations.UseAdvSimd)
  3378. {
  3379. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UabdV);
  3380. }
  3381. else if (Optimizations.UseSse41)
  3382. {
  3383. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3384. Operand n = GetVec(op.Rn);
  3385. Operand m = GetVec(op.Rm);
  3386. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  3387. }
  3388. else
  3389. {
  3390. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3391. {
  3392. return EmitAbs(context, context.Subtract(op1, op2));
  3393. });
  3394. }
  3395. }
  3396. public static void Uabdl_V(ArmEmitterContext context)
  3397. {
  3398. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3399. if (Optimizations.UseAdvSimd)
  3400. {
  3401. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UabdlV);
  3402. }
  3403. else if (Optimizations.UseSse41 && op.Size < 2)
  3404. {
  3405. Operand n = GetVec(op.Rn);
  3406. Operand m = GetVec(op.Rm);
  3407. if (op.RegisterSize == RegisterSize.Simd128)
  3408. {
  3409. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3410. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3411. }
  3412. Intrinsic movInst = op.Size == 0
  3413. ? Intrinsic.X86Pmovzxbw
  3414. : Intrinsic.X86Pmovzxwd;
  3415. n = context.AddIntrinsic(movInst, n);
  3416. m = context.AddIntrinsic(movInst, m);
  3417. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  3418. }
  3419. else
  3420. {
  3421. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  3422. {
  3423. return EmitAbs(context, context.Subtract(op1, op2));
  3424. });
  3425. }
  3426. }
  3427. public static void Uadalp_V(ArmEmitterContext context)
  3428. {
  3429. if (Optimizations.UseAdvSimd)
  3430. {
  3431. InstEmitSimdHelperArm64.EmitVectorBinaryOpRd(context, Intrinsic.Arm64UadalpV);
  3432. }
  3433. else
  3434. {
  3435. EmitAddLongPairwise(context, signed: false, accumulate: true);
  3436. }
  3437. }
  3438. public static void Uaddl_V(ArmEmitterContext context)
  3439. {
  3440. if (Optimizations.UseAdvSimd)
  3441. {
  3442. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UaddlV);
  3443. }
  3444. else if (Optimizations.UseSse41)
  3445. {
  3446. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3447. Operand n = GetVec(op.Rn);
  3448. Operand m = GetVec(op.Rm);
  3449. if (op.RegisterSize == RegisterSize.Simd128)
  3450. {
  3451. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3452. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3453. }
  3454. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3455. n = context.AddIntrinsic(movInst, n);
  3456. m = context.AddIntrinsic(movInst, m);
  3457. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3458. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  3459. }
  3460. else
  3461. {
  3462. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  3463. }
  3464. }
  3465. public static void Uaddlp_V(ArmEmitterContext context)
  3466. {
  3467. if (Optimizations.UseAdvSimd)
  3468. {
  3469. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UaddlpV);
  3470. }
  3471. else
  3472. {
  3473. EmitAddLongPairwise(context, signed: false, accumulate: false);
  3474. }
  3475. }
  3476. public static void Uaddlv_V(ArmEmitterContext context)
  3477. {
  3478. if (Optimizations.UseAdvSimd)
  3479. {
  3480. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UaddlvV);
  3481. }
  3482. else
  3483. {
  3484. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  3485. }
  3486. }
  3487. public static void Uaddw_V(ArmEmitterContext context)
  3488. {
  3489. if (Optimizations.UseAdvSimd)
  3490. {
  3491. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UaddwV);
  3492. }
  3493. else if (Optimizations.UseSse41)
  3494. {
  3495. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3496. Operand n = GetVec(op.Rn);
  3497. Operand m = GetVec(op.Rm);
  3498. if (op.RegisterSize == RegisterSize.Simd128)
  3499. {
  3500. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3501. }
  3502. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3503. m = context.AddIntrinsic(movInst, m);
  3504. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3505. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  3506. }
  3507. else
  3508. {
  3509. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  3510. }
  3511. }
  3512. public static void Uhadd_V(ArmEmitterContext context)
  3513. {
  3514. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3515. if (Optimizations.UseAdvSimd)
  3516. {
  3517. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UhaddV);
  3518. }
  3519. else if (Optimizations.UseSse2 && op.Size > 0)
  3520. {
  3521. Operand n = GetVec(op.Rn);
  3522. Operand m = GetVec(op.Rm);
  3523. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  3524. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  3525. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  3526. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  3527. Intrinsic addInst = X86PaddInstruction[op.Size];
  3528. res = context.AddIntrinsic(addInst, res, res2);
  3529. if (op.RegisterSize == RegisterSize.Simd64)
  3530. {
  3531. res = context.VectorZeroUpper64(res);
  3532. }
  3533. context.Copy(GetVec(op.Rd), res);
  3534. }
  3535. else
  3536. {
  3537. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3538. {
  3539. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  3540. });
  3541. }
  3542. }
  3543. public static void Uhsub_V(ArmEmitterContext context)
  3544. {
  3545. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3546. if (Optimizations.UseAdvSimd)
  3547. {
  3548. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UhsubV);
  3549. }
  3550. else if (Optimizations.UseSse2 && op.Size < 2)
  3551. {
  3552. Operand n = GetVec(op.Rn);
  3553. Operand m = GetVec(op.Rm);
  3554. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3555. Operand res = context.AddIntrinsic(avgInst, n, m);
  3556. Intrinsic subInst = X86PsubInstruction[op.Size];
  3557. res = context.AddIntrinsic(subInst, n, res);
  3558. if (op.RegisterSize == RegisterSize.Simd64)
  3559. {
  3560. res = context.VectorZeroUpper64(res);
  3561. }
  3562. context.Copy(GetVec(op.Rd), res);
  3563. }
  3564. else
  3565. {
  3566. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3567. {
  3568. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  3569. });
  3570. }
  3571. }
  3572. public static void Umax_V(ArmEmitterContext context)
  3573. {
  3574. if (Optimizations.UseAdvSimd)
  3575. {
  3576. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmaxV);
  3577. }
  3578. else if (Optimizations.UseSse41)
  3579. {
  3580. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3581. Operand n = GetVec(op.Rn);
  3582. Operand m = GetVec(op.Rm);
  3583. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  3584. Operand res = context.AddIntrinsic(maxInst, n, m);
  3585. if (op.RegisterSize == RegisterSize.Simd64)
  3586. {
  3587. res = context.VectorZeroUpper64(res);
  3588. }
  3589. context.Copy(GetVec(op.Rd), res);
  3590. }
  3591. else
  3592. {
  3593. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3594. }
  3595. }
  3596. public static void Umaxp_V(ArmEmitterContext context)
  3597. {
  3598. if (Optimizations.UseAdvSimd)
  3599. {
  3600. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmaxpV);
  3601. }
  3602. else if (Optimizations.UseSsse3)
  3603. {
  3604. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  3605. }
  3606. else
  3607. {
  3608. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3609. }
  3610. }
  3611. public static void Umaxv_V(ArmEmitterContext context)
  3612. {
  3613. if (Optimizations.UseAdvSimd)
  3614. {
  3615. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UmaxvV);
  3616. }
  3617. else
  3618. {
  3619. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  3620. }
  3621. }
  3622. public static void Umin_V(ArmEmitterContext context)
  3623. {
  3624. if (Optimizations.UseAdvSimd)
  3625. {
  3626. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UminV);
  3627. }
  3628. else if (Optimizations.UseSse41)
  3629. {
  3630. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3631. Operand n = GetVec(op.Rn);
  3632. Operand m = GetVec(op.Rm);
  3633. Intrinsic minInst = X86PminuInstruction[op.Size];
  3634. Operand res = context.AddIntrinsic(minInst, n, m);
  3635. if (op.RegisterSize == RegisterSize.Simd64)
  3636. {
  3637. res = context.VectorZeroUpper64(res);
  3638. }
  3639. context.Copy(GetVec(op.Rd), res);
  3640. }
  3641. else
  3642. {
  3643. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3644. }
  3645. }
  3646. public static void Uminp_V(ArmEmitterContext context)
  3647. {
  3648. if (Optimizations.UseAdvSimd)
  3649. {
  3650. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UminpV);
  3651. }
  3652. else if (Optimizations.UseSsse3)
  3653. {
  3654. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  3655. }
  3656. else
  3657. {
  3658. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3659. }
  3660. }
  3661. public static void Uminv_V(ArmEmitterContext context)
  3662. {
  3663. if (Optimizations.UseAdvSimd)
  3664. {
  3665. InstEmitSimdHelperArm64.EmitVectorUnaryOp(context, Intrinsic.Arm64UminvV);
  3666. }
  3667. else
  3668. {
  3669. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  3670. }
  3671. }
  3672. public static void Umlal_V(ArmEmitterContext context)
  3673. {
  3674. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3675. if (Optimizations.UseAdvSimd)
  3676. {
  3677. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UmlalV);
  3678. }
  3679. else if (Optimizations.UseSse41 && op.Size < 2)
  3680. {
  3681. Operand d = GetVec(op.Rd);
  3682. Operand n = GetVec(op.Rn);
  3683. Operand m = GetVec(op.Rm);
  3684. if (op.RegisterSize == RegisterSize.Simd128)
  3685. {
  3686. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3687. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3688. }
  3689. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3690. n = context.AddIntrinsic(movInst, n);
  3691. m = context.AddIntrinsic(movInst, m);
  3692. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3693. Operand res = context.AddIntrinsic(mullInst, n, m);
  3694. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  3695. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  3696. }
  3697. else
  3698. {
  3699. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3700. {
  3701. return context.Add(op1, context.Multiply(op2, op3));
  3702. });
  3703. }
  3704. }
  3705. public static void Umlal_Ve(ArmEmitterContext context)
  3706. {
  3707. if (Optimizations.UseAdvSimd)
  3708. {
  3709. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64UmlalVe);
  3710. }
  3711. else
  3712. {
  3713. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  3714. {
  3715. return context.Add(op1, context.Multiply(op2, op3));
  3716. });
  3717. }
  3718. }
  3719. public static void Umlsl_V(ArmEmitterContext context)
  3720. {
  3721. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3722. if (Optimizations.UseAdvSimd)
  3723. {
  3724. InstEmitSimdHelperArm64.EmitVectorTernaryOpRd(context, Intrinsic.Arm64UmlslV);
  3725. }
  3726. else if (Optimizations.UseSse41 && op.Size < 2)
  3727. {
  3728. Operand d = GetVec(op.Rd);
  3729. Operand n = GetVec(op.Rn);
  3730. Operand m = GetVec(op.Rm);
  3731. if (op.RegisterSize == RegisterSize.Simd128)
  3732. {
  3733. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3734. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3735. }
  3736. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  3737. n = context.AddIntrinsic(movInst, n);
  3738. m = context.AddIntrinsic(movInst, m);
  3739. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  3740. Operand res = context.AddIntrinsic(mullInst, n, m);
  3741. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3742. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  3743. }
  3744. else
  3745. {
  3746. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  3747. {
  3748. return context.Subtract(op1, context.Multiply(op2, op3));
  3749. });
  3750. }
  3751. }
  3752. public static void Umlsl_Ve(ArmEmitterContext context)
  3753. {
  3754. if (Optimizations.UseAdvSimd)
  3755. {
  3756. InstEmitSimdHelperArm64.EmitVectorTernaryOpRdByElem(context, Intrinsic.Arm64UmlslVe);
  3757. }
  3758. else
  3759. {
  3760. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  3761. {
  3762. return context.Subtract(op1, context.Multiply(op2, op3));
  3763. });
  3764. }
  3765. }
  3766. public static void Umull_V(ArmEmitterContext context)
  3767. {
  3768. if (Optimizations.UseAdvSimd)
  3769. {
  3770. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UmullV);
  3771. }
  3772. else
  3773. {
  3774. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  3775. }
  3776. }
  3777. public static void Umull_Ve(ArmEmitterContext context)
  3778. {
  3779. if (Optimizations.UseAdvSimd)
  3780. {
  3781. InstEmitSimdHelperArm64.EmitVectorBinaryOpByElem(context, Intrinsic.Arm64UmullVe);
  3782. }
  3783. else
  3784. {
  3785. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  3786. }
  3787. }
  3788. public static void Uqadd_S(ArmEmitterContext context)
  3789. {
  3790. if (Optimizations.UseAdvSimd)
  3791. {
  3792. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64UqaddS);
  3793. }
  3794. else
  3795. {
  3796. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  3797. }
  3798. }
  3799. public static void Uqadd_V(ArmEmitterContext context)
  3800. {
  3801. if (Optimizations.UseAdvSimd)
  3802. {
  3803. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64UqaddV);
  3804. }
  3805. else
  3806. {
  3807. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  3808. }
  3809. }
  3810. public static void Uqsub_S(ArmEmitterContext context)
  3811. {
  3812. if (Optimizations.UseAdvSimd)
  3813. {
  3814. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOp(context, Intrinsic.Arm64UqsubS);
  3815. }
  3816. else
  3817. {
  3818. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  3819. }
  3820. }
  3821. public static void Uqsub_V(ArmEmitterContext context)
  3822. {
  3823. if (Optimizations.UseAdvSimd)
  3824. {
  3825. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOp(context, Intrinsic.Arm64UqsubV);
  3826. }
  3827. else
  3828. {
  3829. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  3830. }
  3831. }
  3832. public static void Uqxtn_S(ArmEmitterContext context)
  3833. {
  3834. if (Optimizations.UseAdvSimd)
  3835. {
  3836. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64UqxtnS);
  3837. }
  3838. else
  3839. {
  3840. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  3841. }
  3842. }
  3843. public static void Uqxtn_V(ArmEmitterContext context)
  3844. {
  3845. if (Optimizations.UseAdvSimd)
  3846. {
  3847. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64UqxtnV);
  3848. }
  3849. else
  3850. {
  3851. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  3852. }
  3853. }
  3854. public static void Urhadd_V(ArmEmitterContext context)
  3855. {
  3856. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3857. if (Optimizations.UseAdvSimd)
  3858. {
  3859. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UrhaddV);
  3860. }
  3861. else if (Optimizations.UseSse2 && op.Size < 2)
  3862. {
  3863. Operand n = GetVec(op.Rn);
  3864. Operand m = GetVec(op.Rm);
  3865. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  3866. Operand res = context.AddIntrinsic(avgInst, n, m);
  3867. if (op.RegisterSize == RegisterSize.Simd64)
  3868. {
  3869. res = context.VectorZeroUpper64(res);
  3870. }
  3871. context.Copy(GetVec(op.Rd), res);
  3872. }
  3873. else
  3874. {
  3875. EmitVectorBinaryOpZx(context, (op1, op2) =>
  3876. {
  3877. Operand res = context.Add(op1, op2);
  3878. res = context.Add(res, Const(1L));
  3879. return context.ShiftRightUI(res, Const(1));
  3880. });
  3881. }
  3882. }
  3883. public static void Usqadd_S(ArmEmitterContext context)
  3884. {
  3885. if (Optimizations.UseAdvSimd)
  3886. {
  3887. InstEmitSimdHelperArm64.EmitScalarSaturatingBinaryOpRd(context, Intrinsic.Arm64UsqaddS);
  3888. }
  3889. else
  3890. {
  3891. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  3892. }
  3893. }
  3894. public static void Usqadd_V(ArmEmitterContext context)
  3895. {
  3896. if (Optimizations.UseAdvSimd)
  3897. {
  3898. InstEmitSimdHelperArm64.EmitVectorSaturatingBinaryOpRd(context, Intrinsic.Arm64UsqaddV);
  3899. }
  3900. else
  3901. {
  3902. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  3903. }
  3904. }
  3905. public static void Usubl_V(ArmEmitterContext context)
  3906. {
  3907. if (Optimizations.UseAdvSimd)
  3908. {
  3909. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UsublV);
  3910. }
  3911. else if (Optimizations.UseSse41)
  3912. {
  3913. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3914. Operand n = GetVec(op.Rn);
  3915. Operand m = GetVec(op.Rm);
  3916. if (op.RegisterSize == RegisterSize.Simd128)
  3917. {
  3918. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  3919. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3920. }
  3921. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3922. n = context.AddIntrinsic(movInst, n);
  3923. m = context.AddIntrinsic(movInst, m);
  3924. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3925. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3926. }
  3927. else
  3928. {
  3929. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3930. }
  3931. }
  3932. public static void Usubw_V(ArmEmitterContext context)
  3933. {
  3934. if (Optimizations.UseAdvSimd)
  3935. {
  3936. InstEmitSimdHelperArm64.EmitVectorBinaryOp(context, Intrinsic.Arm64UsubwV);
  3937. }
  3938. else if (Optimizations.UseSse41)
  3939. {
  3940. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3941. Operand n = GetVec(op.Rn);
  3942. Operand m = GetVec(op.Rm);
  3943. if (op.RegisterSize == RegisterSize.Simd128)
  3944. {
  3945. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  3946. }
  3947. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  3948. m = context.AddIntrinsic(movInst, m);
  3949. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  3950. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  3951. }
  3952. else
  3953. {
  3954. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  3955. }
  3956. }
  3957. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  3958. {
  3959. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  3960. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  3961. }
  3962. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  3963. {
  3964. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  3965. Operand res = context.VectorZero();
  3966. int pairs = op.GetPairsCount() >> op.Size;
  3967. for (int index = 0; index < pairs; index++)
  3968. {
  3969. int pairIndex = index << 1;
  3970. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  3971. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  3972. Operand e = context.Add(ne0, ne1);
  3973. if (accumulate)
  3974. {
  3975. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  3976. e = context.Add(e, de);
  3977. }
  3978. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  3979. }
  3980. context.Copy(GetVec(op.Rd), res);
  3981. }
  3982. private static Operand EmitDoublingMultiplyHighHalf(
  3983. ArmEmitterContext context,
  3984. Operand n,
  3985. Operand m,
  3986. bool round)
  3987. {
  3988. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3989. int eSize = 8 << op.Size;
  3990. Operand res = context.Multiply(n, m);
  3991. if (!round)
  3992. {
  3993. res = context.ShiftRightSI(res, Const(eSize - 1));
  3994. }
  3995. else
  3996. {
  3997. long roundConst = 1L << (eSize - 1);
  3998. res = context.ShiftLeft(res, Const(1));
  3999. res = context.Add(res, Const(roundConst));
  4000. res = context.ShiftRightSI(res, Const(eSize));
  4001. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  4002. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  4003. }
  4004. return res;
  4005. }
  4006. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  4007. {
  4008. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  4009. int elems = 8 >> op.Size;
  4010. int eSize = 8 << op.Size;
  4011. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  4012. Operand d = GetVec(op.Rd);
  4013. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  4014. long roundConst = 1L << (eSize - 1);
  4015. for (int index = 0; index < elems; index++)
  4016. {
  4017. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  4018. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  4019. Operand de = emit(ne, me);
  4020. if (round)
  4021. {
  4022. de = context.Add(de, Const(roundConst));
  4023. }
  4024. de = context.ShiftRightUI(de, Const(eSize));
  4025. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  4026. }
  4027. context.Copy(d, res);
  4028. }
  4029. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  4030. {
  4031. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  4032. Operand cmp = signed
  4033. ? context.ICompareGreaterOrEqual (op1, op2)
  4034. : context.ICompareGreaterOrEqualUI(op1, op2);
  4035. return context.ConditionalSelect(cmp, op1, op2);
  4036. }
  4037. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  4038. {
  4039. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  4040. Operand cmp = signed
  4041. ? context.ICompareLessOrEqual (op1, op2)
  4042. : context.ICompareLessOrEqualUI(op1, op2);
  4043. return context.ConditionalSelect(cmp, op1, op2);
  4044. }
  4045. private static void EmitSse41ScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  4046. {
  4047. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  4048. Operand n = GetVec(op.Rn);
  4049. Operand res;
  4050. if (roundMode != FPRoundingMode.ToNearestAway)
  4051. {
  4052. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  4053. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  4054. }
  4055. else
  4056. {
  4057. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: true);
  4058. }
  4059. if ((op.Size & 1) != 0)
  4060. {
  4061. res = context.VectorZeroUpper64(res);
  4062. }
  4063. else
  4064. {
  4065. res = context.VectorZeroUpper96(res);
  4066. }
  4067. context.Copy(GetVec(op.Rd), res);
  4068. }
  4069. private static void EmitSse41VectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  4070. {
  4071. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  4072. Operand n = GetVec(op.Rn);
  4073. Operand res;
  4074. if (roundMode != FPRoundingMode.ToNearestAway)
  4075. {
  4076. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  4077. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  4078. }
  4079. else
  4080. {
  4081. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: false);
  4082. }
  4083. if (op.RegisterSize == RegisterSize.Simd64)
  4084. {
  4085. res = context.VectorZeroUpper64(res);
  4086. }
  4087. context.Copy(GetVec(op.Rd), res);
  4088. }
  4089. private static Operand EmitSse41Round32Exp8OpF(ArmEmitterContext context, Operand value, bool scalar)
  4090. {
  4091. Operand roundMask;
  4092. Operand truncMask;
  4093. Operand expMask;
  4094. if (scalar)
  4095. {
  4096. roundMask = X86GetScalar(context, 0x4000);
  4097. truncMask = X86GetScalar(context, unchecked((int)0xFFFF8000));
  4098. expMask = X86GetScalar(context, 0x7F800000);
  4099. }
  4100. else
  4101. {
  4102. roundMask = X86GetAllElements(context, 0x4000);
  4103. truncMask = X86GetAllElements(context, unchecked((int)0xFFFF8000));
  4104. expMask = X86GetAllElements(context, 0x7F800000);
  4105. }
  4106. Operand oValue = value;
  4107. Operand masked = context.AddIntrinsic(Intrinsic.X86Pand, value, expMask);
  4108. Operand isNaNInf = context.AddIntrinsic(Intrinsic.X86Pcmpeqd, masked, expMask);
  4109. value = context.AddIntrinsic(Intrinsic.X86Paddd, value, roundMask);
  4110. value = context.AddIntrinsic(Intrinsic.X86Pand, value, truncMask);
  4111. return context.AddIntrinsic(Intrinsic.X86Blendvps, value, oValue, isNaNInf);
  4112. }
  4113. private static Operand EmitSse41RecipStepSelectOpF(
  4114. ArmEmitterContext context,
  4115. Operand n,
  4116. Operand m,
  4117. Operand res,
  4118. Operand mask,
  4119. bool scalar,
  4120. int sizeF)
  4121. {
  4122. Intrinsic cmpOp;
  4123. Intrinsic shlOp;
  4124. Intrinsic blendOp;
  4125. Operand zero = context.VectorZero();
  4126. Operand expMask;
  4127. if (sizeF == 0)
  4128. {
  4129. cmpOp = Intrinsic.X86Pcmpeqd;
  4130. shlOp = Intrinsic.X86Pslld;
  4131. blendOp = Intrinsic.X86Blendvps;
  4132. expMask = scalar ? X86GetScalar(context, 0x7F800000 << 1) : X86GetAllElements(context, 0x7F800000 << 1);
  4133. }
  4134. else /* if (sizeF == 1) */
  4135. {
  4136. cmpOp = Intrinsic.X86Pcmpeqq;
  4137. shlOp = Intrinsic.X86Psllq;
  4138. blendOp = Intrinsic.X86Blendvpd;
  4139. expMask = scalar ? X86GetScalar(context, 0x7FF0000000000000L << 1) : X86GetAllElements(context, 0x7FF0000000000000L << 1);
  4140. }
  4141. n = context.AddIntrinsic(shlOp, n, Const(1));
  4142. m = context.AddIntrinsic(shlOp, m, Const(1));
  4143. Operand nZero = context.AddIntrinsic(cmpOp, n, zero);
  4144. Operand mZero = context.AddIntrinsic(cmpOp, m, zero);
  4145. Operand nInf = context.AddIntrinsic(cmpOp, n, expMask);
  4146. Operand mInf = context.AddIntrinsic(cmpOp, m, expMask);
  4147. Operand nmZero = context.AddIntrinsic(Intrinsic.X86Por, nZero, mZero);
  4148. Operand nmInf = context.AddIntrinsic(Intrinsic.X86Por, nInf, mInf);
  4149. Operand nmZeroInf = context.AddIntrinsic(Intrinsic.X86Pand, nmZero, nmInf);
  4150. return context.AddIntrinsic(blendOp, res, mask, nmZeroInf);
  4151. }
  4152. public static void EmitSse2VectorIsNaNOpF(
  4153. ArmEmitterContext context,
  4154. Operand opF,
  4155. out Operand qNaNMask,
  4156. out Operand sNaNMask,
  4157. bool? isQNaN = null)
  4158. {
  4159. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  4160. if ((op.Size & 1) == 0)
  4161. {
  4162. const int QBit = 22;
  4163. Operand qMask = X86GetAllElements(context, 1 << QBit);
  4164. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  4165. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  4166. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  4167. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : default;
  4168. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : default;
  4169. }
  4170. else /* if ((op.Size & 1) == 1) */
  4171. {
  4172. const int QBit = 51;
  4173. Operand qMask = X86GetAllElements(context, 1L << QBit);
  4174. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  4175. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  4176. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  4177. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : default;
  4178. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : default;
  4179. }
  4180. }
  4181. public static Operand EmitSse41ProcessNaNsOpF(
  4182. ArmEmitterContext context,
  4183. Func2I emit,
  4184. bool scalar,
  4185. Operand n = default,
  4186. Operand m = default)
  4187. {
  4188. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  4189. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  4190. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  4191. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  4192. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  4193. if (sizeF == 0)
  4194. {
  4195. const int QBit = 22;
  4196. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  4197. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  4198. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  4199. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  4200. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  4201. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  4202. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  4203. if (n != default || m != default)
  4204. {
  4205. return res;
  4206. }
  4207. if (scalar)
  4208. {
  4209. res = context.VectorZeroUpper96(res);
  4210. }
  4211. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4212. {
  4213. res = context.VectorZeroUpper64(res);
  4214. }
  4215. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4216. return default;
  4217. }
  4218. else /* if (sizeF == 1) */
  4219. {
  4220. const int QBit = 51;
  4221. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  4222. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  4223. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  4224. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  4225. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  4226. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  4227. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  4228. if (n != default || m != default)
  4229. {
  4230. return res;
  4231. }
  4232. if (scalar)
  4233. {
  4234. res = context.VectorZeroUpper64(res);
  4235. }
  4236. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4237. return default;
  4238. }
  4239. }
  4240. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  4241. {
  4242. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  4243. if ((op.Size & 1) == 0)
  4244. {
  4245. Operand mask = X86GetAllElements(context, -0f);
  4246. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  4247. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  4248. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  4249. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  4250. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  4251. }
  4252. else /* if ((op.Size & 1) == 1) */
  4253. {
  4254. Operand mask = X86GetAllElements(context, -0d);
  4255. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  4256. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  4257. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  4258. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  4259. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  4260. }
  4261. }
  4262. private static Operand EmitSse41MaxMinNumOpF(
  4263. ArmEmitterContext context,
  4264. bool isMaxNum,
  4265. bool scalar,
  4266. Operand n = default,
  4267. Operand m = default)
  4268. {
  4269. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  4270. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  4271. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  4272. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  4273. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  4274. if (sizeF == 0)
  4275. {
  4276. Operand negInfMask = scalar
  4277. ? X86GetScalar (context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  4278. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  4279. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  4280. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  4281. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  4282. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  4283. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  4284. {
  4285. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  4286. }, scalar: scalar, nCopy, mCopy);
  4287. if (n != default || m != default)
  4288. {
  4289. return res;
  4290. }
  4291. if (scalar)
  4292. {
  4293. res = context.VectorZeroUpper96(res);
  4294. }
  4295. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  4296. {
  4297. res = context.VectorZeroUpper64(res);
  4298. }
  4299. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4300. return default;
  4301. }
  4302. else /* if (sizeF == 1) */
  4303. {
  4304. Operand negInfMask = scalar
  4305. ? X86GetScalar (context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  4306. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  4307. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  4308. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  4309. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  4310. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  4311. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  4312. {
  4313. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  4314. }, scalar: scalar, nCopy, mCopy);
  4315. if (n != default || m != default)
  4316. {
  4317. return res;
  4318. }
  4319. if (scalar)
  4320. {
  4321. res = context.VectorZeroUpper64(res);
  4322. }
  4323. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  4324. return default;
  4325. }
  4326. }
  4327. private enum AddSub
  4328. {
  4329. None,
  4330. Add,
  4331. Subtract
  4332. }
  4333. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  4334. {
  4335. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  4336. Operand n = GetVec(op.Rn);
  4337. Operand m = GetVec(op.Rm);
  4338. Operand res;
  4339. if (op.Size == 0)
  4340. {
  4341. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  4342. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  4343. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  4344. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  4345. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  4346. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  4347. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  4348. }
  4349. else if (op.Size == 1)
  4350. {
  4351. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  4352. }
  4353. else
  4354. {
  4355. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  4356. }
  4357. Operand d = GetVec(op.Rd);
  4358. if (addSub == AddSub.Add)
  4359. {
  4360. Intrinsic addInst = X86PaddInstruction[op.Size];
  4361. res = context.AddIntrinsic(addInst, d, res);
  4362. }
  4363. else if (addSub == AddSub.Subtract)
  4364. {
  4365. Intrinsic subInst = X86PsubInstruction[op.Size];
  4366. res = context.AddIntrinsic(subInst, d, res);
  4367. }
  4368. if (op.RegisterSize == RegisterSize.Simd64)
  4369. {
  4370. res = context.VectorZeroUpper64(res);
  4371. }
  4372. context.Copy(d, res);
  4373. }
  4374. private static void EmitSse41VectorSabdOp(
  4375. ArmEmitterContext context,
  4376. OpCodeSimdReg op,
  4377. Operand n,
  4378. Operand m,
  4379. bool isLong)
  4380. {
  4381. int size = isLong ? op.Size + 1 : op.Size;
  4382. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  4383. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  4384. Intrinsic subInst = X86PsubInstruction[size];
  4385. Operand res = context.AddIntrinsic(subInst, n, m);
  4386. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  4387. Operand res2 = context.AddIntrinsic(subInst, m, n);
  4388. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  4389. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  4390. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  4391. {
  4392. res = context.VectorZeroUpper64(res);
  4393. }
  4394. context.Copy(GetVec(op.Rd), res);
  4395. }
  4396. private static void EmitSse41VectorUabdOp(
  4397. ArmEmitterContext context,
  4398. OpCodeSimdReg op,
  4399. Operand n,
  4400. Operand m,
  4401. bool isLong)
  4402. {
  4403. int size = isLong ? op.Size + 1 : op.Size;
  4404. Intrinsic maxInst = X86PmaxuInstruction[size];
  4405. Operand max = context.AddIntrinsic(maxInst, m, n);
  4406. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  4407. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  4408. Operand onesMask = X86GetAllElements(context, -1L);
  4409. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  4410. Intrinsic subInst = X86PsubInstruction[size];
  4411. Operand res = context.AddIntrinsic(subInst, n, m);
  4412. Operand res2 = context.AddIntrinsic(subInst, m, n);
  4413. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  4414. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  4415. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  4416. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  4417. {
  4418. res = context.VectorZeroUpper64(res);
  4419. }
  4420. context.Copy(GetVec(op.Rd), res);
  4421. }
  4422. private static Operand EmitSse2Sll_128(ArmEmitterContext context, Operand op, int shift)
  4423. {
  4424. // The upper part of op is assumed to be zero.
  4425. Debug.Assert(shift >= 0 && shift < 64);
  4426. if (shift == 0)
  4427. {
  4428. return op;
  4429. }
  4430. Operand high = context.AddIntrinsic(Intrinsic.X86Pslldq, op, Const(8));
  4431. high = context.AddIntrinsic(Intrinsic.X86Psrlq, high, Const(64 - shift));
  4432. Operand low = context.AddIntrinsic(Intrinsic.X86Psllq, op, Const(shift));
  4433. return context.AddIntrinsic(Intrinsic.X86Por, high, low);
  4434. }
  4435. }
  4436. }