InstEmitSimdArithmetic.cs 123 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  12. namespace ARMeilleure.Instructions
  13. {
  14. using Func2I = Func<Operand, Operand, Operand>;
  15. static partial class InstEmit
  16. {
  17. public static void Abs_S(ArmEmitterContext context)
  18. {
  19. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  20. }
  21. public static void Abs_V(ArmEmitterContext context)
  22. {
  23. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  24. }
  25. public static void Add_S(ArmEmitterContext context)
  26. {
  27. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  28. }
  29. public static void Add_V(ArmEmitterContext context)
  30. {
  31. if (Optimizations.UseSse2)
  32. {
  33. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  34. Operand n = GetVec(op.Rn);
  35. Operand m = GetVec(op.Rm);
  36. Intrinsic addInst = X86PaddInstruction[op.Size];
  37. Operand res = context.AddIntrinsic(addInst, n, m);
  38. if (op.RegisterSize == RegisterSize.Simd64)
  39. {
  40. res = context.VectorZeroUpper64(res);
  41. }
  42. context.Copy(GetVec(op.Rd), res);
  43. }
  44. else
  45. {
  46. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  47. }
  48. }
  49. public static void Addhn_V(ArmEmitterContext context)
  50. {
  51. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  52. }
  53. public static void Addp_S(ArmEmitterContext context)
  54. {
  55. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  56. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  57. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  58. Operand res = context.Add(ne0, ne1);
  59. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  60. }
  61. public static void Addp_V(ArmEmitterContext context)
  62. {
  63. if (Optimizations.UseSsse3)
  64. {
  65. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  66. }
  67. else
  68. {
  69. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  70. }
  71. }
  72. public static void Addv_V(ArmEmitterContext context)
  73. {
  74. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  75. }
  76. public static void Cls_V(ArmEmitterContext context)
  77. {
  78. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  79. Operand res = context.VectorZero();
  80. int elems = op.GetBytesCount() >> op.Size;
  81. int eSize = 8 << op.Size;
  82. for (int index = 0; index < elems; index++)
  83. {
  84. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  85. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  86. res = EmitVectorInsert(context, res, de, index, op.Size);
  87. }
  88. context.Copy(GetVec(op.Rd), res);
  89. }
  90. public static void Clz_V(ArmEmitterContext context)
  91. {
  92. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  93. Operand res = context.VectorZero();
  94. int elems = op.GetBytesCount() >> op.Size;
  95. int eSize = 8 << op.Size;
  96. for (int index = 0; index < elems; index++)
  97. {
  98. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  99. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  100. res = EmitVectorInsert(context, res, de, index, op.Size);
  101. }
  102. context.Copy(GetVec(op.Rd), res);
  103. }
  104. public static void Cnt_V(ArmEmitterContext context)
  105. {
  106. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  107. Operand res = context.VectorZero();
  108. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  109. for (int index = 0; index < elems; index++)
  110. {
  111. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  112. Operand de;
  113. if (Optimizations.UsePopCnt)
  114. {
  115. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  116. }
  117. else
  118. {
  119. de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountSetBits8)), ne);
  120. }
  121. res = EmitVectorInsert(context, res, de, index, 0);
  122. }
  123. context.Copy(GetVec(op.Rd), res);
  124. }
  125. public static void Fabd_S(ArmEmitterContext context)
  126. {
  127. if (Optimizations.FastFP && Optimizations.UseSse2)
  128. {
  129. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  130. int sizeF = op.Size & 1;
  131. if (sizeF == 0)
  132. {
  133. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  134. res = EmitFloatAbs(context, res, true, false);
  135. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  136. }
  137. else /* if (sizeF == 1) */
  138. {
  139. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  140. res = EmitFloatAbs(context, res, false, false);
  141. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  142. }
  143. }
  144. else
  145. {
  146. EmitScalarBinaryOpF(context, (op1, op2) =>
  147. {
  148. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  149. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  150. });
  151. }
  152. }
  153. public static void Fabd_V(ArmEmitterContext context)
  154. {
  155. if (Optimizations.FastFP && Optimizations.UseSse2)
  156. {
  157. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  158. int sizeF = op.Size & 1;
  159. if (sizeF == 0)
  160. {
  161. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  162. res = EmitFloatAbs(context, res, true, true);
  163. if (op.RegisterSize == RegisterSize.Simd64)
  164. {
  165. res = context.VectorZeroUpper64(res);
  166. }
  167. context.Copy(GetVec(op.Rd), res);
  168. }
  169. else /* if (sizeF == 1) */
  170. {
  171. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  172. res = EmitFloatAbs(context, res, false, true);
  173. context.Copy(GetVec(op.Rd), res);
  174. }
  175. }
  176. else
  177. {
  178. EmitVectorBinaryOpF(context, (op1, op2) =>
  179. {
  180. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  181. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  182. });
  183. }
  184. }
  185. public static void Fabs_S(ArmEmitterContext context)
  186. {
  187. if (Optimizations.UseSse2)
  188. {
  189. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  190. if (op.Size == 0)
  191. {
  192. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  193. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  194. }
  195. else /* if (op.Size == 1) */
  196. {
  197. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  198. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  199. }
  200. }
  201. else
  202. {
  203. EmitScalarUnaryOpF(context, (op1) =>
  204. {
  205. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  206. });
  207. }
  208. }
  209. public static void Fabs_V(ArmEmitterContext context)
  210. {
  211. if (Optimizations.UseSse2)
  212. {
  213. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  214. int sizeF = op.Size & 1;
  215. if (sizeF == 0)
  216. {
  217. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  218. if (op.RegisterSize == RegisterSize.Simd64)
  219. {
  220. res = context.VectorZeroUpper64(res);
  221. }
  222. context.Copy(GetVec(op.Rd), res);
  223. }
  224. else /* if (sizeF == 1) */
  225. {
  226. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  227. context.Copy(GetVec(op.Rd), res);
  228. }
  229. }
  230. else
  231. {
  232. EmitVectorUnaryOpF(context, (op1) =>
  233. {
  234. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  235. });
  236. }
  237. }
  238. public static void Fadd_S(ArmEmitterContext context)
  239. {
  240. if (Optimizations.FastFP && Optimizations.UseSse2)
  241. {
  242. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  243. }
  244. else if (Optimizations.FastFP)
  245. {
  246. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  247. }
  248. else
  249. {
  250. EmitScalarBinaryOpF(context, (op1, op2) =>
  251. {
  252. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  253. });
  254. }
  255. }
  256. public static void Fadd_V(ArmEmitterContext context)
  257. {
  258. if (Optimizations.FastFP && Optimizations.UseSse2)
  259. {
  260. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  261. }
  262. else if (Optimizations.FastFP)
  263. {
  264. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  265. }
  266. else
  267. {
  268. EmitVectorBinaryOpF(context, (op1, op2) =>
  269. {
  270. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  271. });
  272. }
  273. }
  274. public static void Faddp_S(ArmEmitterContext context)
  275. {
  276. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  277. int sizeF = op.Size & 1;
  278. if (Optimizations.FastFP && Optimizations.UseSse3)
  279. {
  280. if (sizeF == 0)
  281. {
  282. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  283. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  284. }
  285. else /* if (sizeF == 1) */
  286. {
  287. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  288. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  289. }
  290. }
  291. else
  292. {
  293. OperandType type = sizeF != 0 ? OperandType.FP64 : OperandType.FP32;
  294. Operand ne0 = context.VectorExtract(type, GetVec(op.Rn), 0);
  295. Operand ne1 = context.VectorExtract(type, GetVec(op.Rn), 1);
  296. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), ne0, ne1);
  297. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  298. }
  299. }
  300. public static void Faddp_V(ArmEmitterContext context)
  301. {
  302. if (Optimizations.FastFP && Optimizations.UseSse41)
  303. {
  304. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  305. {
  306. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  307. {
  308. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  309. {
  310. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  311. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  312. return context.AddIntrinsic(addInst, op1, op2);
  313. }, scalar: false, op1, op2);
  314. }, scalar: false, op1, op2);
  315. });
  316. }
  317. else
  318. {
  319. EmitVectorPairwiseOpF(context, (op1, op2) =>
  320. {
  321. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  322. });
  323. }
  324. }
  325. public static void Fdiv_S(ArmEmitterContext context)
  326. {
  327. if (Optimizations.FastFP && Optimizations.UseSse2)
  328. {
  329. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  330. }
  331. else if (Optimizations.FastFP)
  332. {
  333. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  334. }
  335. else
  336. {
  337. EmitScalarBinaryOpF(context, (op1, op2) =>
  338. {
  339. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  340. });
  341. }
  342. }
  343. public static void Fdiv_V(ArmEmitterContext context)
  344. {
  345. if (Optimizations.FastFP && Optimizations.UseSse2)
  346. {
  347. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  348. }
  349. else if (Optimizations.FastFP)
  350. {
  351. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  352. }
  353. else
  354. {
  355. EmitVectorBinaryOpF(context, (op1, op2) =>
  356. {
  357. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  358. });
  359. }
  360. }
  361. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  362. {
  363. if (Optimizations.FastFP && Optimizations.UseSse2)
  364. {
  365. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  366. Operand d = GetVec(op.Rd);
  367. Operand a = GetVec(op.Ra);
  368. Operand n = GetVec(op.Rn);
  369. Operand m = GetVec(op.Rm);
  370. if (op.Size == 0)
  371. {
  372. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  373. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  374. context.Copy(d, context.VectorZeroUpper96(res));
  375. }
  376. else /* if (op.Size == 1) */
  377. {
  378. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  379. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  380. context.Copy(d, context.VectorZeroUpper64(res));
  381. }
  382. }
  383. else
  384. {
  385. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  386. {
  387. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  388. });
  389. }
  390. }
  391. public static void Fmax_S(ArmEmitterContext context)
  392. {
  393. if (Optimizations.FastFP && Optimizations.UseSse41)
  394. {
  395. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  396. {
  397. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  398. {
  399. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  400. }, scalar: true, op1, op2);
  401. }, scalar: true);
  402. }
  403. else
  404. {
  405. EmitScalarBinaryOpF(context, (op1, op2) =>
  406. {
  407. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  408. });
  409. }
  410. }
  411. public static void Fmax_V(ArmEmitterContext context)
  412. {
  413. if (Optimizations.FastFP && Optimizations.UseSse41)
  414. {
  415. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  416. {
  417. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  418. {
  419. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  420. }, scalar: false, op1, op2);
  421. }, scalar: false);
  422. }
  423. else
  424. {
  425. EmitVectorBinaryOpF(context, (op1, op2) =>
  426. {
  427. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  428. });
  429. }
  430. }
  431. public static void Fmaxnm_S(ArmEmitterContext context)
  432. {
  433. if (Optimizations.FastFP && Optimizations.UseSse41)
  434. {
  435. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  436. }
  437. else
  438. {
  439. EmitScalarBinaryOpF(context, (op1, op2) =>
  440. {
  441. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  442. });
  443. }
  444. }
  445. public static void Fmaxnm_V(ArmEmitterContext context)
  446. {
  447. if (Optimizations.FastFP && Optimizations.UseSse41)
  448. {
  449. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  450. }
  451. else
  452. {
  453. EmitVectorBinaryOpF(context, (op1, op2) =>
  454. {
  455. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  456. });
  457. }
  458. }
  459. public static void Fmaxnmp_V(ArmEmitterContext context)
  460. {
  461. if (Optimizations.FastFP && Optimizations.UseSse41)
  462. {
  463. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  464. {
  465. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  466. });
  467. }
  468. else
  469. {
  470. EmitVectorPairwiseOpF(context, (op1, op2) =>
  471. {
  472. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  473. });
  474. }
  475. }
  476. public static void Fmaxnmv_V(ArmEmitterContext context)
  477. {
  478. if (Optimizations.FastFP && Optimizations.UseSse41)
  479. {
  480. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  481. {
  482. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  483. });
  484. }
  485. else
  486. {
  487. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  488. {
  489. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMaxNum)), op1, op2);
  490. });
  491. }
  492. }
  493. public static void Fmaxp_V(ArmEmitterContext context)
  494. {
  495. if (Optimizations.FastFP && Optimizations.UseSse41)
  496. {
  497. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  498. {
  499. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  500. {
  501. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  502. {
  503. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  504. }, scalar: false, op1, op2);
  505. }, scalar: false, op1, op2);
  506. });
  507. }
  508. else
  509. {
  510. EmitVectorPairwiseOpF(context, (op1, op2) =>
  511. {
  512. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  513. });
  514. }
  515. }
  516. public static void Fmaxv_V(ArmEmitterContext context)
  517. {
  518. if (Optimizations.FastFP && Optimizations.UseSse41)
  519. {
  520. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  521. {
  522. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  523. {
  524. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  525. {
  526. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  527. }, scalar: false, op1, op2);
  528. }, scalar: false, op1, op2);
  529. });
  530. }
  531. else
  532. {
  533. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  534. {
  535. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMax)), op1, op2);
  536. });
  537. }
  538. }
  539. public static void Fmin_S(ArmEmitterContext context)
  540. {
  541. if (Optimizations.FastFP && Optimizations.UseSse41)
  542. {
  543. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  544. {
  545. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  546. {
  547. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  548. }, scalar: true, op1, op2);
  549. }, scalar: true);
  550. }
  551. else
  552. {
  553. EmitScalarBinaryOpF(context, (op1, op2) =>
  554. {
  555. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  556. });
  557. }
  558. }
  559. public static void Fmin_V(ArmEmitterContext context)
  560. {
  561. if (Optimizations.FastFP && Optimizations.UseSse41)
  562. {
  563. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  564. {
  565. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  566. {
  567. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  568. }, scalar: false, op1, op2);
  569. }, scalar: false);
  570. }
  571. else
  572. {
  573. EmitVectorBinaryOpF(context, (op1, op2) =>
  574. {
  575. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  576. });
  577. }
  578. }
  579. public static void Fminnm_S(ArmEmitterContext context)
  580. {
  581. if (Optimizations.FastFP && Optimizations.UseSse41)
  582. {
  583. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  584. }
  585. else
  586. {
  587. EmitScalarBinaryOpF(context, (op1, op2) =>
  588. {
  589. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  590. });
  591. }
  592. }
  593. public static void Fminnm_V(ArmEmitterContext context)
  594. {
  595. if (Optimizations.FastFP && Optimizations.UseSse41)
  596. {
  597. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  598. }
  599. else
  600. {
  601. EmitVectorBinaryOpF(context, (op1, op2) =>
  602. {
  603. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  604. });
  605. }
  606. }
  607. public static void Fminnmp_V(ArmEmitterContext context)
  608. {
  609. if (Optimizations.FastFP && Optimizations.UseSse41)
  610. {
  611. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  612. {
  613. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  614. });
  615. }
  616. else
  617. {
  618. EmitVectorPairwiseOpF(context, (op1, op2) =>
  619. {
  620. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  621. });
  622. }
  623. }
  624. public static void Fminnmv_V(ArmEmitterContext context)
  625. {
  626. if (Optimizations.FastFP && Optimizations.UseSse41)
  627. {
  628. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  629. {
  630. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  631. });
  632. }
  633. else
  634. {
  635. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  636. {
  637. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMinNum)), op1, op2);
  638. });
  639. }
  640. }
  641. public static void Fminp_V(ArmEmitterContext context)
  642. {
  643. if (Optimizations.FastFP && Optimizations.UseSse41)
  644. {
  645. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  646. {
  647. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  648. {
  649. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  650. {
  651. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  652. }, scalar: false, op1, op2);
  653. }, scalar: false, op1, op2);
  654. });
  655. }
  656. else
  657. {
  658. EmitVectorPairwiseOpF(context, (op1, op2) =>
  659. {
  660. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  661. });
  662. }
  663. }
  664. public static void Fminv_V(ArmEmitterContext context)
  665. {
  666. if (Optimizations.FastFP && Optimizations.UseSse41)
  667. {
  668. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  669. {
  670. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  671. {
  672. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  673. {
  674. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  675. }, scalar: false, op1, op2);
  676. }, scalar: false, op1, op2);
  677. });
  678. }
  679. else
  680. {
  681. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  682. {
  683. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMin)), op1, op2);
  684. });
  685. }
  686. }
  687. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  688. {
  689. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  690. {
  691. return context.Add(op1, context.Multiply(op2, op3));
  692. });
  693. }
  694. public static void Fmla_V(ArmEmitterContext context) // Fused.
  695. {
  696. if (Optimizations.FastFP && Optimizations.UseSse2)
  697. {
  698. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  699. Operand d = GetVec(op.Rd);
  700. Operand n = GetVec(op.Rn);
  701. Operand m = GetVec(op.Rm);
  702. int sizeF = op.Size & 1;
  703. if (sizeF == 0)
  704. {
  705. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  706. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  707. if (op.RegisterSize == RegisterSize.Simd64)
  708. {
  709. res = context.VectorZeroUpper64(res);
  710. }
  711. context.Copy(d, res);
  712. }
  713. else /* if (sizeF == 1) */
  714. {
  715. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  716. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  717. context.Copy(d, res);
  718. }
  719. }
  720. else
  721. {
  722. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  723. {
  724. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  725. });
  726. }
  727. }
  728. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  729. {
  730. if (Optimizations.FastFP && Optimizations.UseSse2)
  731. {
  732. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  733. Operand d = GetVec(op.Rd);
  734. Operand n = GetVec(op.Rn);
  735. Operand m = GetVec(op.Rm);
  736. int sizeF = op.Size & 1;
  737. if (sizeF == 0)
  738. {
  739. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  740. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  741. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  742. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  743. if (op.RegisterSize == RegisterSize.Simd64)
  744. {
  745. res = context.VectorZeroUpper64(res);
  746. }
  747. context.Copy(d, res);
  748. }
  749. else /* if (sizeF == 1) */
  750. {
  751. int shuffleMask = op.Index | op.Index << 1;
  752. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  753. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  754. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  755. context.Copy(d, res);
  756. }
  757. }
  758. else
  759. {
  760. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  761. {
  762. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  763. });
  764. }
  765. }
  766. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  767. {
  768. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  769. {
  770. return context.Subtract(op1, context.Multiply(op2, op3));
  771. });
  772. }
  773. public static void Fmls_V(ArmEmitterContext context) // Fused.
  774. {
  775. if (Optimizations.FastFP && Optimizations.UseSse2)
  776. {
  777. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  778. Operand d = GetVec(op.Rd);
  779. Operand n = GetVec(op.Rn);
  780. Operand m = GetVec(op.Rm);
  781. int sizeF = op.Size & 1;
  782. if (sizeF == 0)
  783. {
  784. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  785. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  786. if (op.RegisterSize == RegisterSize.Simd64)
  787. {
  788. res = context.VectorZeroUpper64(res);
  789. }
  790. context.Copy(d, res);
  791. }
  792. else /* if (sizeF == 1) */
  793. {
  794. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  795. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  796. context.Copy(d, res);
  797. }
  798. }
  799. else
  800. {
  801. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  802. {
  803. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  804. });
  805. }
  806. }
  807. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  808. {
  809. if (Optimizations.FastFP && Optimizations.UseSse2)
  810. {
  811. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  812. Operand d = GetVec(op.Rd);
  813. Operand n = GetVec(op.Rn);
  814. Operand m = GetVec(op.Rm);
  815. int sizeF = op.Size & 1;
  816. if (sizeF == 0)
  817. {
  818. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  819. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  820. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  821. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  822. if (op.RegisterSize == RegisterSize.Simd64)
  823. {
  824. res = context.VectorZeroUpper64(res);
  825. }
  826. context.Copy(d, res);
  827. }
  828. else /* if (sizeF == 1) */
  829. {
  830. int shuffleMask = op.Index | op.Index << 1;
  831. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  832. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  833. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  834. context.Copy(d, res);
  835. }
  836. }
  837. else
  838. {
  839. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  840. {
  841. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  842. });
  843. }
  844. }
  845. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  846. {
  847. if (Optimizations.FastFP && Optimizations.UseSse2)
  848. {
  849. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  850. Operand d = GetVec(op.Rd);
  851. Operand a = GetVec(op.Ra);
  852. Operand n = GetVec(op.Rn);
  853. Operand m = GetVec(op.Rm);
  854. if (op.Size == 0)
  855. {
  856. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  857. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  858. context.Copy(d, context.VectorZeroUpper96(res));
  859. }
  860. else /* if (op.Size == 1) */
  861. {
  862. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  863. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  864. context.Copy(d, context.VectorZeroUpper64(res));
  865. }
  866. }
  867. else
  868. {
  869. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  870. {
  871. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  872. });
  873. }
  874. }
  875. public static void Fmul_S(ArmEmitterContext context)
  876. {
  877. if (Optimizations.FastFP && Optimizations.UseSse2)
  878. {
  879. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  880. }
  881. else if (Optimizations.FastFP)
  882. {
  883. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  884. }
  885. else
  886. {
  887. EmitScalarBinaryOpF(context, (op1, op2) =>
  888. {
  889. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  890. });
  891. }
  892. }
  893. public static void Fmul_Se(ArmEmitterContext context)
  894. {
  895. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  896. }
  897. public static void Fmul_V(ArmEmitterContext context)
  898. {
  899. if (Optimizations.FastFP && Optimizations.UseSse2)
  900. {
  901. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  902. }
  903. else if (Optimizations.FastFP)
  904. {
  905. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  906. }
  907. else
  908. {
  909. EmitVectorBinaryOpF(context, (op1, op2) =>
  910. {
  911. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  912. });
  913. }
  914. }
  915. public static void Fmul_Ve(ArmEmitterContext context)
  916. {
  917. if (Optimizations.FastFP && Optimizations.UseSse2)
  918. {
  919. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  920. Operand n = GetVec(op.Rn);
  921. Operand m = GetVec(op.Rm);
  922. int sizeF = op.Size & 1;
  923. if (sizeF == 0)
  924. {
  925. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  926. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  927. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  928. if (op.RegisterSize == RegisterSize.Simd64)
  929. {
  930. res = context.VectorZeroUpper64(res);
  931. }
  932. context.Copy(GetVec(op.Rd), res);
  933. }
  934. else /* if (sizeF == 1) */
  935. {
  936. int shuffleMask = op.Index | op.Index << 1;
  937. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  938. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  939. context.Copy(GetVec(op.Rd), res);
  940. }
  941. }
  942. else if (Optimizations.FastFP)
  943. {
  944. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  945. }
  946. else
  947. {
  948. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  949. {
  950. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  951. });
  952. }
  953. }
  954. public static void Fmulx_S(ArmEmitterContext context)
  955. {
  956. EmitScalarBinaryOpF(context, (op1, op2) =>
  957. {
  958. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  959. });
  960. }
  961. public static void Fmulx_Se(ArmEmitterContext context)
  962. {
  963. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  964. {
  965. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  966. });
  967. }
  968. public static void Fmulx_V(ArmEmitterContext context)
  969. {
  970. EmitVectorBinaryOpF(context, (op1, op2) =>
  971. {
  972. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  973. });
  974. }
  975. public static void Fmulx_Ve(ArmEmitterContext context)
  976. {
  977. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  978. {
  979. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  980. });
  981. }
  982. public static void Fneg_S(ArmEmitterContext context)
  983. {
  984. if (Optimizations.UseSse2)
  985. {
  986. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  987. if (op.Size == 0)
  988. {
  989. Operand mask = X86GetScalar(context, -0f);
  990. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  991. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  992. }
  993. else /* if (op.Size == 1) */
  994. {
  995. Operand mask = X86GetScalar(context, -0d);
  996. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  997. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  998. }
  999. }
  1000. else
  1001. {
  1002. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  1003. }
  1004. }
  1005. public static void Fneg_V(ArmEmitterContext context)
  1006. {
  1007. if (Optimizations.UseSse2)
  1008. {
  1009. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1010. int sizeF = op.Size & 1;
  1011. if (sizeF == 0)
  1012. {
  1013. Operand mask = X86GetAllElements(context, -0f);
  1014. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1015. if (op.RegisterSize == RegisterSize.Simd64)
  1016. {
  1017. res = context.VectorZeroUpper64(res);
  1018. }
  1019. context.Copy(GetVec(op.Rd), res);
  1020. }
  1021. else /* if (sizeF == 1) */
  1022. {
  1023. Operand mask = X86GetAllElements(context, -0d);
  1024. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1025. context.Copy(GetVec(op.Rd), res);
  1026. }
  1027. }
  1028. else
  1029. {
  1030. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1031. }
  1032. }
  1033. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1034. {
  1035. if (Optimizations.FastFP && Optimizations.UseSse2)
  1036. {
  1037. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1038. Operand d = GetVec(op.Rd);
  1039. Operand a = GetVec(op.Ra);
  1040. Operand n = GetVec(op.Rn);
  1041. Operand m = GetVec(op.Rm);
  1042. if (op.Size == 0)
  1043. {
  1044. Operand mask = X86GetScalar(context, -0f);
  1045. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1046. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1047. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1048. context.Copy(d, context.VectorZeroUpper96(res));
  1049. }
  1050. else /* if (op.Size == 1) */
  1051. {
  1052. Operand mask = X86GetScalar(context, -0d);
  1053. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1054. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1055. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1056. context.Copy(d, context.VectorZeroUpper64(res));
  1057. }
  1058. }
  1059. else
  1060. {
  1061. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1062. {
  1063. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1064. });
  1065. }
  1066. }
  1067. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1068. {
  1069. if (Optimizations.FastFP && Optimizations.UseSse2)
  1070. {
  1071. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1072. Operand d = GetVec(op.Rd);
  1073. Operand a = GetVec(op.Ra);
  1074. Operand n = GetVec(op.Rn);
  1075. Operand m = GetVec(op.Rm);
  1076. if (op.Size == 0)
  1077. {
  1078. Operand mask = X86GetScalar(context, -0f);
  1079. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1080. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1081. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1082. context.Copy(d, context.VectorZeroUpper96(res));
  1083. }
  1084. else /* if (op.Size == 1) */
  1085. {
  1086. Operand mask = X86GetScalar(context, -0d);
  1087. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1088. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1089. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1090. context.Copy(d, context.VectorZeroUpper64(res));
  1091. }
  1092. }
  1093. else
  1094. {
  1095. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1096. {
  1097. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1098. });
  1099. }
  1100. }
  1101. public static void Fnmul_S(ArmEmitterContext context)
  1102. {
  1103. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1104. }
  1105. public static void Frecpe_S(ArmEmitterContext context)
  1106. {
  1107. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1108. int sizeF = op.Size & 1;
  1109. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1110. {
  1111. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  1112. }
  1113. else
  1114. {
  1115. EmitScalarUnaryOpF(context, (op1) =>
  1116. {
  1117. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1118. });
  1119. }
  1120. }
  1121. public static void Frecpe_V(ArmEmitterContext context)
  1122. {
  1123. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1124. int sizeF = op.Size & 1;
  1125. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1126. {
  1127. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  1128. }
  1129. else
  1130. {
  1131. EmitVectorUnaryOpF(context, (op1) =>
  1132. {
  1133. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1134. });
  1135. }
  1136. }
  1137. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1138. {
  1139. if (Optimizations.FastFP && Optimizations.UseSse2)
  1140. {
  1141. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1142. int sizeF = op.Size & 1;
  1143. if (sizeF == 0)
  1144. {
  1145. Operand mask = X86GetScalar(context, 2f);
  1146. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1147. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1148. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1149. }
  1150. else /* if (sizeF == 1) */
  1151. {
  1152. Operand mask = X86GetScalar(context, 2d);
  1153. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1154. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1155. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1156. }
  1157. }
  1158. else
  1159. {
  1160. EmitScalarBinaryOpF(context, (op1, op2) =>
  1161. {
  1162. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1163. });
  1164. }
  1165. }
  1166. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1167. {
  1168. if (Optimizations.FastFP && Optimizations.UseSse2)
  1169. {
  1170. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1171. int sizeF = op.Size & 1;
  1172. if (sizeF == 0)
  1173. {
  1174. Operand mask = X86GetAllElements(context, 2f);
  1175. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1176. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1177. if (op.RegisterSize == RegisterSize.Simd64)
  1178. {
  1179. res = context.VectorZeroUpper64(res);
  1180. }
  1181. context.Copy(GetVec(op.Rd), res);
  1182. }
  1183. else /* if (sizeF == 1) */
  1184. {
  1185. Operand mask = X86GetAllElements(context, 2d);
  1186. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1187. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1188. context.Copy(GetVec(op.Rd), res);
  1189. }
  1190. }
  1191. else
  1192. {
  1193. EmitVectorBinaryOpF(context, (op1, op2) =>
  1194. {
  1195. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1196. });
  1197. }
  1198. }
  1199. public static void Frecpx_S(ArmEmitterContext context)
  1200. {
  1201. EmitScalarUnaryOpF(context, (op1) =>
  1202. {
  1203. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1204. });
  1205. }
  1206. public static void Frinta_S(ArmEmitterContext context)
  1207. {
  1208. EmitScalarUnaryOpF(context, (op1) =>
  1209. {
  1210. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1211. });
  1212. }
  1213. public static void Frinta_V(ArmEmitterContext context)
  1214. {
  1215. EmitVectorUnaryOpF(context, (op1) =>
  1216. {
  1217. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1218. });
  1219. }
  1220. public static void Frinti_S(ArmEmitterContext context)
  1221. {
  1222. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1223. EmitScalarUnaryOpF(context, (op1) =>
  1224. {
  1225. if (op.Size == 0)
  1226. {
  1227. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1228. }
  1229. else /* if (op.Size == 1) */
  1230. {
  1231. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1232. }
  1233. });
  1234. }
  1235. public static void Frinti_V(ArmEmitterContext context)
  1236. {
  1237. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1238. int sizeF = op.Size & 1;
  1239. EmitVectorUnaryOpF(context, (op1) =>
  1240. {
  1241. if (sizeF == 0)
  1242. {
  1243. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1244. }
  1245. else /* if (sizeF == 1) */
  1246. {
  1247. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1248. }
  1249. });
  1250. }
  1251. public static void Frintm_S(ArmEmitterContext context)
  1252. {
  1253. if (Optimizations.UseSse41)
  1254. {
  1255. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1256. }
  1257. else
  1258. {
  1259. EmitScalarUnaryOpF(context, (op1) =>
  1260. {
  1261. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1262. });
  1263. }
  1264. }
  1265. public static void Frintm_V(ArmEmitterContext context)
  1266. {
  1267. if (Optimizations.UseSse41)
  1268. {
  1269. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1270. }
  1271. else
  1272. {
  1273. EmitVectorUnaryOpF(context, (op1) =>
  1274. {
  1275. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1276. });
  1277. }
  1278. }
  1279. public static void Frintn_S(ArmEmitterContext context)
  1280. {
  1281. if (Optimizations.UseSse41)
  1282. {
  1283. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1284. }
  1285. else
  1286. {
  1287. EmitScalarUnaryOpF(context, (op1) =>
  1288. {
  1289. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1290. });
  1291. }
  1292. }
  1293. public static void Frintn_V(ArmEmitterContext context)
  1294. {
  1295. if (Optimizations.UseSse41)
  1296. {
  1297. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1298. }
  1299. else
  1300. {
  1301. EmitVectorUnaryOpF(context, (op1) =>
  1302. {
  1303. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1304. });
  1305. }
  1306. }
  1307. public static void Frintp_S(ArmEmitterContext context)
  1308. {
  1309. if (Optimizations.UseSse41)
  1310. {
  1311. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1312. }
  1313. else
  1314. {
  1315. EmitScalarUnaryOpF(context, (op1) =>
  1316. {
  1317. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1318. });
  1319. }
  1320. }
  1321. public static void Frintp_V(ArmEmitterContext context)
  1322. {
  1323. if (Optimizations.UseSse41)
  1324. {
  1325. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1326. }
  1327. else
  1328. {
  1329. EmitVectorUnaryOpF(context, (op1) =>
  1330. {
  1331. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1332. });
  1333. }
  1334. }
  1335. public static void Frintx_S(ArmEmitterContext context)
  1336. {
  1337. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1338. EmitScalarUnaryOpF(context, (op1) =>
  1339. {
  1340. if (op.Size == 0)
  1341. {
  1342. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1343. }
  1344. else /* if (op.Size == 1) */
  1345. {
  1346. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1347. }
  1348. });
  1349. }
  1350. public static void Frintx_V(ArmEmitterContext context)
  1351. {
  1352. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1353. int sizeF = op.Size & 1;
  1354. EmitVectorUnaryOpF(context, (op1) =>
  1355. {
  1356. if (sizeF == 0)
  1357. {
  1358. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1359. }
  1360. else /* if (sizeF == 1) */
  1361. {
  1362. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1363. }
  1364. });
  1365. }
  1366. public static void Frintz_S(ArmEmitterContext context)
  1367. {
  1368. if (Optimizations.UseSse41)
  1369. {
  1370. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1371. }
  1372. else
  1373. {
  1374. EmitScalarUnaryOpF(context, (op1) =>
  1375. {
  1376. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1377. });
  1378. }
  1379. }
  1380. public static void Frintz_V(ArmEmitterContext context)
  1381. {
  1382. if (Optimizations.UseSse41)
  1383. {
  1384. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1385. }
  1386. else
  1387. {
  1388. EmitVectorUnaryOpF(context, (op1) =>
  1389. {
  1390. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1391. });
  1392. }
  1393. }
  1394. public static void Frsqrte_S(ArmEmitterContext context)
  1395. {
  1396. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1397. int sizeF = op.Size & 1;
  1398. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1399. {
  1400. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1401. }
  1402. else
  1403. {
  1404. EmitScalarUnaryOpF(context, (op1) =>
  1405. {
  1406. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1407. });
  1408. }
  1409. }
  1410. public static void Frsqrte_V(ArmEmitterContext context)
  1411. {
  1412. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1413. int sizeF = op.Size & 1;
  1414. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1415. {
  1416. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1417. }
  1418. else
  1419. {
  1420. EmitVectorUnaryOpF(context, (op1) =>
  1421. {
  1422. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1423. });
  1424. }
  1425. }
  1426. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1427. {
  1428. if (Optimizations.FastFP && Optimizations.UseSse2)
  1429. {
  1430. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1431. int sizeF = op.Size & 1;
  1432. if (sizeF == 0)
  1433. {
  1434. Operand maskHalf = X86GetScalar(context, 0.5f);
  1435. Operand maskThree = X86GetScalar(context, 3f);
  1436. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1437. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1438. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1439. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1440. }
  1441. else /* if (sizeF == 1) */
  1442. {
  1443. Operand maskHalf = X86GetScalar(context, 0.5d);
  1444. Operand maskThree = X86GetScalar(context, 3d);
  1445. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1446. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1447. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1448. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1449. }
  1450. }
  1451. else
  1452. {
  1453. EmitScalarBinaryOpF(context, (op1, op2) =>
  1454. {
  1455. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1456. });
  1457. }
  1458. }
  1459. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1460. {
  1461. if (Optimizations.FastFP && Optimizations.UseSse2)
  1462. {
  1463. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1464. int sizeF = op.Size & 1;
  1465. if (sizeF == 0)
  1466. {
  1467. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1468. Operand maskThree = X86GetAllElements(context, 3f);
  1469. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1470. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1471. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1472. if (op.RegisterSize == RegisterSize.Simd64)
  1473. {
  1474. res = context.VectorZeroUpper64(res);
  1475. }
  1476. context.Copy(GetVec(op.Rd), res);
  1477. }
  1478. else /* if (sizeF == 1) */
  1479. {
  1480. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1481. Operand maskThree = X86GetAllElements(context, 3d);
  1482. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1483. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1484. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1485. context.Copy(GetVec(op.Rd), res);
  1486. }
  1487. }
  1488. else
  1489. {
  1490. EmitVectorBinaryOpF(context, (op1, op2) =>
  1491. {
  1492. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1493. });
  1494. }
  1495. }
  1496. public static void Fsqrt_S(ArmEmitterContext context)
  1497. {
  1498. if (Optimizations.FastFP && Optimizations.UseSse2)
  1499. {
  1500. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1501. }
  1502. else
  1503. {
  1504. EmitScalarUnaryOpF(context, (op1) =>
  1505. {
  1506. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1507. });
  1508. }
  1509. }
  1510. public static void Fsqrt_V(ArmEmitterContext context)
  1511. {
  1512. if (Optimizations.FastFP && Optimizations.UseSse2)
  1513. {
  1514. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1515. }
  1516. else
  1517. {
  1518. EmitVectorUnaryOpF(context, (op1) =>
  1519. {
  1520. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1521. });
  1522. }
  1523. }
  1524. public static void Fsub_S(ArmEmitterContext context)
  1525. {
  1526. if (Optimizations.FastFP && Optimizations.UseSse2)
  1527. {
  1528. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1529. }
  1530. else if (Optimizations.FastFP)
  1531. {
  1532. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1533. }
  1534. else
  1535. {
  1536. EmitScalarBinaryOpF(context, (op1, op2) =>
  1537. {
  1538. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1539. });
  1540. }
  1541. }
  1542. public static void Fsub_V(ArmEmitterContext context)
  1543. {
  1544. if (Optimizations.FastFP && Optimizations.UseSse2)
  1545. {
  1546. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1547. }
  1548. else if (Optimizations.FastFP)
  1549. {
  1550. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1551. }
  1552. else
  1553. {
  1554. EmitVectorBinaryOpF(context, (op1, op2) =>
  1555. {
  1556. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1557. });
  1558. }
  1559. }
  1560. public static void Mla_V(ArmEmitterContext context)
  1561. {
  1562. if (Optimizations.UseSse41)
  1563. {
  1564. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  1565. }
  1566. else
  1567. {
  1568. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1569. {
  1570. return context.Add(op1, context.Multiply(op2, op3));
  1571. });
  1572. }
  1573. }
  1574. public static void Mla_Ve(ArmEmitterContext context)
  1575. {
  1576. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1577. {
  1578. return context.Add(op1, context.Multiply(op2, op3));
  1579. });
  1580. }
  1581. public static void Mls_V(ArmEmitterContext context)
  1582. {
  1583. if (Optimizations.UseSse41)
  1584. {
  1585. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  1586. }
  1587. else
  1588. {
  1589. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1590. {
  1591. return context.Subtract(op1, context.Multiply(op2, op3));
  1592. });
  1593. }
  1594. }
  1595. public static void Mls_Ve(ArmEmitterContext context)
  1596. {
  1597. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1598. {
  1599. return context.Subtract(op1, context.Multiply(op2, op3));
  1600. });
  1601. }
  1602. public static void Mul_V(ArmEmitterContext context)
  1603. {
  1604. if (Optimizations.UseSse41)
  1605. {
  1606. EmitSse41VectorMul_AddSub(context, AddSub.None);
  1607. }
  1608. else
  1609. {
  1610. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1611. }
  1612. }
  1613. public static void Mul_Ve(ArmEmitterContext context)
  1614. {
  1615. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1616. }
  1617. public static void Neg_S(ArmEmitterContext context)
  1618. {
  1619. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1620. }
  1621. public static void Neg_V(ArmEmitterContext context)
  1622. {
  1623. if (Optimizations.UseSse2)
  1624. {
  1625. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1626. Intrinsic subInst = X86PsubInstruction[op.Size];
  1627. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1628. if (op.RegisterSize == RegisterSize.Simd64)
  1629. {
  1630. res = context.VectorZeroUpper64(res);
  1631. }
  1632. context.Copy(GetVec(op.Rd), res);
  1633. }
  1634. else
  1635. {
  1636. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1637. }
  1638. }
  1639. public static void Raddhn_V(ArmEmitterContext context)
  1640. {
  1641. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1642. }
  1643. public static void Rsubhn_V(ArmEmitterContext context)
  1644. {
  1645. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1646. }
  1647. public static void Saba_V(ArmEmitterContext context)
  1648. {
  1649. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1650. {
  1651. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1652. });
  1653. }
  1654. public static void Sabal_V(ArmEmitterContext context)
  1655. {
  1656. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1657. {
  1658. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1659. });
  1660. }
  1661. public static void Sabd_V(ArmEmitterContext context)
  1662. {
  1663. if (Optimizations.UseSse41)
  1664. {
  1665. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1666. Operand n = GetVec(op.Rn);
  1667. Operand m = GetVec(op.Rm);
  1668. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  1669. }
  1670. else
  1671. {
  1672. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1673. {
  1674. return EmitAbs(context, context.Subtract(op1, op2));
  1675. });
  1676. }
  1677. }
  1678. public static void Sabdl_V(ArmEmitterContext context)
  1679. {
  1680. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1681. if (Optimizations.UseSse41 && op.Size < 2)
  1682. {
  1683. Operand n = GetVec(op.Rn);
  1684. Operand m = GetVec(op.Rm);
  1685. if (op.RegisterSize == RegisterSize.Simd128)
  1686. {
  1687. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1688. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1689. }
  1690. Intrinsic movInst = op.Size == 0
  1691. ? Intrinsic.X86Pmovsxbw
  1692. : Intrinsic.X86Pmovsxwd;
  1693. n = context.AddIntrinsic(movInst, n);
  1694. m = context.AddIntrinsic(movInst, m);
  1695. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  1696. }
  1697. else
  1698. {
  1699. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1700. {
  1701. return EmitAbs(context, context.Subtract(op1, op2));
  1702. });
  1703. }
  1704. }
  1705. public static void Sadalp_V(ArmEmitterContext context)
  1706. {
  1707. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1708. }
  1709. public static void Saddl_V(ArmEmitterContext context)
  1710. {
  1711. if (Optimizations.UseSse41)
  1712. {
  1713. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1714. Operand n = GetVec(op.Rn);
  1715. Operand m = GetVec(op.Rm);
  1716. if (op.RegisterSize == RegisterSize.Simd128)
  1717. {
  1718. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1719. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1720. }
  1721. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1722. n = context.AddIntrinsic(movInst, n);
  1723. m = context.AddIntrinsic(movInst, m);
  1724. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1725. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1726. }
  1727. else
  1728. {
  1729. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1730. }
  1731. }
  1732. public static void Saddlp_V(ArmEmitterContext context)
  1733. {
  1734. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1735. }
  1736. public static void Saddlv_V(ArmEmitterContext context)
  1737. {
  1738. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1739. }
  1740. public static void Saddw_V(ArmEmitterContext context)
  1741. {
  1742. if (Optimizations.UseSse41)
  1743. {
  1744. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1745. Operand n = GetVec(op.Rn);
  1746. Operand m = GetVec(op.Rm);
  1747. if (op.RegisterSize == RegisterSize.Simd128)
  1748. {
  1749. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1750. }
  1751. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1752. m = context.AddIntrinsic(movInst, m);
  1753. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1754. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1755. }
  1756. else
  1757. {
  1758. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1759. }
  1760. }
  1761. public static void Shadd_V(ArmEmitterContext context)
  1762. {
  1763. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1764. if (Optimizations.UseSse2 && op.Size > 0)
  1765. {
  1766. Operand n = GetVec(op.Rn);
  1767. Operand m = GetVec(op.Rm);
  1768. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1769. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1770. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1771. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1772. Intrinsic addInst = X86PaddInstruction[op.Size];
  1773. res = context.AddIntrinsic(addInst, res, res2);
  1774. if (op.RegisterSize == RegisterSize.Simd64)
  1775. {
  1776. res = context.VectorZeroUpper64(res);
  1777. }
  1778. context.Copy(GetVec(op.Rd), res);
  1779. }
  1780. else
  1781. {
  1782. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1783. {
  1784. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1785. });
  1786. }
  1787. }
  1788. public static void Shsub_V(ArmEmitterContext context)
  1789. {
  1790. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1791. if (Optimizations.UseSse2 && op.Size < 2)
  1792. {
  1793. Operand n = GetVec(op.Rn);
  1794. Operand m = GetVec(op.Rm);
  1795. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1796. Intrinsic addInst = X86PaddInstruction[op.Size];
  1797. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1798. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1799. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1800. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1801. Intrinsic subInst = X86PsubInstruction[op.Size];
  1802. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1803. if (op.RegisterSize == RegisterSize.Simd64)
  1804. {
  1805. res = context.VectorZeroUpper64(res);
  1806. }
  1807. context.Copy(GetVec(op.Rd), res);
  1808. }
  1809. else
  1810. {
  1811. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1812. {
  1813. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1814. });
  1815. }
  1816. }
  1817. public static void Smax_V(ArmEmitterContext context)
  1818. {
  1819. if (Optimizations.UseSse41)
  1820. {
  1821. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1822. Operand n = GetVec(op.Rn);
  1823. Operand m = GetVec(op.Rm);
  1824. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1825. Operand res = context.AddIntrinsic(maxInst, n, m);
  1826. if (op.RegisterSize == RegisterSize.Simd64)
  1827. {
  1828. res = context.VectorZeroUpper64(res);
  1829. }
  1830. context.Copy(GetVec(op.Rd), res);
  1831. }
  1832. else
  1833. {
  1834. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1835. }
  1836. }
  1837. public static void Smaxp_V(ArmEmitterContext context)
  1838. {
  1839. if (Optimizations.UseSsse3)
  1840. {
  1841. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  1842. }
  1843. else
  1844. {
  1845. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1846. }
  1847. }
  1848. public static void Smaxv_V(ArmEmitterContext context)
  1849. {
  1850. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1851. }
  1852. public static void Smin_V(ArmEmitterContext context)
  1853. {
  1854. if (Optimizations.UseSse41)
  1855. {
  1856. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1857. Operand n = GetVec(op.Rn);
  1858. Operand m = GetVec(op.Rm);
  1859. Intrinsic minInst = X86PminsInstruction[op.Size];
  1860. Operand res = context.AddIntrinsic(minInst, n, m);
  1861. if (op.RegisterSize == RegisterSize.Simd64)
  1862. {
  1863. res = context.VectorZeroUpper64(res);
  1864. }
  1865. context.Copy(GetVec(op.Rd), res);
  1866. }
  1867. else
  1868. {
  1869. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1870. }
  1871. }
  1872. public static void Sminp_V(ArmEmitterContext context)
  1873. {
  1874. if (Optimizations.UseSsse3)
  1875. {
  1876. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  1877. }
  1878. else
  1879. {
  1880. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1881. }
  1882. }
  1883. public static void Sminv_V(ArmEmitterContext context)
  1884. {
  1885. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1886. }
  1887. public static void Smlal_V(ArmEmitterContext context)
  1888. {
  1889. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1890. if (Optimizations.UseSse41 && op.Size < 2)
  1891. {
  1892. Operand d = GetVec(op.Rd);
  1893. Operand n = GetVec(op.Rn);
  1894. Operand m = GetVec(op.Rm);
  1895. if (op.RegisterSize == RegisterSize.Simd128)
  1896. {
  1897. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1898. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1899. }
  1900. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1901. n = context.AddIntrinsic(movInst, n);
  1902. m = context.AddIntrinsic(movInst, m);
  1903. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1904. Operand res = context.AddIntrinsic(mullInst, n, m);
  1905. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1906. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  1907. }
  1908. else
  1909. {
  1910. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1911. {
  1912. return context.Add(op1, context.Multiply(op2, op3));
  1913. });
  1914. }
  1915. }
  1916. public static void Smlal_Ve(ArmEmitterContext context)
  1917. {
  1918. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1919. {
  1920. return context.Add(op1, context.Multiply(op2, op3));
  1921. });
  1922. }
  1923. public static void Smlsl_V(ArmEmitterContext context)
  1924. {
  1925. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1926. if (Optimizations.UseSse41 && op.Size < 2)
  1927. {
  1928. Operand d = GetVec(op.Rd);
  1929. Operand n = GetVec(op.Rn);
  1930. Operand m = GetVec(op.Rm);
  1931. if (op.RegisterSize == RegisterSize.Simd128)
  1932. {
  1933. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1934. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1935. }
  1936. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  1937. n = context.AddIntrinsic(movInst, n);
  1938. m = context.AddIntrinsic(movInst, m);
  1939. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1940. Operand res = context.AddIntrinsic(mullInst, n, m);
  1941. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1942. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  1943. }
  1944. else
  1945. {
  1946. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1947. {
  1948. return context.Subtract(op1, context.Multiply(op2, op3));
  1949. });
  1950. }
  1951. }
  1952. public static void Smlsl_Ve(ArmEmitterContext context)
  1953. {
  1954. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1955. {
  1956. return context.Subtract(op1, context.Multiply(op2, op3));
  1957. });
  1958. }
  1959. public static void Smull_V(ArmEmitterContext context)
  1960. {
  1961. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  1962. }
  1963. public static void Smull_Ve(ArmEmitterContext context)
  1964. {
  1965. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  1966. }
  1967. public static void Sqabs_S(ArmEmitterContext context)
  1968. {
  1969. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1970. }
  1971. public static void Sqabs_V(ArmEmitterContext context)
  1972. {
  1973. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1974. }
  1975. public static void Sqadd_S(ArmEmitterContext context)
  1976. {
  1977. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1978. }
  1979. public static void Sqadd_V(ArmEmitterContext context)
  1980. {
  1981. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1982. }
  1983. public static void Sqdmulh_S(ArmEmitterContext context)
  1984. {
  1985. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  1986. }
  1987. public static void Sqdmulh_V(ArmEmitterContext context)
  1988. {
  1989. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  1990. }
  1991. public static void Sqneg_S(ArmEmitterContext context)
  1992. {
  1993. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1994. }
  1995. public static void Sqneg_V(ArmEmitterContext context)
  1996. {
  1997. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1998. }
  1999. public static void Sqrdmulh_S(ArmEmitterContext context)
  2000. {
  2001. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  2002. }
  2003. public static void Sqrdmulh_V(ArmEmitterContext context)
  2004. {
  2005. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  2006. }
  2007. public static void Sqsub_S(ArmEmitterContext context)
  2008. {
  2009. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  2010. }
  2011. public static void Sqsub_V(ArmEmitterContext context)
  2012. {
  2013. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  2014. }
  2015. public static void Sqxtn_S(ArmEmitterContext context)
  2016. {
  2017. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  2018. }
  2019. public static void Sqxtn_V(ArmEmitterContext context)
  2020. {
  2021. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  2022. }
  2023. public static void Sqxtun_S(ArmEmitterContext context)
  2024. {
  2025. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  2026. }
  2027. public static void Sqxtun_V(ArmEmitterContext context)
  2028. {
  2029. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  2030. }
  2031. public static void Srhadd_V(ArmEmitterContext context)
  2032. {
  2033. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2034. if (Optimizations.UseSse2 && op.Size < 2)
  2035. {
  2036. Operand n = GetVec(op.Rn);
  2037. Operand m = GetVec(op.Rm);
  2038. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2039. Intrinsic subInst = X86PsubInstruction[op.Size];
  2040. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  2041. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  2042. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2043. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  2044. Intrinsic addInst = X86PaddInstruction[op.Size];
  2045. res = context.AddIntrinsic(addInst, mask, res);
  2046. if (op.RegisterSize == RegisterSize.Simd64)
  2047. {
  2048. res = context.VectorZeroUpper64(res);
  2049. }
  2050. context.Copy(GetVec(op.Rd), res);
  2051. }
  2052. else
  2053. {
  2054. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2055. {
  2056. Operand res = context.Add(op1, op2);
  2057. res = context.Add(res, Const(1L));
  2058. return context.ShiftRightSI(res, Const(1));
  2059. });
  2060. }
  2061. }
  2062. public static void Ssubl_V(ArmEmitterContext context)
  2063. {
  2064. if (Optimizations.UseSse41)
  2065. {
  2066. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2067. Operand n = GetVec(op.Rn);
  2068. Operand m = GetVec(op.Rm);
  2069. if (op.RegisterSize == RegisterSize.Simd128)
  2070. {
  2071. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2072. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2073. }
  2074. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2075. n = context.AddIntrinsic(movInst, n);
  2076. m = context.AddIntrinsic(movInst, m);
  2077. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2078. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2079. }
  2080. else
  2081. {
  2082. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2083. }
  2084. }
  2085. public static void Ssubw_V(ArmEmitterContext context)
  2086. {
  2087. if (Optimizations.UseSse41)
  2088. {
  2089. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2090. Operand n = GetVec(op.Rn);
  2091. Operand m = GetVec(op.Rm);
  2092. if (op.RegisterSize == RegisterSize.Simd128)
  2093. {
  2094. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2095. }
  2096. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2097. m = context.AddIntrinsic(movInst, m);
  2098. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2099. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2100. }
  2101. else
  2102. {
  2103. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2104. }
  2105. }
  2106. public static void Sub_S(ArmEmitterContext context)
  2107. {
  2108. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2109. }
  2110. public static void Sub_V(ArmEmitterContext context)
  2111. {
  2112. if (Optimizations.UseSse2)
  2113. {
  2114. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2115. Operand n = GetVec(op.Rn);
  2116. Operand m = GetVec(op.Rm);
  2117. Intrinsic subInst = X86PsubInstruction[op.Size];
  2118. Operand res = context.AddIntrinsic(subInst, n, m);
  2119. if (op.RegisterSize == RegisterSize.Simd64)
  2120. {
  2121. res = context.VectorZeroUpper64(res);
  2122. }
  2123. context.Copy(GetVec(op.Rd), res);
  2124. }
  2125. else
  2126. {
  2127. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2128. }
  2129. }
  2130. public static void Subhn_V(ArmEmitterContext context)
  2131. {
  2132. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  2133. }
  2134. public static void Suqadd_S(ArmEmitterContext context)
  2135. {
  2136. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  2137. }
  2138. public static void Suqadd_V(ArmEmitterContext context)
  2139. {
  2140. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  2141. }
  2142. public static void Uaba_V(ArmEmitterContext context)
  2143. {
  2144. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2145. {
  2146. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2147. });
  2148. }
  2149. public static void Uabal_V(ArmEmitterContext context)
  2150. {
  2151. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2152. {
  2153. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2154. });
  2155. }
  2156. public static void Uabd_V(ArmEmitterContext context)
  2157. {
  2158. if (Optimizations.UseSse41)
  2159. {
  2160. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2161. Operand n = GetVec(op.Rn);
  2162. Operand m = GetVec(op.Rm);
  2163. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  2164. }
  2165. else
  2166. {
  2167. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2168. {
  2169. return EmitAbs(context, context.Subtract(op1, op2));
  2170. });
  2171. }
  2172. }
  2173. public static void Uabdl_V(ArmEmitterContext context)
  2174. {
  2175. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2176. if (Optimizations.UseSse41 && op.Size < 2)
  2177. {
  2178. Operand n = GetVec(op.Rn);
  2179. Operand m = GetVec(op.Rm);
  2180. if (op.RegisterSize == RegisterSize.Simd128)
  2181. {
  2182. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2183. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2184. }
  2185. Intrinsic movInst = op.Size == 0
  2186. ? Intrinsic.X86Pmovzxbw
  2187. : Intrinsic.X86Pmovzxwd;
  2188. n = context.AddIntrinsic(movInst, n);
  2189. m = context.AddIntrinsic(movInst, m);
  2190. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  2191. }
  2192. else
  2193. {
  2194. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  2195. {
  2196. return EmitAbs(context, context.Subtract(op1, op2));
  2197. });
  2198. }
  2199. }
  2200. public static void Uadalp_V(ArmEmitterContext context)
  2201. {
  2202. EmitAddLongPairwise(context, signed: false, accumulate: true);
  2203. }
  2204. public static void Uaddl_V(ArmEmitterContext context)
  2205. {
  2206. if (Optimizations.UseSse41)
  2207. {
  2208. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2209. Operand n = GetVec(op.Rn);
  2210. Operand m = GetVec(op.Rm);
  2211. if (op.RegisterSize == RegisterSize.Simd128)
  2212. {
  2213. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2214. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2215. }
  2216. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2217. n = context.AddIntrinsic(movInst, n);
  2218. m = context.AddIntrinsic(movInst, m);
  2219. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2220. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2221. }
  2222. else
  2223. {
  2224. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2225. }
  2226. }
  2227. public static void Uaddlp_V(ArmEmitterContext context)
  2228. {
  2229. EmitAddLongPairwise(context, signed: false, accumulate: false);
  2230. }
  2231. public static void Uaddlv_V(ArmEmitterContext context)
  2232. {
  2233. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  2234. }
  2235. public static void Uaddw_V(ArmEmitterContext context)
  2236. {
  2237. if (Optimizations.UseSse41)
  2238. {
  2239. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2240. Operand n = GetVec(op.Rn);
  2241. Operand m = GetVec(op.Rm);
  2242. if (op.RegisterSize == RegisterSize.Simd128)
  2243. {
  2244. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2245. }
  2246. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2247. m = context.AddIntrinsic(movInst, m);
  2248. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2249. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2250. }
  2251. else
  2252. {
  2253. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2254. }
  2255. }
  2256. public static void Uhadd_V(ArmEmitterContext context)
  2257. {
  2258. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2259. if (Optimizations.UseSse2 && op.Size > 0)
  2260. {
  2261. Operand n = GetVec(op.Rn);
  2262. Operand m = GetVec(op.Rm);
  2263. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2264. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2265. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2266. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2267. Intrinsic addInst = X86PaddInstruction[op.Size];
  2268. res = context.AddIntrinsic(addInst, res, res2);
  2269. if (op.RegisterSize == RegisterSize.Simd64)
  2270. {
  2271. res = context.VectorZeroUpper64(res);
  2272. }
  2273. context.Copy(GetVec(op.Rd), res);
  2274. }
  2275. else
  2276. {
  2277. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2278. {
  2279. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2280. });
  2281. }
  2282. }
  2283. public static void Uhsub_V(ArmEmitterContext context)
  2284. {
  2285. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2286. if (Optimizations.UseSse2 && op.Size < 2)
  2287. {
  2288. Operand n = GetVec(op.Rn);
  2289. Operand m = GetVec(op.Rm);
  2290. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2291. Operand res = context.AddIntrinsic(avgInst, n, m);
  2292. Intrinsic subInst = X86PsubInstruction[op.Size];
  2293. res = context.AddIntrinsic(subInst, n, res);
  2294. if (op.RegisterSize == RegisterSize.Simd64)
  2295. {
  2296. res = context.VectorZeroUpper64(res);
  2297. }
  2298. context.Copy(GetVec(op.Rd), res);
  2299. }
  2300. else
  2301. {
  2302. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2303. {
  2304. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2305. });
  2306. }
  2307. }
  2308. public static void Umax_V(ArmEmitterContext context)
  2309. {
  2310. if (Optimizations.UseSse41)
  2311. {
  2312. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2313. Operand n = GetVec(op.Rn);
  2314. Operand m = GetVec(op.Rm);
  2315. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2316. Operand res = context.AddIntrinsic(maxInst, n, m);
  2317. if (op.RegisterSize == RegisterSize.Simd64)
  2318. {
  2319. res = context.VectorZeroUpper64(res);
  2320. }
  2321. context.Copy(GetVec(op.Rd), res);
  2322. }
  2323. else
  2324. {
  2325. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2326. }
  2327. }
  2328. public static void Umaxp_V(ArmEmitterContext context)
  2329. {
  2330. if (Optimizations.UseSsse3)
  2331. {
  2332. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  2333. }
  2334. else
  2335. {
  2336. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2337. }
  2338. }
  2339. public static void Umaxv_V(ArmEmitterContext context)
  2340. {
  2341. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2342. }
  2343. public static void Umin_V(ArmEmitterContext context)
  2344. {
  2345. if (Optimizations.UseSse41)
  2346. {
  2347. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2348. Operand n = GetVec(op.Rn);
  2349. Operand m = GetVec(op.Rm);
  2350. Intrinsic minInst = X86PminuInstruction[op.Size];
  2351. Operand res = context.AddIntrinsic(minInst, n, m);
  2352. if (op.RegisterSize == RegisterSize.Simd64)
  2353. {
  2354. res = context.VectorZeroUpper64(res);
  2355. }
  2356. context.Copy(GetVec(op.Rd), res);
  2357. }
  2358. else
  2359. {
  2360. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2361. }
  2362. }
  2363. public static void Uminp_V(ArmEmitterContext context)
  2364. {
  2365. if (Optimizations.UseSsse3)
  2366. {
  2367. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  2368. }
  2369. else
  2370. {
  2371. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2372. }
  2373. }
  2374. public static void Uminv_V(ArmEmitterContext context)
  2375. {
  2376. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2377. }
  2378. public static void Umlal_V(ArmEmitterContext context)
  2379. {
  2380. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2381. if (Optimizations.UseSse41 && op.Size < 2)
  2382. {
  2383. Operand d = GetVec(op.Rd);
  2384. Operand n = GetVec(op.Rn);
  2385. Operand m = GetVec(op.Rm);
  2386. if (op.RegisterSize == RegisterSize.Simd128)
  2387. {
  2388. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2389. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2390. }
  2391. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2392. n = context.AddIntrinsic(movInst, n);
  2393. m = context.AddIntrinsic(movInst, m);
  2394. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2395. Operand res = context.AddIntrinsic(mullInst, n, m);
  2396. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2397. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2398. }
  2399. else
  2400. {
  2401. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2402. {
  2403. return context.Add(op1, context.Multiply(op2, op3));
  2404. });
  2405. }
  2406. }
  2407. public static void Umlal_Ve(ArmEmitterContext context)
  2408. {
  2409. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2410. {
  2411. return context.Add(op1, context.Multiply(op2, op3));
  2412. });
  2413. }
  2414. public static void Umlsl_V(ArmEmitterContext context)
  2415. {
  2416. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2417. if (Optimizations.UseSse41 && op.Size < 2)
  2418. {
  2419. Operand d = GetVec(op.Rd);
  2420. Operand n = GetVec(op.Rn);
  2421. Operand m = GetVec(op.Rm);
  2422. if (op.RegisterSize == RegisterSize.Simd128)
  2423. {
  2424. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2425. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2426. }
  2427. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  2428. n = context.AddIntrinsic(movInst, n);
  2429. m = context.AddIntrinsic(movInst, m);
  2430. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2431. Operand res = context.AddIntrinsic(mullInst, n, m);
  2432. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2433. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2434. }
  2435. else
  2436. {
  2437. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2438. {
  2439. return context.Subtract(op1, context.Multiply(op2, op3));
  2440. });
  2441. }
  2442. }
  2443. public static void Umlsl_Ve(ArmEmitterContext context)
  2444. {
  2445. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2446. {
  2447. return context.Subtract(op1, context.Multiply(op2, op3));
  2448. });
  2449. }
  2450. public static void Umull_V(ArmEmitterContext context)
  2451. {
  2452. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2453. }
  2454. public static void Umull_Ve(ArmEmitterContext context)
  2455. {
  2456. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2457. }
  2458. public static void Uqadd_S(ArmEmitterContext context)
  2459. {
  2460. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2461. }
  2462. public static void Uqadd_V(ArmEmitterContext context)
  2463. {
  2464. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2465. }
  2466. public static void Uqsub_S(ArmEmitterContext context)
  2467. {
  2468. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2469. }
  2470. public static void Uqsub_V(ArmEmitterContext context)
  2471. {
  2472. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2473. }
  2474. public static void Uqxtn_S(ArmEmitterContext context)
  2475. {
  2476. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2477. }
  2478. public static void Uqxtn_V(ArmEmitterContext context)
  2479. {
  2480. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2481. }
  2482. public static void Urhadd_V(ArmEmitterContext context)
  2483. {
  2484. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2485. if (Optimizations.UseSse2 && op.Size < 2)
  2486. {
  2487. Operand n = GetVec(op.Rn);
  2488. Operand m = GetVec(op.Rm);
  2489. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2490. Operand res = context.AddIntrinsic(avgInst, n, m);
  2491. if (op.RegisterSize == RegisterSize.Simd64)
  2492. {
  2493. res = context.VectorZeroUpper64(res);
  2494. }
  2495. context.Copy(GetVec(op.Rd), res);
  2496. }
  2497. else
  2498. {
  2499. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2500. {
  2501. Operand res = context.Add(op1, op2);
  2502. res = context.Add(res, Const(1L));
  2503. return context.ShiftRightUI(res, Const(1));
  2504. });
  2505. }
  2506. }
  2507. public static void Usqadd_S(ArmEmitterContext context)
  2508. {
  2509. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2510. }
  2511. public static void Usqadd_V(ArmEmitterContext context)
  2512. {
  2513. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2514. }
  2515. public static void Usubl_V(ArmEmitterContext context)
  2516. {
  2517. if (Optimizations.UseSse41)
  2518. {
  2519. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2520. Operand n = GetVec(op.Rn);
  2521. Operand m = GetVec(op.Rm);
  2522. if (op.RegisterSize == RegisterSize.Simd128)
  2523. {
  2524. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2525. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2526. }
  2527. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2528. n = context.AddIntrinsic(movInst, n);
  2529. m = context.AddIntrinsic(movInst, m);
  2530. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2531. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2532. }
  2533. else
  2534. {
  2535. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2536. }
  2537. }
  2538. public static void Usubw_V(ArmEmitterContext context)
  2539. {
  2540. if (Optimizations.UseSse41)
  2541. {
  2542. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2543. Operand n = GetVec(op.Rn);
  2544. Operand m = GetVec(op.Rm);
  2545. if (op.RegisterSize == RegisterSize.Simd128)
  2546. {
  2547. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2548. }
  2549. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2550. m = context.AddIntrinsic(movInst, m);
  2551. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2552. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2553. }
  2554. else
  2555. {
  2556. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2557. }
  2558. }
  2559. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2560. {
  2561. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2562. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2563. }
  2564. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2565. {
  2566. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2567. Operand res = context.VectorZero();
  2568. int pairs = op.GetPairsCount() >> op.Size;
  2569. for (int index = 0; index < pairs; index++)
  2570. {
  2571. int pairIndex = index << 1;
  2572. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2573. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2574. Operand e = context.Add(ne0, ne1);
  2575. if (accumulate)
  2576. {
  2577. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2578. e = context.Add(e, de);
  2579. }
  2580. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2581. }
  2582. context.Copy(GetVec(op.Rd), res);
  2583. }
  2584. private static Operand EmitDoublingMultiplyHighHalf(
  2585. ArmEmitterContext context,
  2586. Operand n,
  2587. Operand m,
  2588. bool round)
  2589. {
  2590. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2591. int eSize = 8 << op.Size;
  2592. Operand res = context.Multiply(n, m);
  2593. if (!round)
  2594. {
  2595. res = context.ShiftRightSI(res, Const(eSize - 1));
  2596. }
  2597. else
  2598. {
  2599. long roundConst = 1L << (eSize - 1);
  2600. res = context.ShiftLeft(res, Const(1));
  2601. res = context.Add(res, Const(roundConst));
  2602. res = context.ShiftRightSI(res, Const(eSize));
  2603. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2604. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2605. }
  2606. return res;
  2607. }
  2608. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2609. {
  2610. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2611. int elems = 8 >> op.Size;
  2612. int eSize = 8 << op.Size;
  2613. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2614. Operand d = GetVec(op.Rd);
  2615. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  2616. long roundConst = 1L << (eSize - 1);
  2617. for (int index = 0; index < elems; index++)
  2618. {
  2619. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2620. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2621. Operand de = emit(ne, me);
  2622. if (round)
  2623. {
  2624. de = context.Add(de, Const(roundConst));
  2625. }
  2626. de = context.ShiftRightUI(de, Const(eSize));
  2627. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2628. }
  2629. context.Copy(d, res);
  2630. }
  2631. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2632. {
  2633. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2634. Operand cmp = signed
  2635. ? context.ICompareGreaterOrEqual (op1, op2)
  2636. : context.ICompareGreaterOrEqualUI(op1, op2);
  2637. return context.ConditionalSelect(cmp, op1, op2);
  2638. }
  2639. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2640. {
  2641. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2642. Operand cmp = signed
  2643. ? context.ICompareLessOrEqual (op1, op2)
  2644. : context.ICompareLessOrEqualUI(op1, op2);
  2645. return context.ConditionalSelect(cmp, op1, op2);
  2646. }
  2647. private static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2648. {
  2649. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2650. Operand n = GetVec(op.Rn);
  2651. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2652. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2653. if ((op.Size & 1) != 0)
  2654. {
  2655. res = context.VectorZeroUpper64(res);
  2656. }
  2657. else
  2658. {
  2659. res = context.VectorZeroUpper96(res);
  2660. }
  2661. context.Copy(GetVec(op.Rd), res);
  2662. }
  2663. private static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2664. {
  2665. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2666. Operand n = GetVec(op.Rn);
  2667. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2668. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2669. if (op.RegisterSize == RegisterSize.Simd64)
  2670. {
  2671. res = context.VectorZeroUpper64(res);
  2672. }
  2673. context.Copy(GetVec(op.Rd), res);
  2674. }
  2675. public static void EmitSse2VectorIsNaNOpF(
  2676. ArmEmitterContext context,
  2677. Operand opF,
  2678. out Operand qNaNMask,
  2679. out Operand sNaNMask,
  2680. bool? isQNaN = null)
  2681. {
  2682. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2683. if ((op.Size & 1) == 0)
  2684. {
  2685. const int QBit = 22;
  2686. Operand qMask = X86GetAllElements(context, 1 << QBit);
  2687. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2688. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2689. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  2690. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : null;
  2691. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : null;
  2692. }
  2693. else /* if ((op.Size & 1) == 1) */
  2694. {
  2695. const int QBit = 51;
  2696. Operand qMask = X86GetAllElements(context, 1L << QBit);
  2697. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2698. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2699. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  2700. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : null;
  2701. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : null;
  2702. }
  2703. }
  2704. public static Operand EmitSse41ProcessNaNsOpF(
  2705. ArmEmitterContext context,
  2706. Func2I emit,
  2707. bool scalar,
  2708. Operand n = null,
  2709. Operand m = null)
  2710. {
  2711. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2712. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2713. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  2714. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  2715. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2716. if (sizeF == 0)
  2717. {
  2718. const int QBit = 22;
  2719. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  2720. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  2721. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  2722. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  2723. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  2724. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  2725. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  2726. if (n != null || m != null)
  2727. {
  2728. return res;
  2729. }
  2730. if (scalar)
  2731. {
  2732. res = context.VectorZeroUpper96(res);
  2733. }
  2734. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2735. {
  2736. res = context.VectorZeroUpper64(res);
  2737. }
  2738. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2739. return null;
  2740. }
  2741. else /* if (sizeF == 1) */
  2742. {
  2743. const int QBit = 51;
  2744. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  2745. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  2746. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  2747. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  2748. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  2749. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  2750. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  2751. if (n != null || m != null)
  2752. {
  2753. return res;
  2754. }
  2755. if (scalar)
  2756. {
  2757. res = context.VectorZeroUpper64(res);
  2758. }
  2759. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2760. return null;
  2761. }
  2762. }
  2763. public static Operand EmitSseOrAvxHandleFzModeOpF(
  2764. ArmEmitterContext context,
  2765. Func2I emit,
  2766. bool scalar,
  2767. Operand n = null,
  2768. Operand m = null)
  2769. {
  2770. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2771. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2772. EmitSseOrAvxEnterFtzAndDazModesOpF(context, out Operand isTrue);
  2773. Operand res = emit(nCopy, mCopy);
  2774. EmitSseOrAvxExitFtzAndDazModesOpF(context, isTrue);
  2775. if (n != null || m != null)
  2776. {
  2777. return res;
  2778. }
  2779. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2780. if (sizeF == 0)
  2781. {
  2782. if (scalar)
  2783. {
  2784. res = context.VectorZeroUpper96(res);
  2785. }
  2786. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2787. {
  2788. res = context.VectorZeroUpper64(res);
  2789. }
  2790. }
  2791. else /* if (sizeF == 1) */
  2792. {
  2793. if (scalar)
  2794. {
  2795. res = context.VectorZeroUpper64(res);
  2796. }
  2797. }
  2798. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2799. return null;
  2800. }
  2801. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  2802. {
  2803. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2804. if ((op.Size & 1) == 0)
  2805. {
  2806. Operand mask = X86GetAllElements(context, -0f);
  2807. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  2808. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  2809. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  2810. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  2811. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  2812. }
  2813. else /* if ((op.Size & 1) == 1) */
  2814. {
  2815. Operand mask = X86GetAllElements(context, -0d);
  2816. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  2817. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  2818. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  2819. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  2820. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  2821. }
  2822. }
  2823. private static Operand EmitSse41MaxMinNumOpF(
  2824. ArmEmitterContext context,
  2825. bool isMaxNum,
  2826. bool scalar,
  2827. Operand n = null,
  2828. Operand m = null)
  2829. {
  2830. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2831. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2832. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  2833. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  2834. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2835. if (sizeF == 0)
  2836. {
  2837. Operand negInfMask = scalar
  2838. ? X86GetScalar (context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  2839. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  2840. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  2841. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  2842. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  2843. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  2844. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  2845. {
  2846. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  2847. {
  2848. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  2849. }, scalar: scalar, op1, op2);
  2850. }, scalar: scalar, nCopy, mCopy);
  2851. if (n != null || m != null)
  2852. {
  2853. return res;
  2854. }
  2855. if (scalar)
  2856. {
  2857. res = context.VectorZeroUpper96(res);
  2858. }
  2859. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2860. {
  2861. res = context.VectorZeroUpper64(res);
  2862. }
  2863. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2864. return null;
  2865. }
  2866. else /* if (sizeF == 1) */
  2867. {
  2868. Operand negInfMask = scalar
  2869. ? X86GetScalar (context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  2870. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  2871. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  2872. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  2873. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  2874. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  2875. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  2876. {
  2877. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  2878. {
  2879. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  2880. }, scalar: scalar, op1, op2);
  2881. }, scalar: scalar, nCopy, mCopy);
  2882. if (n != null || m != null)
  2883. {
  2884. return res;
  2885. }
  2886. if (scalar)
  2887. {
  2888. res = context.VectorZeroUpper64(res);
  2889. }
  2890. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2891. return null;
  2892. }
  2893. }
  2894. private enum AddSub
  2895. {
  2896. None,
  2897. Add,
  2898. Subtract
  2899. }
  2900. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  2901. {
  2902. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2903. Operand n = GetVec(op.Rn);
  2904. Operand m = GetVec(op.Rm);
  2905. Operand res;
  2906. if (op.Size == 0)
  2907. {
  2908. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  2909. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  2910. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  2911. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  2912. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2913. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  2914. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  2915. }
  2916. else if (op.Size == 1)
  2917. {
  2918. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2919. }
  2920. else
  2921. {
  2922. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  2923. }
  2924. Operand d = GetVec(op.Rd);
  2925. if (addSub == AddSub.Add)
  2926. {
  2927. Intrinsic addInst = X86PaddInstruction[op.Size];
  2928. res = context.AddIntrinsic(addInst, d, res);
  2929. }
  2930. else if (addSub == AddSub.Subtract)
  2931. {
  2932. Intrinsic subInst = X86PsubInstruction[op.Size];
  2933. res = context.AddIntrinsic(subInst, d, res);
  2934. }
  2935. if (op.RegisterSize == RegisterSize.Simd64)
  2936. {
  2937. res = context.VectorZeroUpper64(res);
  2938. }
  2939. context.Copy(d, res);
  2940. }
  2941. private static void EmitSse41VectorSabdOp(
  2942. ArmEmitterContext context,
  2943. OpCodeSimdReg op,
  2944. Operand n,
  2945. Operand m,
  2946. bool isLong)
  2947. {
  2948. int size = isLong ? op.Size + 1 : op.Size;
  2949. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  2950. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  2951. Intrinsic subInst = X86PsubInstruction[size];
  2952. Operand res = context.AddIntrinsic(subInst, n, m);
  2953. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2954. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2955. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2956. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2957. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2958. {
  2959. res = context.VectorZeroUpper64(res);
  2960. }
  2961. context.Copy(GetVec(op.Rd), res);
  2962. }
  2963. private static void EmitSse41VectorUabdOp(
  2964. ArmEmitterContext context,
  2965. OpCodeSimdReg op,
  2966. Operand n,
  2967. Operand m,
  2968. bool isLong)
  2969. {
  2970. int size = isLong ? op.Size + 1 : op.Size;
  2971. Intrinsic maxInst = X86PmaxuInstruction[size];
  2972. Operand max = context.AddIntrinsic(maxInst, m, n);
  2973. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  2974. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  2975. Operand onesMask = X86GetAllElements(context, -1L);
  2976. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  2977. Intrinsic subInst = X86PsubInstruction[size];
  2978. Operand res = context.AddIntrinsic(subInst, n, m);
  2979. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2980. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2981. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2982. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2983. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2984. {
  2985. res = context.VectorZeroUpper64(res);
  2986. }
  2987. context.Copy(GetVec(op.Rd), res);
  2988. }
  2989. }
  2990. }