InstEmitSimdArithmetic.cs 109 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using static ARMeilleure.Instructions.InstEmitHelper;
  9. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  10. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  11. namespace ARMeilleure.Instructions
  12. {
  13. using Func2I = Func<Operand, Operand, Operand>;
  14. static partial class InstEmit
  15. {
  16. public static void Abs_S(ArmEmitterContext context)
  17. {
  18. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  19. }
  20. public static void Abs_V(ArmEmitterContext context)
  21. {
  22. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  23. }
  24. public static void Add_S(ArmEmitterContext context)
  25. {
  26. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  27. }
  28. public static void Add_V(ArmEmitterContext context)
  29. {
  30. if (Optimizations.UseSse2)
  31. {
  32. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  33. Operand n = GetVec(op.Rn);
  34. Operand m = GetVec(op.Rm);
  35. Intrinsic addInst = X86PaddInstruction[op.Size];
  36. Operand res = context.AddIntrinsic(addInst, n, m);
  37. if (op.RegisterSize == RegisterSize.Simd64)
  38. {
  39. res = context.VectorZeroUpper64(res);
  40. }
  41. context.Copy(GetVec(op.Rd), res);
  42. }
  43. else
  44. {
  45. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  46. }
  47. }
  48. public static void Addhn_V(ArmEmitterContext context)
  49. {
  50. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  51. }
  52. public static void Addp_S(ArmEmitterContext context)
  53. {
  54. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  55. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  56. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  57. Operand res = context.Add(ne0, ne1);
  58. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  59. }
  60. public static void Addp_V(ArmEmitterContext context)
  61. {
  62. if (Optimizations.UseSsse3)
  63. {
  64. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  65. }
  66. else
  67. {
  68. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  69. }
  70. }
  71. public static void Addv_V(ArmEmitterContext context)
  72. {
  73. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  74. }
  75. public static void Cls_V(ArmEmitterContext context)
  76. {
  77. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  78. Operand res = context.VectorZero();
  79. int elems = op.GetBytesCount() >> op.Size;
  80. int eSize = 8 << op.Size;
  81. for (int index = 0; index < elems; index++)
  82. {
  83. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  84. Operand de = context.Call(new _U64_U64_S32(SoftFallback.CountLeadingSigns), ne, Const(eSize));
  85. res = EmitVectorInsert(context, res, de, index, op.Size);
  86. }
  87. context.Copy(GetVec(op.Rd), res);
  88. }
  89. public static void Clz_V(ArmEmitterContext context)
  90. {
  91. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  92. Operand res = context.VectorZero();
  93. int elems = op.GetBytesCount() >> op.Size;
  94. int eSize = 8 << op.Size;
  95. for (int index = 0; index < elems; index++)
  96. {
  97. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  98. Operand de;
  99. if (eSize == 64)
  100. {
  101. de = context.CountLeadingZeros(ne);
  102. }
  103. else
  104. {
  105. de = context.Call(new _U64_U64_S32(SoftFallback.CountLeadingZeros), ne, Const(eSize));
  106. }
  107. res = EmitVectorInsert(context, res, de, index, op.Size);
  108. }
  109. context.Copy(GetVec(op.Rd), res);
  110. }
  111. public static void Cnt_V(ArmEmitterContext context)
  112. {
  113. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  114. Operand res = context.VectorZero();
  115. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  116. for (int index = 0; index < elems; index++)
  117. {
  118. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  119. Operand de;
  120. if (Optimizations.UsePopCnt)
  121. {
  122. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  123. }
  124. else
  125. {
  126. de = context.Call(new _U64_U64(SoftFallback.CountSetBits8), ne);
  127. }
  128. res = EmitVectorInsert(context, res, de, index, 0);
  129. }
  130. context.Copy(GetVec(op.Rd), res);
  131. }
  132. public static void Fabd_S(ArmEmitterContext context)
  133. {
  134. if (Optimizations.FastFP && Optimizations.UseSse2)
  135. {
  136. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  137. int sizeF = op.Size & 1;
  138. if (sizeF == 0)
  139. {
  140. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  141. res = EmitFloatAbs(context, res, true, false);
  142. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  143. }
  144. else /* if (sizeF == 1) */
  145. {
  146. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  147. res = EmitFloatAbs(context, res, false, false);
  148. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  149. }
  150. }
  151. else
  152. {
  153. EmitScalarBinaryOpF(context, (op1, op2) =>
  154. {
  155. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  156. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, res);
  157. });
  158. }
  159. }
  160. public static void Fabd_V(ArmEmitterContext context)
  161. {
  162. if (Optimizations.FastFP && Optimizations.UseSse2)
  163. {
  164. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  165. int sizeF = op.Size & 1;
  166. if (sizeF == 0)
  167. {
  168. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  169. res = EmitFloatAbs(context, res, true, true);
  170. if (op.RegisterSize == RegisterSize.Simd64)
  171. {
  172. res = context.VectorZeroUpper64(res);
  173. }
  174. context.Copy(GetVec(op.Rd), res);
  175. }
  176. else /* if (sizeF == 1) */
  177. {
  178. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  179. res = EmitFloatAbs(context, res, false, true);
  180. context.Copy(GetVec(op.Rd), res);
  181. }
  182. }
  183. else
  184. {
  185. EmitVectorBinaryOpF(context, (op1, op2) =>
  186. {
  187. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  188. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, res);
  189. });
  190. }
  191. }
  192. public static void Fabs_S(ArmEmitterContext context)
  193. {
  194. if (Optimizations.UseSse2)
  195. {
  196. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  197. if (op.Size == 0)
  198. {
  199. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  200. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  201. }
  202. else /* if (op.Size == 1) */
  203. {
  204. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  205. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  206. }
  207. }
  208. else
  209. {
  210. EmitScalarUnaryOpF(context, (op1) =>
  211. {
  212. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, op1);
  213. });
  214. }
  215. }
  216. public static void Fabs_V(ArmEmitterContext context)
  217. {
  218. if (Optimizations.UseSse2)
  219. {
  220. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  221. int sizeF = op.Size & 1;
  222. if (sizeF == 0)
  223. {
  224. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  225. if (op.RegisterSize == RegisterSize.Simd64)
  226. {
  227. res = context.VectorZeroUpper64(res);
  228. }
  229. context.Copy(GetVec(op.Rd), res);
  230. }
  231. else /* if (sizeF == 1) */
  232. {
  233. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  234. context.Copy(GetVec(op.Rd), res);
  235. }
  236. }
  237. else
  238. {
  239. EmitVectorUnaryOpF(context, (op1) =>
  240. {
  241. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, op1);
  242. });
  243. }
  244. }
  245. public static void Fadd_S(ArmEmitterContext context)
  246. {
  247. if (Optimizations.FastFP && Optimizations.UseSse2)
  248. {
  249. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  250. }
  251. else if (Optimizations.FastFP)
  252. {
  253. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  254. }
  255. else
  256. {
  257. EmitScalarBinaryOpF(context, (op1, op2) =>
  258. {
  259. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  260. });
  261. }
  262. }
  263. public static void Fadd_V(ArmEmitterContext context)
  264. {
  265. if (Optimizations.FastFP && Optimizations.UseSse2)
  266. {
  267. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  268. }
  269. else if (Optimizations.FastFP)
  270. {
  271. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  272. }
  273. else
  274. {
  275. EmitVectorBinaryOpF(context, (op1, op2) =>
  276. {
  277. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  278. });
  279. }
  280. }
  281. public static void Faddp_S(ArmEmitterContext context)
  282. {
  283. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  284. int sizeF = op.Size & 1;
  285. if (Optimizations.FastFP && Optimizations.UseSse3)
  286. {
  287. if (sizeF == 0)
  288. {
  289. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  290. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  291. }
  292. else /* if (sizeF == 1) */
  293. {
  294. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  295. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  296. }
  297. }
  298. else
  299. {
  300. OperandType type = sizeF != 0 ? OperandType.FP64 : OperandType.FP32;
  301. Operand ne0 = context.VectorExtract(type, GetVec(op.Rn), 0);
  302. Operand ne1 = context.VectorExtract(type, GetVec(op.Rn), 1);
  303. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, ne0, ne1);
  304. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  305. }
  306. }
  307. public static void Faddp_V(ArmEmitterContext context)
  308. {
  309. if (Optimizations.FastFP && Optimizations.UseSse2)
  310. {
  311. EmitSse2VectorPairwiseOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  312. }
  313. else
  314. {
  315. EmitVectorPairwiseOpF(context, (op1, op2) =>
  316. {
  317. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  318. });
  319. }
  320. }
  321. public static void Fdiv_S(ArmEmitterContext context)
  322. {
  323. if (Optimizations.FastFP && Optimizations.UseSse2)
  324. {
  325. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  326. }
  327. else if (Optimizations.FastFP)
  328. {
  329. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  330. }
  331. else
  332. {
  333. EmitScalarBinaryOpF(context, (op1, op2) =>
  334. {
  335. return EmitSoftFloatCall(context, SoftFloat32.FPDiv, SoftFloat64.FPDiv, op1, op2);
  336. });
  337. }
  338. }
  339. public static void Fdiv_V(ArmEmitterContext context)
  340. {
  341. if (Optimizations.FastFP && Optimizations.UseSse2)
  342. {
  343. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  344. }
  345. else if (Optimizations.FastFP)
  346. {
  347. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  348. }
  349. else
  350. {
  351. EmitVectorBinaryOpF(context, (op1, op2) =>
  352. {
  353. return EmitSoftFloatCall(context, SoftFloat32.FPDiv, SoftFloat64.FPDiv, op1, op2);
  354. });
  355. }
  356. }
  357. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  358. {
  359. if (Optimizations.FastFP && Optimizations.UseSse2)
  360. {
  361. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  362. Operand d = GetVec(op.Rd);
  363. Operand a = GetVec(op.Ra);
  364. Operand n = GetVec(op.Rn);
  365. Operand m = GetVec(op.Rm);
  366. if (op.Size == 0)
  367. {
  368. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  369. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  370. context.Copy(d, context.VectorZeroUpper96(res));
  371. }
  372. else /* if (op.Size == 1) */
  373. {
  374. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  375. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  376. context.Copy(d, context.VectorZeroUpper64(res));
  377. }
  378. }
  379. else
  380. {
  381. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  382. {
  383. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  384. });
  385. }
  386. }
  387. public static void Fmax_S(ArmEmitterContext context)
  388. {
  389. if (Optimizations.FastFP && Optimizations.UseSse2)
  390. {
  391. EmitScalarBinaryOpF(context, Intrinsic.X86Maxss, Intrinsic.X86Maxsd);
  392. }
  393. else
  394. {
  395. EmitScalarBinaryOpF(context, (op1, op2) =>
  396. {
  397. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  398. });
  399. }
  400. }
  401. public static void Fmax_V(ArmEmitterContext context)
  402. {
  403. if (Optimizations.FastFP && Optimizations.UseSse2)
  404. {
  405. EmitVectorBinaryOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  406. }
  407. else
  408. {
  409. EmitVectorBinaryOpF(context, (op1, op2) =>
  410. {
  411. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  412. });
  413. }
  414. }
  415. public static void Fmaxnm_S(ArmEmitterContext context)
  416. {
  417. if (Optimizations.FastFP && Optimizations.UseSse41)
  418. {
  419. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  420. }
  421. else
  422. {
  423. EmitScalarBinaryOpF(context, (op1, op2) =>
  424. {
  425. return EmitSoftFloatCall(context, SoftFloat32.FPMaxNum, SoftFloat64.FPMaxNum, op1, op2);
  426. });
  427. }
  428. }
  429. public static void Fmaxnm_V(ArmEmitterContext context)
  430. {
  431. if (Optimizations.FastFP && Optimizations.UseSse41)
  432. {
  433. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  434. }
  435. else
  436. {
  437. EmitVectorBinaryOpF(context, (op1, op2) =>
  438. {
  439. return EmitSoftFloatCall(context, SoftFloat32.FPMaxNum, SoftFloat64.FPMaxNum, op1, op2);
  440. });
  441. }
  442. }
  443. public static void Fmaxp_V(ArmEmitterContext context)
  444. {
  445. if (Optimizations.FastFP && Optimizations.UseSse2)
  446. {
  447. EmitSse2VectorPairwiseOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  448. }
  449. else
  450. {
  451. EmitVectorPairwiseOpF(context, (op1, op2) =>
  452. {
  453. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  454. });
  455. }
  456. }
  457. public static void Fmin_S(ArmEmitterContext context)
  458. {
  459. if (Optimizations.FastFP && Optimizations.UseSse2)
  460. {
  461. EmitScalarBinaryOpF(context, Intrinsic.X86Minss, Intrinsic.X86Minsd);
  462. }
  463. else
  464. {
  465. EmitScalarBinaryOpF(context, (op1, op2) =>
  466. {
  467. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  468. });
  469. }
  470. }
  471. public static void Fmin_V(ArmEmitterContext context)
  472. {
  473. if (Optimizations.FastFP && Optimizations.UseSse2)
  474. {
  475. EmitVectorBinaryOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  476. }
  477. else
  478. {
  479. EmitVectorBinaryOpF(context, (op1, op2) =>
  480. {
  481. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  482. });
  483. }
  484. }
  485. public static void Fminnm_S(ArmEmitterContext context)
  486. {
  487. if (Optimizations.FastFP && Optimizations.UseSse41)
  488. {
  489. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  490. }
  491. else
  492. {
  493. EmitScalarBinaryOpF(context, (op1, op2) =>
  494. {
  495. return EmitSoftFloatCall(context, SoftFloat32.FPMinNum, SoftFloat64.FPMinNum, op1, op2);
  496. });
  497. }
  498. }
  499. public static void Fminnm_V(ArmEmitterContext context)
  500. {
  501. if (Optimizations.FastFP && Optimizations.UseSse41)
  502. {
  503. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  504. }
  505. else
  506. {
  507. EmitVectorBinaryOpF(context, (op1, op2) =>
  508. {
  509. return EmitSoftFloatCall(context, SoftFloat32.FPMinNum, SoftFloat64.FPMinNum, op1, op2);
  510. });
  511. }
  512. }
  513. public static void Fminp_V(ArmEmitterContext context)
  514. {
  515. if (Optimizations.FastFP && Optimizations.UseSse2)
  516. {
  517. EmitSse2VectorPairwiseOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  518. }
  519. else
  520. {
  521. EmitVectorPairwiseOpF(context, (op1, op2) =>
  522. {
  523. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  524. });
  525. }
  526. }
  527. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  528. {
  529. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  530. {
  531. return context.Add(op1, context.Multiply(op2, op3));
  532. });
  533. }
  534. public static void Fmla_V(ArmEmitterContext context) // Fused.
  535. {
  536. if (Optimizations.FastFP && Optimizations.UseSse2)
  537. {
  538. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  539. Operand d = GetVec(op.Rd);
  540. Operand n = GetVec(op.Rn);
  541. Operand m = GetVec(op.Rm);
  542. int sizeF = op.Size & 1;
  543. if (sizeF == 0)
  544. {
  545. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  546. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  547. if (op.RegisterSize == RegisterSize.Simd64)
  548. {
  549. res = context.VectorZeroUpper64(res);
  550. }
  551. context.Copy(d, res);
  552. }
  553. else /* if (sizeF == 1) */
  554. {
  555. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  556. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  557. context.Copy(d, res);
  558. }
  559. }
  560. else
  561. {
  562. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  563. {
  564. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  565. });
  566. }
  567. }
  568. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  569. {
  570. if (Optimizations.FastFP && Optimizations.UseSse2)
  571. {
  572. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  573. Operand d = GetVec(op.Rd);
  574. Operand n = GetVec(op.Rn);
  575. Operand m = GetVec(op.Rm);
  576. int sizeF = op.Size & 1;
  577. if (sizeF == 0)
  578. {
  579. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  580. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  581. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  582. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  583. if (op.RegisterSize == RegisterSize.Simd64)
  584. {
  585. res = context.VectorZeroUpper64(res);
  586. }
  587. context.Copy(d, res);
  588. }
  589. else /* if (sizeF == 1) */
  590. {
  591. int shuffleMask = op.Index | op.Index << 1;
  592. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  593. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  594. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  595. context.Copy(d, res);
  596. }
  597. }
  598. else
  599. {
  600. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  601. {
  602. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  603. });
  604. }
  605. }
  606. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  607. {
  608. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  609. {
  610. return context.Subtract(op1, context.Multiply(op2, op3));
  611. });
  612. }
  613. public static void Fmls_V(ArmEmitterContext context) // Fused.
  614. {
  615. if (Optimizations.FastFP && Optimizations.UseSse2)
  616. {
  617. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  618. Operand d = GetVec(op.Rd);
  619. Operand n = GetVec(op.Rn);
  620. Operand m = GetVec(op.Rm);
  621. int sizeF = op.Size & 1;
  622. if (sizeF == 0)
  623. {
  624. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  625. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  626. if (op.RegisterSize == RegisterSize.Simd64)
  627. {
  628. res = context.VectorZeroUpper64(res);
  629. }
  630. context.Copy(d, res);
  631. }
  632. else /* if (sizeF == 1) */
  633. {
  634. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  635. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  636. context.Copy(d, res);
  637. }
  638. }
  639. else
  640. {
  641. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  642. {
  643. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  644. });
  645. }
  646. }
  647. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  648. {
  649. if (Optimizations.FastFP && Optimizations.UseSse2)
  650. {
  651. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  652. Operand d = GetVec(op.Rd);
  653. Operand n = GetVec(op.Rn);
  654. Operand m = GetVec(op.Rm);
  655. int sizeF = op.Size & 1;
  656. if (sizeF == 0)
  657. {
  658. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  659. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  660. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  661. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  662. if (op.RegisterSize == RegisterSize.Simd64)
  663. {
  664. res = context.VectorZeroUpper64(res);
  665. }
  666. context.Copy(d, res);
  667. }
  668. else /* if (sizeF == 1) */
  669. {
  670. int shuffleMask = op.Index | op.Index << 1;
  671. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  672. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  673. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  674. context.Copy(d, res);
  675. }
  676. }
  677. else
  678. {
  679. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  680. {
  681. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  682. });
  683. }
  684. }
  685. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  686. {
  687. if (Optimizations.FastFP && Optimizations.UseSse2)
  688. {
  689. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  690. Operand d = GetVec(op.Rd);
  691. Operand a = GetVec(op.Ra);
  692. Operand n = GetVec(op.Rn);
  693. Operand m = GetVec(op.Rm);
  694. if (op.Size == 0)
  695. {
  696. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  697. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  698. context.Copy(d, context.VectorZeroUpper96(res));
  699. }
  700. else /* if (op.Size == 1) */
  701. {
  702. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  703. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  704. context.Copy(d, context.VectorZeroUpper64(res));
  705. }
  706. }
  707. else
  708. {
  709. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  710. {
  711. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  712. });
  713. }
  714. }
  715. public static void Fmul_S(ArmEmitterContext context)
  716. {
  717. if (Optimizations.FastFP && Optimizations.UseSse2)
  718. {
  719. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  720. }
  721. else if (Optimizations.FastFP)
  722. {
  723. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  724. }
  725. else
  726. {
  727. EmitScalarBinaryOpF(context, (op1, op2) =>
  728. {
  729. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  730. });
  731. }
  732. }
  733. public static void Fmul_Se(ArmEmitterContext context)
  734. {
  735. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  736. }
  737. public static void Fmul_V(ArmEmitterContext context)
  738. {
  739. if (Optimizations.FastFP && Optimizations.UseSse2)
  740. {
  741. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  742. }
  743. else if (Optimizations.FastFP)
  744. {
  745. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  746. }
  747. else
  748. {
  749. EmitVectorBinaryOpF(context, (op1, op2) =>
  750. {
  751. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  752. });
  753. }
  754. }
  755. public static void Fmul_Ve(ArmEmitterContext context)
  756. {
  757. if (Optimizations.FastFP && Optimizations.UseSse2)
  758. {
  759. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  760. Operand n = GetVec(op.Rn);
  761. Operand m = GetVec(op.Rm);
  762. int sizeF = op.Size & 1;
  763. if (sizeF == 0)
  764. {
  765. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  766. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  767. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  768. if (op.RegisterSize == RegisterSize.Simd64)
  769. {
  770. res = context.VectorZeroUpper64(res);
  771. }
  772. context.Copy(GetVec(op.Rd), res);
  773. }
  774. else /* if (sizeF == 1) */
  775. {
  776. int shuffleMask = op.Index | op.Index << 1;
  777. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  778. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  779. context.Copy(GetVec(op.Rd), res);
  780. }
  781. }
  782. else if (Optimizations.FastFP)
  783. {
  784. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  785. }
  786. else
  787. {
  788. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  789. {
  790. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  791. });
  792. }
  793. }
  794. public static void Fmulx_S(ArmEmitterContext context)
  795. {
  796. EmitScalarBinaryOpF(context, (op1, op2) =>
  797. {
  798. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  799. });
  800. }
  801. public static void Fmulx_Se(ArmEmitterContext context)
  802. {
  803. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  804. {
  805. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  806. });
  807. }
  808. public static void Fmulx_V(ArmEmitterContext context)
  809. {
  810. EmitVectorBinaryOpF(context, (op1, op2) =>
  811. {
  812. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  813. });
  814. }
  815. public static void Fmulx_Ve(ArmEmitterContext context)
  816. {
  817. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  818. {
  819. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  820. });
  821. }
  822. public static void Fneg_S(ArmEmitterContext context)
  823. {
  824. if (Optimizations.UseSse2)
  825. {
  826. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  827. if (op.Size == 0)
  828. {
  829. Operand mask = X86GetScalar(context, -0f);
  830. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  831. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  832. }
  833. else /* if (op.Size == 1) */
  834. {
  835. Operand mask = X86GetScalar(context, -0d);
  836. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  837. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  838. }
  839. }
  840. else
  841. {
  842. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  843. }
  844. }
  845. public static void Fneg_V(ArmEmitterContext context)
  846. {
  847. if (Optimizations.UseSse2)
  848. {
  849. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  850. int sizeF = op.Size & 1;
  851. if (sizeF == 0)
  852. {
  853. Operand mask = X86GetAllElements(context, -0f);
  854. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  855. if (op.RegisterSize == RegisterSize.Simd64)
  856. {
  857. res = context.VectorZeroUpper64(res);
  858. }
  859. context.Copy(GetVec(op.Rd), res);
  860. }
  861. else /* if (sizeF == 1) */
  862. {
  863. Operand mask = X86GetAllElements(context, -0d);
  864. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  865. context.Copy(GetVec(op.Rd), res);
  866. }
  867. }
  868. else
  869. {
  870. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  871. }
  872. }
  873. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  874. {
  875. if (Optimizations.FastFP && Optimizations.UseSse2)
  876. {
  877. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  878. Operand d = GetVec(op.Rd);
  879. Operand a = GetVec(op.Ra);
  880. Operand n = GetVec(op.Rn);
  881. Operand m = GetVec(op.Rm);
  882. if (op.Size == 0)
  883. {
  884. Operand mask = X86GetScalar(context, -0f);
  885. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  886. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  887. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  888. context.Copy(d, context.VectorZeroUpper96(res));
  889. }
  890. else /* if (op.Size == 1) */
  891. {
  892. Operand mask = X86GetScalar(context, -0d);
  893. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  894. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  895. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  896. context.Copy(d, context.VectorZeroUpper64(res));
  897. }
  898. }
  899. else
  900. {
  901. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  902. {
  903. return EmitSoftFloatCall(context, SoftFloat32.FPNegMulAdd, SoftFloat64.FPNegMulAdd, op1, op2, op3);
  904. });
  905. }
  906. }
  907. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  908. {
  909. if (Optimizations.FastFP && Optimizations.UseSse2)
  910. {
  911. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  912. Operand d = GetVec(op.Rd);
  913. Operand a = GetVec(op.Ra);
  914. Operand n = GetVec(op.Rn);
  915. Operand m = GetVec(op.Rm);
  916. if (op.Size == 0)
  917. {
  918. Operand mask = X86GetScalar(context, -0f);
  919. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  920. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  921. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  922. context.Copy(d, context.VectorZeroUpper96(res));
  923. }
  924. else /* if (op.Size == 1) */
  925. {
  926. Operand mask = X86GetScalar(context, -0d);
  927. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  928. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  929. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  930. context.Copy(d, context.VectorZeroUpper64(res));
  931. }
  932. }
  933. else
  934. {
  935. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  936. {
  937. return EmitSoftFloatCall(context, SoftFloat32.FPNegMulSub, SoftFloat64.FPNegMulSub, op1, op2, op3);
  938. });
  939. }
  940. }
  941. public static void Fnmul_S(ArmEmitterContext context)
  942. {
  943. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  944. }
  945. public static void Frecpe_S(ArmEmitterContext context)
  946. {
  947. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  948. int sizeF = op.Size & 1;
  949. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  950. {
  951. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  952. }
  953. else
  954. {
  955. EmitScalarUnaryOpF(context, (op1) =>
  956. {
  957. return EmitSoftFloatCall(context, SoftFloat32.FPRecipEstimate, SoftFloat64.FPRecipEstimate, op1);
  958. });
  959. }
  960. }
  961. public static void Frecpe_V(ArmEmitterContext context)
  962. {
  963. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  964. int sizeF = op.Size & 1;
  965. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  966. {
  967. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  968. }
  969. else
  970. {
  971. EmitVectorUnaryOpF(context, (op1) =>
  972. {
  973. return EmitSoftFloatCall(context, SoftFloat32.FPRecipEstimate, SoftFloat64.FPRecipEstimate, op1);
  974. });
  975. }
  976. }
  977. public static void Frecps_S(ArmEmitterContext context) // Fused.
  978. {
  979. if (Optimizations.FastFP && Optimizations.UseSse2)
  980. {
  981. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  982. int sizeF = op.Size & 1;
  983. if (sizeF == 0)
  984. {
  985. Operand mask = X86GetScalar(context, 2f);
  986. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  987. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  988. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  989. }
  990. else /* if (sizeF == 1) */
  991. {
  992. Operand mask = X86GetScalar(context, 2d);
  993. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  994. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  995. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  996. }
  997. }
  998. else
  999. {
  1000. EmitScalarBinaryOpF(context, (op1, op2) =>
  1001. {
  1002. return EmitSoftFloatCall(context, SoftFloat32.FPRecipStepFused, SoftFloat64.FPRecipStepFused, op1, op2);
  1003. });
  1004. }
  1005. }
  1006. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1007. {
  1008. if (Optimizations.FastFP && Optimizations.UseSse2)
  1009. {
  1010. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1011. int sizeF = op.Size & 1;
  1012. if (sizeF == 0)
  1013. {
  1014. Operand mask = X86GetAllElements(context, 2f);
  1015. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1016. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1017. if (op.RegisterSize == RegisterSize.Simd64)
  1018. {
  1019. res = context.VectorZeroUpper64(res);
  1020. }
  1021. context.Copy(GetVec(op.Rd), res);
  1022. }
  1023. else /* if (sizeF == 1) */
  1024. {
  1025. Operand mask = X86GetAllElements(context, 2d);
  1026. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1027. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1028. context.Copy(GetVec(op.Rd), res);
  1029. }
  1030. }
  1031. else
  1032. {
  1033. EmitVectorBinaryOpF(context, (op1, op2) =>
  1034. {
  1035. return EmitSoftFloatCall(context, SoftFloat32.FPRecipStepFused, SoftFloat64.FPRecipStepFused, op1, op2);
  1036. });
  1037. }
  1038. }
  1039. public static void Frecpx_S(ArmEmitterContext context)
  1040. {
  1041. EmitScalarUnaryOpF(context, (op1) =>
  1042. {
  1043. return EmitSoftFloatCall(context, SoftFloat32.FPRecpX, SoftFloat64.FPRecpX, op1);
  1044. });
  1045. }
  1046. public static void Frinta_S(ArmEmitterContext context)
  1047. {
  1048. EmitScalarUnaryOpF(context, (op1) =>
  1049. {
  1050. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1051. });
  1052. }
  1053. public static void Frinta_V(ArmEmitterContext context)
  1054. {
  1055. EmitVectorUnaryOpF(context, (op1) =>
  1056. {
  1057. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1058. });
  1059. }
  1060. public static void Frinti_S(ArmEmitterContext context)
  1061. {
  1062. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1063. EmitScalarUnaryOpF(context, (op1) =>
  1064. {
  1065. if (op.Size == 0)
  1066. {
  1067. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1068. }
  1069. else /* if (op.Size == 1) */
  1070. {
  1071. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1072. }
  1073. });
  1074. }
  1075. public static void Frinti_V(ArmEmitterContext context)
  1076. {
  1077. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1078. int sizeF = op.Size & 1;
  1079. EmitVectorUnaryOpF(context, (op1) =>
  1080. {
  1081. if (sizeF == 0)
  1082. {
  1083. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1084. }
  1085. else /* if (sizeF == 1) */
  1086. {
  1087. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1088. }
  1089. });
  1090. }
  1091. public static void Frintm_S(ArmEmitterContext context)
  1092. {
  1093. if (Optimizations.UseSse41)
  1094. {
  1095. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1096. }
  1097. else
  1098. {
  1099. EmitScalarUnaryOpF(context, (op1) =>
  1100. {
  1101. return EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1);
  1102. });
  1103. }
  1104. }
  1105. public static void Frintm_V(ArmEmitterContext context)
  1106. {
  1107. if (Optimizations.UseSse41)
  1108. {
  1109. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1110. }
  1111. else
  1112. {
  1113. EmitVectorUnaryOpF(context, (op1) =>
  1114. {
  1115. return EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1);
  1116. });
  1117. }
  1118. }
  1119. public static void Frintn_S(ArmEmitterContext context)
  1120. {
  1121. if (Optimizations.UseSse41)
  1122. {
  1123. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1124. }
  1125. else
  1126. {
  1127. EmitScalarUnaryOpF(context, (op1) =>
  1128. {
  1129. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1130. });
  1131. }
  1132. }
  1133. public static void Frintn_V(ArmEmitterContext context)
  1134. {
  1135. if (Optimizations.UseSse41)
  1136. {
  1137. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1138. }
  1139. else
  1140. {
  1141. EmitVectorUnaryOpF(context, (op1) =>
  1142. {
  1143. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1144. });
  1145. }
  1146. }
  1147. public static void Frintp_S(ArmEmitterContext context)
  1148. {
  1149. if (Optimizations.UseSse41)
  1150. {
  1151. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1152. }
  1153. else
  1154. {
  1155. EmitScalarUnaryOpF(context, (op1) =>
  1156. {
  1157. return EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1);
  1158. });
  1159. }
  1160. }
  1161. public static void Frintp_V(ArmEmitterContext context)
  1162. {
  1163. if (Optimizations.UseSse41)
  1164. {
  1165. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1166. }
  1167. else
  1168. {
  1169. EmitVectorUnaryOpF(context, (op1) =>
  1170. {
  1171. return EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1);
  1172. });
  1173. }
  1174. }
  1175. public static void Frintx_S(ArmEmitterContext context)
  1176. {
  1177. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1178. EmitScalarUnaryOpF(context, (op1) =>
  1179. {
  1180. if (op.Size == 0)
  1181. {
  1182. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1183. }
  1184. else /* if (op.Size == 1) */
  1185. {
  1186. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1187. }
  1188. });
  1189. }
  1190. public static void Frintx_V(ArmEmitterContext context)
  1191. {
  1192. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1193. int sizeF = op.Size & 1;
  1194. EmitVectorUnaryOpF(context, (op1) =>
  1195. {
  1196. if (sizeF == 0)
  1197. {
  1198. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1199. }
  1200. else /* if (sizeF == 1) */
  1201. {
  1202. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1203. }
  1204. });
  1205. }
  1206. public static void Frintz_S(ArmEmitterContext context)
  1207. {
  1208. if (Optimizations.UseSse41)
  1209. {
  1210. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1211. }
  1212. else
  1213. {
  1214. EmitScalarUnaryOpF(context, (op1) =>
  1215. {
  1216. return EmitUnaryMathCall(context, MathF.Truncate, Math.Truncate, op1);
  1217. });
  1218. }
  1219. }
  1220. public static void Frintz_V(ArmEmitterContext context)
  1221. {
  1222. if (Optimizations.UseSse41)
  1223. {
  1224. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1225. }
  1226. else
  1227. {
  1228. EmitVectorUnaryOpF(context, (op1) =>
  1229. {
  1230. return EmitUnaryMathCall(context, MathF.Truncate, Math.Truncate, op1);
  1231. });
  1232. }
  1233. }
  1234. public static void Frsqrte_S(ArmEmitterContext context)
  1235. {
  1236. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1237. int sizeF = op.Size & 1;
  1238. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1239. {
  1240. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1241. }
  1242. else
  1243. {
  1244. EmitScalarUnaryOpF(context, (op1) =>
  1245. {
  1246. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtEstimate, SoftFloat64.FPRSqrtEstimate, op1);
  1247. });
  1248. }
  1249. }
  1250. public static void Frsqrte_V(ArmEmitterContext context)
  1251. {
  1252. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1253. int sizeF = op.Size & 1;
  1254. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1255. {
  1256. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1257. }
  1258. else
  1259. {
  1260. EmitVectorUnaryOpF(context, (op1) =>
  1261. {
  1262. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtEstimate, SoftFloat64.FPRSqrtEstimate, op1);
  1263. });
  1264. }
  1265. }
  1266. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1267. {
  1268. if (Optimizations.FastFP && Optimizations.UseSse2)
  1269. {
  1270. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1271. int sizeF = op.Size & 1;
  1272. if (sizeF == 0)
  1273. {
  1274. Operand maskHalf = X86GetScalar(context, 0.5f);
  1275. Operand maskThree = X86GetScalar(context, 3f);
  1276. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1277. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1278. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1279. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1280. }
  1281. else /* if (sizeF == 1) */
  1282. {
  1283. Operand maskHalf = X86GetScalar(context, 0.5d);
  1284. Operand maskThree = X86GetScalar(context, 3d);
  1285. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1286. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1287. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1288. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1289. }
  1290. }
  1291. else
  1292. {
  1293. EmitScalarBinaryOpF(context, (op1, op2) =>
  1294. {
  1295. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtStepFused, SoftFloat64.FPRSqrtStepFused, op1, op2);
  1296. });
  1297. }
  1298. }
  1299. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1300. {
  1301. if (Optimizations.FastFP && Optimizations.UseSse2)
  1302. {
  1303. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1304. int sizeF = op.Size & 1;
  1305. if (sizeF == 0)
  1306. {
  1307. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1308. Operand maskThree = X86GetAllElements(context, 3f);
  1309. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1310. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1311. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1312. if (op.RegisterSize == RegisterSize.Simd64)
  1313. {
  1314. res = context.VectorZeroUpper64(res);
  1315. }
  1316. context.Copy(GetVec(op.Rd), res);
  1317. }
  1318. else /* if (sizeF == 1) */
  1319. {
  1320. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1321. Operand maskThree = X86GetAllElements(context, 3d);
  1322. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1323. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1324. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1325. context.Copy(GetVec(op.Rd), res);
  1326. }
  1327. }
  1328. else
  1329. {
  1330. EmitVectorBinaryOpF(context, (op1, op2) =>
  1331. {
  1332. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtStepFused, SoftFloat64.FPRSqrtStepFused, op1, op2);
  1333. });
  1334. }
  1335. }
  1336. public static void Fsqrt_S(ArmEmitterContext context)
  1337. {
  1338. if (Optimizations.FastFP && Optimizations.UseSse2)
  1339. {
  1340. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1341. }
  1342. else
  1343. {
  1344. EmitScalarUnaryOpF(context, (op1) =>
  1345. {
  1346. return EmitSoftFloatCall(context, SoftFloat32.FPSqrt, SoftFloat64.FPSqrt, op1);
  1347. });
  1348. }
  1349. }
  1350. public static void Fsqrt_V(ArmEmitterContext context)
  1351. {
  1352. if (Optimizations.FastFP && Optimizations.UseSse2)
  1353. {
  1354. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1355. }
  1356. else
  1357. {
  1358. EmitVectorUnaryOpF(context, (op1) =>
  1359. {
  1360. return EmitSoftFloatCall(context, SoftFloat32.FPSqrt, SoftFloat64.FPSqrt, op1);
  1361. });
  1362. }
  1363. }
  1364. public static void Fsub_S(ArmEmitterContext context)
  1365. {
  1366. if (Optimizations.FastFP && Optimizations.UseSse2)
  1367. {
  1368. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1369. }
  1370. else if (Optimizations.FastFP)
  1371. {
  1372. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1373. }
  1374. else
  1375. {
  1376. EmitScalarBinaryOpF(context, (op1, op2) =>
  1377. {
  1378. return EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  1379. });
  1380. }
  1381. }
  1382. public static void Fsub_V(ArmEmitterContext context)
  1383. {
  1384. if (Optimizations.FastFP && Optimizations.UseSse2)
  1385. {
  1386. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1387. }
  1388. else if (Optimizations.FastFP)
  1389. {
  1390. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1391. }
  1392. else
  1393. {
  1394. EmitVectorBinaryOpF(context, (op1, op2) =>
  1395. {
  1396. return EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  1397. });
  1398. }
  1399. }
  1400. public static void Mla_V(ArmEmitterContext context)
  1401. {
  1402. if (Optimizations.UseSse41)
  1403. {
  1404. EmitSse41Mul_AddSub(context, AddSub.Add);
  1405. }
  1406. else
  1407. {
  1408. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1409. {
  1410. return context.Add(op1, context.Multiply(op2, op3));
  1411. });
  1412. }
  1413. }
  1414. public static void Mla_Ve(ArmEmitterContext context)
  1415. {
  1416. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1417. {
  1418. return context.Add(op1, context.Multiply(op2, op3));
  1419. });
  1420. }
  1421. public static void Mls_V(ArmEmitterContext context)
  1422. {
  1423. if (Optimizations.UseSse41)
  1424. {
  1425. EmitSse41Mul_AddSub(context, AddSub.Subtract);
  1426. }
  1427. else
  1428. {
  1429. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1430. {
  1431. return context.Subtract(op1, context.Multiply(op2, op3));
  1432. });
  1433. }
  1434. }
  1435. public static void Mls_Ve(ArmEmitterContext context)
  1436. {
  1437. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1438. {
  1439. return context.Subtract(op1, context.Multiply(op2, op3));
  1440. });
  1441. }
  1442. public static void Mul_V(ArmEmitterContext context)
  1443. {
  1444. if (Optimizations.UseSse41)
  1445. {
  1446. EmitSse41Mul_AddSub(context, AddSub.None);
  1447. }
  1448. else
  1449. {
  1450. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1451. }
  1452. }
  1453. public static void Mul_Ve(ArmEmitterContext context)
  1454. {
  1455. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1456. }
  1457. public static void Neg_S(ArmEmitterContext context)
  1458. {
  1459. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1460. }
  1461. public static void Neg_V(ArmEmitterContext context)
  1462. {
  1463. if (Optimizations.UseSse2)
  1464. {
  1465. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1466. Intrinsic subInst = X86PsubInstruction[op.Size];
  1467. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1468. if (op.RegisterSize == RegisterSize.Simd64)
  1469. {
  1470. res = context.VectorZeroUpper64(res);
  1471. }
  1472. context.Copy(GetVec(op.Rd), res);
  1473. }
  1474. else
  1475. {
  1476. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1477. }
  1478. }
  1479. public static void Raddhn_V(ArmEmitterContext context)
  1480. {
  1481. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1482. }
  1483. public static void Rsubhn_V(ArmEmitterContext context)
  1484. {
  1485. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1486. }
  1487. public static void Saba_V(ArmEmitterContext context)
  1488. {
  1489. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1490. {
  1491. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1492. });
  1493. }
  1494. public static void Sabal_V(ArmEmitterContext context)
  1495. {
  1496. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1497. {
  1498. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1499. });
  1500. }
  1501. public static void Sabd_V(ArmEmitterContext context)
  1502. {
  1503. if (Optimizations.UseSse2)
  1504. {
  1505. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1506. Operand n = GetVec(op.Rn);
  1507. Operand m = GetVec(op.Rm);
  1508. EmitSse41Sabd(context, op, n, m, isLong: false);
  1509. }
  1510. else
  1511. {
  1512. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1513. {
  1514. return EmitAbs(context, context.Subtract(op1, op2));
  1515. });
  1516. }
  1517. }
  1518. public static void Sabdl_V(ArmEmitterContext context)
  1519. {
  1520. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1521. if (Optimizations.UseSse41 && op.Size < 2)
  1522. {
  1523. Operand n = GetVec(op.Rn);
  1524. Operand m = GetVec(op.Rm);
  1525. if (op.RegisterSize == RegisterSize.Simd128)
  1526. {
  1527. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1528. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1529. }
  1530. Intrinsic movInst = op.Size == 0
  1531. ? Intrinsic.X86Pmovsxbw
  1532. : Intrinsic.X86Pmovsxwd;
  1533. n = context.AddIntrinsic(movInst, n);
  1534. m = context.AddIntrinsic(movInst, m);
  1535. EmitSse41Sabd(context, op, n, m, isLong: true);
  1536. }
  1537. else
  1538. {
  1539. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1540. {
  1541. return EmitAbs(context, context.Subtract(op1, op2));
  1542. });
  1543. }
  1544. }
  1545. public static void Sadalp_V(ArmEmitterContext context)
  1546. {
  1547. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1548. }
  1549. public static void Saddl_V(ArmEmitterContext context)
  1550. {
  1551. if (Optimizations.UseSse41)
  1552. {
  1553. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1554. Operand n = GetVec(op.Rn);
  1555. Operand m = GetVec(op.Rm);
  1556. if (op.RegisterSize == RegisterSize.Simd128)
  1557. {
  1558. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1559. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1560. }
  1561. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1562. n = context.AddIntrinsic(movInst, n);
  1563. m = context.AddIntrinsic(movInst, m);
  1564. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1565. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1566. }
  1567. else
  1568. {
  1569. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1570. }
  1571. }
  1572. public static void Saddlp_V(ArmEmitterContext context)
  1573. {
  1574. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1575. }
  1576. public static void Saddlv_V(ArmEmitterContext context)
  1577. {
  1578. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1579. }
  1580. public static void Saddw_V(ArmEmitterContext context)
  1581. {
  1582. if (Optimizations.UseSse41)
  1583. {
  1584. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1585. Operand n = GetVec(op.Rn);
  1586. Operand m = GetVec(op.Rm);
  1587. if (op.RegisterSize == RegisterSize.Simd128)
  1588. {
  1589. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1590. }
  1591. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1592. m = context.AddIntrinsic(movInst, m);
  1593. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1594. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1595. }
  1596. else
  1597. {
  1598. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1599. }
  1600. }
  1601. public static void Shadd_V(ArmEmitterContext context)
  1602. {
  1603. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1604. if (Optimizations.UseSse2 && op.Size > 0)
  1605. {
  1606. Operand n = GetVec(op.Rn);
  1607. Operand m = GetVec(op.Rm);
  1608. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1609. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1610. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1611. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1612. Intrinsic addInst = X86PaddInstruction[op.Size];
  1613. res = context.AddIntrinsic(addInst, res, res2);
  1614. if (op.RegisterSize == RegisterSize.Simd64)
  1615. {
  1616. res = context.VectorZeroUpper64(res);
  1617. }
  1618. context.Copy(GetVec(op.Rd), res);
  1619. }
  1620. else
  1621. {
  1622. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1623. {
  1624. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1625. });
  1626. }
  1627. }
  1628. public static void Shsub_V(ArmEmitterContext context)
  1629. {
  1630. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1631. if (Optimizations.UseSse2 && op.Size < 2)
  1632. {
  1633. Operand n = GetVec(op.Rn);
  1634. Operand m = GetVec(op.Rm);
  1635. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1636. Intrinsic addInst = X86PaddInstruction[op.Size];
  1637. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1638. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1639. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1640. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1641. Intrinsic subInst = X86PsubInstruction[op.Size];
  1642. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1643. if (op.RegisterSize == RegisterSize.Simd64)
  1644. {
  1645. res = context.VectorZeroUpper64(res);
  1646. }
  1647. context.Copy(GetVec(op.Rd), res);
  1648. }
  1649. else
  1650. {
  1651. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1652. {
  1653. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1654. });
  1655. }
  1656. }
  1657. public static void Smax_V(ArmEmitterContext context)
  1658. {
  1659. if (Optimizations.UseSse41)
  1660. {
  1661. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1662. Operand n = GetVec(op.Rn);
  1663. Operand m = GetVec(op.Rm);
  1664. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1665. Operand res = context.AddIntrinsic(maxInst, n, m);
  1666. if (op.RegisterSize == RegisterSize.Simd64)
  1667. {
  1668. res = context.VectorZeroUpper64(res);
  1669. }
  1670. context.Copy(GetVec(op.Rd), res);
  1671. }
  1672. else
  1673. {
  1674. Delegate dlg = new _S64_S64_S64(Math.Max);
  1675. EmitVectorBinaryOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1676. }
  1677. }
  1678. public static void Smaxp_V(ArmEmitterContext context)
  1679. {
  1680. if (Optimizations.UseSsse3)
  1681. {
  1682. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  1683. }
  1684. else
  1685. {
  1686. Delegate dlg = new _S64_S64_S64(Math.Max);
  1687. EmitVectorPairwiseOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1688. }
  1689. }
  1690. public static void Smaxv_V(ArmEmitterContext context)
  1691. {
  1692. Delegate dlg = new _S64_S64_S64(Math.Max);
  1693. EmitVectorAcrossVectorOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1694. }
  1695. public static void Smin_V(ArmEmitterContext context)
  1696. {
  1697. if (Optimizations.UseSse41)
  1698. {
  1699. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1700. Operand n = GetVec(op.Rn);
  1701. Operand m = GetVec(op.Rm);
  1702. Intrinsic minInst = X86PminsInstruction[op.Size];
  1703. Operand res = context.AddIntrinsic(minInst, n, m);
  1704. if (op.RegisterSize == RegisterSize.Simd64)
  1705. {
  1706. res = context.VectorZeroUpper64(res);
  1707. }
  1708. context.Copy(GetVec(op.Rd), res);
  1709. }
  1710. else
  1711. {
  1712. Delegate dlg = new _S64_S64_S64(Math.Min);
  1713. EmitVectorBinaryOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1714. }
  1715. }
  1716. public static void Sminp_V(ArmEmitterContext context)
  1717. {
  1718. if (Optimizations.UseSsse3)
  1719. {
  1720. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  1721. }
  1722. else
  1723. {
  1724. Delegate dlg = new _S64_S64_S64(Math.Min);
  1725. EmitVectorPairwiseOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1726. }
  1727. }
  1728. public static void Sminv_V(ArmEmitterContext context)
  1729. {
  1730. Delegate dlg = new _S64_S64_S64(Math.Min);
  1731. EmitVectorAcrossVectorOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1732. }
  1733. public static void Smlal_V(ArmEmitterContext context)
  1734. {
  1735. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1736. if (Optimizations.UseSse41 && op.Size < 2)
  1737. {
  1738. Operand d = GetVec(op.Rd);
  1739. Operand n = GetVec(op.Rn);
  1740. Operand m = GetVec(op.Rm);
  1741. if (op.RegisterSize == RegisterSize.Simd128)
  1742. {
  1743. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1744. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1745. }
  1746. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1747. n = context.AddIntrinsic(movInst, n);
  1748. m = context.AddIntrinsic(movInst, m);
  1749. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1750. Operand res = context.AddIntrinsic(mullInst, n, m);
  1751. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1752. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  1753. }
  1754. else
  1755. {
  1756. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1757. {
  1758. return context.Add(op1, context.Multiply(op2, op3));
  1759. });
  1760. }
  1761. }
  1762. public static void Smlal_Ve(ArmEmitterContext context)
  1763. {
  1764. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1765. {
  1766. return context.Add(op1, context.Multiply(op2, op3));
  1767. });
  1768. }
  1769. public static void Smlsl_V(ArmEmitterContext context)
  1770. {
  1771. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1772. if (Optimizations.UseSse41 && op.Size < 2)
  1773. {
  1774. Operand d = GetVec(op.Rd);
  1775. Operand n = GetVec(op.Rn);
  1776. Operand m = GetVec(op.Rm);
  1777. if (op.RegisterSize == RegisterSize.Simd128)
  1778. {
  1779. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1780. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1781. }
  1782. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  1783. n = context.AddIntrinsic(movInst, n);
  1784. m = context.AddIntrinsic(movInst, m);
  1785. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1786. Operand res = context.AddIntrinsic(mullInst, n, m);
  1787. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1788. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  1789. }
  1790. else
  1791. {
  1792. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1793. {
  1794. return context.Subtract(op1, context.Multiply(op2, op3));
  1795. });
  1796. }
  1797. }
  1798. public static void Smlsl_Ve(ArmEmitterContext context)
  1799. {
  1800. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1801. {
  1802. return context.Subtract(op1, context.Multiply(op2, op3));
  1803. });
  1804. }
  1805. public static void Smull_V(ArmEmitterContext context)
  1806. {
  1807. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  1808. }
  1809. public static void Smull_Ve(ArmEmitterContext context)
  1810. {
  1811. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  1812. }
  1813. public static void Sqabs_S(ArmEmitterContext context)
  1814. {
  1815. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1816. }
  1817. public static void Sqabs_V(ArmEmitterContext context)
  1818. {
  1819. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1820. }
  1821. public static void Sqadd_S(ArmEmitterContext context)
  1822. {
  1823. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1824. }
  1825. public static void Sqadd_V(ArmEmitterContext context)
  1826. {
  1827. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1828. }
  1829. public static void Sqdmulh_S(ArmEmitterContext context)
  1830. {
  1831. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  1832. }
  1833. public static void Sqdmulh_V(ArmEmitterContext context)
  1834. {
  1835. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  1836. }
  1837. public static void Sqneg_S(ArmEmitterContext context)
  1838. {
  1839. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1840. }
  1841. public static void Sqneg_V(ArmEmitterContext context)
  1842. {
  1843. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1844. }
  1845. public static void Sqrdmulh_S(ArmEmitterContext context)
  1846. {
  1847. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  1848. }
  1849. public static void Sqrdmulh_V(ArmEmitterContext context)
  1850. {
  1851. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  1852. }
  1853. public static void Sqsub_S(ArmEmitterContext context)
  1854. {
  1855. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1856. }
  1857. public static void Sqsub_V(ArmEmitterContext context)
  1858. {
  1859. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1860. }
  1861. public static void Sqxtn_S(ArmEmitterContext context)
  1862. {
  1863. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  1864. }
  1865. public static void Sqxtn_V(ArmEmitterContext context)
  1866. {
  1867. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  1868. }
  1869. public static void Sqxtun_S(ArmEmitterContext context)
  1870. {
  1871. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  1872. }
  1873. public static void Sqxtun_V(ArmEmitterContext context)
  1874. {
  1875. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  1876. }
  1877. public static void Srhadd_V(ArmEmitterContext context)
  1878. {
  1879. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1880. if (Optimizations.UseSse2 && op.Size < 2)
  1881. {
  1882. Operand n = GetVec(op.Rn);
  1883. Operand m = GetVec(op.Rm);
  1884. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1885. Intrinsic subInst = X86PsubInstruction[op.Size];
  1886. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  1887. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  1888. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1889. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  1890. Intrinsic addInst = X86PaddInstruction[op.Size];
  1891. res = context.AddIntrinsic(addInst, mask, res);
  1892. if (op.RegisterSize == RegisterSize.Simd64)
  1893. {
  1894. res = context.VectorZeroUpper64(res);
  1895. }
  1896. context.Copy(GetVec(op.Rd), res);
  1897. }
  1898. else
  1899. {
  1900. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1901. {
  1902. Operand res = context.Add(op1, op2);
  1903. res = context.Add(res, Const(1L));
  1904. return context.ShiftRightSI(res, Const(1));
  1905. });
  1906. }
  1907. }
  1908. public static void Ssubl_V(ArmEmitterContext context)
  1909. {
  1910. if (Optimizations.UseSse41)
  1911. {
  1912. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1913. Operand n = GetVec(op.Rn);
  1914. Operand m = GetVec(op.Rm);
  1915. if (op.RegisterSize == RegisterSize.Simd128)
  1916. {
  1917. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1918. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1919. }
  1920. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1921. n = context.AddIntrinsic(movInst, n);
  1922. m = context.AddIntrinsic(movInst, m);
  1923. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1924. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1925. }
  1926. else
  1927. {
  1928. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1929. }
  1930. }
  1931. public static void Ssubw_V(ArmEmitterContext context)
  1932. {
  1933. if (Optimizations.UseSse41)
  1934. {
  1935. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1936. Operand n = GetVec(op.Rn);
  1937. Operand m = GetVec(op.Rm);
  1938. if (op.RegisterSize == RegisterSize.Simd128)
  1939. {
  1940. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1941. }
  1942. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1943. m = context.AddIntrinsic(movInst, m);
  1944. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1945. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1946. }
  1947. else
  1948. {
  1949. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1950. }
  1951. }
  1952. public static void Sub_S(ArmEmitterContext context)
  1953. {
  1954. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1955. }
  1956. public static void Sub_V(ArmEmitterContext context)
  1957. {
  1958. if (Optimizations.UseSse2)
  1959. {
  1960. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1961. Operand n = GetVec(op.Rn);
  1962. Operand m = GetVec(op.Rm);
  1963. Intrinsic subInst = X86PsubInstruction[op.Size];
  1964. Operand res = context.AddIntrinsic(subInst, n, m);
  1965. if (op.RegisterSize == RegisterSize.Simd64)
  1966. {
  1967. res = context.VectorZeroUpper64(res);
  1968. }
  1969. context.Copy(GetVec(op.Rd), res);
  1970. }
  1971. else
  1972. {
  1973. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1974. }
  1975. }
  1976. public static void Subhn_V(ArmEmitterContext context)
  1977. {
  1978. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  1979. }
  1980. public static void Suqadd_S(ArmEmitterContext context)
  1981. {
  1982. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1983. }
  1984. public static void Suqadd_V(ArmEmitterContext context)
  1985. {
  1986. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1987. }
  1988. public static void Uaba_V(ArmEmitterContext context)
  1989. {
  1990. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1991. {
  1992. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1993. });
  1994. }
  1995. public static void Uabal_V(ArmEmitterContext context)
  1996. {
  1997. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  1998. {
  1999. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2000. });
  2001. }
  2002. public static void Uabd_V(ArmEmitterContext context)
  2003. {
  2004. if (Optimizations.UseSse41)
  2005. {
  2006. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2007. Operand n = GetVec(op.Rn);
  2008. Operand m = GetVec(op.Rm);
  2009. EmitSse41Uabd(context, op, n, m, isLong: false);
  2010. }
  2011. else
  2012. {
  2013. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2014. {
  2015. return EmitAbs(context, context.Subtract(op1, op2));
  2016. });
  2017. }
  2018. }
  2019. public static void Uabdl_V(ArmEmitterContext context)
  2020. {
  2021. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2022. if (Optimizations.UseSse41 && op.Size < 2)
  2023. {
  2024. Operand n = GetVec(op.Rn);
  2025. Operand m = GetVec(op.Rm);
  2026. if (op.RegisterSize == RegisterSize.Simd128)
  2027. {
  2028. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2029. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2030. }
  2031. Intrinsic movInst = op.Size == 0
  2032. ? Intrinsic.X86Pmovzxbw
  2033. : Intrinsic.X86Pmovzxwd;
  2034. n = context.AddIntrinsic(movInst, n);
  2035. m = context.AddIntrinsic(movInst, m);
  2036. EmitSse41Uabd(context, op, n, m, isLong: true);
  2037. }
  2038. else
  2039. {
  2040. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  2041. {
  2042. return EmitAbs(context, context.Subtract(op1, op2));
  2043. });
  2044. }
  2045. }
  2046. public static void Uadalp_V(ArmEmitterContext context)
  2047. {
  2048. EmitAddLongPairwise(context, signed: false, accumulate: true);
  2049. }
  2050. public static void Uaddl_V(ArmEmitterContext context)
  2051. {
  2052. if (Optimizations.UseSse41)
  2053. {
  2054. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2055. Operand n = GetVec(op.Rn);
  2056. Operand m = GetVec(op.Rm);
  2057. if (op.RegisterSize == RegisterSize.Simd128)
  2058. {
  2059. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2060. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2061. }
  2062. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2063. n = context.AddIntrinsic(movInst, n);
  2064. m = context.AddIntrinsic(movInst, m);
  2065. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2066. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2067. }
  2068. else
  2069. {
  2070. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2071. }
  2072. }
  2073. public static void Uaddlp_V(ArmEmitterContext context)
  2074. {
  2075. EmitAddLongPairwise(context, signed: false, accumulate: false);
  2076. }
  2077. public static void Uaddlv_V(ArmEmitterContext context)
  2078. {
  2079. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  2080. }
  2081. public static void Uaddw_V(ArmEmitterContext context)
  2082. {
  2083. if (Optimizations.UseSse41)
  2084. {
  2085. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2086. Operand n = GetVec(op.Rn);
  2087. Operand m = GetVec(op.Rm);
  2088. if (op.RegisterSize == RegisterSize.Simd128)
  2089. {
  2090. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2091. }
  2092. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2093. m = context.AddIntrinsic(movInst, m);
  2094. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2095. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2096. }
  2097. else
  2098. {
  2099. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2100. }
  2101. }
  2102. public static void Uhadd_V(ArmEmitterContext context)
  2103. {
  2104. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2105. if (Optimizations.UseSse2 && op.Size > 0)
  2106. {
  2107. Operand n = GetVec(op.Rn);
  2108. Operand m = GetVec(op.Rm);
  2109. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2110. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2111. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2112. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2113. Intrinsic addInst = X86PaddInstruction[op.Size];
  2114. res = context.AddIntrinsic(addInst, res, res2);
  2115. if (op.RegisterSize == RegisterSize.Simd64)
  2116. {
  2117. res = context.VectorZeroUpper64(res);
  2118. }
  2119. context.Copy(GetVec(op.Rd), res);
  2120. }
  2121. else
  2122. {
  2123. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2124. {
  2125. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2126. });
  2127. }
  2128. }
  2129. public static void Uhsub_V(ArmEmitterContext context)
  2130. {
  2131. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2132. if (Optimizations.UseSse2 && op.Size < 2)
  2133. {
  2134. Operand n = GetVec(op.Rn);
  2135. Operand m = GetVec(op.Rm);
  2136. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2137. Operand res = context.AddIntrinsic(avgInst, n, m);
  2138. Intrinsic subInst = X86PsubInstruction[op.Size];
  2139. res = context.AddIntrinsic(subInst, n, res);
  2140. if (op.RegisterSize == RegisterSize.Simd64)
  2141. {
  2142. res = context.VectorZeroUpper64(res);
  2143. }
  2144. context.Copy(GetVec(op.Rd), res);
  2145. }
  2146. else
  2147. {
  2148. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2149. {
  2150. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2151. });
  2152. }
  2153. }
  2154. public static void Umax_V(ArmEmitterContext context)
  2155. {
  2156. if (Optimizations.UseSse41)
  2157. {
  2158. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2159. Operand n = GetVec(op.Rn);
  2160. Operand m = GetVec(op.Rm);
  2161. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2162. Operand res = context.AddIntrinsic(maxInst, n, m);
  2163. if (op.RegisterSize == RegisterSize.Simd64)
  2164. {
  2165. res = context.VectorZeroUpper64(res);
  2166. }
  2167. context.Copy(GetVec(op.Rd), res);
  2168. }
  2169. else
  2170. {
  2171. Delegate dlg = new _U64_U64_U64(Math.Max);
  2172. EmitVectorBinaryOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2173. }
  2174. }
  2175. public static void Umaxp_V(ArmEmitterContext context)
  2176. {
  2177. if (Optimizations.UseSsse3)
  2178. {
  2179. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  2180. }
  2181. else
  2182. {
  2183. Delegate dlg = new _U64_U64_U64(Math.Max);
  2184. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2185. }
  2186. }
  2187. public static void Umaxv_V(ArmEmitterContext context)
  2188. {
  2189. Delegate dlg = new _U64_U64_U64(Math.Max);
  2190. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2191. }
  2192. public static void Umin_V(ArmEmitterContext context)
  2193. {
  2194. if (Optimizations.UseSse41)
  2195. {
  2196. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2197. Operand n = GetVec(op.Rn);
  2198. Operand m = GetVec(op.Rm);
  2199. Intrinsic minInst = X86PminuInstruction[op.Size];
  2200. Operand res = context.AddIntrinsic(minInst, n, m);
  2201. if (op.RegisterSize == RegisterSize.Simd64)
  2202. {
  2203. res = context.VectorZeroUpper64(res);
  2204. }
  2205. context.Copy(GetVec(op.Rd), res);
  2206. }
  2207. else
  2208. {
  2209. Delegate dlg = new _U64_U64_U64(Math.Min);
  2210. EmitVectorBinaryOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2211. }
  2212. }
  2213. public static void Uminp_V(ArmEmitterContext context)
  2214. {
  2215. if (Optimizations.UseSsse3)
  2216. {
  2217. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  2218. }
  2219. else
  2220. {
  2221. Delegate dlg = new _U64_U64_U64(Math.Min);
  2222. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2223. }
  2224. }
  2225. public static void Uminv_V(ArmEmitterContext context)
  2226. {
  2227. Delegate dlg = new _U64_U64_U64(Math.Min);
  2228. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2229. }
  2230. public static void Umlal_V(ArmEmitterContext context)
  2231. {
  2232. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2233. if (Optimizations.UseSse41 && op.Size < 2)
  2234. {
  2235. Operand d = GetVec(op.Rd);
  2236. Operand n = GetVec(op.Rn);
  2237. Operand m = GetVec(op.Rm);
  2238. if (op.RegisterSize == RegisterSize.Simd128)
  2239. {
  2240. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2241. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2242. }
  2243. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2244. n = context.AddIntrinsic(movInst, n);
  2245. m = context.AddIntrinsic(movInst, m);
  2246. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2247. Operand res = context.AddIntrinsic(mullInst, n, m);
  2248. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2249. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2250. }
  2251. else
  2252. {
  2253. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2254. {
  2255. return context.Add(op1, context.Multiply(op2, op3));
  2256. });
  2257. }
  2258. }
  2259. public static void Umlal_Ve(ArmEmitterContext context)
  2260. {
  2261. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2262. {
  2263. return context.Add(op1, context.Multiply(op2, op3));
  2264. });
  2265. }
  2266. public static void Umlsl_V(ArmEmitterContext context)
  2267. {
  2268. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2269. if (Optimizations.UseSse41 && op.Size < 2)
  2270. {
  2271. Operand d = GetVec(op.Rd);
  2272. Operand n = GetVec(op.Rn);
  2273. Operand m = GetVec(op.Rm);
  2274. if (op.RegisterSize == RegisterSize.Simd128)
  2275. {
  2276. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2277. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2278. }
  2279. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  2280. n = context.AddIntrinsic(movInst, n);
  2281. m = context.AddIntrinsic(movInst, m);
  2282. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2283. Operand res = context.AddIntrinsic(mullInst, n, m);
  2284. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2285. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2286. }
  2287. else
  2288. {
  2289. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2290. {
  2291. return context.Subtract(op1, context.Multiply(op2, op3));
  2292. });
  2293. }
  2294. }
  2295. public static void Umlsl_Ve(ArmEmitterContext context)
  2296. {
  2297. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2298. {
  2299. return context.Subtract(op1, context.Multiply(op2, op3));
  2300. });
  2301. }
  2302. public static void Umull_V(ArmEmitterContext context)
  2303. {
  2304. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2305. }
  2306. public static void Umull_Ve(ArmEmitterContext context)
  2307. {
  2308. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2309. }
  2310. public static void Uqadd_S(ArmEmitterContext context)
  2311. {
  2312. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2313. }
  2314. public static void Uqadd_V(ArmEmitterContext context)
  2315. {
  2316. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2317. }
  2318. public static void Uqsub_S(ArmEmitterContext context)
  2319. {
  2320. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2321. }
  2322. public static void Uqsub_V(ArmEmitterContext context)
  2323. {
  2324. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2325. }
  2326. public static void Uqxtn_S(ArmEmitterContext context)
  2327. {
  2328. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2329. }
  2330. public static void Uqxtn_V(ArmEmitterContext context)
  2331. {
  2332. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2333. }
  2334. public static void Urhadd_V(ArmEmitterContext context)
  2335. {
  2336. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2337. if (Optimizations.UseSse2 && op.Size < 2)
  2338. {
  2339. Operand n = GetVec(op.Rn);
  2340. Operand m = GetVec(op.Rm);
  2341. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2342. Operand res = context.AddIntrinsic(avgInst, n, m);
  2343. if (op.RegisterSize == RegisterSize.Simd64)
  2344. {
  2345. res = context.VectorZeroUpper64(res);
  2346. }
  2347. context.Copy(GetVec(op.Rd), res);
  2348. }
  2349. else
  2350. {
  2351. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2352. {
  2353. Operand res = context.Add(op1, op2);
  2354. res = context.Add(res, Const(1L));
  2355. return context.ShiftRightUI(res, Const(1));
  2356. });
  2357. }
  2358. }
  2359. public static void Usqadd_S(ArmEmitterContext context)
  2360. {
  2361. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2362. }
  2363. public static void Usqadd_V(ArmEmitterContext context)
  2364. {
  2365. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2366. }
  2367. public static void Usubl_V(ArmEmitterContext context)
  2368. {
  2369. if (Optimizations.UseSse41)
  2370. {
  2371. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2372. Operand n = GetVec(op.Rn);
  2373. Operand m = GetVec(op.Rm);
  2374. if (op.RegisterSize == RegisterSize.Simd128)
  2375. {
  2376. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2377. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2378. }
  2379. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2380. n = context.AddIntrinsic(movInst, n);
  2381. m = context.AddIntrinsic(movInst, m);
  2382. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2383. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2384. }
  2385. else
  2386. {
  2387. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2388. }
  2389. }
  2390. public static void Usubw_V(ArmEmitterContext context)
  2391. {
  2392. if (Optimizations.UseSse41)
  2393. {
  2394. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2395. Operand n = GetVec(op.Rn);
  2396. Operand m = GetVec(op.Rm);
  2397. if (op.RegisterSize == RegisterSize.Simd128)
  2398. {
  2399. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2400. }
  2401. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2402. m = context.AddIntrinsic(movInst, m);
  2403. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2404. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2405. }
  2406. else
  2407. {
  2408. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2409. }
  2410. }
  2411. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2412. {
  2413. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2414. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2415. }
  2416. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2417. {
  2418. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2419. Operand res = context.VectorZero();
  2420. int pairs = op.GetPairsCount() >> op.Size;
  2421. for (int index = 0; index < pairs; index++)
  2422. {
  2423. int pairIndex = index << 1;
  2424. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2425. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2426. Operand e = context.Add(ne0, ne1);
  2427. if (accumulate)
  2428. {
  2429. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2430. e = context.Add(e, de);
  2431. }
  2432. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2433. }
  2434. context.Copy(GetVec(op.Rd), res);
  2435. }
  2436. private static Operand EmitDoublingMultiplyHighHalf(
  2437. ArmEmitterContext context,
  2438. Operand n,
  2439. Operand m,
  2440. bool round)
  2441. {
  2442. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2443. int eSize = 8 << op.Size;
  2444. Operand res = context.Multiply(n, m);
  2445. if (!round)
  2446. {
  2447. res = context.ShiftRightSI(res, Const(eSize - 1));
  2448. }
  2449. else
  2450. {
  2451. long roundConst = 1L << (eSize - 1);
  2452. res = context.ShiftLeft(res, Const(1));
  2453. res = context.Add(res, Const(roundConst));
  2454. res = context.ShiftRightSI(res, Const(eSize));
  2455. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2456. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2457. }
  2458. return res;
  2459. }
  2460. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2461. {
  2462. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2463. int elems = 8 >> op.Size;
  2464. int eSize = 8 << op.Size;
  2465. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2466. Operand d = GetVec(op.Rd);
  2467. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  2468. long roundConst = 1L << (eSize - 1);
  2469. for (int index = 0; index < elems; index++)
  2470. {
  2471. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2472. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2473. Operand de = emit(ne, me);
  2474. if (round)
  2475. {
  2476. de = context.Add(de, Const(roundConst));
  2477. }
  2478. de = context.ShiftRightUI(de, Const(eSize));
  2479. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2480. }
  2481. context.Copy(d, res);
  2482. }
  2483. public static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2484. {
  2485. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2486. Operand n = GetVec(op.Rn);
  2487. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2488. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2489. if ((op.Size & 1) != 0)
  2490. {
  2491. res = context.VectorZeroUpper64(res);
  2492. }
  2493. else
  2494. {
  2495. res = context.VectorZeroUpper96(res);
  2496. }
  2497. context.Copy(GetVec(op.Rd), res);
  2498. }
  2499. public static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2500. {
  2501. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2502. Operand n = GetVec(op.Rn);
  2503. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2504. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2505. if (op.RegisterSize == RegisterSize.Simd64)
  2506. {
  2507. res = context.VectorZeroUpper64(res);
  2508. }
  2509. context.Copy(GetVec(op.Rd), res);
  2510. }
  2511. public static Operand EmitSse2VectorIsQNaNOpF(ArmEmitterContext context, Operand opF)
  2512. {
  2513. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2514. if ((op.Size & 1) == 0)
  2515. {
  2516. const int QBit = 22;
  2517. Operand qMask = X86GetAllElements(context, 1 << QBit);
  2518. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2519. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2520. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  2521. return context.AddIntrinsic(Intrinsic.X86Andps, mask1, mask2);
  2522. }
  2523. else /* if ((op.Size & 1) == 1) */
  2524. {
  2525. const int QBit = 51;
  2526. Operand qMask = X86GetAllElements(context, 1L << QBit);
  2527. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2528. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2529. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  2530. return context.AddIntrinsic(Intrinsic.X86Andpd, mask1, mask2);
  2531. }
  2532. }
  2533. private static void EmitSse41MaxMinNumOpF(ArmEmitterContext context, bool isMaxNum, bool scalar)
  2534. {
  2535. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2536. Operand d = GetVec(op.Rd);
  2537. Operand n = GetVec(op.Rn);
  2538. Operand m = GetVec(op.Rm);
  2539. Operand nNum = context.Copy(n);
  2540. Operand mNum = context.Copy(m);
  2541. Operand nQNaNMask = EmitSse2VectorIsQNaNOpF(context, nNum);
  2542. Operand mQNaNMask = EmitSse2VectorIsQNaNOpF(context, mNum);
  2543. int sizeF = op.Size & 1;
  2544. if (sizeF == 0)
  2545. {
  2546. Operand negInfMask = X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  2547. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  2548. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  2549. nNum = context.AddIntrinsic(Intrinsic.X86Blendvps, nNum, negInfMask, nMask);
  2550. mNum = context.AddIntrinsic(Intrinsic.X86Blendvps, mNum, negInfMask, mMask);
  2551. Operand res = context.AddIntrinsic(isMaxNum ? Intrinsic.X86Maxps : Intrinsic.X86Minps, nNum, mNum);
  2552. if (scalar)
  2553. {
  2554. res = context.VectorZeroUpper96(res);
  2555. }
  2556. else if (op.RegisterSize == RegisterSize.Simd64)
  2557. {
  2558. res = context.VectorZeroUpper64(res);
  2559. }
  2560. context.Copy(d, res);
  2561. }
  2562. else /* if (sizeF == 1) */
  2563. {
  2564. Operand negInfMask = X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  2565. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  2566. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  2567. nNum = context.AddIntrinsic(Intrinsic.X86Blendvpd, nNum, negInfMask, nMask);
  2568. mNum = context.AddIntrinsic(Intrinsic.X86Blendvpd, mNum, negInfMask, mMask);
  2569. Operand res = context.AddIntrinsic(isMaxNum ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, nNum, mNum);
  2570. if (scalar)
  2571. {
  2572. res = context.VectorZeroUpper64(res);
  2573. }
  2574. context.Copy(d, res);
  2575. }
  2576. }
  2577. private enum AddSub
  2578. {
  2579. None,
  2580. Add,
  2581. Subtract
  2582. }
  2583. private static void EmitSse41Mul_AddSub(ArmEmitterContext context, AddSub addSub)
  2584. {
  2585. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2586. Operand n = GetVec(op.Rn);
  2587. Operand m = GetVec(op.Rm);
  2588. Operand res = null;
  2589. if (op.Size == 0)
  2590. {
  2591. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  2592. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  2593. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  2594. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  2595. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2596. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  2597. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  2598. }
  2599. else if (op.Size == 1)
  2600. {
  2601. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2602. }
  2603. else
  2604. {
  2605. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  2606. }
  2607. Operand d = GetVec(op.Rd);
  2608. if (addSub == AddSub.Add)
  2609. {
  2610. switch (op.Size)
  2611. {
  2612. case 0: res = context.AddIntrinsic(Intrinsic.X86Paddb, d, res); break;
  2613. case 1: res = context.AddIntrinsic(Intrinsic.X86Paddw, d, res); break;
  2614. case 2: res = context.AddIntrinsic(Intrinsic.X86Paddd, d, res); break;
  2615. case 3: res = context.AddIntrinsic(Intrinsic.X86Paddq, d, res); break;
  2616. }
  2617. }
  2618. else if (addSub == AddSub.Subtract)
  2619. {
  2620. switch (op.Size)
  2621. {
  2622. case 0: res = context.AddIntrinsic(Intrinsic.X86Psubb, d, res); break;
  2623. case 1: res = context.AddIntrinsic(Intrinsic.X86Psubw, d, res); break;
  2624. case 2: res = context.AddIntrinsic(Intrinsic.X86Psubd, d, res); break;
  2625. case 3: res = context.AddIntrinsic(Intrinsic.X86Psubq, d, res); break;
  2626. }
  2627. }
  2628. if (op.RegisterSize == RegisterSize.Simd64)
  2629. {
  2630. res = context.VectorZeroUpper64(res);
  2631. }
  2632. context.Copy(d, res);
  2633. }
  2634. private static void EmitSse41Sabd(
  2635. ArmEmitterContext context,
  2636. OpCodeSimdReg op,
  2637. Operand n,
  2638. Operand m,
  2639. bool isLong)
  2640. {
  2641. int size = isLong ? op.Size + 1 : op.Size;
  2642. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  2643. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  2644. Intrinsic subInst = X86PsubInstruction[size];
  2645. Operand res = context.AddIntrinsic(subInst, n, m);
  2646. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2647. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2648. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2649. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2650. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2651. {
  2652. res = context.VectorZeroUpper64(res);
  2653. }
  2654. context.Copy(GetVec(op.Rd), res);
  2655. }
  2656. private static void EmitSse41Uabd(
  2657. ArmEmitterContext context,
  2658. OpCodeSimdReg op,
  2659. Operand n,
  2660. Operand m,
  2661. bool isLong)
  2662. {
  2663. int size = isLong ? op.Size + 1 : op.Size;
  2664. Intrinsic maxInst = X86PmaxuInstruction[size];
  2665. Operand max = context.AddIntrinsic(maxInst, m, n);
  2666. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  2667. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  2668. Operand onesMask = X86GetAllElements(context, -1L);
  2669. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  2670. Intrinsic subInst = X86PsubInstruction[size];
  2671. Operand res = context.AddIntrinsic(subInst, n, m);
  2672. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2673. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2674. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2675. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2676. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2677. {
  2678. res = context.VectorZeroUpper64(res);
  2679. }
  2680. context.Copy(GetVec(op.Rd), res);
  2681. }
  2682. }
  2683. }