InstEmitSimdArithmetic.cs 103 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using static ARMeilleure.Instructions.InstEmitHelper;
  9. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  10. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  11. namespace ARMeilleure.Instructions
  12. {
  13. using Func2I = Func<Operand, Operand, Operand>;
  14. static partial class InstEmit
  15. {
  16. public static void Abs_S(ArmEmitterContext context)
  17. {
  18. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  19. }
  20. public static void Abs_V(ArmEmitterContext context)
  21. {
  22. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  23. }
  24. public static void Add_S(ArmEmitterContext context)
  25. {
  26. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  27. }
  28. public static void Add_V(ArmEmitterContext context)
  29. {
  30. if (Optimizations.UseSse2)
  31. {
  32. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  33. Operand n = GetVec(op.Rn);
  34. Operand m = GetVec(op.Rm);
  35. Intrinsic addInst = X86PaddInstruction[op.Size];
  36. Operand res = context.AddIntrinsic(addInst, n, m);
  37. if (op.RegisterSize == RegisterSize.Simd64)
  38. {
  39. res = context.VectorZeroUpper64(res);
  40. }
  41. context.Copy(GetVec(op.Rd), res);
  42. }
  43. else
  44. {
  45. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  46. }
  47. }
  48. public static void Addhn_V(ArmEmitterContext context)
  49. {
  50. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  51. }
  52. public static void Addp_S(ArmEmitterContext context)
  53. {
  54. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  55. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  56. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  57. Operand res = context.Add(ne0, ne1);
  58. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  59. }
  60. public static void Addp_V(ArmEmitterContext context)
  61. {
  62. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  63. }
  64. public static void Addv_V(ArmEmitterContext context)
  65. {
  66. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  67. }
  68. public static void Cls_V(ArmEmitterContext context)
  69. {
  70. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  71. Operand res = context.VectorZero();
  72. int elems = op.GetBytesCount() >> op.Size;
  73. int eSize = 8 << op.Size;
  74. for (int index = 0; index < elems; index++)
  75. {
  76. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  77. Operand de = context.Call(new _U64_U64_S32(SoftFallback.CountLeadingSigns), ne, Const(eSize));
  78. res = EmitVectorInsert(context, res, de, index, op.Size);
  79. }
  80. context.Copy(GetVec(op.Rd), res);
  81. }
  82. public static void Clz_V(ArmEmitterContext context)
  83. {
  84. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  85. Operand res = context.VectorZero();
  86. int elems = op.GetBytesCount() >> op.Size;
  87. int eSize = 8 << op.Size;
  88. for (int index = 0; index < elems; index++)
  89. {
  90. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  91. Operand de;
  92. if (eSize == 64)
  93. {
  94. de = context.CountLeadingZeros(ne);
  95. }
  96. else
  97. {
  98. de = context.Call(new _U64_U64_S32(SoftFallback.CountLeadingZeros), ne, Const(eSize));
  99. }
  100. res = EmitVectorInsert(context, res, de, index, op.Size);
  101. }
  102. context.Copy(GetVec(op.Rd), res);
  103. }
  104. public static void Cnt_V(ArmEmitterContext context)
  105. {
  106. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  107. Operand res = context.VectorZero();
  108. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  109. for (int index = 0; index < elems; index++)
  110. {
  111. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  112. Operand de;
  113. if (Optimizations.UsePopCnt)
  114. {
  115. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  116. }
  117. else
  118. {
  119. de = context.Call(new _U64_U64(SoftFallback.CountSetBits8), ne);
  120. }
  121. res = EmitVectorInsert(context, res, de, index, 0);
  122. }
  123. context.Copy(GetVec(op.Rd), res);
  124. }
  125. public static void Fabd_S(ArmEmitterContext context)
  126. {
  127. if (Optimizations.FastFP && Optimizations.UseSse2)
  128. {
  129. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  130. int sizeF = op.Size & 1;
  131. if (sizeF == 0)
  132. {
  133. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  134. Operand mask = X86GetScalar(context, -0f);
  135. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  136. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  137. }
  138. else /* if (sizeF == 1) */
  139. {
  140. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  141. Operand mask = X86GetScalar(context, -0d);
  142. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  143. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  144. }
  145. }
  146. else
  147. {
  148. EmitScalarBinaryOpF(context, (op1, op2) =>
  149. {
  150. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  151. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, res);
  152. });
  153. }
  154. }
  155. public static void Fabd_V(ArmEmitterContext context)
  156. {
  157. if (Optimizations.FastFP && Optimizations.UseSse2)
  158. {
  159. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  160. int sizeF = op.Size & 1;
  161. if (sizeF == 0)
  162. {
  163. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  164. Operand mask = X86GetAllElements(context, -0f);
  165. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  166. if (op.RegisterSize == RegisterSize.Simd64)
  167. {
  168. res = context.VectorZeroUpper64(res);
  169. }
  170. context.Copy(GetVec(op.Rd), res);
  171. }
  172. else /* if (sizeF == 1) */
  173. {
  174. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  175. Operand mask = X86GetAllElements(context, -0d);
  176. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  177. context.Copy(GetVec(op.Rd), res);
  178. }
  179. }
  180. else
  181. {
  182. EmitVectorBinaryOpF(context, (op1, op2) =>
  183. {
  184. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  185. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, res);
  186. });
  187. }
  188. }
  189. public static void Fabs_S(ArmEmitterContext context)
  190. {
  191. if (Optimizations.UseSse2)
  192. {
  193. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  194. if (op.Size == 0)
  195. {
  196. Operand mask = X86GetScalar(context, -0f);
  197. Operand res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, GetVec(op.Rn));
  198. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  199. }
  200. else /* if (op.Size == 1) */
  201. {
  202. Operand mask = X86GetScalar(context, -0d);
  203. Operand res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, GetVec(op.Rn));
  204. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  205. }
  206. }
  207. else
  208. {
  209. EmitScalarUnaryOpF(context, (op1) =>
  210. {
  211. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, op1);
  212. });
  213. }
  214. }
  215. public static void Fabs_V(ArmEmitterContext context)
  216. {
  217. if (Optimizations.UseSse2)
  218. {
  219. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  220. int sizeF = op.Size & 1;
  221. if (sizeF == 0)
  222. {
  223. Operand mask = X86GetAllElements(context, -0f);
  224. Operand res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, GetVec(op.Rn));
  225. if (op.RegisterSize == RegisterSize.Simd64)
  226. {
  227. res = context.VectorZeroUpper64(res);
  228. }
  229. context.Copy(GetVec(op.Rd), res);
  230. }
  231. else /* if (sizeF == 1) */
  232. {
  233. Operand mask = X86GetAllElements(context, -0d);
  234. Operand res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, GetVec(op.Rn));
  235. context.Copy(GetVec(op.Rd), res);
  236. }
  237. }
  238. else
  239. {
  240. EmitVectorUnaryOpF(context, (op1) =>
  241. {
  242. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, op1);
  243. });
  244. }
  245. }
  246. public static void Fadd_S(ArmEmitterContext context)
  247. {
  248. if (Optimizations.FastFP && Optimizations.UseSse2)
  249. {
  250. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  251. }
  252. else if (Optimizations.FastFP)
  253. {
  254. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  255. }
  256. else
  257. {
  258. EmitScalarBinaryOpF(context, (op1, op2) =>
  259. {
  260. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  261. });
  262. }
  263. }
  264. public static void Fadd_V(ArmEmitterContext context)
  265. {
  266. if (Optimizations.FastFP && Optimizations.UseSse2)
  267. {
  268. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  269. }
  270. else if (Optimizations.FastFP)
  271. {
  272. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  273. }
  274. else
  275. {
  276. EmitVectorBinaryOpF(context, (op1, op2) =>
  277. {
  278. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  279. });
  280. }
  281. }
  282. public static void Faddp_S(ArmEmitterContext context)
  283. {
  284. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  285. int sizeF = op.Size & 1;
  286. if (Optimizations.FastFP && Optimizations.UseSse3)
  287. {
  288. if (sizeF == 0)
  289. {
  290. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  291. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  292. }
  293. else /* if (sizeF == 1) */
  294. {
  295. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  296. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  297. }
  298. }
  299. else
  300. {
  301. OperandType type = sizeF != 0 ? OperandType.FP64
  302. : OperandType.FP32;
  303. Operand ne0 = context.VectorExtract(type, GetVec(op.Rn), 0);
  304. Operand ne1 = context.VectorExtract(type, GetVec(op.Rn), 1);
  305. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, ne0, ne1);
  306. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  307. }
  308. }
  309. public static void Faddp_V(ArmEmitterContext context)
  310. {
  311. if (Optimizations.FastFP && Optimizations.UseSse2)
  312. {
  313. EmitVectorPairwiseOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  314. }
  315. else
  316. {
  317. EmitVectorPairwiseOpF(context, (op1, op2) =>
  318. {
  319. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  320. });
  321. }
  322. }
  323. public static void Fdiv_S(ArmEmitterContext context)
  324. {
  325. if (Optimizations.FastFP && Optimizations.UseSse2)
  326. {
  327. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  328. }
  329. else if (Optimizations.FastFP)
  330. {
  331. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  332. }
  333. else
  334. {
  335. EmitScalarBinaryOpF(context, (op1, op2) =>
  336. {
  337. return EmitSoftFloatCall(context, SoftFloat32.FPDiv, SoftFloat64.FPDiv, op1, op2);
  338. });
  339. }
  340. }
  341. public static void Fdiv_V(ArmEmitterContext context)
  342. {
  343. if (Optimizations.FastFP && Optimizations.UseSse2)
  344. {
  345. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  346. }
  347. else if (Optimizations.FastFP)
  348. {
  349. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  350. }
  351. else
  352. {
  353. EmitVectorBinaryOpF(context, (op1, op2) =>
  354. {
  355. return EmitSoftFloatCall(context, SoftFloat32.FPDiv, SoftFloat64.FPDiv, op1, op2);
  356. });
  357. }
  358. }
  359. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  360. {
  361. if (Optimizations.FastFP && Optimizations.UseSse2)
  362. {
  363. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  364. Operand a = GetVec(op.Ra);
  365. Operand n = GetVec(op.Rn);
  366. Operand m = GetVec(op.Rm);
  367. if (op.Size == 0)
  368. {
  369. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  370. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  371. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  372. }
  373. else /* if (op.Size == 1) */
  374. {
  375. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  376. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  377. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  378. }
  379. }
  380. else
  381. {
  382. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  383. {
  384. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  385. });
  386. }
  387. }
  388. public static void Fmax_S(ArmEmitterContext context)
  389. {
  390. if (Optimizations.FastFP && Optimizations.UseSse2)
  391. {
  392. EmitScalarBinaryOpF(context, Intrinsic.X86Maxss, Intrinsic.X86Maxsd);
  393. }
  394. else
  395. {
  396. EmitScalarBinaryOpF(context, (op1, op2) =>
  397. {
  398. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  399. });
  400. }
  401. }
  402. public static void Fmax_V(ArmEmitterContext context)
  403. {
  404. if (Optimizations.FastFP && Optimizations.UseSse2)
  405. {
  406. EmitVectorBinaryOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  407. }
  408. else
  409. {
  410. EmitVectorBinaryOpF(context, (op1, op2) =>
  411. {
  412. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  413. });
  414. }
  415. }
  416. public static void Fmaxnm_S(ArmEmitterContext context)
  417. {
  418. EmitScalarBinaryOpF(context, (op1, op2) =>
  419. {
  420. return EmitSoftFloatCall(context, SoftFloat32.FPMaxNum, SoftFloat64.FPMaxNum, op1, op2);
  421. });
  422. }
  423. public static void Fmaxnm_V(ArmEmitterContext context)
  424. {
  425. EmitVectorBinaryOpF(context, (op1, op2) =>
  426. {
  427. return EmitSoftFloatCall(context, SoftFloat32.FPMaxNum, SoftFloat64.FPMaxNum, op1, op2);
  428. });
  429. }
  430. public static void Fmaxp_V(ArmEmitterContext context)
  431. {
  432. if (Optimizations.FastFP && Optimizations.UseSse2)
  433. {
  434. EmitVectorPairwiseOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  435. }
  436. else
  437. {
  438. EmitVectorPairwiseOpF(context, (op1, op2) =>
  439. {
  440. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  441. });
  442. }
  443. }
  444. public static void Fmin_S(ArmEmitterContext context)
  445. {
  446. if (Optimizations.FastFP && Optimizations.UseSse2)
  447. {
  448. EmitScalarBinaryOpF(context, Intrinsic.X86Minss, Intrinsic.X86Minsd);
  449. }
  450. else
  451. {
  452. EmitScalarBinaryOpF(context, (op1, op2) =>
  453. {
  454. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  455. });
  456. }
  457. }
  458. public static void Fmin_V(ArmEmitterContext context)
  459. {
  460. if (Optimizations.FastFP && Optimizations.UseSse2)
  461. {
  462. EmitVectorBinaryOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  463. }
  464. else
  465. {
  466. EmitVectorBinaryOpF(context, (op1, op2) =>
  467. {
  468. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  469. });
  470. }
  471. }
  472. public static void Fminnm_S(ArmEmitterContext context)
  473. {
  474. EmitScalarBinaryOpF(context, (op1, op2) =>
  475. {
  476. return EmitSoftFloatCall(context, SoftFloat32.FPMinNum, SoftFloat64.FPMinNum, op1, op2);
  477. });
  478. }
  479. public static void Fminnm_V(ArmEmitterContext context)
  480. {
  481. EmitVectorBinaryOpF(context, (op1, op2) =>
  482. {
  483. return EmitSoftFloatCall(context, SoftFloat32.FPMinNum, SoftFloat64.FPMinNum, op1, op2);
  484. });
  485. }
  486. public static void Fminp_V(ArmEmitterContext context)
  487. {
  488. if (Optimizations.FastFP && Optimizations.UseSse2)
  489. {
  490. EmitVectorPairwiseOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  491. }
  492. else
  493. {
  494. EmitVectorPairwiseOpF(context, (op1, op2) =>
  495. {
  496. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  497. });
  498. }
  499. }
  500. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  501. {
  502. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  503. {
  504. return context.Add(op1, context.Multiply(op2, op3));
  505. });
  506. }
  507. public static void Fmla_V(ArmEmitterContext context) // Fused.
  508. {
  509. if (Optimizations.FastFP && Optimizations.UseSse2)
  510. {
  511. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  512. Operand d = GetVec(op.Rd);
  513. Operand n = GetVec(op.Rn);
  514. Operand m = GetVec(op.Rm);
  515. int sizeF = op.Size & 1;
  516. if (sizeF == 0)
  517. {
  518. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  519. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  520. if (op.RegisterSize == RegisterSize.Simd64)
  521. {
  522. res = context.VectorZeroUpper64(res);
  523. }
  524. context.Copy(GetVec(op.Rd), res);
  525. }
  526. else /* if (sizeF == 1) */
  527. {
  528. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  529. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  530. context.Copy(GetVec(op.Rd), res);
  531. }
  532. }
  533. else
  534. {
  535. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  536. {
  537. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  538. });
  539. }
  540. }
  541. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  542. {
  543. if (Optimizations.FastFP && Optimizations.UseSse2)
  544. {
  545. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  546. Operand d = GetVec(op.Rd);
  547. Operand n = GetVec(op.Rn);
  548. Operand m = GetVec(op.Rm);
  549. int sizeF = op.Size & 1;
  550. if (sizeF == 0)
  551. {
  552. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  553. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  554. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  555. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  556. if (op.RegisterSize == RegisterSize.Simd64)
  557. {
  558. res = context.VectorZeroUpper64(res);
  559. }
  560. context.Copy(GetVec(op.Rd), res);
  561. }
  562. else /* if (sizeF == 1) */
  563. {
  564. int shuffleMask = op.Index | op.Index << 1;
  565. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  566. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  567. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  568. context.Copy(GetVec(op.Rd), res);
  569. }
  570. }
  571. else
  572. {
  573. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  574. {
  575. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  576. });
  577. }
  578. }
  579. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  580. {
  581. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  582. {
  583. return context.Subtract(op1, context.Multiply(op2, op3));
  584. });
  585. }
  586. public static void Fmls_V(ArmEmitterContext context) // Fused.
  587. {
  588. if (Optimizations.FastFP && Optimizations.UseSse2)
  589. {
  590. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  591. Operand d = GetVec(op.Rd);
  592. Operand n = GetVec(op.Rn);
  593. Operand m = GetVec(op.Rm);
  594. int sizeF = op.Size & 1;
  595. if (sizeF == 0)
  596. {
  597. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  598. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  599. if (op.RegisterSize == RegisterSize.Simd64)
  600. {
  601. res = context.VectorZeroUpper64(res);
  602. }
  603. context.Copy(GetVec(op.Rd), res);
  604. }
  605. else /* if (sizeF == 1) */
  606. {
  607. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  608. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  609. context.Copy(GetVec(op.Rd), res);
  610. }
  611. }
  612. else
  613. {
  614. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  615. {
  616. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  617. });
  618. }
  619. }
  620. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  621. {
  622. if (Optimizations.FastFP && Optimizations.UseSse2)
  623. {
  624. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  625. Operand d = GetVec(op.Rd);
  626. Operand n = GetVec(op.Rn);
  627. Operand m = GetVec(op.Rm);
  628. int sizeF = op.Size & 1;
  629. if (sizeF == 0)
  630. {
  631. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  632. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  633. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  634. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  635. if (op.RegisterSize == RegisterSize.Simd64)
  636. {
  637. res = context.VectorZeroUpper64(res);
  638. }
  639. context.Copy(GetVec(op.Rd), res);
  640. }
  641. else /* if (sizeF == 1) */
  642. {
  643. int shuffleMask = op.Index | op.Index << 1;
  644. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  645. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  646. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  647. context.Copy(GetVec(op.Rd), res);
  648. }
  649. }
  650. else
  651. {
  652. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  653. {
  654. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  655. });
  656. }
  657. }
  658. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  659. {
  660. if (Optimizations.FastFP && Optimizations.UseSse2)
  661. {
  662. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  663. Operand a = GetVec(op.Ra);
  664. Operand n = GetVec(op.Rn);
  665. Operand m = GetVec(op.Rm);
  666. if (op.Size == 0)
  667. {
  668. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  669. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  670. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  671. }
  672. else /* if (op.Size == 1) */
  673. {
  674. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  675. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  676. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  677. }
  678. }
  679. else
  680. {
  681. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  682. {
  683. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  684. });
  685. }
  686. }
  687. public static void Fmul_S(ArmEmitterContext context)
  688. {
  689. if (Optimizations.FastFP && Optimizations.UseSse2)
  690. {
  691. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  692. }
  693. else if (Optimizations.FastFP)
  694. {
  695. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  696. }
  697. else
  698. {
  699. EmitScalarBinaryOpF(context, (op1, op2) =>
  700. {
  701. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  702. });
  703. }
  704. }
  705. public static void Fmul_Se(ArmEmitterContext context)
  706. {
  707. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  708. }
  709. public static void Fmul_V(ArmEmitterContext context)
  710. {
  711. if (Optimizations.FastFP && Optimizations.UseSse2)
  712. {
  713. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  714. }
  715. else if (Optimizations.FastFP)
  716. {
  717. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  718. }
  719. else
  720. {
  721. EmitVectorBinaryOpF(context, (op1, op2) =>
  722. {
  723. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  724. });
  725. }
  726. }
  727. public static void Fmul_Ve(ArmEmitterContext context)
  728. {
  729. if (Optimizations.FastFP && Optimizations.UseSse2)
  730. {
  731. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  732. Operand n = GetVec(op.Rn);
  733. Operand m = GetVec(op.Rm);
  734. int sizeF = op.Size & 1;
  735. if (sizeF == 0)
  736. {
  737. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  738. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  739. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  740. if (op.RegisterSize == RegisterSize.Simd64)
  741. {
  742. res = context.VectorZeroUpper64(res);
  743. }
  744. context.Copy(GetVec(op.Rd), res);
  745. }
  746. else /* if (sizeF == 1) */
  747. {
  748. int shuffleMask = op.Index | op.Index << 1;
  749. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  750. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  751. context.Copy(GetVec(op.Rd), res);
  752. }
  753. }
  754. else if (Optimizations.FastFP)
  755. {
  756. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  757. }
  758. else
  759. {
  760. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  761. {
  762. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  763. });
  764. }
  765. }
  766. public static void Fmulx_S(ArmEmitterContext context)
  767. {
  768. EmitScalarBinaryOpF(context, (op1, op2) =>
  769. {
  770. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  771. });
  772. }
  773. public static void Fmulx_Se(ArmEmitterContext context)
  774. {
  775. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  776. {
  777. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  778. });
  779. }
  780. public static void Fmulx_V(ArmEmitterContext context)
  781. {
  782. EmitVectorBinaryOpF(context, (op1, op2) =>
  783. {
  784. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  785. });
  786. }
  787. public static void Fmulx_Ve(ArmEmitterContext context)
  788. {
  789. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  790. {
  791. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  792. });
  793. }
  794. public static void Fneg_S(ArmEmitterContext context)
  795. {
  796. if (Optimizations.UseSse2)
  797. {
  798. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  799. if (op.Size == 0)
  800. {
  801. Operand mask = X86GetScalar(context, -0f);
  802. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  803. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  804. }
  805. else /* if (op.Size == 1) */
  806. {
  807. Operand mask = X86GetScalar(context, -0d);
  808. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  809. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  810. }
  811. }
  812. else
  813. {
  814. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  815. }
  816. }
  817. public static void Fneg_V(ArmEmitterContext context)
  818. {
  819. if (Optimizations.UseSse2)
  820. {
  821. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  822. int sizeF = op.Size & 1;
  823. if (sizeF == 0)
  824. {
  825. Operand mask = X86GetAllElements(context, -0f);
  826. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  827. if (op.RegisterSize == RegisterSize.Simd64)
  828. {
  829. res = context.VectorZeroUpper64(res);
  830. }
  831. context.Copy(GetVec(op.Rd), res);
  832. }
  833. else /* if (sizeF == 1) */
  834. {
  835. Operand mask = X86GetAllElements(context, -0d);
  836. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  837. context.Copy(GetVec(op.Rd), res);
  838. }
  839. }
  840. else
  841. {
  842. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  843. }
  844. }
  845. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  846. {
  847. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  848. int sizeF = op.Size & 1;
  849. OperandType type = sizeF != 0 ? OperandType.FP64 : OperandType.FP32;
  850. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  851. Operand me = context.VectorExtract(type, GetVec(op.Rm), 0);
  852. Operand ae = context.VectorExtract(type, GetVec(op.Ra), 0);
  853. Operand res = context.Subtract(context.Multiply(context.Negate(ne), me), ae);
  854. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  855. }
  856. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  857. {
  858. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  859. int sizeF = op.Size & 1;
  860. OperandType type = sizeF != 0 ? OperandType.FP64 : OperandType.FP32;
  861. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  862. Operand me = context.VectorExtract(type, GetVec(op.Rm), 0);
  863. Operand ae = context.VectorExtract(type, GetVec(op.Ra), 0);
  864. Operand res = context.Subtract(context.Multiply(ne, me), ae);
  865. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  866. }
  867. public static void Fnmul_S(ArmEmitterContext context)
  868. {
  869. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  870. }
  871. public static void Frecpe_S(ArmEmitterContext context)
  872. {
  873. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  874. int sizeF = op.Size & 1;
  875. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  876. {
  877. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  878. }
  879. else
  880. {
  881. EmitScalarUnaryOpF(context, (op1) =>
  882. {
  883. return EmitSoftFloatCall(context, SoftFloat32.FPRecipEstimate, SoftFloat64.FPRecipEstimate, op1);
  884. });
  885. }
  886. }
  887. public static void Frecpe_V(ArmEmitterContext context)
  888. {
  889. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  890. int sizeF = op.Size & 1;
  891. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  892. {
  893. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  894. }
  895. else
  896. {
  897. EmitVectorUnaryOpF(context, (op1) =>
  898. {
  899. return EmitSoftFloatCall(context, SoftFloat32.FPRecipEstimate, SoftFloat64.FPRecipEstimate, op1);
  900. });
  901. }
  902. }
  903. public static void Frecps_S(ArmEmitterContext context) // Fused.
  904. {
  905. if (Optimizations.FastFP && Optimizations.UseSse2)
  906. {
  907. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  908. int sizeF = op.Size & 1;
  909. if (sizeF == 0)
  910. {
  911. Operand mask = X86GetScalar(context, 2f);
  912. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  913. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  914. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  915. }
  916. else /* if (sizeF == 1) */
  917. {
  918. Operand mask = X86GetScalar(context, 2d);
  919. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  920. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  921. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  922. }
  923. }
  924. else
  925. {
  926. EmitScalarBinaryOpF(context, (op1, op2) =>
  927. {
  928. return EmitSoftFloatCall(context, SoftFloat32.FPRecipStepFused, SoftFloat64.FPRecipStepFused, op1, op2);
  929. });
  930. }
  931. }
  932. public static void Frecps_V(ArmEmitterContext context) // Fused.
  933. {
  934. if (Optimizations.FastFP && Optimizations.UseSse2)
  935. {
  936. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  937. int sizeF = op.Size & 1;
  938. if (sizeF == 0)
  939. {
  940. Operand mask = X86GetAllElements(context, 2f);
  941. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  942. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  943. if (op.RegisterSize == RegisterSize.Simd64)
  944. {
  945. res = context.VectorZeroUpper64(res);
  946. }
  947. context.Copy(GetVec(op.Rd), res);
  948. }
  949. else /* if (sizeF == 1) */
  950. {
  951. Operand mask = X86GetAllElements(context, 2d);
  952. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  953. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  954. context.Copy(GetVec(op.Rd), res);
  955. }
  956. }
  957. else
  958. {
  959. EmitVectorBinaryOpF(context, (op1, op2) =>
  960. {
  961. return EmitSoftFloatCall(context, SoftFloat32.FPRecipStepFused, SoftFloat64.FPRecipStepFused, op1, op2);
  962. });
  963. }
  964. }
  965. public static void Frecpx_S(ArmEmitterContext context)
  966. {
  967. EmitScalarUnaryOpF(context, (op1) =>
  968. {
  969. return EmitSoftFloatCall(context, SoftFloat32.FPRecpX, SoftFloat64.FPRecpX, op1);
  970. });
  971. }
  972. public static void Frinta_S(ArmEmitterContext context)
  973. {
  974. EmitScalarUnaryOpF(context, (op1) =>
  975. {
  976. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  977. });
  978. }
  979. public static void Frinta_V(ArmEmitterContext context)
  980. {
  981. EmitVectorUnaryOpF(context, (op1) =>
  982. {
  983. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  984. });
  985. }
  986. public static void Frinti_S(ArmEmitterContext context)
  987. {
  988. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  989. EmitScalarUnaryOpF(context, (op1) =>
  990. {
  991. if (op.Size == 0)
  992. {
  993. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  994. }
  995. else /* if (op.Size == 1) */
  996. {
  997. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  998. }
  999. });
  1000. }
  1001. public static void Frinti_V(ArmEmitterContext context)
  1002. {
  1003. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1004. int sizeF = op.Size & 1;
  1005. EmitVectorUnaryOpF(context, (op1) =>
  1006. {
  1007. if (sizeF == 0)
  1008. {
  1009. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1010. }
  1011. else /* if (sizeF == 1) */
  1012. {
  1013. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1014. }
  1015. });
  1016. }
  1017. public static void Frintm_S(ArmEmitterContext context)
  1018. {
  1019. if (Optimizations.UseSse41)
  1020. {
  1021. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1022. }
  1023. else
  1024. {
  1025. EmitScalarUnaryOpF(context, (op1) =>
  1026. {
  1027. return EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1);
  1028. });
  1029. }
  1030. }
  1031. public static void Frintm_V(ArmEmitterContext context)
  1032. {
  1033. if (Optimizations.UseSse41)
  1034. {
  1035. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1036. }
  1037. else
  1038. {
  1039. EmitVectorUnaryOpF(context, (op1) =>
  1040. {
  1041. return EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1);
  1042. });
  1043. }
  1044. }
  1045. public static void Frintn_S(ArmEmitterContext context)
  1046. {
  1047. if (Optimizations.UseSse41)
  1048. {
  1049. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1050. }
  1051. else
  1052. {
  1053. EmitScalarUnaryOpF(context, (op1) =>
  1054. {
  1055. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1056. });
  1057. }
  1058. }
  1059. public static void Frintn_V(ArmEmitterContext context)
  1060. {
  1061. if (Optimizations.UseSse41)
  1062. {
  1063. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1064. }
  1065. else
  1066. {
  1067. EmitVectorUnaryOpF(context, (op1) =>
  1068. {
  1069. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1070. });
  1071. }
  1072. }
  1073. public static void Frintp_S(ArmEmitterContext context)
  1074. {
  1075. if (Optimizations.UseSse41)
  1076. {
  1077. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1078. }
  1079. else
  1080. {
  1081. EmitScalarUnaryOpF(context, (op1) =>
  1082. {
  1083. return EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1);
  1084. });
  1085. }
  1086. }
  1087. public static void Frintp_V(ArmEmitterContext context)
  1088. {
  1089. if (Optimizations.UseSse41)
  1090. {
  1091. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1092. }
  1093. else
  1094. {
  1095. EmitVectorUnaryOpF(context, (op1) =>
  1096. {
  1097. return EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1);
  1098. });
  1099. }
  1100. }
  1101. public static void Frintx_S(ArmEmitterContext context)
  1102. {
  1103. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1104. EmitScalarUnaryOpF(context, (op1) =>
  1105. {
  1106. if (op.Size == 0)
  1107. {
  1108. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1109. }
  1110. else /* if (op.Size == 1) */
  1111. {
  1112. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1113. }
  1114. });
  1115. }
  1116. public static void Frintx_V(ArmEmitterContext context)
  1117. {
  1118. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1119. int sizeF = op.Size & 1;
  1120. EmitVectorUnaryOpF(context, (op1) =>
  1121. {
  1122. if (sizeF == 0)
  1123. {
  1124. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1125. }
  1126. else /* if (sizeF == 1) */
  1127. {
  1128. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1129. }
  1130. });
  1131. }
  1132. public static void Frintz_S(ArmEmitterContext context)
  1133. {
  1134. if (Optimizations.UseSse41)
  1135. {
  1136. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1137. }
  1138. else
  1139. {
  1140. EmitScalarUnaryOpF(context, (op1) =>
  1141. {
  1142. return EmitUnaryMathCall(context, MathF.Truncate, Math.Truncate, op1);
  1143. });
  1144. }
  1145. }
  1146. public static void Frintz_V(ArmEmitterContext context)
  1147. {
  1148. if (Optimizations.UseSse41)
  1149. {
  1150. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1151. }
  1152. else
  1153. {
  1154. EmitVectorUnaryOpF(context, (op1) =>
  1155. {
  1156. return EmitUnaryMathCall(context, MathF.Truncate, Math.Truncate, op1);
  1157. });
  1158. }
  1159. }
  1160. public static void Frsqrte_S(ArmEmitterContext context)
  1161. {
  1162. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1163. int sizeF = op.Size & 1;
  1164. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1165. {
  1166. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1167. }
  1168. else
  1169. {
  1170. EmitScalarUnaryOpF(context, (op1) =>
  1171. {
  1172. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtEstimate, SoftFloat64.FPRSqrtEstimate, op1);
  1173. });
  1174. }
  1175. }
  1176. public static void Frsqrte_V(ArmEmitterContext context)
  1177. {
  1178. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1179. int sizeF = op.Size & 1;
  1180. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1181. {
  1182. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1183. }
  1184. else
  1185. {
  1186. EmitVectorUnaryOpF(context, (op1) =>
  1187. {
  1188. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtEstimate, SoftFloat64.FPRSqrtEstimate, op1);
  1189. });
  1190. }
  1191. }
  1192. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1193. {
  1194. if (Optimizations.FastFP && Optimizations.UseSse2)
  1195. {
  1196. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1197. int sizeF = op.Size & 1;
  1198. if (sizeF == 0)
  1199. {
  1200. Operand maskHalf = X86GetScalar(context, 0.5f);
  1201. Operand maskThree = X86GetScalar(context, 3f);
  1202. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1203. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1204. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1205. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1206. }
  1207. else /* if (sizeF == 1) */
  1208. {
  1209. Operand maskHalf = X86GetScalar(context, 0.5d);
  1210. Operand maskThree = X86GetScalar(context, 3d);
  1211. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1212. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1213. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1214. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1215. }
  1216. }
  1217. else
  1218. {
  1219. EmitScalarBinaryOpF(context, (op1, op2) =>
  1220. {
  1221. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtStepFused, SoftFloat64.FPRSqrtStepFused, op1, op2);
  1222. });
  1223. }
  1224. }
  1225. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1226. {
  1227. if (Optimizations.FastFP && Optimizations.UseSse2)
  1228. {
  1229. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1230. int sizeF = op.Size & 1;
  1231. if (sizeF == 0)
  1232. {
  1233. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1234. Operand maskThree = X86GetAllElements(context, 3f);
  1235. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1236. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1237. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1238. if (op.RegisterSize == RegisterSize.Simd64)
  1239. {
  1240. res = context.VectorZeroUpper64(res);
  1241. }
  1242. context.Copy(GetVec(op.Rd), res);
  1243. }
  1244. else /* if (sizeF == 1) */
  1245. {
  1246. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1247. Operand maskThree = X86GetAllElements(context, 3d);
  1248. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1249. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1250. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1251. context.Copy(GetVec(op.Rd), res);
  1252. }
  1253. }
  1254. else
  1255. {
  1256. EmitVectorBinaryOpF(context, (op1, op2) =>
  1257. {
  1258. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtStepFused, SoftFloat64.FPRSqrtStepFused, op1, op2);
  1259. });
  1260. }
  1261. }
  1262. public static void Fsqrt_S(ArmEmitterContext context)
  1263. {
  1264. if (Optimizations.FastFP && Optimizations.UseSse2)
  1265. {
  1266. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1267. }
  1268. else
  1269. {
  1270. EmitScalarUnaryOpF(context, (op1) =>
  1271. {
  1272. return EmitSoftFloatCall(context, SoftFloat32.FPSqrt, SoftFloat64.FPSqrt, op1);
  1273. });
  1274. }
  1275. }
  1276. public static void Fsqrt_V(ArmEmitterContext context)
  1277. {
  1278. if (Optimizations.FastFP && Optimizations.UseSse2)
  1279. {
  1280. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1281. }
  1282. else
  1283. {
  1284. EmitVectorUnaryOpF(context, (op1) =>
  1285. {
  1286. return EmitSoftFloatCall(context, SoftFloat32.FPSqrt, SoftFloat64.FPSqrt, op1);
  1287. });
  1288. }
  1289. }
  1290. public static void Fsub_S(ArmEmitterContext context)
  1291. {
  1292. if (Optimizations.FastFP && Optimizations.UseSse2)
  1293. {
  1294. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1295. }
  1296. else if (Optimizations.FastFP)
  1297. {
  1298. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1299. }
  1300. else
  1301. {
  1302. EmitScalarBinaryOpF(context, (op1, op2) =>
  1303. {
  1304. return EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  1305. });
  1306. }
  1307. }
  1308. public static void Fsub_V(ArmEmitterContext context)
  1309. {
  1310. if (Optimizations.FastFP && Optimizations.UseSse2)
  1311. {
  1312. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1313. }
  1314. else if (Optimizations.FastFP)
  1315. {
  1316. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1317. }
  1318. else
  1319. {
  1320. EmitVectorBinaryOpF(context, (op1, op2) =>
  1321. {
  1322. return EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  1323. });
  1324. }
  1325. }
  1326. public static void Mla_V(ArmEmitterContext context)
  1327. {
  1328. if (Optimizations.UseSse41)
  1329. {
  1330. EmitSse41Mul_AddSub(context, AddSub.Add);
  1331. }
  1332. else
  1333. {
  1334. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1335. {
  1336. return context.Add(op1, context.Multiply(op2, op3));
  1337. });
  1338. }
  1339. }
  1340. public static void Mla_Ve(ArmEmitterContext context)
  1341. {
  1342. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1343. {
  1344. return context.Add(op1, context.Multiply(op2, op3));
  1345. });
  1346. }
  1347. public static void Mls_V(ArmEmitterContext context)
  1348. {
  1349. if (Optimizations.UseSse41)
  1350. {
  1351. EmitSse41Mul_AddSub(context, AddSub.Subtract);
  1352. }
  1353. else
  1354. {
  1355. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1356. {
  1357. return context.Subtract(op1, context.Multiply(op2, op3));
  1358. });
  1359. }
  1360. }
  1361. public static void Mls_Ve(ArmEmitterContext context)
  1362. {
  1363. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1364. {
  1365. return context.Subtract(op1, context.Multiply(op2, op3));
  1366. });
  1367. }
  1368. public static void Mul_V(ArmEmitterContext context)
  1369. {
  1370. if (Optimizations.UseSse41)
  1371. {
  1372. EmitSse41Mul_AddSub(context, AddSub.None);
  1373. }
  1374. else
  1375. {
  1376. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1377. }
  1378. }
  1379. public static void Mul_Ve(ArmEmitterContext context)
  1380. {
  1381. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1382. }
  1383. public static void Neg_S(ArmEmitterContext context)
  1384. {
  1385. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1386. }
  1387. public static void Neg_V(ArmEmitterContext context)
  1388. {
  1389. if (Optimizations.UseSse2)
  1390. {
  1391. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1392. Intrinsic subInst = X86PsubInstruction[op.Size];
  1393. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1394. if (op.RegisterSize == RegisterSize.Simd64)
  1395. {
  1396. res = context.VectorZeroUpper64(res);
  1397. }
  1398. context.Copy(GetVec(op.Rd), res);
  1399. }
  1400. else
  1401. {
  1402. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1403. }
  1404. }
  1405. public static void Raddhn_V(ArmEmitterContext context)
  1406. {
  1407. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1408. }
  1409. public static void Rsubhn_V(ArmEmitterContext context)
  1410. {
  1411. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1412. }
  1413. public static void Saba_V(ArmEmitterContext context)
  1414. {
  1415. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1416. {
  1417. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1418. });
  1419. }
  1420. public static void Sabal_V(ArmEmitterContext context)
  1421. {
  1422. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1423. {
  1424. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1425. });
  1426. }
  1427. public static void Sabd_V(ArmEmitterContext context)
  1428. {
  1429. if (Optimizations.UseSse2)
  1430. {
  1431. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1432. Operand n = GetVec(op.Rn);
  1433. Operand m = GetVec(op.Rm);
  1434. EmitSse41Sabd(context, op, n, m, isLong: false);
  1435. }
  1436. else
  1437. {
  1438. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1439. {
  1440. return EmitAbs(context, context.Subtract(op1, op2));
  1441. });
  1442. }
  1443. }
  1444. public static void Sabdl_V(ArmEmitterContext context)
  1445. {
  1446. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1447. if (Optimizations.UseSse41 && op.Size < 2)
  1448. {
  1449. Operand n = GetVec(op.Rn);
  1450. Operand m = GetVec(op.Rm);
  1451. if (op.RegisterSize == RegisterSize.Simd128)
  1452. {
  1453. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1454. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1455. }
  1456. Intrinsic movInst = op.Size == 0
  1457. ? Intrinsic.X86Pmovsxbw
  1458. : Intrinsic.X86Pmovsxwd;
  1459. n = context.AddIntrinsic(movInst, n);
  1460. m = context.AddIntrinsic(movInst, m);
  1461. EmitSse41Sabd(context, op, n, m, isLong: true);
  1462. }
  1463. else
  1464. {
  1465. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1466. {
  1467. return EmitAbs(context, context.Subtract(op1, op2));
  1468. });
  1469. }
  1470. }
  1471. public static void Sadalp_V(ArmEmitterContext context)
  1472. {
  1473. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1474. }
  1475. public static void Saddl_V(ArmEmitterContext context)
  1476. {
  1477. if (Optimizations.UseSse41)
  1478. {
  1479. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1480. Operand n = GetVec(op.Rn);
  1481. Operand m = GetVec(op.Rm);
  1482. if (op.RegisterSize == RegisterSize.Simd128)
  1483. {
  1484. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1485. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1486. }
  1487. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1488. n = context.AddIntrinsic(movInst, n);
  1489. m = context.AddIntrinsic(movInst, m);
  1490. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1491. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1492. }
  1493. else
  1494. {
  1495. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1496. }
  1497. }
  1498. public static void Saddlp_V(ArmEmitterContext context)
  1499. {
  1500. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1501. }
  1502. public static void Saddlv_V(ArmEmitterContext context)
  1503. {
  1504. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1505. }
  1506. public static void Saddw_V(ArmEmitterContext context)
  1507. {
  1508. if (Optimizations.UseSse41)
  1509. {
  1510. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1511. Operand n = GetVec(op.Rn);
  1512. Operand m = GetVec(op.Rm);
  1513. if (op.RegisterSize == RegisterSize.Simd128)
  1514. {
  1515. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1516. }
  1517. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1518. m = context.AddIntrinsic(movInst, m);
  1519. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1520. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1521. }
  1522. else
  1523. {
  1524. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1525. }
  1526. }
  1527. public static void Shadd_V(ArmEmitterContext context)
  1528. {
  1529. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1530. if (Optimizations.UseSse2 && op.Size > 0)
  1531. {
  1532. Operand n = GetVec(op.Rn);
  1533. Operand m = GetVec(op.Rm);
  1534. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1535. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1536. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1537. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1538. Intrinsic addInst = X86PaddInstruction[op.Size];
  1539. res = context.AddIntrinsic(addInst, res, res2);
  1540. if (op.RegisterSize == RegisterSize.Simd64)
  1541. {
  1542. res = context.VectorZeroUpper64(res);
  1543. }
  1544. context.Copy(GetVec(op.Rd), res);
  1545. }
  1546. else
  1547. {
  1548. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1549. {
  1550. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1551. });
  1552. }
  1553. }
  1554. public static void Shsub_V(ArmEmitterContext context)
  1555. {
  1556. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1557. if (Optimizations.UseSse2 && op.Size < 2)
  1558. {
  1559. Operand n = GetVec(op.Rn);
  1560. Operand m = GetVec(op.Rm);
  1561. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1562. Intrinsic addInst = X86PaddInstruction[op.Size];
  1563. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1564. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1565. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1566. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1567. Intrinsic subInst = X86PsubInstruction[op.Size];
  1568. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1569. if (op.RegisterSize == RegisterSize.Simd64)
  1570. {
  1571. res = context.VectorZeroUpper64(res);
  1572. }
  1573. context.Copy(GetVec(op.Rd), res);
  1574. }
  1575. else
  1576. {
  1577. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1578. {
  1579. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1580. });
  1581. }
  1582. }
  1583. public static void Smax_V(ArmEmitterContext context)
  1584. {
  1585. if (Optimizations.UseSse41)
  1586. {
  1587. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1588. Operand n = GetVec(op.Rn);
  1589. Operand m = GetVec(op.Rm);
  1590. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1591. Operand res = context.AddIntrinsic(maxInst, n, m);
  1592. if (op.RegisterSize == RegisterSize.Simd64)
  1593. {
  1594. res = context.VectorZeroUpper64(res);
  1595. }
  1596. context.Copy(GetVec(op.Rd), res);
  1597. }
  1598. else
  1599. {
  1600. Delegate dlg = new _S64_S64_S64(Math.Max);
  1601. EmitVectorBinaryOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1602. }
  1603. }
  1604. public static void Smaxp_V(ArmEmitterContext context)
  1605. {
  1606. Delegate dlg = new _S64_S64_S64(Math.Max);
  1607. EmitVectorPairwiseOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1608. }
  1609. public static void Smaxv_V(ArmEmitterContext context)
  1610. {
  1611. Delegate dlg = new _S64_S64_S64(Math.Max);
  1612. EmitVectorAcrossVectorOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1613. }
  1614. public static void Smin_V(ArmEmitterContext context)
  1615. {
  1616. if (Optimizations.UseSse41)
  1617. {
  1618. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1619. Operand n = GetVec(op.Rn);
  1620. Operand m = GetVec(op.Rm);
  1621. Intrinsic minInst = X86PminsInstruction[op.Size];
  1622. Operand res = context.AddIntrinsic(minInst, n, m);
  1623. if (op.RegisterSize == RegisterSize.Simd64)
  1624. {
  1625. res = context.VectorZeroUpper64(res);
  1626. }
  1627. context.Copy(GetVec(op.Rd), res);
  1628. }
  1629. else
  1630. {
  1631. Delegate dlg = new _S64_S64_S64(Math.Min);
  1632. EmitVectorBinaryOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1633. }
  1634. }
  1635. public static void Sminp_V(ArmEmitterContext context)
  1636. {
  1637. Delegate dlg = new _S64_S64_S64(Math.Min);
  1638. EmitVectorPairwiseOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1639. }
  1640. public static void Sminv_V(ArmEmitterContext context)
  1641. {
  1642. Delegate dlg = new _S64_S64_S64(Math.Min);
  1643. EmitVectorAcrossVectorOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1644. }
  1645. public static void Smlal_V(ArmEmitterContext context)
  1646. {
  1647. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1648. if (Optimizations.UseSse41 && op.Size < 2)
  1649. {
  1650. Operand d = GetVec(op.Rd);
  1651. Operand n = GetVec(op.Rn);
  1652. Operand m = GetVec(op.Rm);
  1653. if (op.RegisterSize == RegisterSize.Simd128)
  1654. {
  1655. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1656. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1657. }
  1658. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1659. n = context.AddIntrinsic(movInst, n);
  1660. m = context.AddIntrinsic(movInst, m);
  1661. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1662. Operand res = context.AddIntrinsic(mullInst, n, m);
  1663. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1664. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  1665. }
  1666. else
  1667. {
  1668. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1669. {
  1670. return context.Add(op1, context.Multiply(op2, op3));
  1671. });
  1672. }
  1673. }
  1674. public static void Smlal_Ve(ArmEmitterContext context)
  1675. {
  1676. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1677. {
  1678. return context.Add(op1, context.Multiply(op2, op3));
  1679. });
  1680. }
  1681. public static void Smlsl_V(ArmEmitterContext context)
  1682. {
  1683. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1684. if (Optimizations.UseSse41 && op.Size < 2)
  1685. {
  1686. Operand d = GetVec(op.Rd);
  1687. Operand n = GetVec(op.Rn);
  1688. Operand m = GetVec(op.Rm);
  1689. if (op.RegisterSize == RegisterSize.Simd128)
  1690. {
  1691. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1692. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1693. }
  1694. Intrinsic movInst = op.Size == 0
  1695. ? Intrinsic.X86Pmovsxbw
  1696. : Intrinsic.X86Pmovsxwd;
  1697. n = context.AddIntrinsic(movInst, n);
  1698. m = context.AddIntrinsic(movInst, m);
  1699. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1700. Operand res = context.AddIntrinsic(mullInst, n, m);
  1701. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1702. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  1703. }
  1704. else
  1705. {
  1706. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1707. {
  1708. return context.Subtract(op1, context.Multiply(op2, op3));
  1709. });
  1710. }
  1711. }
  1712. public static void Smlsl_Ve(ArmEmitterContext context)
  1713. {
  1714. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1715. {
  1716. return context.Subtract(op1, context.Multiply(op2, op3));
  1717. });
  1718. }
  1719. public static void Smull_V(ArmEmitterContext context)
  1720. {
  1721. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  1722. }
  1723. public static void Smull_Ve(ArmEmitterContext context)
  1724. {
  1725. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  1726. }
  1727. public static void Sqabs_S(ArmEmitterContext context)
  1728. {
  1729. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1730. }
  1731. public static void Sqabs_V(ArmEmitterContext context)
  1732. {
  1733. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1734. }
  1735. public static void Sqadd_S(ArmEmitterContext context)
  1736. {
  1737. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1738. }
  1739. public static void Sqadd_V(ArmEmitterContext context)
  1740. {
  1741. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1742. }
  1743. public static void Sqdmulh_S(ArmEmitterContext context)
  1744. {
  1745. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  1746. }
  1747. public static void Sqdmulh_V(ArmEmitterContext context)
  1748. {
  1749. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  1750. }
  1751. public static void Sqneg_S(ArmEmitterContext context)
  1752. {
  1753. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1754. }
  1755. public static void Sqneg_V(ArmEmitterContext context)
  1756. {
  1757. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1758. }
  1759. public static void Sqrdmulh_S(ArmEmitterContext context)
  1760. {
  1761. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  1762. }
  1763. public static void Sqrdmulh_V(ArmEmitterContext context)
  1764. {
  1765. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  1766. }
  1767. public static void Sqsub_S(ArmEmitterContext context)
  1768. {
  1769. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1770. }
  1771. public static void Sqsub_V(ArmEmitterContext context)
  1772. {
  1773. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1774. }
  1775. public static void Sqxtn_S(ArmEmitterContext context)
  1776. {
  1777. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  1778. }
  1779. public static void Sqxtn_V(ArmEmitterContext context)
  1780. {
  1781. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  1782. }
  1783. public static void Sqxtun_S(ArmEmitterContext context)
  1784. {
  1785. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  1786. }
  1787. public static void Sqxtun_V(ArmEmitterContext context)
  1788. {
  1789. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  1790. }
  1791. public static void Srhadd_V(ArmEmitterContext context)
  1792. {
  1793. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1794. if (Optimizations.UseSse2 && op.Size < 2)
  1795. {
  1796. Operand n = GetVec(op.Rn);
  1797. Operand m = GetVec(op.Rm);
  1798. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1799. Intrinsic subInst = X86PsubInstruction[op.Size];
  1800. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  1801. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  1802. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1803. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  1804. Intrinsic addInst = X86PaddInstruction[op.Size];
  1805. res = context.AddIntrinsic(addInst, mask, res);
  1806. if (op.RegisterSize == RegisterSize.Simd64)
  1807. {
  1808. res = context.VectorZeroUpper64(res);
  1809. }
  1810. context.Copy(GetVec(op.Rd), res);
  1811. }
  1812. else
  1813. {
  1814. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1815. {
  1816. Operand res = context.Add(op1, op2);
  1817. res = context.Add(res, Const(1L));
  1818. return context.ShiftRightSI(res, Const(1));
  1819. });
  1820. }
  1821. }
  1822. public static void Ssubl_V(ArmEmitterContext context)
  1823. {
  1824. if (Optimizations.UseSse41)
  1825. {
  1826. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1827. Operand n = GetVec(op.Rn);
  1828. Operand m = GetVec(op.Rm);
  1829. if (op.RegisterSize == RegisterSize.Simd128)
  1830. {
  1831. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1832. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1833. }
  1834. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1835. n = context.AddIntrinsic(movInst, n);
  1836. m = context.AddIntrinsic(movInst, m);
  1837. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1838. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1839. }
  1840. else
  1841. {
  1842. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1843. }
  1844. }
  1845. public static void Ssubw_V(ArmEmitterContext context)
  1846. {
  1847. if (Optimizations.UseSse41)
  1848. {
  1849. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1850. Operand n = GetVec(op.Rn);
  1851. Operand m = GetVec(op.Rm);
  1852. if (op.RegisterSize == RegisterSize.Simd128)
  1853. {
  1854. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1855. }
  1856. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1857. m = context.AddIntrinsic(movInst, m);
  1858. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1859. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1860. }
  1861. else
  1862. {
  1863. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1864. }
  1865. }
  1866. public static void Sub_S(ArmEmitterContext context)
  1867. {
  1868. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1869. }
  1870. public static void Sub_V(ArmEmitterContext context)
  1871. {
  1872. if (Optimizations.UseSse2)
  1873. {
  1874. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1875. Operand n = GetVec(op.Rn);
  1876. Operand m = GetVec(op.Rm);
  1877. Intrinsic subInst = X86PsubInstruction[op.Size];
  1878. Operand res = context.AddIntrinsic(subInst, n, m);
  1879. if (op.RegisterSize == RegisterSize.Simd64)
  1880. {
  1881. res = context.VectorZeroUpper64(res);
  1882. }
  1883. context.Copy(GetVec(op.Rd), res);
  1884. }
  1885. else
  1886. {
  1887. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1888. }
  1889. }
  1890. public static void Subhn_V(ArmEmitterContext context)
  1891. {
  1892. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  1893. }
  1894. public static void Suqadd_S(ArmEmitterContext context)
  1895. {
  1896. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1897. }
  1898. public static void Suqadd_V(ArmEmitterContext context)
  1899. {
  1900. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1901. }
  1902. public static void Uaba_V(ArmEmitterContext context)
  1903. {
  1904. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1905. {
  1906. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1907. });
  1908. }
  1909. public static void Uabal_V(ArmEmitterContext context)
  1910. {
  1911. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  1912. {
  1913. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1914. });
  1915. }
  1916. public static void Uabd_V(ArmEmitterContext context)
  1917. {
  1918. if (Optimizations.UseSse41)
  1919. {
  1920. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1921. Operand n = GetVec(op.Rn);
  1922. Operand m = GetVec(op.Rm);
  1923. EmitSse41Uabd(context, op, n, m, isLong: false);
  1924. }
  1925. else
  1926. {
  1927. EmitVectorBinaryOpZx(context, (op1, op2) =>
  1928. {
  1929. return EmitAbs(context, context.Subtract(op1, op2));
  1930. });
  1931. }
  1932. }
  1933. public static void Uabdl_V(ArmEmitterContext context)
  1934. {
  1935. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1936. if (Optimizations.UseSse41 && op.Size < 2)
  1937. {
  1938. Operand n = GetVec(op.Rn);
  1939. Operand m = GetVec(op.Rm);
  1940. if (op.RegisterSize == RegisterSize.Simd128)
  1941. {
  1942. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1943. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1944. }
  1945. Intrinsic movInst = op.Size == 0
  1946. ? Intrinsic.X86Pmovzxbw
  1947. : Intrinsic.X86Pmovzxwd;
  1948. n = context.AddIntrinsic(movInst, n);
  1949. m = context.AddIntrinsic(movInst, m);
  1950. EmitSse41Uabd(context, op, n, m, isLong: true);
  1951. }
  1952. else
  1953. {
  1954. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  1955. {
  1956. return EmitAbs(context, context.Subtract(op1, op2));
  1957. });
  1958. }
  1959. }
  1960. public static void Uadalp_V(ArmEmitterContext context)
  1961. {
  1962. EmitAddLongPairwise(context, signed: false, accumulate: true);
  1963. }
  1964. public static void Uaddl_V(ArmEmitterContext context)
  1965. {
  1966. if (Optimizations.UseSse41)
  1967. {
  1968. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1969. Operand n = GetVec(op.Rn);
  1970. Operand m = GetVec(op.Rm);
  1971. if (op.RegisterSize == RegisterSize.Simd128)
  1972. {
  1973. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1974. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1975. }
  1976. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  1977. n = context.AddIntrinsic(movInst, n);
  1978. m = context.AddIntrinsic(movInst, m);
  1979. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1980. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1981. }
  1982. else
  1983. {
  1984. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  1985. }
  1986. }
  1987. public static void Uaddlp_V(ArmEmitterContext context)
  1988. {
  1989. EmitAddLongPairwise(context, signed: false, accumulate: false);
  1990. }
  1991. public static void Uaddlv_V(ArmEmitterContext context)
  1992. {
  1993. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  1994. }
  1995. public static void Uaddw_V(ArmEmitterContext context)
  1996. {
  1997. if (Optimizations.UseSse41)
  1998. {
  1999. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2000. Operand n = GetVec(op.Rn);
  2001. Operand m = GetVec(op.Rm);
  2002. if (op.RegisterSize == RegisterSize.Simd128)
  2003. {
  2004. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2005. }
  2006. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2007. m = context.AddIntrinsic(movInst, m);
  2008. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2009. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2010. }
  2011. else
  2012. {
  2013. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2014. }
  2015. }
  2016. public static void Uhadd_V(ArmEmitterContext context)
  2017. {
  2018. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2019. if (Optimizations.UseSse2 && op.Size > 0)
  2020. {
  2021. Operand n = GetVec(op.Rn);
  2022. Operand m = GetVec(op.Rm);
  2023. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2024. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2025. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2026. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2027. Intrinsic addInst = X86PaddInstruction[op.Size];
  2028. res = context.AddIntrinsic(addInst, res, res2);
  2029. if (op.RegisterSize == RegisterSize.Simd64)
  2030. {
  2031. res = context.VectorZeroUpper64(res);
  2032. }
  2033. context.Copy(GetVec(op.Rd), res);
  2034. }
  2035. else
  2036. {
  2037. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2038. {
  2039. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2040. });
  2041. }
  2042. }
  2043. public static void Uhsub_V(ArmEmitterContext context)
  2044. {
  2045. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2046. if (Optimizations.UseSse2 && op.Size < 2)
  2047. {
  2048. Operand n = GetVec(op.Rn);
  2049. Operand m = GetVec(op.Rm);
  2050. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2051. Operand res = context.AddIntrinsic(avgInst, n, m);
  2052. Intrinsic subInst = X86PsubInstruction[op.Size];
  2053. res = context.AddIntrinsic(subInst, n, res);
  2054. if (op.RegisterSize == RegisterSize.Simd64)
  2055. {
  2056. res = context.VectorZeroUpper64(res);
  2057. }
  2058. context.Copy(GetVec(op.Rd), res);
  2059. }
  2060. else
  2061. {
  2062. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2063. {
  2064. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2065. });
  2066. }
  2067. }
  2068. public static void Umax_V(ArmEmitterContext context)
  2069. {
  2070. if (Optimizations.UseSse41)
  2071. {
  2072. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2073. Operand n = GetVec(op.Rn);
  2074. Operand m = GetVec(op.Rm);
  2075. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2076. Operand res = context.AddIntrinsic(maxInst, n, m);
  2077. if (op.RegisterSize == RegisterSize.Simd64)
  2078. {
  2079. res = context.VectorZeroUpper64(res);
  2080. }
  2081. context.Copy(GetVec(op.Rd), res);
  2082. }
  2083. else
  2084. {
  2085. Delegate dlg = new _U64_U64_U64(Math.Max);
  2086. EmitVectorBinaryOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2087. }
  2088. }
  2089. public static void Umaxp_V(ArmEmitterContext context)
  2090. {
  2091. Delegate dlg = new _U64_U64_U64(Math.Max);
  2092. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2093. }
  2094. public static void Umaxv_V(ArmEmitterContext context)
  2095. {
  2096. Delegate dlg = new _U64_U64_U64(Math.Max);
  2097. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2098. }
  2099. public static void Umin_V(ArmEmitterContext context)
  2100. {
  2101. if (Optimizations.UseSse41)
  2102. {
  2103. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2104. Operand n = GetVec(op.Rn);
  2105. Operand m = GetVec(op.Rm);
  2106. Intrinsic minInst = X86PminuInstruction[op.Size];
  2107. Operand res = context.AddIntrinsic(minInst, n, m);
  2108. if (op.RegisterSize == RegisterSize.Simd64)
  2109. {
  2110. res = context.VectorZeroUpper64(res);
  2111. }
  2112. context.Copy(GetVec(op.Rd), res);
  2113. }
  2114. else
  2115. {
  2116. Delegate dlg = new _U64_U64_U64(Math.Min);
  2117. EmitVectorBinaryOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2118. }
  2119. }
  2120. public static void Uminp_V(ArmEmitterContext context)
  2121. {
  2122. Delegate dlg = new _U64_U64_U64(Math.Min);
  2123. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2124. }
  2125. public static void Uminv_V(ArmEmitterContext context)
  2126. {
  2127. Delegate dlg = new _U64_U64_U64(Math.Min);
  2128. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2129. }
  2130. public static void Umlal_V(ArmEmitterContext context)
  2131. {
  2132. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2133. if (Optimizations.UseSse41 && op.Size < 2)
  2134. {
  2135. Operand d = GetVec(op.Rd);
  2136. Operand n = GetVec(op.Rn);
  2137. Operand m = GetVec(op.Rm);
  2138. if (op.RegisterSize == RegisterSize.Simd128)
  2139. {
  2140. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2141. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2142. }
  2143. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2144. n = context.AddIntrinsic(movInst, n);
  2145. m = context.AddIntrinsic(movInst, m);
  2146. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2147. Operand res = context.AddIntrinsic(mullInst, n, m);
  2148. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2149. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2150. }
  2151. else
  2152. {
  2153. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2154. {
  2155. return context.Add(op1, context.Multiply(op2, op3));
  2156. });
  2157. }
  2158. }
  2159. public static void Umlal_Ve(ArmEmitterContext context)
  2160. {
  2161. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2162. {
  2163. return context.Add(op1, context.Multiply(op2, op3));
  2164. });
  2165. }
  2166. public static void Umlsl_V(ArmEmitterContext context)
  2167. {
  2168. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2169. if (Optimizations.UseSse41 && op.Size < 2)
  2170. {
  2171. Operand d = GetVec(op.Rd);
  2172. Operand n = GetVec(op.Rn);
  2173. Operand m = GetVec(op.Rm);
  2174. if (op.RegisterSize == RegisterSize.Simd128)
  2175. {
  2176. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2177. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2178. }
  2179. Intrinsic movInst = op.Size == 0
  2180. ? Intrinsic.X86Pmovzxbw
  2181. : Intrinsic.X86Pmovzxwd;
  2182. n = context.AddIntrinsic(movInst, n);
  2183. m = context.AddIntrinsic(movInst, m);
  2184. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2185. Operand res = context.AddIntrinsic(mullInst, n, m);
  2186. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2187. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2188. }
  2189. else
  2190. {
  2191. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2192. {
  2193. return context.Subtract(op1, context.Multiply(op2, op3));
  2194. });
  2195. }
  2196. }
  2197. public static void Umlsl_Ve(ArmEmitterContext context)
  2198. {
  2199. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2200. {
  2201. return context.Subtract(op1, context.Multiply(op2, op3));
  2202. });
  2203. }
  2204. public static void Umull_V(ArmEmitterContext context)
  2205. {
  2206. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2207. }
  2208. public static void Umull_Ve(ArmEmitterContext context)
  2209. {
  2210. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2211. }
  2212. public static void Uqadd_S(ArmEmitterContext context)
  2213. {
  2214. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2215. }
  2216. public static void Uqadd_V(ArmEmitterContext context)
  2217. {
  2218. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2219. }
  2220. public static void Uqsub_S(ArmEmitterContext context)
  2221. {
  2222. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2223. }
  2224. public static void Uqsub_V(ArmEmitterContext context)
  2225. {
  2226. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2227. }
  2228. public static void Uqxtn_S(ArmEmitterContext context)
  2229. {
  2230. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2231. }
  2232. public static void Uqxtn_V(ArmEmitterContext context)
  2233. {
  2234. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2235. }
  2236. public static void Urhadd_V(ArmEmitterContext context)
  2237. {
  2238. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2239. if (Optimizations.UseSse2 && op.Size < 2)
  2240. {
  2241. Operand n = GetVec(op.Rn);
  2242. Operand m = GetVec(op.Rm);
  2243. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2244. Operand res = context.AddIntrinsic(avgInst, n, m);
  2245. if (op.RegisterSize == RegisterSize.Simd64)
  2246. {
  2247. res = context.VectorZeroUpper64(res);
  2248. }
  2249. context.Copy(GetVec(op.Rd), res);
  2250. }
  2251. else
  2252. {
  2253. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2254. {
  2255. Operand res = context.Add(op1, op2);
  2256. res = context.Add(res, Const(1L));
  2257. return context.ShiftRightUI(res, Const(1));
  2258. });
  2259. }
  2260. }
  2261. public static void Usqadd_S(ArmEmitterContext context)
  2262. {
  2263. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2264. }
  2265. public static void Usqadd_V(ArmEmitterContext context)
  2266. {
  2267. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2268. }
  2269. public static void Usubl_V(ArmEmitterContext context)
  2270. {
  2271. if (Optimizations.UseSse41)
  2272. {
  2273. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2274. Operand n = GetVec(op.Rn);
  2275. Operand m = GetVec(op.Rm);
  2276. if (op.RegisterSize == RegisterSize.Simd128)
  2277. {
  2278. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2279. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2280. }
  2281. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2282. n = context.AddIntrinsic(movInst, n);
  2283. m = context.AddIntrinsic(movInst, m);
  2284. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2285. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2286. }
  2287. else
  2288. {
  2289. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2290. }
  2291. }
  2292. public static void Usubw_V(ArmEmitterContext context)
  2293. {
  2294. if (Optimizations.UseSse41)
  2295. {
  2296. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2297. Operand n = GetVec(op.Rn);
  2298. Operand m = GetVec(op.Rm);
  2299. if (op.RegisterSize == RegisterSize.Simd128)
  2300. {
  2301. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2302. }
  2303. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2304. m = context.AddIntrinsic(movInst, m);
  2305. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2306. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2307. }
  2308. else
  2309. {
  2310. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2311. }
  2312. }
  2313. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2314. {
  2315. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2316. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2317. }
  2318. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2319. {
  2320. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2321. Operand res = context.VectorZero();
  2322. int pairs = op.GetPairsCount() >> op.Size;
  2323. for (int index = 0; index < pairs; index++)
  2324. {
  2325. int pairIndex = index << 1;
  2326. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2327. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2328. Operand e = context.Add(ne0, ne1);
  2329. if (accumulate)
  2330. {
  2331. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2332. e = context.Add(e, de);
  2333. }
  2334. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2335. }
  2336. context.Copy(GetVec(op.Rd), res);
  2337. }
  2338. private static Operand EmitDoublingMultiplyHighHalf(
  2339. ArmEmitterContext context,
  2340. Operand n,
  2341. Operand m,
  2342. bool round)
  2343. {
  2344. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2345. int eSize = 8 << op.Size;
  2346. Operand res = context.Multiply(n, m);
  2347. if (!round)
  2348. {
  2349. res = context.ShiftRightSI(res, Const(eSize - 1));
  2350. }
  2351. else
  2352. {
  2353. long roundConst = 1L << (eSize - 1);
  2354. res = context.ShiftLeft(res, Const(1));
  2355. res = context.Add(res, Const(roundConst));
  2356. res = context.ShiftRightSI(res, Const(eSize));
  2357. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2358. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2359. }
  2360. return res;
  2361. }
  2362. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2363. {
  2364. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2365. int elems = 8 >> op.Size;
  2366. int eSize = 8 << op.Size;
  2367. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2368. Operand res = part == 0 ? context.VectorZero() : context.Copy(GetVec(op.Rd));
  2369. long roundConst = 1L << (eSize - 1);
  2370. for (int index = 0; index < elems; index++)
  2371. {
  2372. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2373. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2374. Operand de = emit(ne, me);
  2375. if (round)
  2376. {
  2377. de = context.Add(de, Const(roundConst));
  2378. }
  2379. de = context.ShiftRightUI(de, Const(eSize));
  2380. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2381. }
  2382. context.Copy(GetVec(op.Rd), res);
  2383. }
  2384. public static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2385. {
  2386. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2387. Operand n = GetVec(op.Rn);
  2388. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2389. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2390. if ((op.Size & 1) != 0)
  2391. {
  2392. res = context.VectorZeroUpper64(res);
  2393. }
  2394. else
  2395. {
  2396. res = context.VectorZeroUpper96(res);
  2397. }
  2398. context.Copy(GetVec(op.Rd), res);
  2399. }
  2400. public static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2401. {
  2402. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2403. Operand n = GetVec(op.Rn);
  2404. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2405. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2406. if (op.RegisterSize == RegisterSize.Simd64)
  2407. {
  2408. res = context.VectorZeroUpper64(res);
  2409. }
  2410. context.Copy(GetVec(op.Rd), res);
  2411. }
  2412. private enum AddSub
  2413. {
  2414. None,
  2415. Add,
  2416. Subtract
  2417. }
  2418. private static void EmitSse41Mul_AddSub(ArmEmitterContext context, AddSub addSub)
  2419. {
  2420. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2421. Operand n = GetVec(op.Rn);
  2422. Operand m = GetVec(op.Rm);
  2423. Operand res = null;
  2424. if (op.Size == 0)
  2425. {
  2426. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  2427. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  2428. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  2429. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  2430. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2431. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  2432. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  2433. }
  2434. else if (op.Size == 1)
  2435. {
  2436. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2437. }
  2438. else
  2439. {
  2440. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  2441. }
  2442. Operand d = GetVec(op.Rd);
  2443. if (addSub == AddSub.Add)
  2444. {
  2445. switch (op.Size)
  2446. {
  2447. case 0: res = context.AddIntrinsic(Intrinsic.X86Paddb, d, res); break;
  2448. case 1: res = context.AddIntrinsic(Intrinsic.X86Paddw, d, res); break;
  2449. case 2: res = context.AddIntrinsic(Intrinsic.X86Paddd, d, res); break;
  2450. case 3: res = context.AddIntrinsic(Intrinsic.X86Paddq, d, res); break;
  2451. }
  2452. }
  2453. else if (addSub == AddSub.Subtract)
  2454. {
  2455. switch (op.Size)
  2456. {
  2457. case 0: res = context.AddIntrinsic(Intrinsic.X86Psubb, d, res); break;
  2458. case 1: res = context.AddIntrinsic(Intrinsic.X86Psubw, d, res); break;
  2459. case 2: res = context.AddIntrinsic(Intrinsic.X86Psubd, d, res); break;
  2460. case 3: res = context.AddIntrinsic(Intrinsic.X86Psubq, d, res); break;
  2461. }
  2462. }
  2463. if (op.RegisterSize == RegisterSize.Simd64)
  2464. {
  2465. res = context.VectorZeroUpper64(res);
  2466. }
  2467. context.Copy(d, res);
  2468. }
  2469. private static void EmitSse41Sabd(
  2470. ArmEmitterContext context,
  2471. OpCodeSimdReg op,
  2472. Operand n,
  2473. Operand m,
  2474. bool isLong)
  2475. {
  2476. int size = isLong ? op.Size + 1 : op.Size;
  2477. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  2478. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  2479. Intrinsic subInst = X86PsubInstruction[size];
  2480. Operand res = context.AddIntrinsic(subInst, n, m);
  2481. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2482. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2483. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2484. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2485. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2486. {
  2487. res = context.VectorZeroUpper64(res);
  2488. }
  2489. context.Copy(GetVec(op.Rd), res);
  2490. }
  2491. private static void EmitSse41Uabd(
  2492. ArmEmitterContext context,
  2493. OpCodeSimdReg op,
  2494. Operand n,
  2495. Operand m,
  2496. bool isLong)
  2497. {
  2498. int size = isLong ? op.Size + 1 : op.Size;
  2499. Intrinsic maxInst = X86PmaxuInstruction[size];
  2500. Operand max = context.AddIntrinsic(maxInst, m, n);
  2501. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  2502. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  2503. Operand onesMask = X86GetAllElements(context, -1L);
  2504. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  2505. Intrinsic subInst = X86PsubInstruction[size];
  2506. Operand res = context.AddIntrinsic(subInst, n, m);
  2507. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2508. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2509. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2510. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2511. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2512. {
  2513. res = context.VectorZeroUpper64(res);
  2514. }
  2515. context.Copy(GetVec(op.Rd), res);
  2516. }
  2517. }
  2518. }