InstEmitSimdArithmetic32.cs 65 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703
  1. using ARMeilleure.Decoders;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using static ARMeilleure.Instructions.InstEmitFlowHelper;
  6. using static ARMeilleure.Instructions.InstEmitHelper;
  7. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  8. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  9. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  10. namespace ARMeilleure.Instructions
  11. {
  12. static partial class InstEmit32
  13. {
  14. public static void Vabd_I(ArmEmitterContext context)
  15. {
  16. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  17. EmitVectorBinaryOpI32(context, (op1, op2) => EmitAbs(context, context.Subtract(op1, op2)), !op.U);
  18. }
  19. public static void Vabdl_I(ArmEmitterContext context)
  20. {
  21. OpCode32SimdRegLong op = (OpCode32SimdRegLong)context.CurrOp;
  22. EmitVectorBinaryLongOpI32(context, (op1, op2) => EmitAbs(context, context.Subtract(op1, op2)), !op.U);
  23. }
  24. public static void Vabs_S(ArmEmitterContext context)
  25. {
  26. OpCode32SimdS op = (OpCode32SimdS)context.CurrOp;
  27. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  28. {
  29. InstEmitSimdHelper32Arm64.EmitScalarUnaryOpF32(context, Intrinsic.Arm64FabsS);
  30. }
  31. else if (Optimizations.FastFP && Optimizations.UseSse2)
  32. {
  33. EmitScalarUnaryOpSimd32(context, (m) =>
  34. {
  35. return EmitFloatAbs(context, m, (op.Size & 1) == 0, false);
  36. });
  37. }
  38. else
  39. {
  40. EmitScalarUnaryOpF32(context, (op1) => EmitUnaryMathCall(context, nameof(Math.Abs), op1));
  41. }
  42. }
  43. public static void Vabs_V(ArmEmitterContext context)
  44. {
  45. OpCode32SimdCmpZ op = (OpCode32SimdCmpZ)context.CurrOp;
  46. if (op.F)
  47. {
  48. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  49. {
  50. InstEmitSimdHelper32Arm64.EmitVectorUnaryOpF32(context, Intrinsic.Arm64FabsV);
  51. }
  52. else if (Optimizations.FastFP && Optimizations.UseSse2)
  53. {
  54. EmitVectorUnaryOpSimd32(context, (m) =>
  55. {
  56. return EmitFloatAbs(context, m, (op.Size & 1) == 0, true);
  57. });
  58. }
  59. else
  60. {
  61. EmitVectorUnaryOpF32(context, (op1) => EmitUnaryMathCall(context, nameof(Math.Abs), op1));
  62. }
  63. }
  64. else
  65. {
  66. EmitVectorUnaryOpSx32(context, (op1) => EmitAbs(context, op1));
  67. }
  68. }
  69. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  70. {
  71. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  72. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  73. }
  74. public static void Vadd_S(ArmEmitterContext context)
  75. {
  76. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  77. {
  78. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FaddS);
  79. }
  80. else if (Optimizations.FastFP && Optimizations.UseSse2)
  81. {
  82. EmitScalarBinaryOpF32(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  83. }
  84. else if (Optimizations.FastFP)
  85. {
  86. EmitScalarBinaryOpF32(context, (op1, op2) => context.Add(op1, op2));
  87. }
  88. else
  89. {
  90. EmitScalarBinaryOpF32(context, (op1, op2) => EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2));
  91. }
  92. }
  93. public static void Vadd_V(ArmEmitterContext context)
  94. {
  95. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  96. {
  97. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FaddV);
  98. }
  99. else if (Optimizations.FastFP && Optimizations.UseSse2)
  100. {
  101. EmitVectorBinaryOpF32(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  102. }
  103. else if (Optimizations.FastFP)
  104. {
  105. EmitVectorBinaryOpF32(context, (op1, op2) => context.Add(op1, op2));
  106. }
  107. else
  108. {
  109. EmitVectorBinaryOpF32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPAddFpscr), op1, op2));
  110. }
  111. }
  112. public static void Vadd_I(ArmEmitterContext context)
  113. {
  114. if (Optimizations.UseSse2)
  115. {
  116. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  117. EmitVectorBinaryOpSimd32(context, (op1, op2) => context.AddIntrinsic(X86PaddInstruction[op.Size], op1, op2));
  118. }
  119. else
  120. {
  121. EmitVectorBinaryOpZx32(context, (op1, op2) => context.Add(op1, op2));
  122. }
  123. }
  124. public static void Vaddl_I(ArmEmitterContext context)
  125. {
  126. OpCode32SimdRegLong op = (OpCode32SimdRegLong)context.CurrOp;
  127. EmitVectorBinaryLongOpI32(context, (op1, op2) => context.Add(op1, op2), !op.U);
  128. }
  129. public static void Vaddw_I(ArmEmitterContext context)
  130. {
  131. OpCode32SimdRegWide op = (OpCode32SimdRegWide)context.CurrOp;
  132. EmitVectorBinaryWideOpI32(context, (op1, op2) => context.Add(op1, op2), !op.U);
  133. }
  134. public static void Vcnt(ArmEmitterContext context)
  135. {
  136. OpCode32SimdCmpZ op = (OpCode32SimdCmpZ)context.CurrOp;
  137. Operand res = GetVecA32(op.Qd);
  138. int elems = op.GetBytesCount();
  139. for (int index = 0; index < elems; index++)
  140. {
  141. Operand de;
  142. Operand me = EmitVectorExtractZx32(context, op.Qm, op.Im + index, op.Size);
  143. if (Optimizations.UsePopCnt)
  144. {
  145. de = context.AddIntrinsicInt(Intrinsic.X86Popcnt, me);
  146. }
  147. else
  148. {
  149. de = EmitCountSetBits8(context, me);
  150. }
  151. res = EmitVectorInsert(context, res, de, op.Id + index, op.Size);
  152. }
  153. context.Copy(GetVecA32(op.Qd), res);
  154. }
  155. public static void Vdup(ArmEmitterContext context)
  156. {
  157. OpCode32SimdDupGP op = (OpCode32SimdDupGP)context.CurrOp;
  158. Operand insert = GetIntA32(context, op.Rt);
  159. // Zero extend into an I64, then replicate. Saves the most time over elementwise inserts.
  160. insert = op.Size switch
  161. {
  162. 2 => context.Multiply(context.ZeroExtend32(OperandType.I64, insert), Const(0x0000000100000001u)),
  163. 1 => context.Multiply(context.ZeroExtend16(OperandType.I64, insert), Const(0x0001000100010001u)),
  164. 0 => context.Multiply(context.ZeroExtend8(OperandType.I64, insert), Const(0x0101010101010101u)),
  165. _ => throw new InvalidOperationException($"Invalid Vdup size \"{op.Size}\".")
  166. };
  167. InsertScalar(context, op.Vd, insert);
  168. if (op.Q)
  169. {
  170. InsertScalar(context, op.Vd + 1, insert);
  171. }
  172. }
  173. public static void Vdup_1(ArmEmitterContext context)
  174. {
  175. OpCode32SimdDupElem op = (OpCode32SimdDupElem)context.CurrOp;
  176. Operand insert = EmitVectorExtractZx32(context, op.Vm >> 1, ((op.Vm & 1) << (3 - op.Size)) + op.Index, op.Size);
  177. // Zero extend into an I64, then replicate. Saves the most time over elementwise inserts.
  178. insert = op.Size switch
  179. {
  180. 2 => context.Multiply(context.ZeroExtend32(OperandType.I64, insert), Const(0x0000000100000001u)),
  181. 1 => context.Multiply(context.ZeroExtend16(OperandType.I64, insert), Const(0x0001000100010001u)),
  182. 0 => context.Multiply(context.ZeroExtend8(OperandType.I64, insert), Const(0x0101010101010101u)),
  183. _ => throw new InvalidOperationException($"Invalid Vdup size \"{op.Size}\".")
  184. };
  185. InsertScalar(context, op.Vd, insert);
  186. if (op.Q)
  187. {
  188. InsertScalar(context, op.Vd | 1, insert);
  189. }
  190. }
  191. private static (long, long) MaskHelperByteSequence(int start, int length, int startByte)
  192. {
  193. int end = start + length;
  194. int b = startByte;
  195. long result = 0;
  196. long result2 = 0;
  197. for (int i = 0; i < 8; i++)
  198. {
  199. result |= (long)((i >= end || i < start) ? 0x80 : b++) << (i * 8);
  200. }
  201. for (int i = 8; i < 16; i++)
  202. {
  203. result2 |= (long)((i >= end || i < start) ? 0x80 : b++) << ((i - 8) * 8);
  204. }
  205. return (result2, result);
  206. }
  207. public static void Vext(ArmEmitterContext context)
  208. {
  209. OpCode32SimdExt op = (OpCode32SimdExt)context.CurrOp;
  210. int elems = op.GetBytesCount();
  211. int byteOff = op.Immediate;
  212. if (Optimizations.UseSsse3)
  213. {
  214. EmitVectorBinaryOpSimd32(context, (n, m) =>
  215. {
  216. // Writing low to high of d: start <imm> into n, overlap into m.
  217. // Then rotate n down by <imm>, m up by (elems)-imm.
  218. // Then OR them together for the result.
  219. (long nMaskHigh, long nMaskLow) = MaskHelperByteSequence(0, elems - byteOff, byteOff);
  220. (long mMaskHigh, long mMaskLow) = MaskHelperByteSequence(elems - byteOff, byteOff, 0);
  221. Operand nMask, mMask;
  222. if (!op.Q)
  223. {
  224. // Do the same operation to the bytes in the top doubleword too, as our target could be in either.
  225. nMaskHigh = nMaskLow + 0x0808080808080808L;
  226. mMaskHigh = mMaskLow + 0x0808080808080808L;
  227. }
  228. nMask = X86GetElements(context, nMaskHigh, nMaskLow);
  229. mMask = X86GetElements(context, mMaskHigh, mMaskLow);
  230. Operand nPart = context.AddIntrinsic(Intrinsic.X86Pshufb, n, nMask);
  231. Operand mPart = context.AddIntrinsic(Intrinsic.X86Pshufb, m, mMask);
  232. return context.AddIntrinsic(Intrinsic.X86Por, nPart, mPart);
  233. });
  234. }
  235. else
  236. {
  237. Operand res = GetVecA32(op.Qd);
  238. for (int index = 0; index < elems; index++)
  239. {
  240. Operand extract;
  241. if (byteOff >= elems)
  242. {
  243. extract = EmitVectorExtractZx32(context, op.Qm, op.Im + (byteOff - elems), op.Size);
  244. }
  245. else
  246. {
  247. extract = EmitVectorExtractZx32(context, op.Qn, op.In + byteOff, op.Size);
  248. }
  249. byteOff++;
  250. res = EmitVectorInsert(context, res, extract, op.Id + index, op.Size);
  251. }
  252. context.Copy(GetVecA32(op.Qd), res);
  253. }
  254. }
  255. public static void Vfma_S(ArmEmitterContext context) // Fused.
  256. {
  257. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  258. {
  259. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FmaddS);
  260. }
  261. else if (Optimizations.FastFP && Optimizations.UseFma)
  262. {
  263. EmitScalarTernaryOpF32(context, Intrinsic.X86Vfmadd231ss, Intrinsic.X86Vfmadd231sd);
  264. }
  265. else if (Optimizations.FastFP && Optimizations.UseSse2)
  266. {
  267. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  268. }
  269. else
  270. {
  271. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  272. {
  273. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  274. });
  275. }
  276. }
  277. public static void Vfma_V(ArmEmitterContext context) // Fused.
  278. {
  279. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  280. {
  281. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpF32(context, Intrinsic.Arm64FmlaV);
  282. }
  283. else if (Optimizations.FastFP && Optimizations.UseFma)
  284. {
  285. EmitVectorTernaryOpF32(context, Intrinsic.X86Vfmadd231ps);
  286. }
  287. else
  288. {
  289. EmitVectorTernaryOpF32(context, (op1, op2, op3) =>
  290. {
  291. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulAddFpscr), op1, op2, op3);
  292. });
  293. }
  294. }
  295. public static void Vfms_S(ArmEmitterContext context) // Fused.
  296. {
  297. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  298. {
  299. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FmsubS);
  300. }
  301. else if (Optimizations.FastFP && Optimizations.UseFma)
  302. {
  303. EmitScalarTernaryOpF32(context, Intrinsic.X86Vfnmadd231ss, Intrinsic.X86Vfnmadd231sd);
  304. }
  305. else if (Optimizations.FastFP && Optimizations.UseSse2)
  306. {
  307. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  308. }
  309. else
  310. {
  311. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  312. {
  313. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  314. });
  315. }
  316. }
  317. public static void Vfms_V(ArmEmitterContext context) // Fused.
  318. {
  319. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  320. {
  321. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpF32(context, Intrinsic.Arm64FmlsV);
  322. }
  323. else if (Optimizations.FastFP && Optimizations.UseFma)
  324. {
  325. EmitVectorTernaryOpF32(context, Intrinsic.X86Vfnmadd231ps);
  326. }
  327. else
  328. {
  329. EmitVectorTernaryOpF32(context, (op1, op2, op3) =>
  330. {
  331. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulSubFpscr), op1, op2, op3);
  332. });
  333. }
  334. }
  335. public static void Vfnma_S(ArmEmitterContext context) // Fused.
  336. {
  337. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  338. {
  339. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FnmaddS);
  340. }
  341. else if (Optimizations.FastFP && Optimizations.UseFma)
  342. {
  343. EmitScalarTernaryOpF32(context, Intrinsic.X86Vfnmsub231ss, Intrinsic.X86Vfnmsub231sd);
  344. }
  345. else if (Optimizations.FastFP && Optimizations.UseSse2)
  346. {
  347. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Subss, Intrinsic.X86Subsd, isNegD: true);
  348. }
  349. else
  350. {
  351. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  352. {
  353. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  354. });
  355. }
  356. }
  357. public static void Vfnms_S(ArmEmitterContext context) // Fused.
  358. {
  359. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  360. {
  361. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FnmsubS);
  362. }
  363. else if (Optimizations.FastFP && Optimizations.UseFma)
  364. {
  365. EmitScalarTernaryOpF32(context, Intrinsic.X86Vfmsub231ss, Intrinsic.X86Vfmsub231sd);
  366. }
  367. else if (Optimizations.FastFP && Optimizations.UseSse2)
  368. {
  369. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Addss, Intrinsic.X86Addsd, isNegD: true);
  370. }
  371. else
  372. {
  373. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  374. {
  375. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  376. });
  377. }
  378. }
  379. public static void Vhadd(ArmEmitterContext context)
  380. {
  381. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  382. if (op.U)
  383. {
  384. EmitVectorBinaryOpZx32(context, (op1, op2) => context.ShiftRightUI(context.Add(op1, op2), Const(1)));
  385. }
  386. else
  387. {
  388. EmitVectorBinaryOpSx32(context, (op1, op2) => context.ShiftRightSI(context.Add(op1, op2), Const(1)));
  389. }
  390. }
  391. public static void Vmov_S(ArmEmitterContext context)
  392. {
  393. if (Optimizations.FastFP && Optimizations.UseSse2)
  394. {
  395. EmitScalarUnaryOpF32(context, 0, 0);
  396. }
  397. else
  398. {
  399. EmitScalarUnaryOpF32(context, (op1) => op1);
  400. }
  401. }
  402. public static void Vmovn(ArmEmitterContext context)
  403. {
  404. EmitVectorUnaryNarrowOp32(context, (op1) => op1);
  405. }
  406. public static void Vneg_S(ArmEmitterContext context)
  407. {
  408. OpCode32SimdS op = (OpCode32SimdS)context.CurrOp;
  409. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  410. {
  411. InstEmitSimdHelper32Arm64.EmitScalarUnaryOpF32(context, Intrinsic.Arm64FnegS);
  412. }
  413. else if (Optimizations.UseSse2)
  414. {
  415. EmitScalarUnaryOpSimd32(context, (m) =>
  416. {
  417. if ((op.Size & 1) == 0)
  418. {
  419. Operand mask = X86GetScalar(context, -0f);
  420. return context.AddIntrinsic(Intrinsic.X86Xorps, mask, m);
  421. }
  422. else
  423. {
  424. Operand mask = X86GetScalar(context, -0d);
  425. return context.AddIntrinsic(Intrinsic.X86Xorpd, mask, m);
  426. }
  427. });
  428. }
  429. else
  430. {
  431. EmitScalarUnaryOpF32(context, (op1) => context.Negate(op1));
  432. }
  433. }
  434. public static void Vnmul_S(ArmEmitterContext context)
  435. {
  436. OpCode32SimdRegS op = (OpCode32SimdRegS)context.CurrOp;
  437. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  438. {
  439. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FnmulS);
  440. }
  441. else if (Optimizations.UseSse2)
  442. {
  443. EmitScalarBinaryOpSimd32(context, (n, m) =>
  444. {
  445. if ((op.Size & 1) == 0)
  446. {
  447. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  448. Operand mask = X86GetScalar(context, -0f);
  449. return context.AddIntrinsic(Intrinsic.X86Xorps, mask, res);
  450. }
  451. else
  452. {
  453. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  454. Operand mask = X86GetScalar(context, -0d);
  455. return context.AddIntrinsic(Intrinsic.X86Xorpd, mask, res);
  456. }
  457. });
  458. }
  459. else
  460. {
  461. EmitScalarBinaryOpF32(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  462. }
  463. }
  464. public static void Vnmla_S(ArmEmitterContext context)
  465. {
  466. OpCode32SimdRegS op = (OpCode32SimdRegS)context.CurrOp;
  467. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  468. {
  469. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FnmaddS);
  470. }
  471. else if (Optimizations.FastFP && Optimizations.UseSse2)
  472. {
  473. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Subss, Intrinsic.X86Subsd, isNegD: true);
  474. }
  475. else if (Optimizations.FastFP)
  476. {
  477. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  478. {
  479. return context.Subtract(context.Negate(op1), context.Multiply(op2, op3));
  480. });
  481. }
  482. else
  483. {
  484. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  485. {
  486. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op2, op3);
  487. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), context.Negate(op1), res);
  488. });
  489. }
  490. }
  491. public static void Vnmls_S(ArmEmitterContext context)
  492. {
  493. OpCode32SimdRegS op = (OpCode32SimdRegS)context.CurrOp;
  494. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  495. {
  496. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FnmsubS);
  497. }
  498. else if (Optimizations.FastFP && Optimizations.UseSse2)
  499. {
  500. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Addss, Intrinsic.X86Addsd, isNegD: true);
  501. }
  502. else if (Optimizations.FastFP)
  503. {
  504. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  505. {
  506. return context.Add(context.Negate(op1), context.Multiply(op2, op3));
  507. });
  508. }
  509. else
  510. {
  511. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  512. {
  513. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op2, op3);
  514. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), context.Negate(op1), res);
  515. });
  516. }
  517. }
  518. public static void Vneg_V(ArmEmitterContext context)
  519. {
  520. OpCode32SimdCmpZ op = (OpCode32SimdCmpZ)context.CurrOp;
  521. if (op.F)
  522. {
  523. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  524. {
  525. InstEmitSimdHelper32Arm64.EmitVectorUnaryOpF32(context, Intrinsic.Arm64FnegV);
  526. }
  527. else if (Optimizations.FastFP && Optimizations.UseSse2)
  528. {
  529. EmitVectorUnaryOpSimd32(context, (m) =>
  530. {
  531. if ((op.Size & 1) == 0)
  532. {
  533. Operand mask = X86GetAllElements(context, -0f);
  534. return context.AddIntrinsic(Intrinsic.X86Xorps, mask, m);
  535. }
  536. else
  537. {
  538. Operand mask = X86GetAllElements(context, -0d);
  539. return context.AddIntrinsic(Intrinsic.X86Xorpd, mask, m);
  540. }
  541. });
  542. }
  543. else
  544. {
  545. EmitVectorUnaryOpF32(context, (op1) => context.Negate(op1));
  546. }
  547. }
  548. else
  549. {
  550. EmitVectorUnaryOpSx32(context, (op1) => context.Negate(op1));
  551. }
  552. }
  553. public static void Vdiv_S(ArmEmitterContext context)
  554. {
  555. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  556. {
  557. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FdivS);
  558. }
  559. else if (Optimizations.FastFP && Optimizations.UseSse2)
  560. {
  561. EmitScalarBinaryOpF32(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  562. }
  563. else if (Optimizations.FastFP)
  564. {
  565. EmitScalarBinaryOpF32(context, (op1, op2) => context.Divide(op1, op2));
  566. }
  567. else
  568. {
  569. EmitScalarBinaryOpF32(context, (op1, op2) =>
  570. {
  571. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  572. });
  573. }
  574. }
  575. public static void Vmaxnm_S(ArmEmitterContext context)
  576. {
  577. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  578. {
  579. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FmaxnmS);
  580. }
  581. else if (Optimizations.FastFP && Optimizations.UseSse41)
  582. {
  583. EmitSse41MaxMinNumOpF32(context, true, true);
  584. }
  585. else
  586. {
  587. EmitScalarBinaryOpF32(context, (op1, op2) => EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2));
  588. }
  589. }
  590. public static void Vmaxnm_V(ArmEmitterContext context)
  591. {
  592. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  593. {
  594. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FmaxnmV);
  595. }
  596. else if (Optimizations.FastFP && Optimizations.UseSse41)
  597. {
  598. EmitSse41MaxMinNumOpF32(context, true, false);
  599. }
  600. else
  601. {
  602. EmitVectorBinaryOpSx32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMaxNumFpscr), op1, op2));
  603. }
  604. }
  605. public static void Vminnm_S(ArmEmitterContext context)
  606. {
  607. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  608. {
  609. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FminnmS);
  610. }
  611. else if (Optimizations.FastFP && Optimizations.UseSse41)
  612. {
  613. EmitSse41MaxMinNumOpF32(context, false, true);
  614. }
  615. else
  616. {
  617. EmitScalarBinaryOpF32(context, (op1, op2) => EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2));
  618. }
  619. }
  620. public static void Vminnm_V(ArmEmitterContext context)
  621. {
  622. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  623. {
  624. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FminnmV);
  625. }
  626. else if (Optimizations.FastFP && Optimizations.UseSse41)
  627. {
  628. EmitSse41MaxMinNumOpF32(context, false, false);
  629. }
  630. else
  631. {
  632. EmitVectorBinaryOpSx32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMinNumFpscr), op1, op2));
  633. }
  634. }
  635. public static void Vmax_V(ArmEmitterContext context)
  636. {
  637. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  638. {
  639. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FmaxV);
  640. }
  641. else if (Optimizations.FastFP && Optimizations.UseSse2)
  642. {
  643. EmitVectorBinaryOpF32(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  644. }
  645. else
  646. {
  647. EmitVectorBinaryOpF32(context, (op1, op2) =>
  648. {
  649. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMaxFpscr), op1, op2);
  650. });
  651. }
  652. }
  653. public static void Vmax_I(ArmEmitterContext context)
  654. {
  655. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  656. if (op.U)
  657. {
  658. if (Optimizations.UseSse2)
  659. {
  660. EmitVectorBinaryOpSimd32(context, (op1, op2) => context.AddIntrinsic(X86PmaxuInstruction[op.Size], op1, op2));
  661. }
  662. else
  663. {
  664. EmitVectorBinaryOpZx32(context, (op1, op2) => context.ConditionalSelect(context.ICompareGreaterUI(op1, op2), op1, op2));
  665. }
  666. }
  667. else
  668. {
  669. if (Optimizations.UseSse2)
  670. {
  671. EmitVectorBinaryOpSimd32(context, (op1, op2) => context.AddIntrinsic(X86PmaxsInstruction[op.Size], op1, op2));
  672. }
  673. else
  674. {
  675. EmitVectorBinaryOpSx32(context, (op1, op2) => context.ConditionalSelect(context.ICompareGreater(op1, op2), op1, op2));
  676. }
  677. }
  678. }
  679. public static void Vmin_V(ArmEmitterContext context)
  680. {
  681. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  682. {
  683. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FminV);
  684. }
  685. else if (Optimizations.FastFP && Optimizations.UseSse2)
  686. {
  687. EmitVectorBinaryOpF32(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  688. }
  689. else
  690. {
  691. EmitVectorBinaryOpF32(context, (op1, op2) =>
  692. {
  693. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMinFpscr), op1, op2);
  694. });
  695. }
  696. }
  697. public static void Vmin_I(ArmEmitterContext context)
  698. {
  699. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  700. if (op.U)
  701. {
  702. if (Optimizations.UseSse2)
  703. {
  704. EmitVectorBinaryOpSimd32(context, (op1, op2) => context.AddIntrinsic(X86PminuInstruction[op.Size], op1, op2));
  705. }
  706. else
  707. {
  708. EmitVectorBinaryOpZx32(context, (op1, op2) => context.ConditionalSelect(context.ICompareLessUI(op1, op2), op1, op2));
  709. }
  710. }
  711. else
  712. {
  713. if (Optimizations.UseSse2)
  714. {
  715. EmitVectorBinaryOpSimd32(context, (op1, op2) => context.AddIntrinsic(X86PminsInstruction[op.Size], op1, op2));
  716. }
  717. else
  718. {
  719. EmitVectorBinaryOpSx32(context, (op1, op2) => context.ConditionalSelect(context.ICompareLess(op1, op2), op1, op2));
  720. }
  721. }
  722. }
  723. public static void Vmla_S(ArmEmitterContext context)
  724. {
  725. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  726. {
  727. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FmaddS);
  728. }
  729. else if (Optimizations.FastFP && Optimizations.UseSse2)
  730. {
  731. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  732. }
  733. else if (Optimizations.FastFP)
  734. {
  735. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  736. {
  737. return context.Add(op1, context.Multiply(op2, op3));
  738. });
  739. }
  740. else
  741. {
  742. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  743. {
  744. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op2, op3);
  745. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, res);
  746. });
  747. }
  748. }
  749. public static void Vmla_V(ArmEmitterContext context)
  750. {
  751. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  752. {
  753. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpF32(context, Intrinsic.Arm64FmlaV);
  754. }
  755. else if (Optimizations.FastFP && Optimizations.UseSse2)
  756. {
  757. EmitVectorTernaryOpF32(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  758. }
  759. else if (Optimizations.FastFP)
  760. {
  761. EmitVectorTernaryOpF32(context, (op1, op2, op3) => context.Add(op1, context.Multiply(op2, op3)));
  762. }
  763. else
  764. {
  765. EmitVectorTernaryOpF32(context, (op1, op2, op3) =>
  766. {
  767. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulAddFpscr), op1, op2, op3);
  768. });
  769. }
  770. }
  771. public static void Vmla_I(ArmEmitterContext context)
  772. {
  773. EmitVectorTernaryOpZx32(context, (op1, op2, op3) => context.Add(op1, context.Multiply(op2, op3)));
  774. }
  775. public static void Vmla_1(ArmEmitterContext context)
  776. {
  777. OpCode32SimdRegElem op = (OpCode32SimdRegElem)context.CurrOp;
  778. if (op.F)
  779. {
  780. if (Optimizations.FastFP && Optimizations.UseSse2)
  781. {
  782. EmitVectorsByScalarOpF32(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  783. }
  784. else if (Optimizations.FastFP)
  785. {
  786. EmitVectorsByScalarOpF32(context, (op1, op2, op3) => context.Add(op1, context.Multiply(op2, op3)));
  787. }
  788. else
  789. {
  790. EmitVectorsByScalarOpF32(context, (op1, op2, op3) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulAddFpscr), op1, op2, op3));
  791. }
  792. }
  793. else
  794. {
  795. EmitVectorsByScalarOpI32(context, (op1, op2, op3) => context.Add(op1, context.Multiply(op2, op3)), false);
  796. }
  797. }
  798. public static void Vmlal_I(ArmEmitterContext context)
  799. {
  800. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  801. EmitVectorTernaryLongOpI32(context, (d, n, m) => context.Add(d, context.Multiply(n, m)), !op.U);
  802. }
  803. public static void Vmls_S(ArmEmitterContext context)
  804. {
  805. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  806. {
  807. InstEmitSimdHelper32Arm64.EmitScalarTernaryOpF32(context, Intrinsic.Arm64FmlsV);
  808. }
  809. else if (Optimizations.FastFP && Optimizations.UseSse2)
  810. {
  811. EmitScalarTernaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  812. }
  813. else if (Optimizations.FastFP)
  814. {
  815. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  816. {
  817. return context.Subtract(op1, context.Multiply(op2, op3));
  818. });
  819. }
  820. else
  821. {
  822. EmitScalarTernaryOpF32(context, (op1, op2, op3) =>
  823. {
  824. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op2, op3);
  825. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, res);
  826. });
  827. }
  828. }
  829. public static void Vmls_V(ArmEmitterContext context)
  830. {
  831. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  832. {
  833. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpF32(context, Intrinsic.Arm64FmlsV);
  834. }
  835. else if (Optimizations.FastFP && Optimizations.UseSse2)
  836. {
  837. EmitVectorTernaryOpF32(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  838. }
  839. else if (Optimizations.FastFP)
  840. {
  841. EmitVectorTernaryOpF32(context, (op1, op2, op3) => context.Subtract(op1, context.Multiply(op2, op3)));
  842. }
  843. else
  844. {
  845. EmitVectorTernaryOpF32(context, (op1, op2, op3) =>
  846. {
  847. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulSubFpscr), op1, op2, op3);
  848. });
  849. }
  850. }
  851. public static void Vmls_I(ArmEmitterContext context)
  852. {
  853. EmitVectorTernaryOpZx32(context, (op1, op2, op3) => context.Subtract(op1, context.Multiply(op2, op3)));
  854. }
  855. public static void Vmls_1(ArmEmitterContext context)
  856. {
  857. OpCode32SimdRegElem op = (OpCode32SimdRegElem)context.CurrOp;
  858. if (op.F)
  859. {
  860. if (Optimizations.FastFP && Optimizations.UseSse2)
  861. {
  862. EmitVectorsByScalarOpF32(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  863. }
  864. else if (Optimizations.FastFP)
  865. {
  866. EmitVectorsByScalarOpF32(context, (op1, op2, op3) => context.Subtract(op1, context.Multiply(op2, op3)));
  867. }
  868. else
  869. {
  870. EmitVectorsByScalarOpF32(context, (op1, op2, op3) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulSubFpscr), op1, op2, op3));
  871. }
  872. }
  873. else
  874. {
  875. EmitVectorsByScalarOpI32(context, (op1, op2, op3) => context.Subtract(op1, context.Multiply(op2, op3)), false);
  876. }
  877. }
  878. public static void Vmlsl_I(ArmEmitterContext context)
  879. {
  880. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  881. EmitVectorTernaryLongOpI32(context, (opD, op1, op2) => context.Subtract(opD, context.Multiply(op1, op2)), !op.U);
  882. }
  883. public static void Vmul_S(ArmEmitterContext context)
  884. {
  885. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  886. {
  887. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FmulS);
  888. }
  889. else if (Optimizations.FastFP && Optimizations.UseSse2)
  890. {
  891. EmitScalarBinaryOpF32(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  892. }
  893. else if (Optimizations.FastFP)
  894. {
  895. EmitScalarBinaryOpF32(context, (op1, op2) => context.Multiply(op1, op2));
  896. }
  897. else
  898. {
  899. EmitScalarBinaryOpF32(context, (op1, op2) =>
  900. {
  901. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  902. });
  903. }
  904. }
  905. public static void Vmul_V(ArmEmitterContext context)
  906. {
  907. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  908. {
  909. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FmulV);
  910. }
  911. else if (Optimizations.FastFP && Optimizations.UseSse2)
  912. {
  913. EmitVectorBinaryOpF32(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  914. }
  915. else if (Optimizations.FastFP)
  916. {
  917. EmitVectorBinaryOpF32(context, (op1, op2) => context.Multiply(op1, op2));
  918. }
  919. else
  920. {
  921. EmitVectorBinaryOpF32(context, (op1, op2) =>
  922. {
  923. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulFpscr), op1, op2);
  924. });
  925. }
  926. }
  927. public static void Vmul_I(ArmEmitterContext context)
  928. {
  929. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  930. if (op.U) // This instruction is always signed, U indicates polynomial mode.
  931. {
  932. EmitVectorBinaryOpZx32(context, (op1, op2) => EmitPolynomialMultiply(context, op1, op2, 8 << op.Size));
  933. }
  934. else
  935. {
  936. EmitVectorBinaryOpSx32(context, (op1, op2) => context.Multiply(op1, op2));
  937. }
  938. }
  939. public static void Vmul_1(ArmEmitterContext context)
  940. {
  941. OpCode32SimdRegElem op = (OpCode32SimdRegElem)context.CurrOp;
  942. if (op.F)
  943. {
  944. if (Optimizations.FastFP && Optimizations.UseSse2)
  945. {
  946. EmitVectorByScalarOpF32(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  947. }
  948. else if (Optimizations.FastFP)
  949. {
  950. EmitVectorByScalarOpF32(context, (op1, op2) => context.Multiply(op1, op2));
  951. }
  952. else
  953. {
  954. EmitVectorByScalarOpF32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMulFpscr), op1, op2));
  955. }
  956. }
  957. else
  958. {
  959. EmitVectorByScalarOpI32(context, (op1, op2) => context.Multiply(op1, op2), false);
  960. }
  961. }
  962. public static void Vmull_1(ArmEmitterContext context)
  963. {
  964. OpCode32SimdRegElem op = (OpCode32SimdRegElem)context.CurrOp;
  965. EmitVectorByScalarLongOpI32(context, (op1, op2) => context.Multiply(op1, op2), !op.U);
  966. }
  967. public static void Vmull_I(ArmEmitterContext context)
  968. {
  969. OpCode32SimdRegLong op = (OpCode32SimdRegLong)context.CurrOp;
  970. if (op.Polynomial)
  971. {
  972. if (op.Size == 0) // P8
  973. {
  974. EmitVectorBinaryLongOpI32(context, (op1, op2) => EmitPolynomialMultiply(context, op1, op2, 8 << op.Size), false);
  975. }
  976. else /* if (op.Size == 2) // P64 */
  977. {
  978. Operand ne = context.VectorExtract(OperandType.I64, GetVec(op.Qn), op.Vn & 1);
  979. Operand me = context.VectorExtract(OperandType.I64, GetVec(op.Qm), op.Vm & 1);
  980. Operand res = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.PolynomialMult64_128)), ne, me);
  981. context.Copy(GetVecA32(op.Qd), res);
  982. }
  983. }
  984. else
  985. {
  986. EmitVectorBinaryLongOpI32(context, (op1, op2) => context.Multiply(op1, op2), !op.U);
  987. }
  988. }
  989. public static void Vpadd_V(ArmEmitterContext context)
  990. {
  991. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  992. {
  993. InstEmitSimdHelper32Arm64.EmitVectorPairwiseOpF32(context, Intrinsic.Arm64FaddpV);
  994. }
  995. else if (Optimizations.FastFP && Optimizations.UseSse2)
  996. {
  997. EmitSse2VectorPairwiseOpF32(context, Intrinsic.X86Addps);
  998. }
  999. else
  1000. {
  1001. EmitVectorPairwiseOpF32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPAddFpscr), op1, op2));
  1002. }
  1003. }
  1004. public static void Vpadd_I(ArmEmitterContext context)
  1005. {
  1006. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1007. if (Optimizations.UseSsse3)
  1008. {
  1009. EmitSsse3VectorPairwiseOp32(context, X86PaddInstruction);
  1010. }
  1011. else
  1012. {
  1013. EmitVectorPairwiseOpI32(context, (op1, op2) => context.Add(op1, op2), !op.U);
  1014. }
  1015. }
  1016. public static void Vpaddl(ArmEmitterContext context)
  1017. {
  1018. OpCode32Simd op = (OpCode32Simd)context.CurrOp;
  1019. EmitVectorPairwiseLongOpI32(context, (op1, op2) => context.Add(op1, op2), (op.Opc & 1) == 0);
  1020. }
  1021. public static void Vpmax_V(ArmEmitterContext context)
  1022. {
  1023. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1024. {
  1025. InstEmitSimdHelper32Arm64.EmitVectorPairwiseOpF32(context, Intrinsic.Arm64FmaxpV);
  1026. }
  1027. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1028. {
  1029. EmitSse2VectorPairwiseOpF32(context, Intrinsic.X86Maxps);
  1030. }
  1031. else
  1032. {
  1033. EmitVectorPairwiseOpF32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat64.FPMaxFpscr), op1, op2));
  1034. }
  1035. }
  1036. public static void Vpmax_I(ArmEmitterContext context)
  1037. {
  1038. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1039. if (Optimizations.UseSsse3)
  1040. {
  1041. EmitSsse3VectorPairwiseOp32(context, op.U ? X86PmaxuInstruction : X86PmaxsInstruction);
  1042. }
  1043. else
  1044. {
  1045. EmitVectorPairwiseOpI32(context, (op1, op2) =>
  1046. {
  1047. Operand greater = op.U ? context.ICompareGreaterUI(op1, op2) : context.ICompareGreater(op1, op2);
  1048. return context.ConditionalSelect(greater, op1, op2);
  1049. }, !op.U);
  1050. }
  1051. }
  1052. public static void Vpmin_V(ArmEmitterContext context)
  1053. {
  1054. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1055. {
  1056. InstEmitSimdHelper32Arm64.EmitVectorPairwiseOpF32(context, Intrinsic.Arm64FminpV);
  1057. }
  1058. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1059. {
  1060. EmitSse2VectorPairwiseOpF32(context, Intrinsic.X86Minps);
  1061. }
  1062. else
  1063. {
  1064. EmitVectorPairwiseOpF32(context, (op1, op2) => EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPMinFpscr), op1, op2));
  1065. }
  1066. }
  1067. public static void Vpmin_I(ArmEmitterContext context)
  1068. {
  1069. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1070. if (Optimizations.UseSsse3)
  1071. {
  1072. EmitSsse3VectorPairwiseOp32(context, op.U ? X86PminuInstruction : X86PminsInstruction);
  1073. }
  1074. else
  1075. {
  1076. EmitVectorPairwiseOpI32(context, (op1, op2) =>
  1077. {
  1078. Operand greater = op.U ? context.ICompareLessUI(op1, op2) : context.ICompareLess(op1, op2);
  1079. return context.ConditionalSelect(greater, op1, op2);
  1080. }, !op.U);
  1081. }
  1082. }
  1083. public static void Vqadd(ArmEmitterContext context)
  1084. {
  1085. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1086. EmitSaturatingAddSubBinaryOp(context, add: true, !op.U);
  1087. }
  1088. public static void Vqdmulh(ArmEmitterContext context)
  1089. {
  1090. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1091. int eSize = 8 << op.Size;
  1092. EmitVectorBinaryOpI32(context, (op1, op2) =>
  1093. {
  1094. if (op.Size == 2)
  1095. {
  1096. op1 = context.SignExtend32(OperandType.I64, op1);
  1097. op2 = context.SignExtend32(OperandType.I64, op2);
  1098. }
  1099. Operand res = context.Multiply(op1, op2);
  1100. res = context.ShiftRightSI(res, Const(eSize - 1));
  1101. res = EmitSatQ(context, res, eSize, signedSrc: true, signedDst: true);
  1102. if (op.Size == 2)
  1103. {
  1104. res = context.ConvertI64ToI32(res);
  1105. }
  1106. return res;
  1107. }, signed: true);
  1108. }
  1109. public static void Vqmovn(ArmEmitterContext context)
  1110. {
  1111. OpCode32SimdMovn op = (OpCode32SimdMovn)context.CurrOp;
  1112. bool signed = !op.Q;
  1113. EmitVectorUnaryNarrowOp32(context, (op1) => EmitSatQ(context, op1, 8 << op.Size, signed, signed), signed);
  1114. }
  1115. public static void Vqmovun(ArmEmitterContext context)
  1116. {
  1117. OpCode32SimdMovn op = (OpCode32SimdMovn)context.CurrOp;
  1118. EmitVectorUnaryNarrowOp32(context, (op1) => EmitSatQ(context, op1, 8 << op.Size, signedSrc: true, signedDst: false), signed: true);
  1119. }
  1120. public static void Vqsub(ArmEmitterContext context)
  1121. {
  1122. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1123. EmitSaturatingAddSubBinaryOp(context, add: false, !op.U);
  1124. }
  1125. public static void Vrev(ArmEmitterContext context)
  1126. {
  1127. OpCode32SimdRev op = (OpCode32SimdRev)context.CurrOp;
  1128. if (Optimizations.UseSsse3)
  1129. {
  1130. EmitVectorUnaryOpSimd32(context, (op1) =>
  1131. {
  1132. Operand mask;
  1133. switch (op.Size)
  1134. {
  1135. case 3:
  1136. // Rev64
  1137. switch (op.Opc)
  1138. {
  1139. case 0:
  1140. mask = X86GetElements(context, 0x08090a0b0c0d0e0fL, 0x0001020304050607L);
  1141. return context.AddIntrinsic(Intrinsic.X86Pshufb, op1, mask);
  1142. case 1:
  1143. mask = X86GetElements(context, 0x09080b0a0d0c0f0eL, 0x0100030205040706L);
  1144. return context.AddIntrinsic(Intrinsic.X86Pshufb, op1, mask);
  1145. case 2:
  1146. return context.AddIntrinsic(Intrinsic.X86Shufps, op1, op1, Const(1 | (0 << 2) | (3 << 4) | (2 << 6)));
  1147. }
  1148. break;
  1149. case 2:
  1150. // Rev32
  1151. switch (op.Opc)
  1152. {
  1153. case 0:
  1154. mask = X86GetElements(context, 0x0c0d0e0f_08090a0bL, 0x04050607_00010203L);
  1155. return context.AddIntrinsic(Intrinsic.X86Pshufb, op1, mask);
  1156. case 1:
  1157. mask = X86GetElements(context, 0x0d0c0f0e_09080b0aL, 0x05040706_01000302L);
  1158. return context.AddIntrinsic(Intrinsic.X86Pshufb, op1, mask);
  1159. }
  1160. break;
  1161. case 1:
  1162. // Rev16
  1163. mask = X86GetElements(context, 0x0e0f_0c0d_0a0b_0809L, 0x_0607_0405_0203_0001L);
  1164. return context.AddIntrinsic(Intrinsic.X86Pshufb, op1, mask);
  1165. }
  1166. throw new InvalidOperationException("Invalid VREV Opcode + Size combo."); // Should be unreachable.
  1167. });
  1168. }
  1169. else
  1170. {
  1171. EmitVectorUnaryOpZx32(context, (op1) =>
  1172. {
  1173. switch (op.Opc)
  1174. {
  1175. case 0:
  1176. switch (op.Size) // Swap bytes.
  1177. {
  1178. case 1:
  1179. return InstEmitAluHelper.EmitReverseBytes16_32Op(context, op1);
  1180. case 2:
  1181. case 3:
  1182. return context.ByteSwap(op1);
  1183. }
  1184. break;
  1185. case 1:
  1186. switch (op.Size)
  1187. {
  1188. case 2:
  1189. return context.BitwiseOr(context.ShiftRightUI(context.BitwiseAnd(op1, Const(0xffff0000)), Const(16)),
  1190. context.ShiftLeft(context.BitwiseAnd(op1, Const(0x0000ffff)), Const(16)));
  1191. case 3:
  1192. return context.BitwiseOr(
  1193. context.BitwiseOr(context.ShiftRightUI(context.BitwiseAnd(op1, Const(0xffff000000000000ul)), Const(48)),
  1194. context.ShiftLeft(context.BitwiseAnd(op1, Const(0x000000000000fffful)), Const(48))),
  1195. context.BitwiseOr(context.ShiftRightUI(context.BitwiseAnd(op1, Const(0x0000ffff00000000ul)), Const(16)),
  1196. context.ShiftLeft(context.BitwiseAnd(op1, Const(0x00000000ffff0000ul)), Const(16))));
  1197. }
  1198. break;
  1199. case 2:
  1200. // Swap upper and lower halves.
  1201. return context.BitwiseOr(context.ShiftRightUI(context.BitwiseAnd(op1, Const(0xffffffff00000000ul)), Const(32)),
  1202. context.ShiftLeft(context.BitwiseAnd(op1, Const(0x00000000fffffffful)), Const(32)));
  1203. }
  1204. throw new InvalidOperationException("Invalid VREV Opcode + Size combo."); // Should be unreachable.
  1205. });
  1206. }
  1207. }
  1208. public static void Vrecpe(ArmEmitterContext context)
  1209. {
  1210. OpCode32SimdSqrte op = (OpCode32SimdSqrte)context.CurrOp;
  1211. if (op.F)
  1212. {
  1213. int sizeF = op.Size & 1;
  1214. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1215. {
  1216. InstEmitSimdHelper32Arm64.EmitVectorUnaryOpF32(context, Intrinsic.Arm64FrecpeV);
  1217. }
  1218. else if (Optimizations.FastFP && Optimizations.UseSse2 && sizeF == 0)
  1219. {
  1220. EmitVectorUnaryOpF32(context, Intrinsic.X86Rcpps, 0);
  1221. }
  1222. else
  1223. {
  1224. EmitVectorUnaryOpF32(context, (op1) =>
  1225. {
  1226. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPRecipEstimateFpscr), op1);
  1227. });
  1228. }
  1229. }
  1230. else
  1231. {
  1232. throw new NotImplementedException("Integer Vrecpe not currently implemented.");
  1233. }
  1234. }
  1235. public static void Vrecps(ArmEmitterContext context)
  1236. {
  1237. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1238. {
  1239. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FrecpsV);
  1240. }
  1241. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1242. {
  1243. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1244. bool single = (op.Size & 1) == 0;
  1245. // (2 - (n*m))
  1246. EmitVectorBinaryOpSimd32(context, (n, m) =>
  1247. {
  1248. if (single)
  1249. {
  1250. Operand maskTwo = X86GetAllElements(context, 2f);
  1251. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1252. return context.AddIntrinsic(Intrinsic.X86Subps, maskTwo, res);
  1253. }
  1254. else
  1255. {
  1256. Operand maskTwo = X86GetAllElements(context, 2d);
  1257. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1258. return context.AddIntrinsic(Intrinsic.X86Subpd, maskTwo, res);
  1259. }
  1260. });
  1261. }
  1262. else
  1263. {
  1264. EmitVectorBinaryOpF32(context, (op1, op2) =>
  1265. {
  1266. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStep), op1, op2);
  1267. });
  1268. }
  1269. }
  1270. public static void Vrhadd(ArmEmitterContext context)
  1271. {
  1272. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1273. EmitVectorBinaryOpI32(context, (op1, op2) =>
  1274. {
  1275. if (op.Size == 2)
  1276. {
  1277. op1 = context.ZeroExtend32(OperandType.I64, op1);
  1278. op2 = context.ZeroExtend32(OperandType.I64, op2);
  1279. }
  1280. Operand res = context.Add(context.Add(op1, op2), Const(op1.Type, 1L));
  1281. res = context.ShiftRightUI(res, Const(1));
  1282. if (op.Size == 2)
  1283. {
  1284. res = context.ConvertI64ToI32(res);
  1285. }
  1286. return res;
  1287. }, !op.U);
  1288. }
  1289. public static void Vrsqrte(ArmEmitterContext context)
  1290. {
  1291. OpCode32SimdSqrte op = (OpCode32SimdSqrte)context.CurrOp;
  1292. if (op.F)
  1293. {
  1294. int sizeF = op.Size & 1;
  1295. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1296. {
  1297. InstEmitSimdHelper32Arm64.EmitVectorUnaryOpF32(context, Intrinsic.Arm64FrsqrteV);
  1298. }
  1299. else if (Optimizations.FastFP && Optimizations.UseSse2 && sizeF == 0)
  1300. {
  1301. EmitVectorUnaryOpF32(context, Intrinsic.X86Rsqrtps, 0);
  1302. }
  1303. else
  1304. {
  1305. EmitVectorUnaryOpF32(context, (op1) =>
  1306. {
  1307. return EmitSoftFloatCallDefaultFpscr(context, nameof(SoftFloat32.FPRSqrtEstimateFpscr), op1);
  1308. });
  1309. }
  1310. }
  1311. else
  1312. {
  1313. throw new NotImplementedException("Integer Vrsqrte not currently implemented.");
  1314. }
  1315. }
  1316. public static void Vrsqrts(ArmEmitterContext context)
  1317. {
  1318. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1319. {
  1320. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FrsqrtsV);
  1321. }
  1322. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1323. {
  1324. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1325. bool single = (op.Size & 1) == 0;
  1326. // (3 - (n*m)) / 2
  1327. EmitVectorBinaryOpSimd32(context, (n, m) =>
  1328. {
  1329. if (single)
  1330. {
  1331. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1332. Operand maskThree = X86GetAllElements(context, 3f);
  1333. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1334. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1335. return context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1336. }
  1337. else
  1338. {
  1339. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1340. Operand maskThree = X86GetAllElements(context, 3d);
  1341. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1342. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1343. return context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1344. }
  1345. });
  1346. }
  1347. else
  1348. {
  1349. EmitVectorBinaryOpF32(context, (op1, op2) =>
  1350. {
  1351. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStep), op1, op2);
  1352. });
  1353. }
  1354. }
  1355. public static void Vsel(ArmEmitterContext context)
  1356. {
  1357. OpCode32SimdSel op = (OpCode32SimdSel)context.CurrOp;
  1358. Operand condition = default;
  1359. switch (op.Cc)
  1360. {
  1361. case OpCode32SimdSelMode.Eq:
  1362. condition = GetCondTrue(context, Condition.Eq);
  1363. break;
  1364. case OpCode32SimdSelMode.Ge:
  1365. condition = GetCondTrue(context, Condition.Ge);
  1366. break;
  1367. case OpCode32SimdSelMode.Gt:
  1368. condition = GetCondTrue(context, Condition.Gt);
  1369. break;
  1370. case OpCode32SimdSelMode.Vs:
  1371. condition = GetCondTrue(context, Condition.Vs);
  1372. break;
  1373. }
  1374. EmitScalarBinaryOpI32(context, (op1, op2) =>
  1375. {
  1376. return context.ConditionalSelect(condition, op1, op2);
  1377. });
  1378. }
  1379. public static void Vsqrt_S(ArmEmitterContext context)
  1380. {
  1381. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1382. {
  1383. InstEmitSimdHelper32Arm64.EmitScalarUnaryOpF32(context, Intrinsic.Arm64FsqrtS);
  1384. }
  1385. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1386. {
  1387. EmitScalarUnaryOpF32(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1388. }
  1389. else
  1390. {
  1391. EmitScalarUnaryOpF32(context, (op1) =>
  1392. {
  1393. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1394. });
  1395. }
  1396. }
  1397. public static void Vsub_S(ArmEmitterContext context)
  1398. {
  1399. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1400. {
  1401. InstEmitSimdHelper32Arm64.EmitScalarBinaryOpF32(context, Intrinsic.Arm64FsubS);
  1402. }
  1403. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1404. {
  1405. EmitScalarBinaryOpF32(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1406. }
  1407. else
  1408. {
  1409. EmitScalarBinaryOpF32(context, (op1, op2) => context.Subtract(op1, op2));
  1410. }
  1411. }
  1412. public static void Vsub_V(ArmEmitterContext context)
  1413. {
  1414. if (Optimizations.FastFP && Optimizations.UseAdvSimd)
  1415. {
  1416. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpF32(context, Intrinsic.Arm64FsubV);
  1417. }
  1418. else if (Optimizations.FastFP && Optimizations.UseSse2)
  1419. {
  1420. EmitVectorBinaryOpF32(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1421. }
  1422. else
  1423. {
  1424. EmitVectorBinaryOpF32(context, (op1, op2) => context.Subtract(op1, op2));
  1425. }
  1426. }
  1427. public static void Vsub_I(ArmEmitterContext context)
  1428. {
  1429. if (Optimizations.UseSse2)
  1430. {
  1431. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  1432. EmitVectorBinaryOpSimd32(context, (op1, op2) => context.AddIntrinsic(X86PsubInstruction[op.Size], op1, op2));
  1433. }
  1434. else
  1435. {
  1436. EmitVectorBinaryOpZx32(context, (op1, op2) => context.Subtract(op1, op2));
  1437. }
  1438. }
  1439. public static void Vsubl_I(ArmEmitterContext context)
  1440. {
  1441. OpCode32SimdRegLong op = (OpCode32SimdRegLong)context.CurrOp;
  1442. EmitVectorBinaryLongOpI32(context, (op1, op2) => context.Subtract(op1, op2), !op.U);
  1443. }
  1444. public static void Vsubw_I(ArmEmitterContext context)
  1445. {
  1446. OpCode32SimdRegWide op = (OpCode32SimdRegWide)context.CurrOp;
  1447. EmitVectorBinaryWideOpI32(context, (op1, op2) => context.Subtract(op1, op2), !op.U);
  1448. }
  1449. private static void EmitSaturatingAddSubBinaryOp(ArmEmitterContext context, bool add, bool signed)
  1450. {
  1451. OpCode32Simd op = (OpCode32Simd)context.CurrOp;
  1452. EmitVectorBinaryOpI32(context, (ne, me) =>
  1453. {
  1454. if (op.Size <= 2)
  1455. {
  1456. if (op.Size == 2)
  1457. {
  1458. ne = signed ? context.SignExtend32(OperandType.I64, ne) : context.ZeroExtend32(OperandType.I64, ne);
  1459. me = signed ? context.SignExtend32(OperandType.I64, me) : context.ZeroExtend32(OperandType.I64, me);
  1460. }
  1461. Operand res = add ? context.Add(ne, me) : context.Subtract(ne, me);
  1462. res = EmitSatQ(context, res, 8 << op.Size, signedSrc: true, signed);
  1463. if (op.Size == 2)
  1464. {
  1465. res = context.ConvertI64ToI32(res);
  1466. }
  1467. return res;
  1468. }
  1469. else if (add) /* if (op.Size == 3) */
  1470. {
  1471. return signed
  1472. ? EmitBinarySignedSatQAdd(context, ne, me)
  1473. : EmitBinaryUnsignedSatQAdd(context, ne, me);
  1474. }
  1475. else /* if (sub) */
  1476. {
  1477. return signed
  1478. ? EmitBinarySignedSatQSub(context, ne, me)
  1479. : EmitBinaryUnsignedSatQSub(context, ne, me);
  1480. }
  1481. }, signed);
  1482. }
  1483. private static void EmitSse41MaxMinNumOpF32(ArmEmitterContext context, bool isMaxNum, bool scalar)
  1484. {
  1485. IOpCode32Simd op = (IOpCode32Simd)context.CurrOp;
  1486. Func<Operand, Operand, Operand> genericEmit = (n, m) =>
  1487. {
  1488. Operand nNum = context.Copy(n);
  1489. Operand mNum = context.Copy(m);
  1490. InstEmit.EmitSse2VectorIsNaNOpF(context, nNum, out Operand nQNaNMask, out _, isQNaN: true);
  1491. InstEmit.EmitSse2VectorIsNaNOpF(context, mNum, out Operand mQNaNMask, out _, isQNaN: true);
  1492. int sizeF = op.Size & 1;
  1493. if (sizeF == 0)
  1494. {
  1495. Operand negInfMask = X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  1496. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  1497. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  1498. nNum = context.AddIntrinsic(Intrinsic.X86Blendvps, nNum, negInfMask, nMask);
  1499. mNum = context.AddIntrinsic(Intrinsic.X86Blendvps, mNum, negInfMask, mMask);
  1500. return context.AddIntrinsic(isMaxNum ? Intrinsic.X86Maxps : Intrinsic.X86Minps, nNum, mNum);
  1501. }
  1502. else /* if (sizeF == 1) */
  1503. {
  1504. Operand negInfMask = X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  1505. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  1506. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  1507. nNum = context.AddIntrinsic(Intrinsic.X86Blendvpd, nNum, negInfMask, nMask);
  1508. mNum = context.AddIntrinsic(Intrinsic.X86Blendvpd, mNum, negInfMask, mMask);
  1509. return context.AddIntrinsic(isMaxNum ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, nNum, mNum);
  1510. }
  1511. };
  1512. if (scalar)
  1513. {
  1514. EmitScalarBinaryOpSimd32(context, genericEmit);
  1515. }
  1516. else
  1517. {
  1518. EmitVectorBinaryOpSimd32(context, genericEmit);
  1519. }
  1520. }
  1521. }
  1522. }