InstEmitSimdLogical32.cs 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266
  1. using ARMeilleure.Decoders;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using static ARMeilleure.Instructions.InstEmitHelper;
  5. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  6. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  7. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  8. namespace ARMeilleure.Instructions
  9. {
  10. static partial class InstEmit32
  11. {
  12. public static void Vand_I(ArmEmitterContext context)
  13. {
  14. if (Optimizations.UseAdvSimd)
  15. {
  16. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.Arm64AndV | Intrinsic.Arm64V128, n, m));
  17. }
  18. else if (Optimizations.UseSse2)
  19. {
  20. EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.X86Pand, n, m));
  21. }
  22. else
  23. {
  24. EmitVectorBinaryOpZx32(context, (op1, op2) => context.BitwiseAnd(op1, op2));
  25. }
  26. }
  27. public static void Vbic_I(ArmEmitterContext context)
  28. {
  29. if (Optimizations.UseAdvSimd)
  30. {
  31. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.Arm64BicV | Intrinsic.Arm64V128, n, m));
  32. }
  33. else if (Optimizations.UseSse2)
  34. {
  35. EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.X86Pandn, m, n));
  36. }
  37. else
  38. {
  39. EmitVectorBinaryOpZx32(context, (op1, op2) => context.BitwiseAnd(op1, context.BitwiseNot(op2)));
  40. }
  41. }
  42. public static void Vbic_II(ArmEmitterContext context)
  43. {
  44. OpCode32SimdImm op = (OpCode32SimdImm)context.CurrOp;
  45. long immediate = op.Immediate;
  46. // Replicate fields to fill the 64-bits, if size is < 64-bits.
  47. switch (op.Size)
  48. {
  49. case 0: immediate *= 0x0101010101010101L; break;
  50. case 1: immediate *= 0x0001000100010001L; break;
  51. case 2: immediate *= 0x0000000100000001L; break;
  52. }
  53. Operand imm = Const(immediate);
  54. Operand res = GetVecA32(op.Qd);
  55. if (op.Q)
  56. {
  57. for (int elem = 0; elem < 2; elem++)
  58. {
  59. Operand de = EmitVectorExtractZx(context, op.Qd, elem, 3);
  60. res = EmitVectorInsert(context, res, context.BitwiseAnd(de, context.BitwiseNot(imm)), elem, 3);
  61. }
  62. }
  63. else
  64. {
  65. Operand de = EmitVectorExtractZx(context, op.Qd, op.Vd & 1, 3);
  66. res = EmitVectorInsert(context, res, context.BitwiseAnd(de, context.BitwiseNot(imm)), op.Vd & 1, 3);
  67. }
  68. context.Copy(GetVecA32(op.Qd), res);
  69. }
  70. public static void Vbif(ArmEmitterContext context)
  71. {
  72. if (Optimizations.UseAdvSimd)
  73. {
  74. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpSimd32(context, (d, n, m) => context.AddIntrinsic(Intrinsic.Arm64BifV | Intrinsic.Arm64V128, d, n, m));
  75. }
  76. else
  77. {
  78. EmitBifBit(context, true);
  79. }
  80. }
  81. public static void Vbit(ArmEmitterContext context)
  82. {
  83. if (Optimizations.UseAdvSimd)
  84. {
  85. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpSimd32(context, (d, n, m) => context.AddIntrinsic(Intrinsic.Arm64BitV | Intrinsic.Arm64V128, d, n, m));
  86. }
  87. else
  88. {
  89. EmitBifBit(context, false);
  90. }
  91. }
  92. public static void Vbsl(ArmEmitterContext context)
  93. {
  94. if (Optimizations.UseAdvSimd)
  95. {
  96. InstEmitSimdHelper32Arm64.EmitVectorTernaryOpSimd32(context, (d, n, m) => context.AddIntrinsic(Intrinsic.Arm64BslV | Intrinsic.Arm64V128, d, n, m));
  97. }
  98. else if (Optimizations.UseSse2)
  99. {
  100. EmitVectorTernaryOpSimd32(context, (d, n, m) =>
  101. {
  102. Operand res = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  103. res = context.AddIntrinsic(Intrinsic.X86Pand, res, d);
  104. return context.AddIntrinsic(Intrinsic.X86Pxor, res, m);
  105. });
  106. }
  107. else
  108. {
  109. EmitVectorTernaryOpZx32(context, (op1, op2, op3) =>
  110. {
  111. return context.BitwiseExclusiveOr(
  112. context.BitwiseAnd(op1,
  113. context.BitwiseExclusiveOr(op2, op3)), op3);
  114. });
  115. }
  116. }
  117. public static void Veor_I(ArmEmitterContext context)
  118. {
  119. if (Optimizations.UseAdvSimd)
  120. {
  121. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.Arm64EorV | Intrinsic.Arm64V128, n, m));
  122. }
  123. else if (Optimizations.UseSse2)
  124. {
  125. EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.X86Pxor, n, m));
  126. }
  127. else
  128. {
  129. EmitVectorBinaryOpZx32(context, (op1, op2) => context.BitwiseExclusiveOr(op1, op2));
  130. }
  131. }
  132. public static void Vorn_I(ArmEmitterContext context)
  133. {
  134. if (Optimizations.UseAdvSimd)
  135. {
  136. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.Arm64OrnV | Intrinsic.Arm64V128, n, m));
  137. }
  138. else if (Optimizations.UseAvx512Ortho)
  139. {
  140. EmitVectorBinaryOpSimd32(context, (n, m) =>
  141. {
  142. return context.AddIntrinsic(Intrinsic.X86Vpternlogd, n, m, Const(0b11001100 | ~0b10101010));
  143. });
  144. }
  145. else if (Optimizations.UseSse2)
  146. {
  147. Operand mask = context.VectorOne();
  148. EmitVectorBinaryOpSimd32(context, (n, m) =>
  149. {
  150. m = context.AddIntrinsic(Intrinsic.X86Pandn, m, mask);
  151. return context.AddIntrinsic(Intrinsic.X86Por, n, m);
  152. });
  153. }
  154. else
  155. {
  156. EmitVectorBinaryOpZx32(context, (op1, op2) => context.BitwiseOr(op1, context.BitwiseNot(op2)));
  157. }
  158. }
  159. public static void Vorr_I(ArmEmitterContext context)
  160. {
  161. if (Optimizations.UseAdvSimd)
  162. {
  163. InstEmitSimdHelper32Arm64.EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.Arm64OrrV | Intrinsic.Arm64V128, n, m));
  164. }
  165. else if (Optimizations.UseSse2)
  166. {
  167. EmitVectorBinaryOpSimd32(context, (n, m) => context.AddIntrinsic(Intrinsic.X86Por, n, m));
  168. }
  169. else
  170. {
  171. EmitVectorBinaryOpZx32(context, (op1, op2) => context.BitwiseOr(op1, op2));
  172. }
  173. }
  174. public static void Vorr_II(ArmEmitterContext context)
  175. {
  176. OpCode32SimdImm op = (OpCode32SimdImm)context.CurrOp;
  177. long immediate = op.Immediate;
  178. // Replicate fields to fill the 64-bits, if size is < 64-bits.
  179. switch (op.Size)
  180. {
  181. case 0: immediate *= 0x0101010101010101L; break;
  182. case 1: immediate *= 0x0001000100010001L; break;
  183. case 2: immediate *= 0x0000000100000001L; break;
  184. }
  185. Operand imm = Const(immediate);
  186. Operand res = GetVecA32(op.Qd);
  187. if (op.Q)
  188. {
  189. for (int elem = 0; elem < 2; elem++)
  190. {
  191. Operand de = EmitVectorExtractZx(context, op.Qd, elem, 3);
  192. res = EmitVectorInsert(context, res, context.BitwiseOr(de, imm), elem, 3);
  193. }
  194. }
  195. else
  196. {
  197. Operand de = EmitVectorExtractZx(context, op.Qd, op.Vd & 1, 3);
  198. res = EmitVectorInsert(context, res, context.BitwiseOr(de, imm), op.Vd & 1, 3);
  199. }
  200. context.Copy(GetVecA32(op.Qd), res);
  201. }
  202. public static void Vtst(ArmEmitterContext context)
  203. {
  204. EmitVectorBinaryOpZx32(context, (op1, op2) =>
  205. {
  206. Operand isZero = context.ICompareEqual(context.BitwiseAnd(op1, op2), Const(0));
  207. return context.ConditionalSelect(isZero, Const(0), Const(-1));
  208. });
  209. }
  210. private static void EmitBifBit(ArmEmitterContext context, bool notRm)
  211. {
  212. OpCode32SimdReg op = (OpCode32SimdReg)context.CurrOp;
  213. if (Optimizations.UseSse2)
  214. {
  215. EmitVectorTernaryOpSimd32(context, (d, n, m) =>
  216. {
  217. Operand res = context.AddIntrinsic(Intrinsic.X86Pxor, n, d);
  218. res = context.AddIntrinsic((notRm) ? Intrinsic.X86Pandn : Intrinsic.X86Pand, m, res);
  219. return context.AddIntrinsic(Intrinsic.X86Pxor, d, res);
  220. });
  221. }
  222. else
  223. {
  224. EmitVectorTernaryOpZx32(context, (d, n, m) =>
  225. {
  226. if (notRm)
  227. {
  228. m = context.BitwiseNot(m);
  229. }
  230. return context.BitwiseExclusiveOr(
  231. context.BitwiseAnd(m,
  232. context.BitwiseExclusiveOr(d, n)), d);
  233. });
  234. }
  235. }
  236. }
  237. }