InstEmitSimdMove.cs 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853
  1. using ARMeilleure.Decoders;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using static ARMeilleure.Instructions.InstEmitHelper;
  7. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  8. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  9. namespace ARMeilleure.Instructions
  10. {
  11. static partial class InstEmit
  12. {
  13. #region "Masks"
  14. private static readonly long[] _masksE0_TrnUzpXtn = new long[]
  15. {
  16. 14L << 56 | 12L << 48 | 10L << 40 | 08L << 32 | 06L << 24 | 04L << 16 | 02L << 8 | 00L << 0,
  17. 13L << 56 | 12L << 48 | 09L << 40 | 08L << 32 | 05L << 24 | 04L << 16 | 01L << 8 | 00L << 0,
  18. 11L << 56 | 10L << 48 | 09L << 40 | 08L << 32 | 03L << 24 | 02L << 16 | 01L << 8 | 00L << 0
  19. };
  20. private static readonly long[] _masksE1_TrnUzp = new long[]
  21. {
  22. 15L << 56 | 13L << 48 | 11L << 40 | 09L << 32 | 07L << 24 | 05L << 16 | 03L << 8 | 01L << 0,
  23. 15L << 56 | 14L << 48 | 11L << 40 | 10L << 32 | 07L << 24 | 06L << 16 | 03L << 8 | 02L << 0,
  24. 15L << 56 | 14L << 48 | 13L << 40 | 12L << 32 | 07L << 24 | 06L << 16 | 05L << 8 | 04L << 0
  25. };
  26. private static readonly long[] _masksE0_Uzp = new long[]
  27. {
  28. 13L << 56 | 09L << 48 | 05L << 40 | 01L << 32 | 12L << 24 | 08L << 16 | 04L << 8 | 00L << 0,
  29. 11L << 56 | 10L << 48 | 03L << 40 | 02L << 32 | 09L << 24 | 08L << 16 | 01L << 8 | 00L << 0
  30. };
  31. private static readonly long[] _masksE1_Uzp = new long[]
  32. {
  33. 15L << 56 | 11L << 48 | 07L << 40 | 03L << 32 | 14L << 24 | 10L << 16 | 06L << 8 | 02L << 0,
  34. 15L << 56 | 14L << 48 | 07L << 40 | 06L << 32 | 13L << 24 | 12L << 16 | 05L << 8 | 04L << 0
  35. };
  36. #endregion
  37. public static void Dup_Gp(ArmEmitterContext context)
  38. {
  39. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  40. Operand n = GetIntOrZR(context, op.Rn);
  41. if (Optimizations.UseSse2)
  42. {
  43. switch (op.Size)
  44. {
  45. case 0: n = context.ZeroExtend8 (n.Type, n); n = context.Multiply(n, Const(n.Type, 0x01010101)); break;
  46. case 1: n = context.ZeroExtend16(n.Type, n); n = context.Multiply(n, Const(n.Type, 0x00010001)); break;
  47. case 2: n = context.ZeroExtend32(n.Type, n); break;
  48. }
  49. Operand res = context.VectorInsert(context.VectorZero(), n, 0);
  50. if (op.Size < 3)
  51. {
  52. if (op.RegisterSize == RegisterSize.Simd64)
  53. {
  54. res = context.AddIntrinsic(Intrinsic.X86Shufps, res, res, Const(0xf0));
  55. }
  56. else
  57. {
  58. res = context.AddIntrinsic(Intrinsic.X86Shufps, res, res, Const(0));
  59. }
  60. }
  61. else
  62. {
  63. res = context.AddIntrinsic(Intrinsic.X86Movlhps, res, res);
  64. }
  65. context.Copy(GetVec(op.Rd), res);
  66. }
  67. else
  68. {
  69. Operand res = context.VectorZero();
  70. int elems = op.GetBytesCount() >> op.Size;
  71. for (int index = 0; index < elems; index++)
  72. {
  73. res = EmitVectorInsert(context, res, n, index, op.Size);
  74. }
  75. context.Copy(GetVec(op.Rd), res);
  76. }
  77. }
  78. public static void Dup_S(ArmEmitterContext context)
  79. {
  80. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  81. Operand ne = EmitVectorExtractZx(context, op.Rn, op.DstIndex, op.Size);
  82. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), ne, 0, op.Size));
  83. }
  84. public static void Dup_V(ArmEmitterContext context)
  85. {
  86. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  87. if (Optimizations.UseSse2)
  88. {
  89. Operand res = GetVec(op.Rn);
  90. if (op.Size == 0)
  91. {
  92. if (op.DstIndex != 0)
  93. {
  94. res = context.AddIntrinsic(Intrinsic.X86Psrldq, res, Const(op.DstIndex));
  95. }
  96. res = context.AddIntrinsic(Intrinsic.X86Punpcklbw, res, res);
  97. res = context.AddIntrinsic(Intrinsic.X86Punpcklwd, res, res);
  98. res = context.AddIntrinsic(Intrinsic.X86Shufps, res, res, Const(0));
  99. }
  100. else if (op.Size == 1)
  101. {
  102. if (op.DstIndex != 0)
  103. {
  104. res = context.AddIntrinsic(Intrinsic.X86Psrldq, res, Const(op.DstIndex * 2));
  105. }
  106. res = context.AddIntrinsic(Intrinsic.X86Punpcklwd, res, res);
  107. res = context.AddIntrinsic(Intrinsic.X86Shufps, res, res, Const(0));
  108. }
  109. else if (op.Size == 2)
  110. {
  111. int mask = op.DstIndex * 0b01010101;
  112. res = context.AddIntrinsic(Intrinsic.X86Shufps, res, res, Const(mask));
  113. }
  114. else if (op.DstIndex == 0 && op.RegisterSize != RegisterSize.Simd64)
  115. {
  116. res = context.AddIntrinsic(Intrinsic.X86Movlhps, res, res);
  117. }
  118. else if (op.DstIndex == 1)
  119. {
  120. res = context.AddIntrinsic(Intrinsic.X86Movhlps, res, res);
  121. }
  122. if (op.RegisterSize == RegisterSize.Simd64)
  123. {
  124. res = context.VectorZeroUpper64(res);
  125. }
  126. context.Copy(GetVec(op.Rd), res);
  127. }
  128. else
  129. {
  130. Operand ne = EmitVectorExtractZx(context, op.Rn, op.DstIndex, op.Size);
  131. Operand res = context.VectorZero();
  132. int elems = op.GetBytesCount() >> op.Size;
  133. for (int index = 0; index < elems; index++)
  134. {
  135. res = EmitVectorInsert(context, res, ne, index, op.Size);
  136. }
  137. context.Copy(GetVec(op.Rd), res);
  138. }
  139. }
  140. public static void Ext_V(ArmEmitterContext context)
  141. {
  142. OpCodeSimdExt op = (OpCodeSimdExt)context.CurrOp;
  143. if (Optimizations.UseSse2)
  144. {
  145. Operand nShifted = GetVec(op.Rn);
  146. if (op.RegisterSize == RegisterSize.Simd64)
  147. {
  148. nShifted = context.VectorZeroUpper64(nShifted);
  149. }
  150. nShifted = context.AddIntrinsic(Intrinsic.X86Psrldq, nShifted, Const(op.Imm4));
  151. Operand mShifted = GetVec(op.Rm);
  152. mShifted = context.AddIntrinsic(Intrinsic.X86Pslldq, mShifted, Const(op.GetBytesCount() - op.Imm4));
  153. if (op.RegisterSize == RegisterSize.Simd64)
  154. {
  155. mShifted = context.VectorZeroUpper64(mShifted);
  156. }
  157. Operand res = context.AddIntrinsic(Intrinsic.X86Por, nShifted, mShifted);
  158. context.Copy(GetVec(op.Rd), res);
  159. }
  160. else
  161. {
  162. Operand res = context.VectorZero();
  163. int bytes = op.GetBytesCount();
  164. int position = op.Imm4 & (bytes - 1);
  165. for (int index = 0; index < bytes; index++)
  166. {
  167. int reg = op.Imm4 + index < bytes ? op.Rn : op.Rm;
  168. Operand e = EmitVectorExtractZx(context, reg, position, 0);
  169. position = (position + 1) & (bytes - 1);
  170. res = EmitVectorInsert(context, res, e, index, 0);
  171. }
  172. context.Copy(GetVec(op.Rd), res);
  173. }
  174. }
  175. public static void Fcsel_S(ArmEmitterContext context)
  176. {
  177. OpCodeSimdFcond op = (OpCodeSimdFcond)context.CurrOp;
  178. Operand lblTrue = Label();
  179. Operand lblEnd = Label();
  180. Operand isTrue = InstEmitFlowHelper.GetCondTrue(context, op.Cond);
  181. context.BranchIfTrue(lblTrue, isTrue);
  182. OperandType type = op.Size == 0 ? OperandType.FP32 : OperandType.FP64;
  183. Operand me = context.VectorExtract(type, GetVec(op.Rm), 0);
  184. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), me, 0));
  185. context.Branch(lblEnd);
  186. context.MarkLabel(lblTrue);
  187. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  188. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), ne, 0));
  189. context.MarkLabel(lblEnd);
  190. }
  191. public static void Fmov_Ftoi(ArmEmitterContext context)
  192. {
  193. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  194. Operand ne = EmitVectorExtractZx(context, op.Rn, 0, op.Size + 2);
  195. SetIntOrZR(context, op.Rd, ne);
  196. }
  197. public static void Fmov_Ftoi1(ArmEmitterContext context)
  198. {
  199. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  200. Operand ne = EmitVectorExtractZx(context, op.Rn, 1, 3);
  201. SetIntOrZR(context, op.Rd, ne);
  202. }
  203. public static void Fmov_Itof(ArmEmitterContext context)
  204. {
  205. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  206. Operand n = GetIntOrZR(context, op.Rn);
  207. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), n, 0, op.Size + 2));
  208. }
  209. public static void Fmov_Itof1(ArmEmitterContext context)
  210. {
  211. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  212. Operand d = GetVec(op.Rd);
  213. Operand n = GetIntOrZR(context, op.Rn);
  214. context.Copy(d, EmitVectorInsert(context, d, n, 1, 3));
  215. }
  216. public static void Fmov_S(ArmEmitterContext context)
  217. {
  218. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  219. OperandType type = op.Size == 0 ? OperandType.FP32 : OperandType.FP64;
  220. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  221. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), ne, 0));
  222. }
  223. public static void Fmov_Si(ArmEmitterContext context)
  224. {
  225. OpCodeSimdFmov op = (OpCodeSimdFmov)context.CurrOp;
  226. if (op.Size == 0)
  227. {
  228. context.Copy(GetVec(op.Rd), X86GetScalar(context, (int)op.Immediate));
  229. }
  230. else
  231. {
  232. context.Copy(GetVec(op.Rd), X86GetScalar(context, op.Immediate));
  233. }
  234. }
  235. public static void Fmov_Vi(ArmEmitterContext context)
  236. {
  237. OpCodeSimdImm op = (OpCodeSimdImm)context.CurrOp;
  238. if (Optimizations.UseSse2)
  239. {
  240. if (op.RegisterSize == RegisterSize.Simd128)
  241. {
  242. context.Copy(GetVec(op.Rd), X86GetAllElements(context, op.Immediate));
  243. }
  244. else
  245. {
  246. context.Copy(GetVec(op.Rd), X86GetScalar(context, op.Immediate));
  247. }
  248. }
  249. else
  250. {
  251. Operand e = Const(op.Immediate);
  252. Operand res = context.VectorZero();
  253. int elems = op.RegisterSize == RegisterSize.Simd128 ? 2 : 1;
  254. for (int index = 0; index < elems; index++)
  255. {
  256. res = EmitVectorInsert(context, res, e, index, 3);
  257. }
  258. context.Copy(GetVec(op.Rd), res);
  259. }
  260. }
  261. public static void Ins_Gp(ArmEmitterContext context)
  262. {
  263. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  264. Operand d = GetVec(op.Rd);
  265. Operand n = GetIntOrZR(context, op.Rn);
  266. context.Copy(d, EmitVectorInsert(context, d, n, op.DstIndex, op.Size));
  267. }
  268. public static void Ins_V(ArmEmitterContext context)
  269. {
  270. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  271. Operand d = GetVec(op.Rd);
  272. Operand ne = EmitVectorExtractZx(context, op.Rn, op.SrcIndex, op.Size);
  273. context.Copy(d, EmitVectorInsert(context, d, ne, op.DstIndex, op.Size));
  274. }
  275. public static void Movi_V(ArmEmitterContext context)
  276. {
  277. if (Optimizations.UseSse2)
  278. {
  279. EmitSse2MoviMvni(context, not: false);
  280. }
  281. else
  282. {
  283. EmitVectorImmUnaryOp(context, (op1) => op1);
  284. }
  285. }
  286. public static void Mvni_V(ArmEmitterContext context)
  287. {
  288. if (Optimizations.UseSse2)
  289. {
  290. EmitSse2MoviMvni(context, not: true);
  291. }
  292. else
  293. {
  294. EmitVectorImmUnaryOp(context, (op1) => context.BitwiseNot(op1));
  295. }
  296. }
  297. public static void Smov_S(ArmEmitterContext context)
  298. {
  299. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  300. Operand ne = EmitVectorExtractSx(context, op.Rn, op.DstIndex, op.Size);
  301. if (op.RegisterSize == RegisterSize.Simd64)
  302. {
  303. ne = context.ZeroExtend32(OperandType.I64, ne);
  304. }
  305. SetIntOrZR(context, op.Rd, ne);
  306. }
  307. public static void Tbl_V(ArmEmitterContext context)
  308. {
  309. EmitTableVectorLookup(context, isTbl: true);
  310. }
  311. public static void Tbx_V(ArmEmitterContext context)
  312. {
  313. EmitTableVectorLookup(context, isTbl: false);
  314. }
  315. public static void Trn1_V(ArmEmitterContext context)
  316. {
  317. EmitVectorTranspose(context, part: 0);
  318. }
  319. public static void Trn2_V(ArmEmitterContext context)
  320. {
  321. EmitVectorTranspose(context, part: 1);
  322. }
  323. public static void Umov_S(ArmEmitterContext context)
  324. {
  325. OpCodeSimdIns op = (OpCodeSimdIns)context.CurrOp;
  326. Operand ne = EmitVectorExtractZx(context, op.Rn, op.DstIndex, op.Size);
  327. SetIntOrZR(context, op.Rd, ne);
  328. }
  329. public static void Uzp1_V(ArmEmitterContext context)
  330. {
  331. EmitVectorUnzip(context, part: 0);
  332. }
  333. public static void Uzp2_V(ArmEmitterContext context)
  334. {
  335. EmitVectorUnzip(context, part: 1);
  336. }
  337. public static void Xtn_V(ArmEmitterContext context)
  338. {
  339. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  340. if (Optimizations.UseSsse3)
  341. {
  342. Operand d = GetVec(op.Rd);
  343. Operand res = context.VectorZeroUpper64(d);
  344. Operand mask = X86GetAllElements(context, _masksE0_TrnUzpXtn[op.Size]);
  345. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pshufb, GetVec(op.Rn), mask);
  346. Intrinsic movInst = op.RegisterSize == RegisterSize.Simd128
  347. ? Intrinsic.X86Movlhps
  348. : Intrinsic.X86Movhlps;
  349. res = context.AddIntrinsic(movInst, res, res2);
  350. context.Copy(d, res);
  351. }
  352. else
  353. {
  354. int elems = 8 >> op.Size;
  355. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  356. Operand d = GetVec(op.Rd);
  357. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  358. for (int index = 0; index < elems; index++)
  359. {
  360. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  361. res = EmitVectorInsert(context, res, ne, part + index, op.Size);
  362. }
  363. context.Copy(d, res);
  364. }
  365. }
  366. public static void Zip1_V(ArmEmitterContext context)
  367. {
  368. EmitVectorZip(context, part: 0);
  369. }
  370. public static void Zip2_V(ArmEmitterContext context)
  371. {
  372. EmitVectorZip(context, part: 1);
  373. }
  374. private static void EmitSse2MoviMvni(ArmEmitterContext context, bool not)
  375. {
  376. OpCodeSimdImm op = (OpCodeSimdImm)context.CurrOp;
  377. long imm = op.Immediate;
  378. switch (op.Size)
  379. {
  380. case 0: imm *= 0x01010101; break;
  381. case 1: imm *= 0x00010001; break;
  382. }
  383. if (not)
  384. {
  385. imm = ~imm;
  386. }
  387. Operand mask;
  388. if (op.Size < 3)
  389. {
  390. mask = X86GetAllElements(context, (int)imm);
  391. }
  392. else
  393. {
  394. mask = X86GetAllElements(context, imm);
  395. }
  396. if (op.RegisterSize == RegisterSize.Simd64)
  397. {
  398. mask = context.VectorZeroUpper64(mask);
  399. }
  400. context.Copy(GetVec(op.Rd), mask);
  401. }
  402. private static void EmitTableVectorLookup(ArmEmitterContext context, bool isTbl)
  403. {
  404. OpCodeSimdTbl op = (OpCodeSimdTbl)context.CurrOp;
  405. if (Optimizations.UseSsse3)
  406. {
  407. Operand d = GetVec(op.Rd);
  408. Operand m = GetVec(op.Rm);
  409. Operand res;
  410. Operand mask = X86GetAllElements(context, 0x0F0F0F0F0F0F0F0FL);
  411. // Fast path for single register table.
  412. {
  413. Operand n = GetVec(op.Rn);
  414. Operand mMask = context.AddIntrinsic(Intrinsic.X86Pcmpgtb, m, mask);
  415. mMask = context.AddIntrinsic(Intrinsic.X86Por, mMask, m);
  416. res = context.AddIntrinsic(Intrinsic.X86Pshufb, n, mMask);
  417. }
  418. for (int index = 1; index < op.Size; index++)
  419. {
  420. Operand ni = GetVec((op.Rn + index) & 0x1F);
  421. Operand idxMask = X86GetAllElements(context, 0x1010101010101010L * index);
  422. Operand mSubMask = context.AddIntrinsic(Intrinsic.X86Psubb, m, idxMask);
  423. Operand mMask = context.AddIntrinsic(Intrinsic.X86Pcmpgtb, mSubMask, mask);
  424. mMask = context.AddIntrinsic(Intrinsic.X86Por, mMask, mSubMask);
  425. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pshufb, ni, mMask);
  426. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  427. }
  428. if (!isTbl)
  429. {
  430. Operand idxMask = X86GetAllElements(context, (0x1010101010101010L * op.Size) - 0x0101010101010101L);
  431. Operand zeroMask = context.VectorZero();
  432. Operand mPosMask = context.AddIntrinsic(Intrinsic.X86Pcmpgtb, m, idxMask);
  433. Operand mNegMask = context.AddIntrinsic(Intrinsic.X86Pcmpgtb, zeroMask, m);
  434. Operand mMask = context.AddIntrinsic(Intrinsic.X86Por, mPosMask, mNegMask);
  435. Operand dMask = context.AddIntrinsic(Intrinsic.X86Pand, d, mMask);
  436. res = context.AddIntrinsic(Intrinsic.X86Por, res, dMask);
  437. }
  438. if (op.RegisterSize == RegisterSize.Simd64)
  439. {
  440. res = context.VectorZeroUpper64(res);
  441. }
  442. context.Copy(d, res);
  443. }
  444. else
  445. {
  446. Operand d = GetVec(op.Rd);
  447. List<Operand> args = new List<Operand>();
  448. if (!isTbl)
  449. {
  450. args.Add(d);
  451. }
  452. args.Add(GetVec(op.Rm));
  453. args.Add(Const(op.RegisterSize == RegisterSize.Simd64 ? 8 : 16));
  454. for (int index = 0; index < op.Size; index++)
  455. {
  456. args.Add(GetVec((op.Rn + index) & 0x1F));
  457. }
  458. Delegate dlg = null;
  459. switch (op.Size)
  460. {
  461. case 1: dlg = isTbl
  462. ? (Delegate)new _V128_V128_S32_V128 (SoftFallback.Tbl1)
  463. : (Delegate)new _V128_V128_V128_S32_V128(SoftFallback.Tbx1);
  464. break;
  465. case 2: dlg = isTbl
  466. ? (Delegate)new _V128_V128_S32_V128_V128 (SoftFallback.Tbl2)
  467. : (Delegate)new _V128_V128_V128_S32_V128_V128(SoftFallback.Tbx2);
  468. break;
  469. case 3: dlg = isTbl
  470. ? (Delegate)new _V128_V128_S32_V128_V128_V128 (SoftFallback.Tbl3)
  471. : (Delegate)new _V128_V128_V128_S32_V128_V128_V128(SoftFallback.Tbx3);
  472. break;
  473. case 4: dlg = isTbl
  474. ? (Delegate)new _V128_V128_S32_V128_V128_V128_V128 (SoftFallback.Tbl4)
  475. : (Delegate)new _V128_V128_V128_S32_V128_V128_V128_V128(SoftFallback.Tbx4);
  476. break;
  477. }
  478. context.Copy(d, context.Call(dlg, args.ToArray()));
  479. }
  480. }
  481. private static void EmitVectorTranspose(ArmEmitterContext context, int part)
  482. {
  483. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  484. if (Optimizations.UseSsse3)
  485. {
  486. Operand mask = null;
  487. if (op.Size < 3)
  488. {
  489. long maskE0 = _masksE0_TrnUzpXtn[op.Size];
  490. long maskE1 = _masksE1_TrnUzp [op.Size];
  491. mask = X86GetScalar(context, maskE0);
  492. mask = EmitVectorInsert(context, mask, Const(maskE1), 1, 3);
  493. }
  494. Operand n = GetVec(op.Rn);
  495. if (op.Size < 3)
  496. {
  497. n = context.AddIntrinsic(Intrinsic.X86Pshufb, n, mask);
  498. }
  499. Operand m = GetVec(op.Rm);
  500. if (op.Size < 3)
  501. {
  502. m = context.AddIntrinsic(Intrinsic.X86Pshufb, m, mask);
  503. }
  504. Intrinsic punpckInst = part == 0
  505. ? X86PunpcklInstruction[op.Size]
  506. : X86PunpckhInstruction[op.Size];
  507. Operand res = context.AddIntrinsic(punpckInst, n, m);
  508. if (op.RegisterSize == RegisterSize.Simd64)
  509. {
  510. res = context.VectorZeroUpper64(res);
  511. }
  512. context.Copy(GetVec(op.Rd), res);
  513. }
  514. else
  515. {
  516. Operand res = context.VectorZero();
  517. int pairs = op.GetPairsCount() >> op.Size;
  518. for (int index = 0; index < pairs; index++)
  519. {
  520. int pairIndex = index << 1;
  521. Operand ne = EmitVectorExtractZx(context, op.Rn, pairIndex + part, op.Size);
  522. Operand me = EmitVectorExtractZx(context, op.Rm, pairIndex + part, op.Size);
  523. res = EmitVectorInsert(context, res, ne, pairIndex, op.Size);
  524. res = EmitVectorInsert(context, res, me, pairIndex + 1, op.Size);
  525. }
  526. context.Copy(GetVec(op.Rd), res);
  527. }
  528. }
  529. private static void EmitVectorUnzip(ArmEmitterContext context, int part)
  530. {
  531. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  532. if (Optimizations.UseSsse3)
  533. {
  534. if (op.RegisterSize == RegisterSize.Simd128)
  535. {
  536. Operand mask = null;
  537. if (op.Size < 3)
  538. {
  539. long maskE0 = _masksE0_TrnUzpXtn[op.Size];
  540. long maskE1 = _masksE1_TrnUzp [op.Size];
  541. mask = X86GetScalar(context, maskE0);
  542. mask = EmitVectorInsert(context, mask, Const(maskE1), 1, 3);
  543. }
  544. Operand n = GetVec(op.Rn);
  545. if (op.Size < 3)
  546. {
  547. n = context.AddIntrinsic(Intrinsic.X86Pshufb, n, mask);
  548. }
  549. Operand m = GetVec(op.Rm);
  550. if (op.Size < 3)
  551. {
  552. m = context.AddIntrinsic(Intrinsic.X86Pshufb, m, mask);
  553. }
  554. Intrinsic punpckInst = part == 0
  555. ? Intrinsic.X86Punpcklqdq
  556. : Intrinsic.X86Punpckhqdq;
  557. Operand res = context.AddIntrinsic(punpckInst, n, m);
  558. context.Copy(GetVec(op.Rd), res);
  559. }
  560. else
  561. {
  562. Operand n = GetVec(op.Rn);
  563. Operand m = GetVec(op.Rm);
  564. Intrinsic punpcklInst = X86PunpcklInstruction[op.Size];
  565. Operand res = context.AddIntrinsic(punpcklInst, n, m);
  566. if (op.Size < 2)
  567. {
  568. long maskE0 = _masksE0_Uzp[op.Size];
  569. long maskE1 = _masksE1_Uzp[op.Size];
  570. Operand mask = X86GetScalar(context, maskE0);
  571. mask = EmitVectorInsert(context, mask, Const(maskE1), 1, 3);
  572. res = context.AddIntrinsic(Intrinsic.X86Pshufb, res, mask);
  573. }
  574. Intrinsic punpckInst = part == 0
  575. ? Intrinsic.X86Punpcklqdq
  576. : Intrinsic.X86Punpckhqdq;
  577. res = context.AddIntrinsic(punpckInst, res, context.VectorZero());
  578. context.Copy(GetVec(op.Rd), res);
  579. }
  580. }
  581. else
  582. {
  583. Operand res = context.VectorZero();
  584. int pairs = op.GetPairsCount() >> op.Size;
  585. for (int index = 0; index < pairs; index++)
  586. {
  587. int idx = index << 1;
  588. Operand ne = EmitVectorExtractZx(context, op.Rn, idx + part, op.Size);
  589. Operand me = EmitVectorExtractZx(context, op.Rm, idx + part, op.Size);
  590. res = EmitVectorInsert(context, res, ne, index, op.Size);
  591. res = EmitVectorInsert(context, res, me, pairs + index, op.Size);
  592. }
  593. context.Copy(GetVec(op.Rd), res);
  594. }
  595. }
  596. private static void EmitVectorZip(ArmEmitterContext context, int part)
  597. {
  598. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  599. if (Optimizations.UseSse2)
  600. {
  601. Operand n = GetVec(op.Rn);
  602. Operand m = GetVec(op.Rm);
  603. if (op.RegisterSize == RegisterSize.Simd128)
  604. {
  605. Intrinsic punpckInst = part == 0
  606. ? X86PunpcklInstruction[op.Size]
  607. : X86PunpckhInstruction[op.Size];
  608. Operand res = context.AddIntrinsic(punpckInst, n, m);
  609. context.Copy(GetVec(op.Rd), res);
  610. }
  611. else
  612. {
  613. Operand res = context.AddIntrinsic(X86PunpcklInstruction[op.Size], n, m);
  614. Intrinsic punpckInst = part == 0
  615. ? Intrinsic.X86Punpcklqdq
  616. : Intrinsic.X86Punpckhqdq;
  617. res = context.AddIntrinsic(punpckInst, res, context.VectorZero());
  618. context.Copy(GetVec(op.Rd), res);
  619. }
  620. }
  621. else
  622. {
  623. Operand res = context.VectorZero();
  624. int pairs = op.GetPairsCount() >> op.Size;
  625. int baseIndex = part != 0 ? pairs : 0;
  626. for (int index = 0; index < pairs; index++)
  627. {
  628. int pairIndex = index << 1;
  629. Operand ne = EmitVectorExtractZx(context, op.Rn, baseIndex + index, op.Size);
  630. Operand me = EmitVectorExtractZx(context, op.Rm, baseIndex + index, op.Size);
  631. res = EmitVectorInsert(context, res, ne, pairIndex, op.Size);
  632. res = EmitVectorInsert(context, res, me, pairIndex + 1, op.Size);
  633. }
  634. context.Copy(GetVec(op.Rd), res);
  635. }
  636. }
  637. }
  638. }