InstEmitSimdCvt.cs 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483
  1. using ARMeilleure.Decoders;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.State;
  4. using ARMeilleure.Translation;
  5. using System;
  6. using System.Diagnostics;
  7. using static ARMeilleure.Instructions.InstEmitHelper;
  8. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  9. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  10. namespace ARMeilleure.Instructions
  11. {
  12. using Func1I = Func<Operand, Operand>;
  13. static partial class InstEmit
  14. {
  15. public static void Fcvt_S(ArmEmitterContext context)
  16. {
  17. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  18. if (op.Size == 0 && op.Opc == 1) // Single -> Double.
  19. {
  20. if (Optimizations.UseSse2)
  21. {
  22. Operand n = GetVec(op.Rn);
  23. Operand res = context.AddIntrinsic(Intrinsic.X86Cvtss2sd, context.VectorZero(), n);
  24. context.Copy(GetVec(op.Rd), res);
  25. }
  26. else
  27. {
  28. Operand ne = context.VectorExtract(OperandType.FP32, GetVec(op.Rn), 0);
  29. Operand res = context.ConvertToFP(OperandType.FP64, ne);
  30. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  31. }
  32. }
  33. else if (op.Size == 1 && op.Opc == 0) // Double -> Single.
  34. {
  35. if (Optimizations.UseSse2)
  36. {
  37. Operand n = GetVec(op.Rn);
  38. Operand res = context.AddIntrinsic(Intrinsic.X86Cvtsd2ss, context.VectorZero(), n);
  39. context.Copy(GetVec(op.Rd), res);
  40. }
  41. else
  42. {
  43. Operand ne = context.VectorExtract(OperandType.FP64, GetVec(op.Rn), 0);
  44. Operand res = context.ConvertToFP(OperandType.FP32, ne);
  45. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  46. }
  47. }
  48. else if (op.Size == 0 && op.Opc == 3) // Single -> Half.
  49. {
  50. Operand ne = context.VectorExtract(OperandType.FP32, GetVec(op.Rn), 0);
  51. Delegate dlg = new _U16_F32(SoftFloat32_16.FPConvert);
  52. Operand res = context.Call(dlg, ne);
  53. res = context.ZeroExtend16(OperandType.I64, res);
  54. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, 1));
  55. }
  56. else if (op.Size == 3 && op.Opc == 0) // Half -> Single.
  57. {
  58. Operand ne = EmitVectorExtractZx(context, op.Rn, 0, 1);
  59. Delegate dlg = new _F32_U16(SoftFloat16_32.FPConvert);
  60. Operand res = context.Call(dlg, ne);
  61. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  62. }
  63. else if (op.Size == 1 && op.Opc == 3) // Double -> Half.
  64. {
  65. throw new NotImplementedException("Double-precision to half-precision.");
  66. }
  67. else if (op.Size == 3 && op.Opc == 1) // Double -> Half.
  68. {
  69. throw new NotImplementedException("Half-precision to double-precision.");
  70. }
  71. else // Invalid encoding.
  72. {
  73. Debug.Assert(false, $"type == {op.Size} && opc == {op.Opc}");
  74. }
  75. }
  76. public static void Fcvtas_Gp(ArmEmitterContext context)
  77. {
  78. EmitFcvt_s_Gp(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1));
  79. }
  80. public static void Fcvtau_Gp(ArmEmitterContext context)
  81. {
  82. EmitFcvt_u_Gp(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1));
  83. }
  84. public static void Fcvtl_V(ArmEmitterContext context)
  85. {
  86. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  87. int sizeF = op.Size & 1;
  88. if (Optimizations.UseSse2 && sizeF == 1)
  89. {
  90. Operand n = GetVec(op.Rn);
  91. Operand res;
  92. if (op.RegisterSize == RegisterSize.Simd128)
  93. {
  94. res = context.AddIntrinsic(Intrinsic.X86Movhlps, n, n);
  95. }
  96. else
  97. {
  98. res = n;
  99. }
  100. res = context.AddIntrinsic(Intrinsic.X86Cvtps2pd, res);
  101. context.Copy(GetVec(op.Rd), res);
  102. }
  103. else
  104. {
  105. Operand res = context.VectorZero();
  106. int elems = 4 >> sizeF;
  107. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  108. for (int index = 0; index < elems; index++)
  109. {
  110. if (sizeF == 0)
  111. {
  112. Operand ne = EmitVectorExtractZx(context, op.Rn, part + index, 1);
  113. Delegate dlg = new _F32_U16(SoftFloat16_32.FPConvert);
  114. Operand e = context.Call(dlg, ne);
  115. res = context.VectorInsert(res, e, index);
  116. }
  117. else /* if (sizeF == 1) */
  118. {
  119. Operand ne = context.VectorExtract(OperandType.FP32, GetVec(op.Rn), part + index);
  120. Operand e = context.ConvertToFP(OperandType.FP64, ne);
  121. res = context.VectorInsert(res, e, index);
  122. }
  123. }
  124. context.Copy(GetVec(op.Rd), res);
  125. }
  126. }
  127. public static void Fcvtms_Gp(ArmEmitterContext context)
  128. {
  129. if (Optimizations.UseSse41)
  130. {
  131. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsMinusInfinity, isFixed: false);
  132. }
  133. else
  134. {
  135. EmitFcvt_s_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1));
  136. }
  137. }
  138. public static void Fcvtmu_Gp(ArmEmitterContext context)
  139. {
  140. if (Optimizations.UseSse41)
  141. {
  142. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsMinusInfinity, isFixed: false);
  143. }
  144. else
  145. {
  146. EmitFcvt_u_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1));
  147. }
  148. }
  149. public static void Fcvtn_V(ArmEmitterContext context)
  150. {
  151. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  152. int sizeF = op.Size & 1;
  153. if (Optimizations.UseSse2 && sizeF == 1)
  154. {
  155. Operand d = GetVec(op.Rd);
  156. Operand res = context.VectorZeroUpper64(d);
  157. Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtpd2ps, GetVec(op.Rn));
  158. nInt = context.AddIntrinsic(Intrinsic.X86Movlhps, nInt, nInt);
  159. Intrinsic movInst = op.RegisterSize == RegisterSize.Simd128
  160. ? Intrinsic.X86Movlhps
  161. : Intrinsic.X86Movhlps;
  162. res = context.AddIntrinsic(movInst, res, nInt);
  163. context.Copy(d, res);
  164. }
  165. else
  166. {
  167. OperandType type = sizeF == 0 ? OperandType.FP32 : OperandType.FP64;
  168. int elems = 4 >> sizeF;
  169. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  170. Operand d = GetVec(op.Rd);
  171. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  172. for (int index = 0; index < elems; index++)
  173. {
  174. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  175. if (sizeF == 0)
  176. {
  177. Delegate dlg = new _U16_F32(SoftFloat32_16.FPConvert);
  178. Operand e = context.Call(dlg, ne);
  179. e = context.ZeroExtend16(OperandType.I64, e);
  180. res = EmitVectorInsert(context, res, e, part + index, 1);
  181. }
  182. else /* if (sizeF == 1) */
  183. {
  184. Operand e = context.ConvertToFP(OperandType.FP32, ne);
  185. res = context.VectorInsert(res, e, part + index);
  186. }
  187. }
  188. context.Copy(d, res);
  189. }
  190. }
  191. public static void Fcvtns_S(ArmEmitterContext context)
  192. {
  193. if (Optimizations.UseSse41)
  194. {
  195. EmitSse41Fcvts(context, FPRoundingMode.ToNearest, scalar: true);
  196. }
  197. else
  198. {
  199. EmitFcvtn(context, signed: true, scalar: true);
  200. }
  201. }
  202. public static void Fcvtns_V(ArmEmitterContext context)
  203. {
  204. if (Optimizations.UseSse41)
  205. {
  206. EmitSse41Fcvts(context, FPRoundingMode.ToNearest, scalar: false);
  207. }
  208. else
  209. {
  210. EmitFcvtn(context, signed: true, scalar: false);
  211. }
  212. }
  213. public static void Fcvtnu_S(ArmEmitterContext context)
  214. {
  215. if (Optimizations.UseSse41)
  216. {
  217. EmitSse41Fcvtu(context, FPRoundingMode.ToNearest, scalar: true);
  218. }
  219. else
  220. {
  221. EmitFcvtn(context, signed: false, scalar: true);
  222. }
  223. }
  224. public static void Fcvtnu_V(ArmEmitterContext context)
  225. {
  226. if (Optimizations.UseSse41)
  227. {
  228. EmitSse41Fcvtu(context, FPRoundingMode.ToNearest, scalar: false);
  229. }
  230. else
  231. {
  232. EmitFcvtn(context, signed: false, scalar: false);
  233. }
  234. }
  235. public static void Fcvtps_Gp(ArmEmitterContext context)
  236. {
  237. if (Optimizations.UseSse41)
  238. {
  239. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsPlusInfinity, isFixed: false);
  240. }
  241. else
  242. {
  243. EmitFcvt_s_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1));
  244. }
  245. }
  246. public static void Fcvtpu_Gp(ArmEmitterContext context)
  247. {
  248. if (Optimizations.UseSse41)
  249. {
  250. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsPlusInfinity, isFixed: false);
  251. }
  252. else
  253. {
  254. EmitFcvt_u_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1));
  255. }
  256. }
  257. public static void Fcvtzs_Gp(ArmEmitterContext context)
  258. {
  259. if (Optimizations.UseSse41)
  260. {
  261. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsZero, isFixed: false);
  262. }
  263. else
  264. {
  265. EmitFcvt_s_Gp(context, (op1) => op1);
  266. }
  267. }
  268. public static void Fcvtzs_Gp_Fixed(ArmEmitterContext context)
  269. {
  270. if (Optimizations.UseSse41)
  271. {
  272. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsZero, isFixed: true);
  273. }
  274. else
  275. {
  276. EmitFcvtzs_Gp_Fixed(context);
  277. }
  278. }
  279. public static void Fcvtzs_S(ArmEmitterContext context)
  280. {
  281. if (Optimizations.UseSse41)
  282. {
  283. EmitSse41Fcvts(context, FPRoundingMode.TowardsZero, scalar: true);
  284. }
  285. else
  286. {
  287. EmitFcvtz(context, signed: true, scalar: true);
  288. }
  289. }
  290. public static void Fcvtzs_V(ArmEmitterContext context)
  291. {
  292. if (Optimizations.UseSse41)
  293. {
  294. EmitSse41Fcvts(context, FPRoundingMode.TowardsZero, scalar: false);
  295. }
  296. else
  297. {
  298. EmitFcvtz(context, signed: true, scalar: false);
  299. }
  300. }
  301. public static void Fcvtzs_V_Fixed(ArmEmitterContext context)
  302. {
  303. if (Optimizations.UseSse41)
  304. {
  305. EmitSse41Fcvts(context, FPRoundingMode.TowardsZero, scalar: false);
  306. }
  307. else
  308. {
  309. EmitFcvtz(context, signed: true, scalar: false);
  310. }
  311. }
  312. public static void Fcvtzu_Gp(ArmEmitterContext context)
  313. {
  314. if (Optimizations.UseSse41)
  315. {
  316. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsZero, isFixed: false);
  317. }
  318. else
  319. {
  320. EmitFcvt_u_Gp(context, (op1) => op1);
  321. }
  322. }
  323. public static void Fcvtzu_Gp_Fixed(ArmEmitterContext context)
  324. {
  325. if (Optimizations.UseSse41)
  326. {
  327. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsZero, isFixed: true);
  328. }
  329. else
  330. {
  331. EmitFcvtzu_Gp_Fixed(context);
  332. }
  333. }
  334. public static void Fcvtzu_S(ArmEmitterContext context)
  335. {
  336. if (Optimizations.UseSse41)
  337. {
  338. EmitSse41Fcvtu(context, FPRoundingMode.TowardsZero, scalar: true);
  339. }
  340. else
  341. {
  342. EmitFcvtz(context, signed: false, scalar: true);
  343. }
  344. }
  345. public static void Fcvtzu_V(ArmEmitterContext context)
  346. {
  347. if (Optimizations.UseSse41)
  348. {
  349. EmitSse41Fcvtu(context, FPRoundingMode.TowardsZero, scalar: false);
  350. }
  351. else
  352. {
  353. EmitFcvtz(context, signed: false, scalar: false);
  354. }
  355. }
  356. public static void Fcvtzu_V_Fixed(ArmEmitterContext context)
  357. {
  358. if (Optimizations.UseSse41)
  359. {
  360. EmitSse41Fcvtu(context, FPRoundingMode.TowardsZero, scalar: false);
  361. }
  362. else
  363. {
  364. EmitFcvtz(context, signed: false, scalar: false);
  365. }
  366. }
  367. public static void Scvtf_Gp(ArmEmitterContext context)
  368. {
  369. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  370. Operand res = GetIntOrZR(context, op.Rn);
  371. if (op.RegisterSize == RegisterSize.Int32)
  372. {
  373. res = context.SignExtend32(OperandType.I64, res);
  374. }
  375. res = EmitFPConvert(context, res, op.Size, signed: true);
  376. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  377. }
  378. public static void Scvtf_Gp_Fixed(ArmEmitterContext context)
  379. {
  380. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  381. Operand res = GetIntOrZR(context, op.Rn);
  382. if (op.RegisterSize == RegisterSize.Int32)
  383. {
  384. res = context.SignExtend32(OperandType.I64, res);
  385. }
  386. res = EmitFPConvert(context, res, op.Size, signed: true);
  387. res = EmitI2fFBitsMul(context, res, op.FBits);
  388. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  389. }
  390. public static void Scvtf_S(ArmEmitterContext context)
  391. {
  392. if (Optimizations.UseSse2)
  393. {
  394. EmitSse2Scvtf(context, scalar: true);
  395. }
  396. else
  397. {
  398. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  399. int sizeF = op.Size & 1;
  400. Operand res = EmitVectorLongExtract(context, op.Rn, 0, sizeF + 2);
  401. res = EmitFPConvert(context, res, op.Size, signed: true);
  402. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  403. }
  404. }
  405. public static void Scvtf_V(ArmEmitterContext context)
  406. {
  407. if (Optimizations.UseSse2)
  408. {
  409. EmitSse2Scvtf(context, scalar: false);
  410. }
  411. else
  412. {
  413. EmitVectorCvtf(context, signed: true);
  414. }
  415. }
  416. public static void Scvtf_V_Fixed(ArmEmitterContext context)
  417. {
  418. if (Optimizations.UseSse2)
  419. {
  420. EmitSse2Scvtf(context, scalar: false);
  421. }
  422. else
  423. {
  424. EmitVectorCvtf(context, signed: true);
  425. }
  426. }
  427. public static void Ucvtf_Gp(ArmEmitterContext context)
  428. {
  429. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  430. Operand res = GetIntOrZR(context, op.Rn);
  431. res = EmitFPConvert(context, res, op.Size, signed: false);
  432. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  433. }
  434. public static void Ucvtf_Gp_Fixed(ArmEmitterContext context)
  435. {
  436. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  437. Operand res = GetIntOrZR(context, op.Rn);
  438. res = EmitFPConvert(context, res, op.Size, signed: false);
  439. res = EmitI2fFBitsMul(context, res, op.FBits);
  440. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  441. }
  442. public static void Ucvtf_S(ArmEmitterContext context)
  443. {
  444. if (Optimizations.UseSse2)
  445. {
  446. EmitSse2Ucvtf(context, scalar: true);
  447. }
  448. else
  449. {
  450. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  451. int sizeF = op.Size & 1;
  452. Operand ne = EmitVectorLongExtract(context, op.Rn, 0, sizeF + 2);
  453. Operand res = EmitFPConvert(context, ne, sizeF, signed: false);
  454. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  455. }
  456. }
  457. public static void Ucvtf_V(ArmEmitterContext context)
  458. {
  459. if (Optimizations.UseSse2)
  460. {
  461. EmitSse2Ucvtf(context, scalar: false);
  462. }
  463. else
  464. {
  465. EmitVectorCvtf(context, signed: false);
  466. }
  467. }
  468. public static void Ucvtf_V_Fixed(ArmEmitterContext context)
  469. {
  470. if (Optimizations.UseSse2)
  471. {
  472. EmitSse2Ucvtf(context, scalar: false);
  473. }
  474. else
  475. {
  476. EmitVectorCvtf(context, signed: false);
  477. }
  478. }
  479. private static void EmitFcvtn(ArmEmitterContext context, bool signed, bool scalar)
  480. {
  481. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  482. Operand res = context.VectorZero();
  483. Operand n = GetVec(op.Rn);
  484. int sizeF = op.Size & 1;
  485. int sizeI = sizeF + 2;
  486. OperandType type = sizeF == 0 ? OperandType.FP32 : OperandType.FP64;
  487. int elems = !scalar ? op.GetBytesCount() >> sizeI : 1;
  488. for (int index = 0; index < elems; index++)
  489. {
  490. Operand ne = context.VectorExtract(type, n, index);
  491. Operand e = EmitRoundMathCall(context, MidpointRounding.ToEven, ne);
  492. if (sizeF == 0)
  493. {
  494. Delegate dlg = signed
  495. ? (Delegate)new _S32_F32(SoftFallback.SatF32ToS32)
  496. : (Delegate)new _U32_F32(SoftFallback.SatF32ToU32);
  497. e = context.Call(dlg, e);
  498. e = context.ZeroExtend32(OperandType.I64, e);
  499. }
  500. else /* if (sizeF == 1) */
  501. {
  502. Delegate dlg = signed
  503. ? (Delegate)new _S64_F64(SoftFallback.SatF64ToS64)
  504. : (Delegate)new _U64_F64(SoftFallback.SatF64ToU64);
  505. e = context.Call(dlg, e);
  506. }
  507. res = EmitVectorInsert(context, res, e, index, sizeI);
  508. }
  509. context.Copy(GetVec(op.Rd), res);
  510. }
  511. private static void EmitFcvtz(ArmEmitterContext context, bool signed, bool scalar)
  512. {
  513. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  514. Operand res = context.VectorZero();
  515. Operand n = GetVec(op.Rn);
  516. int sizeF = op.Size & 1;
  517. int sizeI = sizeF + 2;
  518. OperandType type = sizeF == 0 ? OperandType.FP32 : OperandType.FP64;
  519. int fBits = GetFBits(context);
  520. int elems = !scalar ? op.GetBytesCount() >> sizeI : 1;
  521. for (int index = 0; index < elems; index++)
  522. {
  523. Operand ne = context.VectorExtract(type, n, index);
  524. Operand e = EmitF2iFBitsMul(context, ne, fBits);
  525. if (sizeF == 0)
  526. {
  527. Delegate dlg = signed
  528. ? (Delegate)new _S32_F32(SoftFallback.SatF32ToS32)
  529. : (Delegate)new _U32_F32(SoftFallback.SatF32ToU32);
  530. e = context.Call(dlg, e);
  531. e = context.ZeroExtend32(OperandType.I64, e);
  532. }
  533. else /* if (sizeF == 1) */
  534. {
  535. Delegate dlg = signed
  536. ? (Delegate)new _S64_F64(SoftFallback.SatF64ToS64)
  537. : (Delegate)new _U64_F64(SoftFallback.SatF64ToU64);
  538. e = context.Call(dlg, e);
  539. }
  540. res = EmitVectorInsert(context, res, e, index, sizeI);
  541. }
  542. context.Copy(GetVec(op.Rd), res);
  543. }
  544. private static void EmitFcvt_s_Gp(ArmEmitterContext context, Func1I emit)
  545. {
  546. EmitFcvt___Gp(context, emit, signed: true);
  547. }
  548. private static void EmitFcvt_u_Gp(ArmEmitterContext context, Func1I emit)
  549. {
  550. EmitFcvt___Gp(context, emit, signed: false);
  551. }
  552. private static void EmitFcvt___Gp(ArmEmitterContext context, Func1I emit, bool signed)
  553. {
  554. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  555. OperandType type = op.Size == 0 ? OperandType.FP32 : OperandType.FP64;
  556. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  557. Operand res = signed
  558. ? EmitScalarFcvts(context, emit(ne), 0)
  559. : EmitScalarFcvtu(context, emit(ne), 0);
  560. SetIntOrZR(context, op.Rd, res);
  561. }
  562. private static void EmitFcvtzs_Gp_Fixed(ArmEmitterContext context)
  563. {
  564. EmitFcvtz__Gp_Fixed(context, signed: true);
  565. }
  566. private static void EmitFcvtzu_Gp_Fixed(ArmEmitterContext context)
  567. {
  568. EmitFcvtz__Gp_Fixed(context, signed: false);
  569. }
  570. private static void EmitFcvtz__Gp_Fixed(ArmEmitterContext context, bool signed)
  571. {
  572. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  573. OperandType type = op.Size == 0 ? OperandType.FP32 : OperandType.FP64;
  574. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  575. Operand res = signed
  576. ? EmitScalarFcvts(context, ne, op.FBits)
  577. : EmitScalarFcvtu(context, ne, op.FBits);
  578. SetIntOrZR(context, op.Rd, res);
  579. }
  580. private static void EmitVectorCvtf(ArmEmitterContext context, bool signed)
  581. {
  582. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  583. Operand res = context.VectorZero();
  584. int sizeF = op.Size & 1;
  585. int sizeI = sizeF + 2;
  586. int fBits = GetFBits(context);
  587. int elems = op.GetBytesCount() >> sizeI;
  588. for (int index = 0; index < elems; index++)
  589. {
  590. Operand ne = EmitVectorLongExtract(context, op.Rn, index, sizeI);
  591. Operand e = EmitFPConvert(context, ne, sizeF, signed);
  592. e = EmitI2fFBitsMul(context, e, fBits);
  593. res = context.VectorInsert(res, e, index);
  594. }
  595. context.Copy(GetVec(op.Rd), res);
  596. }
  597. private static int GetFBits(ArmEmitterContext context)
  598. {
  599. if (context.CurrOp is OpCodeSimdShImm op)
  600. {
  601. return GetImmShr(op);
  602. }
  603. return 0;
  604. }
  605. private static Operand EmitFPConvert(ArmEmitterContext context, Operand value, int size, bool signed)
  606. {
  607. Debug.Assert(value.Type == OperandType.I32 || value.Type == OperandType.I64);
  608. Debug.Assert((uint)size < 2);
  609. OperandType type = size == 0 ? OperandType.FP32 : OperandType.FP64;
  610. if (signed)
  611. {
  612. return context.ConvertToFP(type, value);
  613. }
  614. else
  615. {
  616. return context.ConvertToFPUI(type, value);
  617. }
  618. }
  619. private static Operand EmitScalarFcvts(ArmEmitterContext context, Operand value, int fBits)
  620. {
  621. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  622. value = EmitF2iFBitsMul(context, value, fBits);
  623. if (context.CurrOp.RegisterSize == RegisterSize.Int32)
  624. {
  625. Delegate dlg = value.Type == OperandType.FP32
  626. ? (Delegate)new _S32_F32(SoftFallback.SatF32ToS32)
  627. : (Delegate)new _S32_F64(SoftFallback.SatF64ToS32);
  628. return context.Call(dlg, value);
  629. }
  630. else
  631. {
  632. Delegate dlg = value.Type == OperandType.FP32
  633. ? (Delegate)new _S64_F32(SoftFallback.SatF32ToS64)
  634. : (Delegate)new _S64_F64(SoftFallback.SatF64ToS64);
  635. return context.Call(dlg, value);
  636. }
  637. }
  638. private static Operand EmitScalarFcvtu(ArmEmitterContext context, Operand value, int fBits)
  639. {
  640. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  641. value = EmitF2iFBitsMul(context, value, fBits);
  642. if (context.CurrOp.RegisterSize == RegisterSize.Int32)
  643. {
  644. Delegate dlg = value.Type == OperandType.FP32
  645. ? (Delegate)new _U32_F32(SoftFallback.SatF32ToU32)
  646. : (Delegate)new _U32_F64(SoftFallback.SatF64ToU32);
  647. return context.Call(dlg, value);
  648. }
  649. else
  650. {
  651. Delegate dlg = value.Type == OperandType.FP32
  652. ? (Delegate)new _U64_F32(SoftFallback.SatF32ToU64)
  653. : (Delegate)new _U64_F64(SoftFallback.SatF64ToU64);
  654. return context.Call(dlg, value);
  655. }
  656. }
  657. private static Operand EmitF2iFBitsMul(ArmEmitterContext context, Operand value, int fBits)
  658. {
  659. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  660. if (fBits == 0)
  661. {
  662. return value;
  663. }
  664. if (value.Type == OperandType.FP32)
  665. {
  666. return context.Multiply(value, ConstF(MathF.Pow(2f, fBits)));
  667. }
  668. else /* if (value.Type == OperandType.FP64) */
  669. {
  670. return context.Multiply(value, ConstF(Math.Pow(2d, fBits)));
  671. }
  672. }
  673. private static Operand EmitI2fFBitsMul(ArmEmitterContext context, Operand value, int fBits)
  674. {
  675. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  676. if (fBits == 0)
  677. {
  678. return value;
  679. }
  680. if (value.Type == OperandType.FP32)
  681. {
  682. return context.Multiply(value, ConstF(1f / MathF.Pow(2f, fBits)));
  683. }
  684. else /* if (value.Type == OperandType.FP64) */
  685. {
  686. return context.Multiply(value, ConstF(1d / Math.Pow(2d, fBits)));
  687. }
  688. }
  689. private static Operand EmitSse2CvtDoubleToInt64OpF(ArmEmitterContext context, Operand opF, bool scalar)
  690. {
  691. Debug.Assert(opF.Type == OperandType.V128);
  692. Operand longL = context.AddIntrinsicLong (Intrinsic.X86Cvtsd2si, opF); // opFL
  693. Operand res = context.VectorCreateScalar(longL);
  694. if (!scalar)
  695. {
  696. Operand opFH = context.AddIntrinsic (Intrinsic.X86Movhlps, res, opF); // res doesn't matter.
  697. Operand longH = context.AddIntrinsicLong (Intrinsic.X86Cvtsd2si, opFH);
  698. Operand resH = context.VectorCreateScalar(longH);
  699. res = context.AddIntrinsic (Intrinsic.X86Movlhps, res, resH);
  700. }
  701. return res;
  702. }
  703. private static Operand EmitSse2CvtInt64ToDoubleOp(ArmEmitterContext context, Operand op, bool scalar)
  704. {
  705. Debug.Assert(op.Type == OperandType.V128);
  706. Operand longL = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, op); // opL
  707. Operand res = context.AddIntrinsic (Intrinsic.X86Cvtsi2sd, context.VectorZero(), longL);
  708. if (!scalar)
  709. {
  710. Operand opH = context.AddIntrinsic (Intrinsic.X86Movhlps, res, op); // res doesn't matter.
  711. Operand longH = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, opH);
  712. Operand resH = context.AddIntrinsic (Intrinsic.X86Cvtsi2sd, res, longH); // res doesn't matter.
  713. res = context.AddIntrinsic (Intrinsic.X86Movlhps, res, resH);
  714. }
  715. return res;
  716. }
  717. private static void EmitSse2Scvtf(ArmEmitterContext context, bool scalar)
  718. {
  719. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  720. Operand n = GetVec(op.Rn);
  721. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  722. int sizeF = op.Size & 1;
  723. if (sizeF == 0)
  724. {
  725. Operand res = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, n);
  726. if (op is OpCodeSimdShImm fixedOp)
  727. {
  728. int fBits = GetImmShr(fixedOp);
  729. // BitConverter.Int32BitsToSingle(fpScaled) == 1f / MathF.Pow(2f, fBits)
  730. int fpScaled = 0x3F800000 - fBits * 0x800000;
  731. Operand fpScaledMask = scalar
  732. ? X86GetScalar (context, fpScaled)
  733. : X86GetAllElements(context, fpScaled);
  734. res = context.AddIntrinsic(Intrinsic.X86Mulps, res, fpScaledMask);
  735. }
  736. if (scalar)
  737. {
  738. res = context.VectorZeroUpper96(res);
  739. }
  740. else if (op.RegisterSize == RegisterSize.Simd64)
  741. {
  742. res = context.VectorZeroUpper64(res);
  743. }
  744. context.Copy(GetVec(op.Rd), res);
  745. }
  746. else /* if (sizeF == 1) */
  747. {
  748. Operand res = EmitSse2CvtInt64ToDoubleOp(context, n, scalar);
  749. if (op is OpCodeSimdShImm fixedOp)
  750. {
  751. int fBits = GetImmShr(fixedOp);
  752. // BitConverter.Int64BitsToDouble(fpScaled) == 1d / Math.Pow(2d, fBits)
  753. long fpScaled = 0x3FF0000000000000L - fBits * 0x10000000000000L;
  754. Operand fpScaledMask = scalar
  755. ? X86GetScalar (context, fpScaled)
  756. : X86GetAllElements(context, fpScaled);
  757. res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, fpScaledMask);
  758. }
  759. if (scalar)
  760. {
  761. res = context.VectorZeroUpper64(res);
  762. }
  763. context.Copy(GetVec(op.Rd), res);
  764. }
  765. }
  766. private static void EmitSse2Ucvtf(ArmEmitterContext context, bool scalar)
  767. {
  768. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  769. Operand n = GetVec(op.Rn);
  770. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  771. int sizeF = op.Size & 1;
  772. if (sizeF == 0)
  773. {
  774. Operand mask = scalar // 65536.000f (1 << 16)
  775. ? X86GetScalar (context, 0x47800000)
  776. : X86GetAllElements(context, 0x47800000);
  777. Operand res = context.AddIntrinsic(Intrinsic.X86Psrld, n, Const(16));
  778. res = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res);
  779. res = context.AddIntrinsic(Intrinsic.X86Mulps, res, mask);
  780. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pslld, n, Const(16));
  781. res2 = context.AddIntrinsic(Intrinsic.X86Psrld, res2, Const(16));
  782. res2 = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res2);
  783. res = context.AddIntrinsic(Intrinsic.X86Addps, res, res2);
  784. if (op is OpCodeSimdShImm fixedOp)
  785. {
  786. int fBits = GetImmShr(fixedOp);
  787. // BitConverter.Int32BitsToSingle(fpScaled) == 1f / MathF.Pow(2f, fBits)
  788. int fpScaled = 0x3F800000 - fBits * 0x800000;
  789. Operand fpScaledMask = scalar
  790. ? X86GetScalar (context, fpScaled)
  791. : X86GetAllElements(context, fpScaled);
  792. res = context.AddIntrinsic(Intrinsic.X86Mulps, res, fpScaledMask);
  793. }
  794. if (scalar)
  795. {
  796. res = context.VectorZeroUpper96(res);
  797. }
  798. else if (op.RegisterSize == RegisterSize.Simd64)
  799. {
  800. res = context.VectorZeroUpper64(res);
  801. }
  802. context.Copy(GetVec(op.Rd), res);
  803. }
  804. else /* if (sizeF == 1) */
  805. {
  806. Operand mask = scalar // 4294967296.0000000d (1L << 32)
  807. ? X86GetScalar (context, 0x41F0000000000000L)
  808. : X86GetAllElements(context, 0x41F0000000000000L);
  809. Operand res = context.AddIntrinsic (Intrinsic.X86Psrlq, n, Const(32));
  810. res = EmitSse2CvtInt64ToDoubleOp(context, res, scalar);
  811. res = context.AddIntrinsic (Intrinsic.X86Mulpd, res, mask);
  812. Operand res2 = context.AddIntrinsic (Intrinsic.X86Psllq, n, Const(32));
  813. res2 = context.AddIntrinsic (Intrinsic.X86Psrlq, res2, Const(32));
  814. res2 = EmitSse2CvtInt64ToDoubleOp(context, res2, scalar);
  815. res = context.AddIntrinsic(Intrinsic.X86Addpd, res, res2);
  816. if (op is OpCodeSimdShImm fixedOp)
  817. {
  818. int fBits = GetImmShr(fixedOp);
  819. // BitConverter.Int64BitsToDouble(fpScaled) == 1d / Math.Pow(2d, fBits)
  820. long fpScaled = 0x3FF0000000000000L - fBits * 0x10000000000000L;
  821. Operand fpScaledMask = scalar
  822. ? X86GetScalar (context, fpScaled)
  823. : X86GetAllElements(context, fpScaled);
  824. res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, fpScaledMask);
  825. }
  826. if (scalar)
  827. {
  828. res = context.VectorZeroUpper64(res);
  829. }
  830. context.Copy(GetVec(op.Rd), res);
  831. }
  832. }
  833. private static void EmitSse41Fcvts(ArmEmitterContext context, FPRoundingMode roundMode, bool scalar)
  834. {
  835. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  836. Operand n = GetVec(op.Rn);
  837. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  838. int sizeF = op.Size & 1;
  839. if (sizeF == 0)
  840. {
  841. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, n, n, Const((int)CmpCondition.OrderedQ));
  842. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  843. if (op is OpCodeSimdShImm fixedOp)
  844. {
  845. int fBits = GetImmShr(fixedOp);
  846. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, fBits)
  847. int fpScaled = 0x3F800000 + fBits * 0x800000;
  848. Operand fpScaledMask = scalar
  849. ? X86GetScalar (context, fpScaled)
  850. : X86GetAllElements(context, fpScaled);
  851. nRes = context.AddIntrinsic(Intrinsic.X86Mulps, nRes, fpScaledMask);
  852. }
  853. nRes = context.AddIntrinsic(Intrinsic.X86Roundps, nRes, Const(X86GetRoundControl(roundMode)));
  854. Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
  855. Operand fpMaxValMask = scalar // 2.14748365E9f (2147483648)
  856. ? X86GetScalar (context, 0x4F000000)
  857. : X86GetAllElements(context, 0x4F000000);
  858. nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  859. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nInt, nRes);
  860. if (scalar)
  861. {
  862. dRes = context.VectorZeroUpper96(dRes);
  863. }
  864. else if (op.RegisterSize == RegisterSize.Simd64)
  865. {
  866. dRes = context.VectorZeroUpper64(dRes);
  867. }
  868. context.Copy(GetVec(op.Rd), dRes);
  869. }
  870. else /* if (sizeF == 1) */
  871. {
  872. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, n, n, Const((int)CmpCondition.OrderedQ));
  873. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  874. if (op is OpCodeSimdShImm fixedOp)
  875. {
  876. int fBits = GetImmShr(fixedOp);
  877. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, fBits)
  878. long fpScaled = 0x3FF0000000000000L + fBits * 0x10000000000000L;
  879. Operand fpScaledMask = scalar
  880. ? X86GetScalar (context, fpScaled)
  881. : X86GetAllElements(context, fpScaled);
  882. nRes = context.AddIntrinsic(Intrinsic.X86Mulpd, nRes, fpScaledMask);
  883. }
  884. nRes = context.AddIntrinsic(Intrinsic.X86Roundpd, nRes, Const(X86GetRoundControl(roundMode)));
  885. Operand nLong = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
  886. Operand fpMaxValMask = scalar // 9.2233720368547760E18d (9223372036854775808)
  887. ? X86GetScalar (context, 0x43E0000000000000L)
  888. : X86GetAllElements(context, 0x43E0000000000000L);
  889. nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  890. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nLong, nRes);
  891. if (scalar)
  892. {
  893. dRes = context.VectorZeroUpper64(dRes);
  894. }
  895. context.Copy(GetVec(op.Rd), dRes);
  896. }
  897. }
  898. private static void EmitSse41Fcvtu(ArmEmitterContext context, FPRoundingMode roundMode, bool scalar)
  899. {
  900. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  901. Operand n = GetVec(op.Rn);
  902. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  903. int sizeF = op.Size & 1;
  904. if (sizeF == 0)
  905. {
  906. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, n, n, Const((int)CmpCondition.OrderedQ));
  907. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  908. if (op is OpCodeSimdShImm fixedOp)
  909. {
  910. int fBits = GetImmShr(fixedOp);
  911. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, fBits)
  912. int fpScaled = 0x3F800000 + fBits * 0x800000;
  913. Operand fpScaledMask = scalar
  914. ? X86GetScalar (context, fpScaled)
  915. : X86GetAllElements(context, fpScaled);
  916. nRes = context.AddIntrinsic(Intrinsic.X86Mulps, nRes, fpScaledMask);
  917. }
  918. nRes = context.AddIntrinsic(Intrinsic.X86Roundps, nRes, Const(X86GetRoundControl(roundMode)));
  919. Operand zero = context.VectorZero();
  920. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  921. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  922. Operand fpMaxValMask = scalar // 2.14748365E9f (2147483648)
  923. ? X86GetScalar (context, 0x4F000000)
  924. : X86GetAllElements(context, 0x4F000000);
  925. Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
  926. nRes = context.AddIntrinsic(Intrinsic.X86Subps, nRes, fpMaxValMask);
  927. nCmp = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  928. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  929. Operand nInt2 = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
  930. nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  931. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nInt2, nRes);
  932. dRes = context.AddIntrinsic(Intrinsic.X86Paddd, dRes, nInt);
  933. if (scalar)
  934. {
  935. dRes = context.VectorZeroUpper96(dRes);
  936. }
  937. else if (op.RegisterSize == RegisterSize.Simd64)
  938. {
  939. dRes = context.VectorZeroUpper64(dRes);
  940. }
  941. context.Copy(GetVec(op.Rd), dRes);
  942. }
  943. else /* if (sizeF == 1) */
  944. {
  945. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, n, n, Const((int)CmpCondition.OrderedQ));
  946. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  947. if (op is OpCodeSimdShImm fixedOp)
  948. {
  949. int fBits = GetImmShr(fixedOp);
  950. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, fBits)
  951. long fpScaled = 0x3FF0000000000000L + fBits * 0x10000000000000L;
  952. Operand fpScaledMask = scalar
  953. ? X86GetScalar (context, fpScaled)
  954. : X86GetAllElements(context, fpScaled);
  955. nRes = context.AddIntrinsic(Intrinsic.X86Mulpd, nRes, fpScaledMask);
  956. }
  957. nRes = context.AddIntrinsic(Intrinsic.X86Roundpd, nRes, Const(X86GetRoundControl(roundMode)));
  958. Operand zero = context.VectorZero();
  959. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  960. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  961. Operand fpMaxValMask = scalar // 9.2233720368547760E18d (9223372036854775808)
  962. ? X86GetScalar (context, 0x43E0000000000000L)
  963. : X86GetAllElements(context, 0x43E0000000000000L);
  964. Operand nLong = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
  965. nRes = context.AddIntrinsic(Intrinsic.X86Subpd, nRes, fpMaxValMask);
  966. nCmp = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  967. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  968. Operand nLong2 = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
  969. nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  970. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nLong2, nRes);
  971. dRes = context.AddIntrinsic(Intrinsic.X86Paddq, dRes, nLong);
  972. if (scalar)
  973. {
  974. dRes = context.VectorZeroUpper64(dRes);
  975. }
  976. context.Copy(GetVec(op.Rd), dRes);
  977. }
  978. }
  979. private static void EmitSse41Fcvts_Gp(ArmEmitterContext context, FPRoundingMode roundMode, bool isFixed)
  980. {
  981. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  982. Operand n = GetVec(op.Rn);
  983. if (op.Size == 0)
  984. {
  985. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, n, n, Const((int)CmpCondition.OrderedQ));
  986. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  987. if (isFixed)
  988. {
  989. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, op.FBits)
  990. int fpScaled = 0x3F800000 + op.FBits * 0x800000;
  991. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  992. nRes = context.AddIntrinsic(Intrinsic.X86Mulss, nRes, fpScaledMask);
  993. }
  994. nRes = context.AddIntrinsic(Intrinsic.X86Roundss, nRes, Const(X86GetRoundControl(roundMode)));
  995. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  996. ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
  997. : context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
  998. int fpMaxVal = op.RegisterSize == RegisterSize.Int32
  999. ? 0x4F000000 // 2.14748365E9f (2147483648)
  1000. : 0x5F000000; // 9.223372E18f (9223372036854775808)
  1001. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1002. nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1003. Operand nInt = context.AddIntrinsicInt(Intrinsic.X86Cvtsi2si, nRes);
  1004. if (op.RegisterSize == RegisterSize.Int64)
  1005. {
  1006. nInt = context.SignExtend32(OperandType.I64, nInt);
  1007. }
  1008. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong, nInt);
  1009. SetIntOrZR(context, op.Rd, dRes);
  1010. }
  1011. else /* if (op.Size == 1) */
  1012. {
  1013. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, n, n, Const((int)CmpCondition.OrderedQ));
  1014. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1015. if (isFixed)
  1016. {
  1017. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, op.FBits)
  1018. long fpScaled = 0x3FF0000000000000L + op.FBits * 0x10000000000000L;
  1019. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1020. nRes = context.AddIntrinsic(Intrinsic.X86Mulsd, nRes, fpScaledMask);
  1021. }
  1022. nRes = context.AddIntrinsic(Intrinsic.X86Roundsd, nRes, Const(X86GetRoundControl(roundMode)));
  1023. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1024. ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
  1025. : context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
  1026. long fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1027. ? 0x41E0000000000000L // 2147483648.0000000d (2147483648)
  1028. : 0x43E0000000000000L; // 9.2233720368547760E18d (9223372036854775808)
  1029. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1030. nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1031. Operand nLong = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, nRes);
  1032. if (op.RegisterSize == RegisterSize.Int32)
  1033. {
  1034. nLong = context.ConvertI64ToI32(nLong);
  1035. }
  1036. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong, nLong);
  1037. SetIntOrZR(context, op.Rd, dRes);
  1038. }
  1039. }
  1040. private static void EmitSse41Fcvtu_Gp(ArmEmitterContext context, FPRoundingMode roundMode, bool isFixed)
  1041. {
  1042. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  1043. Operand n = GetVec(op.Rn);
  1044. if (op.Size == 0)
  1045. {
  1046. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, n, n, Const((int)CmpCondition.OrderedQ));
  1047. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1048. if (isFixed)
  1049. {
  1050. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, op.FBits)
  1051. int fpScaled = 0x3F800000 + op.FBits * 0x800000;
  1052. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1053. nRes = context.AddIntrinsic(Intrinsic.X86Mulss, nRes, fpScaledMask);
  1054. }
  1055. nRes = context.AddIntrinsic(Intrinsic.X86Roundss, nRes, Const(X86GetRoundControl(roundMode)));
  1056. Operand zero = context.VectorZero();
  1057. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1058. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1059. int fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1060. ? 0x4F000000 // 2.14748365E9f (2147483648)
  1061. : 0x5F000000; // 9.223372E18f (9223372036854775808)
  1062. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1063. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1064. ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
  1065. : context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
  1066. nRes = context.AddIntrinsic(Intrinsic.X86Subss, nRes, fpMaxValMask);
  1067. nCmp = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1068. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1069. Operand nIntOrLong2 = op.RegisterSize == RegisterSize.Int32
  1070. ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
  1071. : context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
  1072. nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1073. Operand nInt = context.AddIntrinsicInt(Intrinsic.X86Cvtsi2si, nRes);
  1074. if (op.RegisterSize == RegisterSize.Int64)
  1075. {
  1076. nInt = context.SignExtend32(OperandType.I64, nInt);
  1077. }
  1078. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong2, nInt);
  1079. dRes = context.Add(dRes, nIntOrLong);
  1080. SetIntOrZR(context, op.Rd, dRes);
  1081. }
  1082. else /* if (op.Size == 1) */
  1083. {
  1084. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, n, n, Const((int)CmpCondition.OrderedQ));
  1085. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1086. if (isFixed)
  1087. {
  1088. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, op.FBits)
  1089. long fpScaled = 0x3FF0000000000000L + op.FBits * 0x10000000000000L;
  1090. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1091. nRes = context.AddIntrinsic(Intrinsic.X86Mulsd, nRes, fpScaledMask);
  1092. }
  1093. nRes = context.AddIntrinsic(Intrinsic.X86Roundsd, nRes, Const(X86GetRoundControl(roundMode)));
  1094. Operand zero = context.VectorZero();
  1095. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1096. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1097. long fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1098. ? 0x41E0000000000000L // 2147483648.0000000d (2147483648)
  1099. : 0x43E0000000000000L; // 9.2233720368547760E18d (9223372036854775808)
  1100. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1101. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1102. ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
  1103. : context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
  1104. nRes = context.AddIntrinsic(Intrinsic.X86Subsd, nRes, fpMaxValMask);
  1105. nCmp = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1106. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1107. Operand nIntOrLong2 = op.RegisterSize == RegisterSize.Int32
  1108. ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
  1109. : context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
  1110. nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1111. Operand nLong = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, nRes);
  1112. if (op.RegisterSize == RegisterSize.Int32)
  1113. {
  1114. nLong = context.ConvertI64ToI32(nLong);
  1115. }
  1116. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong2, nLong);
  1117. dRes = context.Add(dRes, nIntOrLong);
  1118. SetIntOrZR(context, op.Rd, dRes);
  1119. }
  1120. }
  1121. private static Operand EmitVectorLongExtract(ArmEmitterContext context, int reg, int index, int size)
  1122. {
  1123. OperandType type = size == 3 ? OperandType.I64 : OperandType.I32;
  1124. return context.VectorExtract(type, GetVec(reg), index);
  1125. }
  1126. }
  1127. }