InstEmitSimdCvt.cs 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503
  1. using ARMeilleure.Decoders;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.State;
  4. using ARMeilleure.Translation;
  5. using System;
  6. using System.Diagnostics;
  7. using static ARMeilleure.Instructions.InstEmitHelper;
  8. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  9. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  10. namespace ARMeilleure.Instructions
  11. {
  12. using Func1I = Func<Operand, Operand>;
  13. static partial class InstEmit
  14. {
  15. public static void Fcvt_S(ArmEmitterContext context)
  16. {
  17. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  18. if (op.Size == 0 && op.Opc == 1) // Single -> Double.
  19. {
  20. if (Optimizations.UseSse2)
  21. {
  22. Operand n = GetVec(op.Rn);
  23. Operand res = context.AddIntrinsic(Intrinsic.X86Cvtss2sd, context.VectorZero(), n);
  24. context.Copy(GetVec(op.Rd), res);
  25. }
  26. else
  27. {
  28. Operand ne = context.VectorExtract(OperandType.FP32, GetVec(op.Rn), 0);
  29. Operand res = context.ConvertToFP(OperandType.FP64, ne);
  30. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  31. }
  32. }
  33. else if (op.Size == 1 && op.Opc == 0) // Double -> Single.
  34. {
  35. if (Optimizations.UseSse2)
  36. {
  37. Operand n = GetVec(op.Rn);
  38. Operand res = context.AddIntrinsic(Intrinsic.X86Cvtsd2ss, context.VectorZero(), n);
  39. context.Copy(GetVec(op.Rd), res);
  40. }
  41. else
  42. {
  43. Operand ne = context.VectorExtract(OperandType.FP64, GetVec(op.Rn), 0);
  44. Operand res = context.ConvertToFP(OperandType.FP32, ne);
  45. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  46. }
  47. }
  48. else if (op.Size == 0 && op.Opc == 3) // Single -> Half.
  49. {
  50. Operand ne = context.VectorExtract(OperandType.FP32, GetVec(op.Rn), 0);
  51. Delegate dlg = new _U16_F32(SoftFloat32_16.FPConvert);
  52. Operand res = context.Call(dlg, ne);
  53. res = context.ZeroExtend16(OperandType.I64, res);
  54. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, 1));
  55. }
  56. else if (op.Size == 3 && op.Opc == 0) // Half -> Single.
  57. {
  58. Operand ne = EmitVectorExtractZx(context, op.Rn, 0, 1);
  59. Delegate dlg = new _F32_U16(SoftFloat16_32.FPConvert);
  60. Operand res = context.Call(dlg, ne);
  61. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  62. }
  63. else if (op.Size == 1 && op.Opc == 3) // Double -> Half.
  64. {
  65. throw new NotImplementedException("Double-precision to half-precision.");
  66. }
  67. else if (op.Size == 3 && op.Opc == 1) // Double -> Half.
  68. {
  69. throw new NotImplementedException("Half-precision to double-precision.");
  70. }
  71. else // Invalid encoding.
  72. {
  73. Debug.Assert(false, $"type == {op.Size} && opc == {op.Opc}");
  74. }
  75. }
  76. public static void Fcvtas_Gp(ArmEmitterContext context)
  77. {
  78. EmitFcvt_s_Gp(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1));
  79. }
  80. public static void Fcvtas_S(ArmEmitterContext context)
  81. {
  82. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1), signed: true, scalar: true);
  83. }
  84. public static void Fcvtas_V(ArmEmitterContext context)
  85. {
  86. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1), signed: true, scalar: false);
  87. }
  88. public static void Fcvtau_Gp(ArmEmitterContext context)
  89. {
  90. EmitFcvt_u_Gp(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1));
  91. }
  92. public static void Fcvtau_S(ArmEmitterContext context)
  93. {
  94. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1), signed: false, scalar: true);
  95. }
  96. public static void Fcvtau_V(ArmEmitterContext context)
  97. {
  98. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1), signed: false, scalar: false);
  99. }
  100. public static void Fcvtl_V(ArmEmitterContext context)
  101. {
  102. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  103. int sizeF = op.Size & 1;
  104. if (Optimizations.UseSse2 && sizeF == 1)
  105. {
  106. Operand n = GetVec(op.Rn);
  107. Operand res;
  108. if (op.RegisterSize == RegisterSize.Simd128)
  109. {
  110. res = context.AddIntrinsic(Intrinsic.X86Movhlps, n, n);
  111. }
  112. else
  113. {
  114. res = n;
  115. }
  116. res = context.AddIntrinsic(Intrinsic.X86Cvtps2pd, res);
  117. context.Copy(GetVec(op.Rd), res);
  118. }
  119. else
  120. {
  121. Operand res = context.VectorZero();
  122. int elems = 4 >> sizeF;
  123. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  124. for (int index = 0; index < elems; index++)
  125. {
  126. if (sizeF == 0)
  127. {
  128. Operand ne = EmitVectorExtractZx(context, op.Rn, part + index, 1);
  129. Delegate dlg = new _F32_U16(SoftFloat16_32.FPConvert);
  130. Operand e = context.Call(dlg, ne);
  131. res = context.VectorInsert(res, e, index);
  132. }
  133. else /* if (sizeF == 1) */
  134. {
  135. Operand ne = context.VectorExtract(OperandType.FP32, GetVec(op.Rn), part + index);
  136. Operand e = context.ConvertToFP(OperandType.FP64, ne);
  137. res = context.VectorInsert(res, e, index);
  138. }
  139. }
  140. context.Copy(GetVec(op.Rd), res);
  141. }
  142. }
  143. public static void Fcvtms_Gp(ArmEmitterContext context)
  144. {
  145. if (Optimizations.UseSse41)
  146. {
  147. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsMinusInfinity, isFixed: false);
  148. }
  149. else
  150. {
  151. EmitFcvt_s_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1));
  152. }
  153. }
  154. public static void Fcvtmu_Gp(ArmEmitterContext context)
  155. {
  156. if (Optimizations.UseSse41)
  157. {
  158. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsMinusInfinity, isFixed: false);
  159. }
  160. else
  161. {
  162. EmitFcvt_u_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1));
  163. }
  164. }
  165. public static void Fcvtn_V(ArmEmitterContext context)
  166. {
  167. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  168. int sizeF = op.Size & 1;
  169. if (Optimizations.UseSse2 && sizeF == 1)
  170. {
  171. Operand d = GetVec(op.Rd);
  172. Operand res = context.VectorZeroUpper64(d);
  173. Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtpd2ps, GetVec(op.Rn));
  174. nInt = context.AddIntrinsic(Intrinsic.X86Movlhps, nInt, nInt);
  175. Intrinsic movInst = op.RegisterSize == RegisterSize.Simd128
  176. ? Intrinsic.X86Movlhps
  177. : Intrinsic.X86Movhlps;
  178. res = context.AddIntrinsic(movInst, res, nInt);
  179. context.Copy(d, res);
  180. }
  181. else
  182. {
  183. OperandType type = sizeF == 0 ? OperandType.FP32 : OperandType.FP64;
  184. int elems = 4 >> sizeF;
  185. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  186. Operand d = GetVec(op.Rd);
  187. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  188. for (int index = 0; index < elems; index++)
  189. {
  190. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  191. if (sizeF == 0)
  192. {
  193. Delegate dlg = new _U16_F32(SoftFloat32_16.FPConvert);
  194. Operand e = context.Call(dlg, ne);
  195. e = context.ZeroExtend16(OperandType.I64, e);
  196. res = EmitVectorInsert(context, res, e, part + index, 1);
  197. }
  198. else /* if (sizeF == 1) */
  199. {
  200. Operand e = context.ConvertToFP(OperandType.FP32, ne);
  201. res = context.VectorInsert(res, e, part + index);
  202. }
  203. }
  204. context.Copy(d, res);
  205. }
  206. }
  207. public static void Fcvtns_S(ArmEmitterContext context)
  208. {
  209. if (Optimizations.UseSse41)
  210. {
  211. EmitSse41Fcvts(context, FPRoundingMode.ToNearest, scalar: true);
  212. }
  213. else
  214. {
  215. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.ToEven, op1), signed: true, scalar: true);
  216. }
  217. }
  218. public static void Fcvtns_V(ArmEmitterContext context)
  219. {
  220. if (Optimizations.UseSse41)
  221. {
  222. EmitSse41Fcvts(context, FPRoundingMode.ToNearest, scalar: false);
  223. }
  224. else
  225. {
  226. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.ToEven, op1), signed: true, scalar: false);
  227. }
  228. }
  229. public static void Fcvtnu_S(ArmEmitterContext context)
  230. {
  231. if (Optimizations.UseSse41)
  232. {
  233. EmitSse41Fcvtu(context, FPRoundingMode.ToNearest, scalar: true);
  234. }
  235. else
  236. {
  237. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.ToEven, op1), signed: false, scalar: true);
  238. }
  239. }
  240. public static void Fcvtnu_V(ArmEmitterContext context)
  241. {
  242. if (Optimizations.UseSse41)
  243. {
  244. EmitSse41Fcvtu(context, FPRoundingMode.ToNearest, scalar: false);
  245. }
  246. else
  247. {
  248. EmitFcvt(context, (op1) => EmitRoundMathCall(context, MidpointRounding.ToEven, op1), signed: false, scalar: false);
  249. }
  250. }
  251. public static void Fcvtps_Gp(ArmEmitterContext context)
  252. {
  253. if (Optimizations.UseSse41)
  254. {
  255. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsPlusInfinity, isFixed: false);
  256. }
  257. else
  258. {
  259. EmitFcvt_s_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1));
  260. }
  261. }
  262. public static void Fcvtpu_Gp(ArmEmitterContext context)
  263. {
  264. if (Optimizations.UseSse41)
  265. {
  266. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsPlusInfinity, isFixed: false);
  267. }
  268. else
  269. {
  270. EmitFcvt_u_Gp(context, (op1) => EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1));
  271. }
  272. }
  273. public static void Fcvtzs_Gp(ArmEmitterContext context)
  274. {
  275. if (Optimizations.UseSse41)
  276. {
  277. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsZero, isFixed: false);
  278. }
  279. else
  280. {
  281. EmitFcvt_s_Gp(context, (op1) => op1);
  282. }
  283. }
  284. public static void Fcvtzs_Gp_Fixed(ArmEmitterContext context)
  285. {
  286. if (Optimizations.UseSse41)
  287. {
  288. EmitSse41Fcvts_Gp(context, FPRoundingMode.TowardsZero, isFixed: true);
  289. }
  290. else
  291. {
  292. EmitFcvtzs_Gp_Fixed(context);
  293. }
  294. }
  295. public static void Fcvtzs_S(ArmEmitterContext context)
  296. {
  297. if (Optimizations.UseSse41)
  298. {
  299. EmitSse41Fcvts(context, FPRoundingMode.TowardsZero, scalar: true);
  300. }
  301. else
  302. {
  303. EmitFcvtz(context, signed: true, scalar: true);
  304. }
  305. }
  306. public static void Fcvtzs_V(ArmEmitterContext context)
  307. {
  308. if (Optimizations.UseSse41)
  309. {
  310. EmitSse41Fcvts(context, FPRoundingMode.TowardsZero, scalar: false);
  311. }
  312. else
  313. {
  314. EmitFcvtz(context, signed: true, scalar: false);
  315. }
  316. }
  317. public static void Fcvtzs_V_Fixed(ArmEmitterContext context)
  318. {
  319. if (Optimizations.UseSse41)
  320. {
  321. EmitSse41Fcvts(context, FPRoundingMode.TowardsZero, scalar: false);
  322. }
  323. else
  324. {
  325. EmitFcvtz(context, signed: true, scalar: false);
  326. }
  327. }
  328. public static void Fcvtzu_Gp(ArmEmitterContext context)
  329. {
  330. if (Optimizations.UseSse41)
  331. {
  332. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsZero, isFixed: false);
  333. }
  334. else
  335. {
  336. EmitFcvt_u_Gp(context, (op1) => op1);
  337. }
  338. }
  339. public static void Fcvtzu_Gp_Fixed(ArmEmitterContext context)
  340. {
  341. if (Optimizations.UseSse41)
  342. {
  343. EmitSse41Fcvtu_Gp(context, FPRoundingMode.TowardsZero, isFixed: true);
  344. }
  345. else
  346. {
  347. EmitFcvtzu_Gp_Fixed(context);
  348. }
  349. }
  350. public static void Fcvtzu_S(ArmEmitterContext context)
  351. {
  352. if (Optimizations.UseSse41)
  353. {
  354. EmitSse41Fcvtu(context, FPRoundingMode.TowardsZero, scalar: true);
  355. }
  356. else
  357. {
  358. EmitFcvtz(context, signed: false, scalar: true);
  359. }
  360. }
  361. public static void Fcvtzu_V(ArmEmitterContext context)
  362. {
  363. if (Optimizations.UseSse41)
  364. {
  365. EmitSse41Fcvtu(context, FPRoundingMode.TowardsZero, scalar: false);
  366. }
  367. else
  368. {
  369. EmitFcvtz(context, signed: false, scalar: false);
  370. }
  371. }
  372. public static void Fcvtzu_V_Fixed(ArmEmitterContext context)
  373. {
  374. if (Optimizations.UseSse41)
  375. {
  376. EmitSse41Fcvtu(context, FPRoundingMode.TowardsZero, scalar: false);
  377. }
  378. else
  379. {
  380. EmitFcvtz(context, signed: false, scalar: false);
  381. }
  382. }
  383. public static void Scvtf_Gp(ArmEmitterContext context)
  384. {
  385. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  386. Operand res = GetIntOrZR(context, op.Rn);
  387. if (op.RegisterSize == RegisterSize.Int32)
  388. {
  389. res = context.SignExtend32(OperandType.I64, res);
  390. }
  391. res = EmitFPConvert(context, res, op.Size, signed: true);
  392. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  393. }
  394. public static void Scvtf_Gp_Fixed(ArmEmitterContext context)
  395. {
  396. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  397. Operand res = GetIntOrZR(context, op.Rn);
  398. if (op.RegisterSize == RegisterSize.Int32)
  399. {
  400. res = context.SignExtend32(OperandType.I64, res);
  401. }
  402. res = EmitFPConvert(context, res, op.Size, signed: true);
  403. res = EmitI2fFBitsMul(context, res, op.FBits);
  404. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  405. }
  406. public static void Scvtf_S(ArmEmitterContext context)
  407. {
  408. if (Optimizations.UseSse2)
  409. {
  410. EmitSse2Scvtf(context, scalar: true);
  411. }
  412. else
  413. {
  414. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  415. int sizeF = op.Size & 1;
  416. Operand res = EmitVectorLongExtract(context, op.Rn, 0, sizeF + 2);
  417. res = EmitFPConvert(context, res, op.Size, signed: true);
  418. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  419. }
  420. }
  421. public static void Scvtf_V(ArmEmitterContext context)
  422. {
  423. if (Optimizations.UseSse2)
  424. {
  425. EmitSse2Scvtf(context, scalar: false);
  426. }
  427. else
  428. {
  429. EmitVectorCvtf(context, signed: true);
  430. }
  431. }
  432. public static void Scvtf_V_Fixed(ArmEmitterContext context)
  433. {
  434. if (Optimizations.UseSse2)
  435. {
  436. EmitSse2Scvtf(context, scalar: false);
  437. }
  438. else
  439. {
  440. EmitVectorCvtf(context, signed: true);
  441. }
  442. }
  443. public static void Ucvtf_Gp(ArmEmitterContext context)
  444. {
  445. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  446. Operand res = GetIntOrZR(context, op.Rn);
  447. res = EmitFPConvert(context, res, op.Size, signed: false);
  448. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  449. }
  450. public static void Ucvtf_Gp_Fixed(ArmEmitterContext context)
  451. {
  452. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  453. Operand res = GetIntOrZR(context, op.Rn);
  454. res = EmitFPConvert(context, res, op.Size, signed: false);
  455. res = EmitI2fFBitsMul(context, res, op.FBits);
  456. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  457. }
  458. public static void Ucvtf_S(ArmEmitterContext context)
  459. {
  460. if (Optimizations.UseSse2)
  461. {
  462. EmitSse2Ucvtf(context, scalar: true);
  463. }
  464. else
  465. {
  466. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  467. int sizeF = op.Size & 1;
  468. Operand ne = EmitVectorLongExtract(context, op.Rn, 0, sizeF + 2);
  469. Operand res = EmitFPConvert(context, ne, sizeF, signed: false);
  470. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  471. }
  472. }
  473. public static void Ucvtf_V(ArmEmitterContext context)
  474. {
  475. if (Optimizations.UseSse2)
  476. {
  477. EmitSse2Ucvtf(context, scalar: false);
  478. }
  479. else
  480. {
  481. EmitVectorCvtf(context, signed: false);
  482. }
  483. }
  484. public static void Ucvtf_V_Fixed(ArmEmitterContext context)
  485. {
  486. if (Optimizations.UseSse2)
  487. {
  488. EmitSse2Ucvtf(context, scalar: false);
  489. }
  490. else
  491. {
  492. EmitVectorCvtf(context, signed: false);
  493. }
  494. }
  495. private static void EmitFcvt(ArmEmitterContext context, Func1I emit, bool signed, bool scalar)
  496. {
  497. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  498. Operand res = context.VectorZero();
  499. Operand n = GetVec(op.Rn);
  500. int sizeF = op.Size & 1;
  501. int sizeI = sizeF + 2;
  502. OperandType type = sizeF == 0 ? OperandType.FP32 : OperandType.FP64;
  503. int elems = !scalar ? op.GetBytesCount() >> sizeI : 1;
  504. for (int index = 0; index < elems; index++)
  505. {
  506. Operand ne = context.VectorExtract(type, n, index);
  507. Operand e = emit(ne);
  508. if (sizeF == 0)
  509. {
  510. Delegate dlg = signed
  511. ? (Delegate)new _S32_F32(SoftFallback.SatF32ToS32)
  512. : (Delegate)new _U32_F32(SoftFallback.SatF32ToU32);
  513. e = context.Call(dlg, e);
  514. e = context.ZeroExtend32(OperandType.I64, e);
  515. }
  516. else /* if (sizeF == 1) */
  517. {
  518. Delegate dlg = signed
  519. ? (Delegate)new _S64_F64(SoftFallback.SatF64ToS64)
  520. : (Delegate)new _U64_F64(SoftFallback.SatF64ToU64);
  521. e = context.Call(dlg, e);
  522. }
  523. res = EmitVectorInsert(context, res, e, index, sizeI);
  524. }
  525. context.Copy(GetVec(op.Rd), res);
  526. }
  527. private static void EmitFcvtz(ArmEmitterContext context, bool signed, bool scalar)
  528. {
  529. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  530. Operand res = context.VectorZero();
  531. Operand n = GetVec(op.Rn);
  532. int sizeF = op.Size & 1;
  533. int sizeI = sizeF + 2;
  534. OperandType type = sizeF == 0 ? OperandType.FP32 : OperandType.FP64;
  535. int fBits = GetFBits(context);
  536. int elems = !scalar ? op.GetBytesCount() >> sizeI : 1;
  537. for (int index = 0; index < elems; index++)
  538. {
  539. Operand ne = context.VectorExtract(type, n, index);
  540. Operand e = EmitF2iFBitsMul(context, ne, fBits);
  541. if (sizeF == 0)
  542. {
  543. Delegate dlg = signed
  544. ? (Delegate)new _S32_F32(SoftFallback.SatF32ToS32)
  545. : (Delegate)new _U32_F32(SoftFallback.SatF32ToU32);
  546. e = context.Call(dlg, e);
  547. e = context.ZeroExtend32(OperandType.I64, e);
  548. }
  549. else /* if (sizeF == 1) */
  550. {
  551. Delegate dlg = signed
  552. ? (Delegate)new _S64_F64(SoftFallback.SatF64ToS64)
  553. : (Delegate)new _U64_F64(SoftFallback.SatF64ToU64);
  554. e = context.Call(dlg, e);
  555. }
  556. res = EmitVectorInsert(context, res, e, index, sizeI);
  557. }
  558. context.Copy(GetVec(op.Rd), res);
  559. }
  560. private static void EmitFcvt_s_Gp(ArmEmitterContext context, Func1I emit)
  561. {
  562. EmitFcvt___Gp(context, emit, signed: true);
  563. }
  564. private static void EmitFcvt_u_Gp(ArmEmitterContext context, Func1I emit)
  565. {
  566. EmitFcvt___Gp(context, emit, signed: false);
  567. }
  568. private static void EmitFcvt___Gp(ArmEmitterContext context, Func1I emit, bool signed)
  569. {
  570. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  571. OperandType type = op.Size == 0 ? OperandType.FP32 : OperandType.FP64;
  572. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  573. Operand res = signed
  574. ? EmitScalarFcvts(context, emit(ne), 0)
  575. : EmitScalarFcvtu(context, emit(ne), 0);
  576. SetIntOrZR(context, op.Rd, res);
  577. }
  578. private static void EmitFcvtzs_Gp_Fixed(ArmEmitterContext context)
  579. {
  580. EmitFcvtz__Gp_Fixed(context, signed: true);
  581. }
  582. private static void EmitFcvtzu_Gp_Fixed(ArmEmitterContext context)
  583. {
  584. EmitFcvtz__Gp_Fixed(context, signed: false);
  585. }
  586. private static void EmitFcvtz__Gp_Fixed(ArmEmitterContext context, bool signed)
  587. {
  588. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  589. OperandType type = op.Size == 0 ? OperandType.FP32 : OperandType.FP64;
  590. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  591. Operand res = signed
  592. ? EmitScalarFcvts(context, ne, op.FBits)
  593. : EmitScalarFcvtu(context, ne, op.FBits);
  594. SetIntOrZR(context, op.Rd, res);
  595. }
  596. private static void EmitVectorCvtf(ArmEmitterContext context, bool signed)
  597. {
  598. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  599. Operand res = context.VectorZero();
  600. int sizeF = op.Size & 1;
  601. int sizeI = sizeF + 2;
  602. int fBits = GetFBits(context);
  603. int elems = op.GetBytesCount() >> sizeI;
  604. for (int index = 0; index < elems; index++)
  605. {
  606. Operand ne = EmitVectorLongExtract(context, op.Rn, index, sizeI);
  607. Operand e = EmitFPConvert(context, ne, sizeF, signed);
  608. e = EmitI2fFBitsMul(context, e, fBits);
  609. res = context.VectorInsert(res, e, index);
  610. }
  611. context.Copy(GetVec(op.Rd), res);
  612. }
  613. private static int GetFBits(ArmEmitterContext context)
  614. {
  615. if (context.CurrOp is OpCodeSimdShImm op)
  616. {
  617. return GetImmShr(op);
  618. }
  619. return 0;
  620. }
  621. private static Operand EmitFPConvert(ArmEmitterContext context, Operand value, int size, bool signed)
  622. {
  623. Debug.Assert(value.Type == OperandType.I32 || value.Type == OperandType.I64);
  624. Debug.Assert((uint)size < 2);
  625. OperandType type = size == 0 ? OperandType.FP32 : OperandType.FP64;
  626. if (signed)
  627. {
  628. return context.ConvertToFP(type, value);
  629. }
  630. else
  631. {
  632. return context.ConvertToFPUI(type, value);
  633. }
  634. }
  635. private static Operand EmitScalarFcvts(ArmEmitterContext context, Operand value, int fBits)
  636. {
  637. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  638. value = EmitF2iFBitsMul(context, value, fBits);
  639. if (context.CurrOp.RegisterSize == RegisterSize.Int32)
  640. {
  641. Delegate dlg = value.Type == OperandType.FP32
  642. ? (Delegate)new _S32_F32(SoftFallback.SatF32ToS32)
  643. : (Delegate)new _S32_F64(SoftFallback.SatF64ToS32);
  644. return context.Call(dlg, value);
  645. }
  646. else
  647. {
  648. Delegate dlg = value.Type == OperandType.FP32
  649. ? (Delegate)new _S64_F32(SoftFallback.SatF32ToS64)
  650. : (Delegate)new _S64_F64(SoftFallback.SatF64ToS64);
  651. return context.Call(dlg, value);
  652. }
  653. }
  654. private static Operand EmitScalarFcvtu(ArmEmitterContext context, Operand value, int fBits)
  655. {
  656. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  657. value = EmitF2iFBitsMul(context, value, fBits);
  658. if (context.CurrOp.RegisterSize == RegisterSize.Int32)
  659. {
  660. Delegate dlg = value.Type == OperandType.FP32
  661. ? (Delegate)new _U32_F32(SoftFallback.SatF32ToU32)
  662. : (Delegate)new _U32_F64(SoftFallback.SatF64ToU32);
  663. return context.Call(dlg, value);
  664. }
  665. else
  666. {
  667. Delegate dlg = value.Type == OperandType.FP32
  668. ? (Delegate)new _U64_F32(SoftFallback.SatF32ToU64)
  669. : (Delegate)new _U64_F64(SoftFallback.SatF64ToU64);
  670. return context.Call(dlg, value);
  671. }
  672. }
  673. private static Operand EmitF2iFBitsMul(ArmEmitterContext context, Operand value, int fBits)
  674. {
  675. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  676. if (fBits == 0)
  677. {
  678. return value;
  679. }
  680. if (value.Type == OperandType.FP32)
  681. {
  682. return context.Multiply(value, ConstF(MathF.Pow(2f, fBits)));
  683. }
  684. else /* if (value.Type == OperandType.FP64) */
  685. {
  686. return context.Multiply(value, ConstF(Math.Pow(2d, fBits)));
  687. }
  688. }
  689. private static Operand EmitI2fFBitsMul(ArmEmitterContext context, Operand value, int fBits)
  690. {
  691. Debug.Assert(value.Type == OperandType.FP32 || value.Type == OperandType.FP64);
  692. if (fBits == 0)
  693. {
  694. return value;
  695. }
  696. if (value.Type == OperandType.FP32)
  697. {
  698. return context.Multiply(value, ConstF(1f / MathF.Pow(2f, fBits)));
  699. }
  700. else /* if (value.Type == OperandType.FP64) */
  701. {
  702. return context.Multiply(value, ConstF(1d / Math.Pow(2d, fBits)));
  703. }
  704. }
  705. public static Operand EmitSse2CvtDoubleToInt64OpF(ArmEmitterContext context, Operand opF, bool scalar)
  706. {
  707. Debug.Assert(opF.Type == OperandType.V128);
  708. Operand longL = context.AddIntrinsicLong (Intrinsic.X86Cvtsd2si, opF); // opFL
  709. Operand res = context.VectorCreateScalar(longL);
  710. if (!scalar)
  711. {
  712. Operand opFH = context.AddIntrinsic (Intrinsic.X86Movhlps, res, opF); // res doesn't matter.
  713. Operand longH = context.AddIntrinsicLong (Intrinsic.X86Cvtsd2si, opFH);
  714. Operand resH = context.VectorCreateScalar(longH);
  715. res = context.AddIntrinsic (Intrinsic.X86Movlhps, res, resH);
  716. }
  717. return res;
  718. }
  719. private static Operand EmitSse2CvtInt64ToDoubleOp(ArmEmitterContext context, Operand op, bool scalar)
  720. {
  721. Debug.Assert(op.Type == OperandType.V128);
  722. Operand longL = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, op); // opL
  723. Operand res = context.AddIntrinsic (Intrinsic.X86Cvtsi2sd, context.VectorZero(), longL);
  724. if (!scalar)
  725. {
  726. Operand opH = context.AddIntrinsic (Intrinsic.X86Movhlps, res, op); // res doesn't matter.
  727. Operand longH = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, opH);
  728. Operand resH = context.AddIntrinsic (Intrinsic.X86Cvtsi2sd, res, longH); // res doesn't matter.
  729. res = context.AddIntrinsic (Intrinsic.X86Movlhps, res, resH);
  730. }
  731. return res;
  732. }
  733. private static void EmitSse2Scvtf(ArmEmitterContext context, bool scalar)
  734. {
  735. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  736. Operand n = GetVec(op.Rn);
  737. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  738. int sizeF = op.Size & 1;
  739. if (sizeF == 0)
  740. {
  741. Operand res = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, n);
  742. if (op is OpCodeSimdShImm fixedOp)
  743. {
  744. int fBits = GetImmShr(fixedOp);
  745. // BitConverter.Int32BitsToSingle(fpScaled) == 1f / MathF.Pow(2f, fBits)
  746. int fpScaled = 0x3F800000 - fBits * 0x800000;
  747. Operand fpScaledMask = scalar
  748. ? X86GetScalar (context, fpScaled)
  749. : X86GetAllElements(context, fpScaled);
  750. res = context.AddIntrinsic(Intrinsic.X86Mulps, res, fpScaledMask);
  751. }
  752. if (scalar)
  753. {
  754. res = context.VectorZeroUpper96(res);
  755. }
  756. else if (op.RegisterSize == RegisterSize.Simd64)
  757. {
  758. res = context.VectorZeroUpper64(res);
  759. }
  760. context.Copy(GetVec(op.Rd), res);
  761. }
  762. else /* if (sizeF == 1) */
  763. {
  764. Operand res = EmitSse2CvtInt64ToDoubleOp(context, n, scalar);
  765. if (op is OpCodeSimdShImm fixedOp)
  766. {
  767. int fBits = GetImmShr(fixedOp);
  768. // BitConverter.Int64BitsToDouble(fpScaled) == 1d / Math.Pow(2d, fBits)
  769. long fpScaled = 0x3FF0000000000000L - fBits * 0x10000000000000L;
  770. Operand fpScaledMask = scalar
  771. ? X86GetScalar (context, fpScaled)
  772. : X86GetAllElements(context, fpScaled);
  773. res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, fpScaledMask);
  774. }
  775. if (scalar)
  776. {
  777. res = context.VectorZeroUpper64(res);
  778. }
  779. context.Copy(GetVec(op.Rd), res);
  780. }
  781. }
  782. private static void EmitSse2Ucvtf(ArmEmitterContext context, bool scalar)
  783. {
  784. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  785. Operand n = GetVec(op.Rn);
  786. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  787. int sizeF = op.Size & 1;
  788. if (sizeF == 0)
  789. {
  790. Operand mask = scalar // 65536.000f (1 << 16)
  791. ? X86GetScalar (context, 0x47800000)
  792. : X86GetAllElements(context, 0x47800000);
  793. Operand res = context.AddIntrinsic(Intrinsic.X86Psrld, n, Const(16));
  794. res = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res);
  795. res = context.AddIntrinsic(Intrinsic.X86Mulps, res, mask);
  796. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pslld, n, Const(16));
  797. res2 = context.AddIntrinsic(Intrinsic.X86Psrld, res2, Const(16));
  798. res2 = context.AddIntrinsic(Intrinsic.X86Cvtdq2ps, res2);
  799. res = context.AddIntrinsic(Intrinsic.X86Addps, res, res2);
  800. if (op is OpCodeSimdShImm fixedOp)
  801. {
  802. int fBits = GetImmShr(fixedOp);
  803. // BitConverter.Int32BitsToSingle(fpScaled) == 1f / MathF.Pow(2f, fBits)
  804. int fpScaled = 0x3F800000 - fBits * 0x800000;
  805. Operand fpScaledMask = scalar
  806. ? X86GetScalar (context, fpScaled)
  807. : X86GetAllElements(context, fpScaled);
  808. res = context.AddIntrinsic(Intrinsic.X86Mulps, res, fpScaledMask);
  809. }
  810. if (scalar)
  811. {
  812. res = context.VectorZeroUpper96(res);
  813. }
  814. else if (op.RegisterSize == RegisterSize.Simd64)
  815. {
  816. res = context.VectorZeroUpper64(res);
  817. }
  818. context.Copy(GetVec(op.Rd), res);
  819. }
  820. else /* if (sizeF == 1) */
  821. {
  822. Operand mask = scalar // 4294967296.0000000d (1L << 32)
  823. ? X86GetScalar (context, 0x41F0000000000000L)
  824. : X86GetAllElements(context, 0x41F0000000000000L);
  825. Operand res = context.AddIntrinsic (Intrinsic.X86Psrlq, n, Const(32));
  826. res = EmitSse2CvtInt64ToDoubleOp(context, res, scalar);
  827. res = context.AddIntrinsic (Intrinsic.X86Mulpd, res, mask);
  828. Operand res2 = context.AddIntrinsic (Intrinsic.X86Psllq, n, Const(32));
  829. res2 = context.AddIntrinsic (Intrinsic.X86Psrlq, res2, Const(32));
  830. res2 = EmitSse2CvtInt64ToDoubleOp(context, res2, scalar);
  831. res = context.AddIntrinsic(Intrinsic.X86Addpd, res, res2);
  832. if (op is OpCodeSimdShImm fixedOp)
  833. {
  834. int fBits = GetImmShr(fixedOp);
  835. // BitConverter.Int64BitsToDouble(fpScaled) == 1d / Math.Pow(2d, fBits)
  836. long fpScaled = 0x3FF0000000000000L - fBits * 0x10000000000000L;
  837. Operand fpScaledMask = scalar
  838. ? X86GetScalar (context, fpScaled)
  839. : X86GetAllElements(context, fpScaled);
  840. res = context.AddIntrinsic(Intrinsic.X86Mulpd, res, fpScaledMask);
  841. }
  842. if (scalar)
  843. {
  844. res = context.VectorZeroUpper64(res);
  845. }
  846. context.Copy(GetVec(op.Rd), res);
  847. }
  848. }
  849. private static void EmitSse41Fcvts(ArmEmitterContext context, FPRoundingMode roundMode, bool scalar)
  850. {
  851. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  852. Operand n = GetVec(op.Rn);
  853. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  854. int sizeF = op.Size & 1;
  855. if (sizeF == 0)
  856. {
  857. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, n, n, Const((int)CmpCondition.OrderedQ));
  858. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  859. if (op is OpCodeSimdShImm fixedOp)
  860. {
  861. int fBits = GetImmShr(fixedOp);
  862. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, fBits)
  863. int fpScaled = 0x3F800000 + fBits * 0x800000;
  864. Operand fpScaledMask = scalar
  865. ? X86GetScalar (context, fpScaled)
  866. : X86GetAllElements(context, fpScaled);
  867. nRes = context.AddIntrinsic(Intrinsic.X86Mulps, nRes, fpScaledMask);
  868. }
  869. nRes = context.AddIntrinsic(Intrinsic.X86Roundps, nRes, Const(X86GetRoundControl(roundMode)));
  870. Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
  871. Operand fpMaxValMask = scalar // 2.14748365E9f (2147483648)
  872. ? X86GetScalar (context, 0x4F000000)
  873. : X86GetAllElements(context, 0x4F000000);
  874. nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  875. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nInt, nRes);
  876. if (scalar)
  877. {
  878. dRes = context.VectorZeroUpper96(dRes);
  879. }
  880. else if (op.RegisterSize == RegisterSize.Simd64)
  881. {
  882. dRes = context.VectorZeroUpper64(dRes);
  883. }
  884. context.Copy(GetVec(op.Rd), dRes);
  885. }
  886. else /* if (sizeF == 1) */
  887. {
  888. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, n, n, Const((int)CmpCondition.OrderedQ));
  889. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  890. if (op is OpCodeSimdShImm fixedOp)
  891. {
  892. int fBits = GetImmShr(fixedOp);
  893. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, fBits)
  894. long fpScaled = 0x3FF0000000000000L + fBits * 0x10000000000000L;
  895. Operand fpScaledMask = scalar
  896. ? X86GetScalar (context, fpScaled)
  897. : X86GetAllElements(context, fpScaled);
  898. nRes = context.AddIntrinsic(Intrinsic.X86Mulpd, nRes, fpScaledMask);
  899. }
  900. nRes = context.AddIntrinsic(Intrinsic.X86Roundpd, nRes, Const(X86GetRoundControl(roundMode)));
  901. Operand nLong = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
  902. Operand fpMaxValMask = scalar // 9.2233720368547760E18d (9223372036854775808)
  903. ? X86GetScalar (context, 0x43E0000000000000L)
  904. : X86GetAllElements(context, 0x43E0000000000000L);
  905. nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  906. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nLong, nRes);
  907. if (scalar)
  908. {
  909. dRes = context.VectorZeroUpper64(dRes);
  910. }
  911. context.Copy(GetVec(op.Rd), dRes);
  912. }
  913. }
  914. private static void EmitSse41Fcvtu(ArmEmitterContext context, FPRoundingMode roundMode, bool scalar)
  915. {
  916. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  917. Operand n = GetVec(op.Rn);
  918. // sizeF == ((OpCodeSimdShImm)op).Size - 2
  919. int sizeF = op.Size & 1;
  920. if (sizeF == 0)
  921. {
  922. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, n, n, Const((int)CmpCondition.OrderedQ));
  923. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  924. if (op is OpCodeSimdShImm fixedOp)
  925. {
  926. int fBits = GetImmShr(fixedOp);
  927. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, fBits)
  928. int fpScaled = 0x3F800000 + fBits * 0x800000;
  929. Operand fpScaledMask = scalar
  930. ? X86GetScalar (context, fpScaled)
  931. : X86GetAllElements(context, fpScaled);
  932. nRes = context.AddIntrinsic(Intrinsic.X86Mulps, nRes, fpScaledMask);
  933. }
  934. nRes = context.AddIntrinsic(Intrinsic.X86Roundps, nRes, Const(X86GetRoundControl(roundMode)));
  935. Operand zero = context.VectorZero();
  936. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  937. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  938. Operand fpMaxValMask = scalar // 2.14748365E9f (2147483648)
  939. ? X86GetScalar (context, 0x4F000000)
  940. : X86GetAllElements(context, 0x4F000000);
  941. Operand nInt = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
  942. nRes = context.AddIntrinsic(Intrinsic.X86Subps, nRes, fpMaxValMask);
  943. nCmp = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  944. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  945. Operand nInt2 = context.AddIntrinsic(Intrinsic.X86Cvtps2dq, nRes);
  946. nRes = context.AddIntrinsic(Intrinsic.X86Cmpps, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  947. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nInt2, nRes);
  948. dRes = context.AddIntrinsic(Intrinsic.X86Paddd, dRes, nInt);
  949. if (scalar)
  950. {
  951. dRes = context.VectorZeroUpper96(dRes);
  952. }
  953. else if (op.RegisterSize == RegisterSize.Simd64)
  954. {
  955. dRes = context.VectorZeroUpper64(dRes);
  956. }
  957. context.Copy(GetVec(op.Rd), dRes);
  958. }
  959. else /* if (sizeF == 1) */
  960. {
  961. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, n, n, Const((int)CmpCondition.OrderedQ));
  962. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  963. if (op is OpCodeSimdShImm fixedOp)
  964. {
  965. int fBits = GetImmShr(fixedOp);
  966. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, fBits)
  967. long fpScaled = 0x3FF0000000000000L + fBits * 0x10000000000000L;
  968. Operand fpScaledMask = scalar
  969. ? X86GetScalar (context, fpScaled)
  970. : X86GetAllElements(context, fpScaled);
  971. nRes = context.AddIntrinsic(Intrinsic.X86Mulpd, nRes, fpScaledMask);
  972. }
  973. nRes = context.AddIntrinsic(Intrinsic.X86Roundpd, nRes, Const(X86GetRoundControl(roundMode)));
  974. Operand zero = context.VectorZero();
  975. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  976. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  977. Operand fpMaxValMask = scalar // 9.2233720368547760E18d (9223372036854775808)
  978. ? X86GetScalar (context, 0x43E0000000000000L)
  979. : X86GetAllElements(context, 0x43E0000000000000L);
  980. Operand nLong = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
  981. nRes = context.AddIntrinsic(Intrinsic.X86Subpd, nRes, fpMaxValMask);
  982. nCmp = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  983. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  984. Operand nLong2 = EmitSse2CvtDoubleToInt64OpF(context, nRes, scalar);
  985. nRes = context.AddIntrinsic(Intrinsic.X86Cmppd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  986. Operand dRes = context.AddIntrinsic(Intrinsic.X86Pxor, nLong2, nRes);
  987. dRes = context.AddIntrinsic(Intrinsic.X86Paddq, dRes, nLong);
  988. if (scalar)
  989. {
  990. dRes = context.VectorZeroUpper64(dRes);
  991. }
  992. context.Copy(GetVec(op.Rd), dRes);
  993. }
  994. }
  995. private static void EmitSse41Fcvts_Gp(ArmEmitterContext context, FPRoundingMode roundMode, bool isFixed)
  996. {
  997. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  998. Operand n = GetVec(op.Rn);
  999. if (op.Size == 0)
  1000. {
  1001. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, n, n, Const((int)CmpCondition.OrderedQ));
  1002. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1003. if (isFixed)
  1004. {
  1005. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, op.FBits)
  1006. int fpScaled = 0x3F800000 + op.FBits * 0x800000;
  1007. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1008. nRes = context.AddIntrinsic(Intrinsic.X86Mulss, nRes, fpScaledMask);
  1009. }
  1010. nRes = context.AddIntrinsic(Intrinsic.X86Roundss, nRes, Const(X86GetRoundControl(roundMode)));
  1011. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1012. ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
  1013. : context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
  1014. int fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1015. ? 0x4F000000 // 2.14748365E9f (2147483648)
  1016. : 0x5F000000; // 9.223372E18f (9223372036854775808)
  1017. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1018. nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1019. Operand nInt = context.AddIntrinsicInt(Intrinsic.X86Cvtsi2si, nRes);
  1020. if (op.RegisterSize == RegisterSize.Int64)
  1021. {
  1022. nInt = context.SignExtend32(OperandType.I64, nInt);
  1023. }
  1024. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong, nInt);
  1025. SetIntOrZR(context, op.Rd, dRes);
  1026. }
  1027. else /* if (op.Size == 1) */
  1028. {
  1029. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, n, n, Const((int)CmpCondition.OrderedQ));
  1030. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1031. if (isFixed)
  1032. {
  1033. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, op.FBits)
  1034. long fpScaled = 0x3FF0000000000000L + op.FBits * 0x10000000000000L;
  1035. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1036. nRes = context.AddIntrinsic(Intrinsic.X86Mulsd, nRes, fpScaledMask);
  1037. }
  1038. nRes = context.AddIntrinsic(Intrinsic.X86Roundsd, nRes, Const(X86GetRoundControl(roundMode)));
  1039. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1040. ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
  1041. : context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
  1042. long fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1043. ? 0x41E0000000000000L // 2147483648.0000000d (2147483648)
  1044. : 0x43E0000000000000L; // 9.2233720368547760E18d (9223372036854775808)
  1045. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1046. nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1047. Operand nLong = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, nRes);
  1048. if (op.RegisterSize == RegisterSize.Int32)
  1049. {
  1050. nLong = context.ConvertI64ToI32(nLong);
  1051. }
  1052. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong, nLong);
  1053. SetIntOrZR(context, op.Rd, dRes);
  1054. }
  1055. }
  1056. private static void EmitSse41Fcvtu_Gp(ArmEmitterContext context, FPRoundingMode roundMode, bool isFixed)
  1057. {
  1058. OpCodeSimdCvt op = (OpCodeSimdCvt)context.CurrOp;
  1059. Operand n = GetVec(op.Rn);
  1060. if (op.Size == 0)
  1061. {
  1062. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, n, n, Const((int)CmpCondition.OrderedQ));
  1063. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1064. if (isFixed)
  1065. {
  1066. // BitConverter.Int32BitsToSingle(fpScaled) == MathF.Pow(2f, op.FBits)
  1067. int fpScaled = 0x3F800000 + op.FBits * 0x800000;
  1068. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1069. nRes = context.AddIntrinsic(Intrinsic.X86Mulss, nRes, fpScaledMask);
  1070. }
  1071. nRes = context.AddIntrinsic(Intrinsic.X86Roundss, nRes, Const(X86GetRoundControl(roundMode)));
  1072. Operand zero = context.VectorZero();
  1073. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1074. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1075. int fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1076. ? 0x4F000000 // 2.14748365E9f (2147483648)
  1077. : 0x5F000000; // 9.223372E18f (9223372036854775808)
  1078. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1079. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1080. ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
  1081. : context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
  1082. nRes = context.AddIntrinsic(Intrinsic.X86Subss, nRes, fpMaxValMask);
  1083. nCmp = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1084. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1085. Operand nIntOrLong2 = op.RegisterSize == RegisterSize.Int32
  1086. ? context.AddIntrinsicInt (Intrinsic.X86Cvtss2si, nRes)
  1087. : context.AddIntrinsicLong(Intrinsic.X86Cvtss2si, nRes);
  1088. nRes = context.AddIntrinsic(Intrinsic.X86Cmpss, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1089. Operand nInt = context.AddIntrinsicInt(Intrinsic.X86Cvtsi2si, nRes);
  1090. if (op.RegisterSize == RegisterSize.Int64)
  1091. {
  1092. nInt = context.SignExtend32(OperandType.I64, nInt);
  1093. }
  1094. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong2, nInt);
  1095. dRes = context.Add(dRes, nIntOrLong);
  1096. SetIntOrZR(context, op.Rd, dRes);
  1097. }
  1098. else /* if (op.Size == 1) */
  1099. {
  1100. Operand nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, n, n, Const((int)CmpCondition.OrderedQ));
  1101. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, n);
  1102. if (isFixed)
  1103. {
  1104. // BitConverter.Int64BitsToDouble(fpScaled) == Math.Pow(2d, op.FBits)
  1105. long fpScaled = 0x3FF0000000000000L + op.FBits * 0x10000000000000L;
  1106. Operand fpScaledMask = X86GetScalar(context, fpScaled);
  1107. nRes = context.AddIntrinsic(Intrinsic.X86Mulsd, nRes, fpScaledMask);
  1108. }
  1109. nRes = context.AddIntrinsic(Intrinsic.X86Roundsd, nRes, Const(X86GetRoundControl(roundMode)));
  1110. Operand zero = context.VectorZero();
  1111. Operand nCmp = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1112. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1113. long fpMaxVal = op.RegisterSize == RegisterSize.Int32
  1114. ? 0x41E0000000000000L // 2147483648.0000000d (2147483648)
  1115. : 0x43E0000000000000L; // 9.2233720368547760E18d (9223372036854775808)
  1116. Operand fpMaxValMask = X86GetScalar(context, fpMaxVal);
  1117. Operand nIntOrLong = op.RegisterSize == RegisterSize.Int32
  1118. ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
  1119. : context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
  1120. nRes = context.AddIntrinsic(Intrinsic.X86Subsd, nRes, fpMaxValMask);
  1121. nCmp = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, zero, Const((int)CmpCondition.NotLessThanOrEqual));
  1122. nRes = context.AddIntrinsic(Intrinsic.X86Pand, nRes, nCmp);
  1123. Operand nIntOrLong2 = op.RegisterSize == RegisterSize.Int32
  1124. ? context.AddIntrinsicInt (Intrinsic.X86Cvtsd2si, nRes)
  1125. : context.AddIntrinsicLong(Intrinsic.X86Cvtsd2si, nRes);
  1126. nRes = context.AddIntrinsic(Intrinsic.X86Cmpsd, nRes, fpMaxValMask, Const((int)CmpCondition.NotLessThan));
  1127. Operand nLong = context.AddIntrinsicLong(Intrinsic.X86Cvtsi2si, nRes);
  1128. if (op.RegisterSize == RegisterSize.Int32)
  1129. {
  1130. nLong = context.ConvertI64ToI32(nLong);
  1131. }
  1132. Operand dRes = context.BitwiseExclusiveOr(nIntOrLong2, nLong);
  1133. dRes = context.Add(dRes, nIntOrLong);
  1134. SetIntOrZR(context, op.Rd, dRes);
  1135. }
  1136. }
  1137. private static Operand EmitVectorLongExtract(ArmEmitterContext context, int reg, int index, int size)
  1138. {
  1139. OperandType type = size == 3 ? OperandType.I64 : OperandType.I32;
  1140. return context.VectorExtract(type, GetVec(reg), index);
  1141. }
  1142. }
  1143. }