InstEmitSystem.cs 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660
  1. using Ryujinx.Cpu.LightningJit.CodeGen;
  2. using Ryujinx.Cpu.LightningJit.CodeGen.Arm64;
  3. using System;
  4. using System.Diagnostics;
  5. using System.Numerics;
  6. using System.Runtime.InteropServices;
  7. namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
  8. {
  9. static class InstEmitSystem
  10. {
  11. private delegate void SoftwareInterruptHandler(ulong address, int imm);
  12. private delegate ulong Get64();
  13. private delegate bool GetBool();
  14. private const int SpIndex = 31;
  15. public static void Bkpt(CodeGenContext context, uint imm)
  16. {
  17. context.AddPendingBkpt(imm);
  18. context.Arm64Assembler.B(0);
  19. }
  20. public static void Cps(CodeGenContext context, uint imod, uint m, uint a, uint i, uint f, uint mode)
  21. {
  22. // NOP in user mode.
  23. }
  24. public static void Dbg(CodeGenContext context, uint option)
  25. {
  26. // NOP in ARMv8.
  27. }
  28. public static void Hlt(CodeGenContext context, uint imm)
  29. {
  30. }
  31. public static void Mcr(CodeGenContext context, uint encoding, uint coproc, uint opc1, uint rt, uint crn, uint crm, uint opc2)
  32. {
  33. if (coproc != 15 || opc1 != 0)
  34. {
  35. Udf(context, encoding, 0);
  36. return;
  37. }
  38. Operand ctx = Register(context.RegisterAllocator.FixedContextRegister);
  39. Operand rtOperand = InstEmitCommon.GetInputGpr(context, rt);
  40. switch (crn)
  41. {
  42. case 13: // Process and Thread Info.
  43. if (crm == 0)
  44. {
  45. switch (opc2)
  46. {
  47. case 2:
  48. context.Arm64Assembler.StrRiUn(rtOperand, ctx, NativeContextOffsets.TpidrEl0Offset);
  49. return;
  50. }
  51. }
  52. break;
  53. }
  54. }
  55. public static void Mcrr(CodeGenContext context, uint encoding, uint coproc, uint opc1, uint rt, uint crm)
  56. {
  57. if (coproc != 15 || opc1 != 0)
  58. {
  59. Udf(context, encoding, 0);
  60. return;
  61. }
  62. // We don't have any system register that needs to be modified using a 64-bit value.
  63. }
  64. public static void Mrc(CodeGenContext context, uint encoding, uint coproc, uint opc1, uint rt, uint crn, uint crm, uint opc2)
  65. {
  66. if (coproc != 15 || opc1 != 0)
  67. {
  68. Udf(context, encoding, 0);
  69. return;
  70. }
  71. Operand ctx = Register(context.RegisterAllocator.FixedContextRegister);
  72. Operand rtOperand = InstEmitCommon.GetInputGpr(context, rt);
  73. bool hasValue = false;
  74. using ScopedRegister tempRegister = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  75. Operand dest = rt == RegisterUtils.PcRegister ? tempRegister.Operand : rtOperand;
  76. switch (crn)
  77. {
  78. case 13: // Process and Thread Info.
  79. if (crm == 0)
  80. {
  81. switch (opc2)
  82. {
  83. case 2:
  84. context.Arm64Assembler.LdrRiUn(dest, ctx, NativeContextOffsets.TpidrEl0Offset);
  85. hasValue = true;
  86. break;
  87. case 3:
  88. context.Arm64Assembler.LdrRiUn(dest, ctx, NativeContextOffsets.TpidrroEl0Offset);
  89. hasValue = true;
  90. break;
  91. }
  92. }
  93. break;
  94. }
  95. if (rt == RegisterUtils.PcRegister)
  96. {
  97. context.Arm64Assembler.MsrNzcv(dest);
  98. context.SetNzcvModified();
  99. }
  100. else if (!hasValue)
  101. {
  102. context.Arm64Assembler.Mov(dest, 0u);
  103. }
  104. }
  105. public static void Mrrc(CodeGenContext context, uint encoding, uint coproc, uint opc1, uint rt, uint rt2, uint crm)
  106. {
  107. if (coproc != 15)
  108. {
  109. Udf(context, encoding, 0);
  110. return;
  111. }
  112. switch (crm)
  113. {
  114. case 14:
  115. switch (opc1)
  116. {
  117. case 0:
  118. context.AddPendingReadCntpct(rt, rt2);
  119. context.Arm64Assembler.B(0);
  120. return;
  121. }
  122. break;
  123. }
  124. // Unsupported system register.
  125. context.Arm64Assembler.Mov(InstEmitCommon.GetOutputGpr(context, rt), 0u);
  126. context.Arm64Assembler.Mov(InstEmitCommon.GetOutputGpr(context, rt2), 0u);
  127. }
  128. public static void Mrs(CodeGenContext context, uint rd, bool r)
  129. {
  130. Operand rdOperand = InstEmitCommon.GetOutputGpr(context, rd);
  131. if (r)
  132. {
  133. // Reads SPSR, unpredictable in user mode.
  134. context.Arm64Assembler.Mov(rdOperand, 0u);
  135. }
  136. else
  137. {
  138. Operand ctx = Register(context.RegisterAllocator.FixedContextRegister);
  139. using ScopedRegister tempRegister = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  140. context.Arm64Assembler.LdrRiUn(tempRegister.Operand, ctx, NativeContextOffsets.FlagsBaseOffset);
  141. // Copy GE flags to destination register.
  142. context.Arm64Assembler.Ubfx(rdOperand, tempRegister.Operand, 16, 4);
  143. // Insert Q flag.
  144. context.Arm64Assembler.And(tempRegister.Operand, tempRegister.Operand, InstEmitCommon.Const(1 << 27));
  145. context.Arm64Assembler.Orr(rdOperand, rdOperand, tempRegister.Operand);
  146. // Insert NZCV flags.
  147. context.Arm64Assembler.MrsNzcv(tempRegister.Operand);
  148. context.Arm64Assembler.Orr(rdOperand, rdOperand, tempRegister.Operand);
  149. // All other flags can't be accessed in user mode or have "unknown" values.
  150. }
  151. }
  152. public static void MrsBr(CodeGenContext context, uint rd, uint m1, bool r)
  153. {
  154. Operand rdOperand = InstEmitCommon.GetOutputGpr(context, rd);
  155. // Reads banked register, unpredictable in user mode.
  156. context.Arm64Assembler.Mov(rdOperand, 0u);
  157. }
  158. public static void MsrBr(CodeGenContext context, uint rn, uint m1, bool r)
  159. {
  160. // Writes banked register, unpredictable in user mode.
  161. }
  162. public static void MsrI(CodeGenContext context, uint imm, uint mask, bool r)
  163. {
  164. if (r)
  165. {
  166. // Writes SPSR, unpredictable in user mode.
  167. }
  168. else
  169. {
  170. Operand ctx = Register(context.RegisterAllocator.FixedContextRegister);
  171. using ScopedRegister tempRegister = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  172. using ScopedRegister tempRegister2 = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  173. context.Arm64Assembler.LdrRiUn(tempRegister.Operand, ctx, NativeContextOffsets.FlagsBaseOffset);
  174. if ((mask & 2) != 0)
  175. {
  176. // Endian flag.
  177. context.Arm64Assembler.Mov(tempRegister2.Operand, (imm >> 9) & 1);
  178. context.Arm64Assembler.Bfi(tempRegister.Operand, tempRegister2.Operand, 9, 1);
  179. }
  180. if ((mask & 4) != 0)
  181. {
  182. // GE flags.
  183. context.Arm64Assembler.Mov(tempRegister2.Operand, (imm >> 16) & 0xf);
  184. context.Arm64Assembler.Bfi(tempRegister.Operand, tempRegister2.Operand, 16, 4);
  185. }
  186. if ((mask & 8) != 0)
  187. {
  188. // NZCVQ flags.
  189. context.Arm64Assembler.Mov(tempRegister2.Operand, (imm >> 27) & 0x1f);
  190. context.Arm64Assembler.Bfi(tempRegister.Operand, tempRegister2.Operand, 27, 5);
  191. context.Arm64Assembler.Mov(tempRegister2.Operand, (imm >> 28) & 0xf);
  192. InstEmitCommon.RestoreNzcvFlags(context, tempRegister2.Operand);
  193. context.SetNzcvModified();
  194. }
  195. }
  196. }
  197. public static void MsrR(CodeGenContext context, uint rn, uint mask, bool r)
  198. {
  199. Operand rnOperand = InstEmitCommon.GetInputGpr(context, rn);
  200. if (r)
  201. {
  202. // Writes SPSR, unpredictable in user mode.
  203. }
  204. else
  205. {
  206. Operand ctx = Register(context.RegisterAllocator.FixedContextRegister);
  207. using ScopedRegister tempRegister = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  208. using ScopedRegister tempRegister2 = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  209. context.Arm64Assembler.LdrRiUn(tempRegister.Operand, ctx, NativeContextOffsets.FlagsBaseOffset);
  210. if ((mask & 2) != 0)
  211. {
  212. // Endian flag.
  213. context.Arm64Assembler.Lsr(tempRegister2.Operand, rnOperand, InstEmitCommon.Const(9));
  214. context.Arm64Assembler.Bfi(tempRegister.Operand, tempRegister2.Operand, 9, 1);
  215. }
  216. if ((mask & 4) != 0)
  217. {
  218. // GE flags.
  219. context.Arm64Assembler.Lsr(tempRegister2.Operand, rnOperand, InstEmitCommon.Const(16));
  220. context.Arm64Assembler.Bfi(tempRegister.Operand, tempRegister2.Operand, 16, 4);
  221. }
  222. if ((mask & 8) != 0)
  223. {
  224. // NZCVQ flags.
  225. context.Arm64Assembler.Lsr(tempRegister2.Operand, rnOperand, InstEmitCommon.Const(27));
  226. context.Arm64Assembler.Bfi(tempRegister.Operand, tempRegister2.Operand, 27, 5);
  227. context.Arm64Assembler.Lsr(tempRegister2.Operand, rnOperand, InstEmitCommon.Const(28));
  228. InstEmitCommon.RestoreNzcvFlags(context, tempRegister2.Operand);
  229. context.SetNzcvModified();
  230. }
  231. }
  232. }
  233. public static void Setend(CodeGenContext context, bool e)
  234. {
  235. Operand ctx = Register(context.RegisterAllocator.FixedContextRegister);
  236. using ScopedRegister tempRegister = context.RegisterAllocator.AllocateTempGprRegisterScoped();
  237. context.Arm64Assembler.LdrRiUn(tempRegister.Operand, ctx, NativeContextOffsets.FlagsBaseOffset);
  238. if (e)
  239. {
  240. context.Arm64Assembler.Orr(tempRegister.Operand, tempRegister.Operand, InstEmitCommon.Const(1 << 9));
  241. }
  242. else
  243. {
  244. context.Arm64Assembler.Bfc(tempRegister.Operand, 9, 1);
  245. }
  246. context.Arm64Assembler.StrRiUn(tempRegister.Operand, ctx, NativeContextOffsets.FlagsBaseOffset);
  247. }
  248. public static void Svc(CodeGenContext context, uint imm)
  249. {
  250. context.AddPendingSvc(imm);
  251. context.Arm64Assembler.B(0);
  252. }
  253. public static void Udf(CodeGenContext context, uint encoding, uint imm)
  254. {
  255. context.AddPendingUdf(encoding);
  256. context.Arm64Assembler.B(0);
  257. }
  258. public static void PrivilegedInstruction(CodeGenContext context, uint encoding)
  259. {
  260. Udf(context, encoding, 0);
  261. }
  262. private static nint GetBkptHandlerPtr()
  263. {
  264. return Marshal.GetFunctionPointerForDelegate<SoftwareInterruptHandler>(NativeInterface.Break);
  265. }
  266. private static nint GetSvcHandlerPtr()
  267. {
  268. return Marshal.GetFunctionPointerForDelegate<SoftwareInterruptHandler>(NativeInterface.SupervisorCall);
  269. }
  270. private static nint GetUdfHandlerPtr()
  271. {
  272. return Marshal.GetFunctionPointerForDelegate<SoftwareInterruptHandler>(NativeInterface.Undefined);
  273. }
  274. private static nint GetCntpctEl0Ptr()
  275. {
  276. return Marshal.GetFunctionPointerForDelegate<Get64>(NativeInterface.GetCntpctEl0);
  277. }
  278. private static nint CheckSynchronizationPtr()
  279. {
  280. return Marshal.GetFunctionPointerForDelegate<GetBool>(NativeInterface.CheckSynchronization);
  281. }
  282. public static bool NeedsCall(InstName name)
  283. {
  284. // All instructions that might do a host call should be included here.
  285. // That is required to reserve space on the stack for caller saved registers.
  286. return name == InstName.Mrrc;
  287. }
  288. public static bool NeedsCallSkipContext(InstName name)
  289. {
  290. // All instructions that might do a host call should be included here.
  291. // That is required to reserve space on the stack for caller saved registers.
  292. switch (name)
  293. {
  294. case InstName.Mcr:
  295. case InstName.Mrc:
  296. case InstName.Svc:
  297. case InstName.Udf:
  298. return true;
  299. }
  300. return false;
  301. }
  302. public static void WriteBkpt(CodeWriter writer, RegisterAllocator regAlloc, TailMerger tailMerger, int spillBaseOffset, uint pc, uint imm)
  303. {
  304. Assembler asm = new(writer);
  305. WriteCall(ref asm, regAlloc, GetBkptHandlerPtr(), skipContext: true, spillBaseOffset, null, pc, imm);
  306. WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, spillBaseOffset);
  307. }
  308. public static void WriteSvc(CodeWriter writer, RegisterAllocator regAlloc, TailMerger tailMerger, int spillBaseOffset, uint pc, uint svcId)
  309. {
  310. Assembler asm = new(writer);
  311. WriteCall(ref asm, regAlloc, GetSvcHandlerPtr(), skipContext: true, spillBaseOffset, null, pc, svcId);
  312. WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, spillBaseOffset);
  313. }
  314. public static void WriteUdf(CodeWriter writer, RegisterAllocator regAlloc, TailMerger tailMerger, int spillBaseOffset, uint pc, uint imm)
  315. {
  316. Assembler asm = new(writer);
  317. WriteCall(ref asm, regAlloc, GetUdfHandlerPtr(), skipContext: true, spillBaseOffset, null, pc, imm);
  318. WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, spillBaseOffset);
  319. }
  320. public static void WriteReadCntpct(CodeWriter writer, RegisterAllocator regAlloc, int spillBaseOffset, int rt, int rt2)
  321. {
  322. Assembler asm = new(writer);
  323. uint resultMask = (1u << rt) | (1u << rt2);
  324. int tempRegister = 0;
  325. while ((resultMask & (1u << tempRegister)) != 0 && tempRegister < 32)
  326. {
  327. tempRegister++;
  328. }
  329. Debug.Assert(tempRegister < 32);
  330. WriteSpill(ref asm, regAlloc, resultMask, skipContext: false, spillBaseOffset, tempRegister);
  331. Operand rn = Register(tempRegister);
  332. asm.Mov(rn, (ulong)GetCntpctEl0Ptr());
  333. asm.Blr(rn);
  334. if (rt != rt2)
  335. {
  336. asm.Lsr(Register(rt2), Register(0), InstEmitCommon.Const(32));
  337. }
  338. asm.Mov(Register(rt, OperandType.I32), Register(0, OperandType.I32)); // Zero-extend.
  339. WriteFill(ref asm, regAlloc, resultMask, skipContext: false, spillBaseOffset, tempRegister);
  340. }
  341. public static void WriteSyncPoint(
  342. CodeWriter writer,
  343. ref Assembler asm,
  344. RegisterAllocator regAlloc,
  345. TailMerger tailMerger,
  346. int spillBaseOffset,
  347. Action storeToContext = null,
  348. Action loadFromContext = null)
  349. {
  350. int tempRegister = regAlloc.AllocateTempGprRegister();
  351. Operand rt = Register(tempRegister, OperandType.I32);
  352. asm.LdrRiUn(rt, Register(regAlloc.FixedContextRegister), NativeContextOffsets.CounterOffset);
  353. int branchIndex = writer.InstructionPointer;
  354. asm.Cbnz(rt, 0);
  355. storeToContext?.Invoke();
  356. WriteSpill(ref asm, regAlloc, 1u << tempRegister, skipContext: true, spillBaseOffset, tempRegister);
  357. Operand rn = Register(tempRegister == 0 ? 1 : 0);
  358. asm.Mov(rn, (ulong)CheckSynchronizationPtr());
  359. asm.Blr(rn);
  360. tailMerger.AddConditionalZeroReturn(writer, asm, Register(0, OperandType.I32));
  361. WriteFill(ref asm, regAlloc, 1u << tempRegister, skipContext: true, spillBaseOffset, tempRegister);
  362. loadFromContext?.Invoke();
  363. asm.LdrRiUn(rt, Register(regAlloc.FixedContextRegister), NativeContextOffsets.CounterOffset);
  364. uint branchInst = writer.ReadInstructionAt(branchIndex);
  365. writer.WriteInstructionAt(branchIndex, branchInst | (((uint)(writer.InstructionPointer - branchIndex) & 0x7ffff) << 5));
  366. asm.Sub(rt, rt, new Operand(OperandKind.Constant, OperandType.I32, 1));
  367. asm.StrRiUn(rt, Register(regAlloc.FixedContextRegister), NativeContextOffsets.CounterOffset);
  368. regAlloc.FreeTempGprRegister(tempRegister);
  369. }
  370. private static void WriteCall(
  371. ref Assembler asm,
  372. RegisterAllocator regAlloc,
  373. nint funcPtr,
  374. bool skipContext,
  375. int spillBaseOffset,
  376. int? resultRegister,
  377. params ReadOnlySpan<ulong> callArgs)
  378. {
  379. uint resultMask = 0u;
  380. if (resultRegister.HasValue)
  381. {
  382. resultMask = 1u << resultRegister.Value;
  383. }
  384. int tempRegister = callArgs.Length;
  385. if (resultRegister.HasValue && tempRegister == resultRegister.Value)
  386. {
  387. tempRegister++;
  388. }
  389. WriteSpill(ref asm, regAlloc, resultMask, skipContext, spillBaseOffset, tempRegister);
  390. // We only support up to 7 arguments right now.
  391. // ABI defines the first 8 integer arguments to be passed on registers X0-X7.
  392. // We need at least one register to put the function address on, so that reduces the number of
  393. // registers we can use for that by one.
  394. Debug.Assert(callArgs.Length < 8);
  395. for (int index = 0; index < callArgs.Length; index++)
  396. {
  397. asm.Mov(Register(index), callArgs[index]);
  398. }
  399. Operand rn = Register(tempRegister);
  400. asm.Mov(rn, (ulong)funcPtr);
  401. asm.Blr(rn);
  402. if (resultRegister.HasValue && resultRegister.Value != 0)
  403. {
  404. asm.Mov(Register(resultRegister.Value), Register(0));
  405. }
  406. WriteFill(ref asm, regAlloc, resultMask, skipContext, spillBaseOffset, tempRegister);
  407. }
  408. private static void WriteSpill(ref Assembler asm, RegisterAllocator regAlloc, uint exceptMask, bool skipContext, int spillOffset, int tempRegister)
  409. {
  410. if (skipContext)
  411. {
  412. InstEmitFlow.WriteSpillSkipContext(ref asm, regAlloc, spillOffset);
  413. }
  414. else
  415. {
  416. WriteSpillOrFill(ref asm, regAlloc, exceptMask, spillOffset, tempRegister, spill: true);
  417. }
  418. }
  419. private static void WriteFill(ref Assembler asm, RegisterAllocator regAlloc, uint exceptMask, bool skipContext, int spillOffset, int tempRegister)
  420. {
  421. if (skipContext)
  422. {
  423. InstEmitFlow.WriteFillSkipContext(ref asm, regAlloc, spillOffset);
  424. }
  425. else
  426. {
  427. WriteSpillOrFill(ref asm, regAlloc, exceptMask, spillOffset, tempRegister, spill: false);
  428. }
  429. }
  430. private static void WriteSpillOrFill(
  431. ref Assembler asm,
  432. RegisterAllocator regAlloc,
  433. uint exceptMask,
  434. int spillOffset,
  435. int tempRegister,
  436. bool spill)
  437. {
  438. uint gprMask = regAlloc.UsedGprsMask & ~(AbiConstants.GprCalleeSavedRegsMask | exceptMask);
  439. if (!spill)
  440. {
  441. // We must reload the status register before reloading the GPRs,
  442. // since we might otherwise trash one of them by using it as temp register.
  443. Operand rt = Register(tempRegister, OperandType.I32);
  444. asm.LdrRiUn(rt, Register(SpIndex), spillOffset + BitOperations.PopCount(gprMask) * 8);
  445. asm.MsrNzcv(rt);
  446. }
  447. while (gprMask != 0)
  448. {
  449. int reg = BitOperations.TrailingZeroCount(gprMask);
  450. if (reg < 31 && (gprMask & (2u << reg)) != 0 && spillOffset < RegisterSaveRestore.Encodable9BitsOffsetLimit)
  451. {
  452. if (spill)
  453. {
  454. asm.StpRiUn(Register(reg), Register(reg + 1), Register(SpIndex), spillOffset);
  455. }
  456. else
  457. {
  458. asm.LdpRiUn(Register(reg), Register(reg + 1), Register(SpIndex), spillOffset);
  459. }
  460. gprMask &= ~(3u << reg);
  461. spillOffset += 16;
  462. }
  463. else
  464. {
  465. if (spill)
  466. {
  467. asm.StrRiUn(Register(reg), Register(SpIndex), spillOffset);
  468. }
  469. else
  470. {
  471. asm.LdrRiUn(Register(reg), Register(SpIndex), spillOffset);
  472. }
  473. gprMask &= ~(1u << reg);
  474. spillOffset += 8;
  475. }
  476. }
  477. if (spill)
  478. {
  479. Operand rt = Register(tempRegister, OperandType.I32);
  480. asm.MrsNzcv(rt);
  481. asm.StrRiUn(rt, Register(SpIndex), spillOffset);
  482. }
  483. spillOffset += 8;
  484. if ((spillOffset & 8) != 0)
  485. {
  486. spillOffset += 8;
  487. }
  488. uint fpSimdMask = regAlloc.UsedFpSimdMask;
  489. while (fpSimdMask != 0)
  490. {
  491. int reg = BitOperations.TrailingZeroCount(fpSimdMask);
  492. if (reg < 31 && (fpSimdMask & (2u << reg)) != 0 && spillOffset < RegisterSaveRestore.Encodable9BitsOffsetLimit)
  493. {
  494. if (spill)
  495. {
  496. asm.StpRiUn(Register(reg, OperandType.V128), Register(reg + 1, OperandType.V128), Register(SpIndex), spillOffset);
  497. }
  498. else
  499. {
  500. asm.LdpRiUn(Register(reg, OperandType.V128), Register(reg + 1, OperandType.V128), Register(SpIndex), spillOffset);
  501. }
  502. fpSimdMask &= ~(3u << reg);
  503. spillOffset += 32;
  504. }
  505. else
  506. {
  507. if (spill)
  508. {
  509. asm.StrRiUn(Register(reg, OperandType.V128), Register(SpIndex), spillOffset);
  510. }
  511. else
  512. {
  513. asm.LdrRiUn(Register(reg, OperandType.V128), Register(SpIndex), spillOffset);
  514. }
  515. fpSimdMask &= ~(1u << reg);
  516. spillOffset += 16;
  517. }
  518. }
  519. }
  520. public static Operand Register(int register, OperandType type = OperandType.I64)
  521. {
  522. return new Operand(register, RegisterType.Integer, type);
  523. }
  524. }
  525. }