ManagedPageFlags.cs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389
  1. using Ryujinx.Memory;
  2. using Ryujinx.Memory.Tracking;
  3. using System;
  4. using System.Runtime.CompilerServices;
  5. using System.Threading;
  6. namespace Ryujinx.Cpu
  7. {
  8. /// <summary>
  9. /// A page bitmap that keeps track of mapped state and tracking protection
  10. /// for managed memory accesses (not using host page protection).
  11. /// </summary>
  12. internal readonly struct ManagedPageFlags
  13. {
  14. public const int PageBits = 12;
  15. public const int PageSize = 1 << PageBits;
  16. public const int PageMask = PageSize - 1;
  17. private readonly ulong[] _pageBitmap;
  18. public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
  19. public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
  20. private enum ManagedPtBits : ulong
  21. {
  22. Unmapped = 0,
  23. Mapped,
  24. WriteTracked,
  25. ReadWriteTracked,
  26. MappedReplicated = 0x5555555555555555,
  27. WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
  28. ReadWriteTrackedReplicated = ulong.MaxValue,
  29. }
  30. public ManagedPageFlags(int addressSpaceBits)
  31. {
  32. int bits = Math.Max(0, addressSpaceBits - (PageBits + PageToPteShift));
  33. _pageBitmap = new ulong[1 << bits];
  34. }
  35. /// <summary>
  36. /// Computes the number of pages in a virtual address range.
  37. /// </summary>
  38. /// <param name="va">Virtual address of the range</param>
  39. /// <param name="size">Size of the range</param>
  40. /// <param name="startVa">The virtual address of the beginning of the first page</param>
  41. /// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
  42. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  43. private static int GetPagesCount(ulong va, ulong size, out ulong startVa)
  44. {
  45. // WARNING: Always check if ulong does not overflow during the operations.
  46. startVa = va & ~(ulong)PageMask;
  47. ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
  48. return (int)(vaSpan / PageSize);
  49. }
  50. /// <summary>
  51. /// Checks if the page at a given CPU virtual address is mapped.
  52. /// </summary>
  53. /// <param name="va">Virtual address to check</param>
  54. /// <returns>True if the address is mapped, false otherwise</returns>
  55. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  56. public readonly bool IsMapped(ulong va)
  57. {
  58. ulong page = va >> PageBits;
  59. int bit = (int)((page & 31) << 1);
  60. int pageIndex = (int)(page >> PageToPteShift);
  61. ref ulong pageRef = ref _pageBitmap[pageIndex];
  62. ulong pte = Volatile.Read(ref pageRef);
  63. return ((pte >> bit) & 3) != 0;
  64. }
  65. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  66. private static void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
  67. {
  68. startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
  69. endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
  70. pageIndex = (int)(pageStart >> PageToPteShift);
  71. pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
  72. }
  73. /// <summary>
  74. /// Checks if a memory range is mapped.
  75. /// </summary>
  76. /// <param name="va">Virtual address of the range</param>
  77. /// <param name="size">Size of the range in bytes</param>
  78. /// <returns>True if the entire range is mapped, false otherwise</returns>
  79. public readonly bool IsRangeMapped(ulong va, ulong size)
  80. {
  81. int pages = GetPagesCount(va, size, out _);
  82. if (pages == 1)
  83. {
  84. return IsMapped(va);
  85. }
  86. ulong pageStart = va >> PageBits;
  87. ulong pageEnd = pageStart + (ulong)pages;
  88. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  89. // Check if either bit in each 2 bit page entry is set.
  90. // OR the block with itself shifted down by 1, and check the first bit of each entry.
  91. ulong mask = BlockMappedMask & startMask;
  92. while (pageIndex <= pageEndIndex)
  93. {
  94. if (pageIndex == pageEndIndex)
  95. {
  96. mask &= endMask;
  97. }
  98. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  99. ulong pte = Volatile.Read(ref pageRef);
  100. pte |= pte >> 1;
  101. if ((pte & mask) != mask)
  102. {
  103. return false;
  104. }
  105. mask = BlockMappedMask;
  106. }
  107. return true;
  108. }
  109. /// <summary>
  110. /// Reprotect a region of virtual memory for tracking.
  111. /// </summary>
  112. /// <param name="va">Virtual address base</param>
  113. /// <param name="size">Size of the region to protect</param>
  114. /// <param name="protection">Memory protection to set</param>
  115. public readonly void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
  116. {
  117. // Protection is inverted on software pages, since the default value is 0.
  118. protection = (~protection) & MemoryPermission.ReadAndWrite;
  119. int pages = GetPagesCount(va, size, out va);
  120. ulong pageStart = va >> PageBits;
  121. if (pages == 1)
  122. {
  123. ulong protTag = protection switch
  124. {
  125. MemoryPermission.None => (ulong)ManagedPtBits.Mapped,
  126. MemoryPermission.Write => (ulong)ManagedPtBits.WriteTracked,
  127. _ => (ulong)ManagedPtBits.ReadWriteTracked,
  128. };
  129. int bit = (int)((pageStart & 31) << 1);
  130. ulong tagMask = 3UL << bit;
  131. ulong invTagMask = ~tagMask;
  132. ulong tag = protTag << bit;
  133. int pageIndex = (int)(pageStart >> PageToPteShift);
  134. ref ulong pageRef = ref _pageBitmap[pageIndex];
  135. ulong pte;
  136. do
  137. {
  138. pte = Volatile.Read(ref pageRef);
  139. }
  140. while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
  141. }
  142. else
  143. {
  144. ulong pageEnd = pageStart + (ulong)pages;
  145. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  146. ulong mask = startMask;
  147. ulong protTag = protection switch
  148. {
  149. MemoryPermission.None => (ulong)ManagedPtBits.MappedReplicated,
  150. MemoryPermission.Write => (ulong)ManagedPtBits.WriteTrackedReplicated,
  151. _ => (ulong)ManagedPtBits.ReadWriteTrackedReplicated,
  152. };
  153. while (pageIndex <= pageEndIndex)
  154. {
  155. if (pageIndex == pageEndIndex)
  156. {
  157. mask &= endMask;
  158. }
  159. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  160. ulong pte;
  161. ulong mappedMask;
  162. // Change the protection of all 2 bit entries that are mapped.
  163. do
  164. {
  165. pte = Volatile.Read(ref pageRef);
  166. mappedMask = pte | (pte >> 1);
  167. mappedMask |= (mappedMask & BlockMappedMask) << 1;
  168. mappedMask &= mask; // Only update mapped pages within the given range.
  169. }
  170. while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
  171. mask = ulong.MaxValue;
  172. }
  173. }
  174. }
  175. /// <summary>
  176. /// Alerts the memory tracking that a given region has been read from or written to.
  177. /// This should be called before read/write is performed.
  178. /// </summary>
  179. /// <param name="tracking">Memory tracking structure to call when pages are protected</param>
  180. /// <param name="va">Virtual address of the region</param>
  181. /// <param name="size">Size of the region</param>
  182. /// <param name="write">True if the region was written, false if read</param>
  183. /// <param name="exemptId">Optional ID of the handles that should not be signalled</param>
  184. /// <remarks>
  185. /// This function also validates that the given range is both valid and mapped, and will throw if it is not.
  186. /// </remarks>
  187. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  188. public readonly void SignalMemoryTracking(MemoryTracking tracking, ulong va, ulong size, bool write, int? exemptId = null)
  189. {
  190. // Software table, used for managed memory tracking.
  191. int pages = GetPagesCount(va, size, out _);
  192. ulong pageStart = va >> PageBits;
  193. if (pages == 1)
  194. {
  195. ulong tag = (ulong)(write ? ManagedPtBits.WriteTracked : ManagedPtBits.ReadWriteTracked);
  196. int bit = (int)((pageStart & 31) << 1);
  197. int pageIndex = (int)(pageStart >> PageToPteShift);
  198. ref ulong pageRef = ref _pageBitmap[pageIndex];
  199. ulong pte = Volatile.Read(ref pageRef);
  200. ulong state = ((pte >> bit) & 3);
  201. if (state >= tag)
  202. {
  203. tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
  204. return;
  205. }
  206. else if (state == 0)
  207. {
  208. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  209. }
  210. }
  211. else
  212. {
  213. ulong pageEnd = pageStart + (ulong)pages;
  214. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  215. ulong mask = startMask;
  216. ulong anyTrackingTag = (ulong)ManagedPtBits.WriteTrackedReplicated;
  217. while (pageIndex <= pageEndIndex)
  218. {
  219. if (pageIndex == pageEndIndex)
  220. {
  221. mask &= endMask;
  222. }
  223. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  224. ulong pte = Volatile.Read(ref pageRef);
  225. ulong mappedMask = mask & BlockMappedMask;
  226. ulong mappedPte = pte | (pte >> 1);
  227. if ((mappedPte & mappedMask) != mappedMask)
  228. {
  229. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  230. }
  231. pte &= mask;
  232. if ((pte & anyTrackingTag) != 0) // Search for any tracking.
  233. {
  234. // Writes trigger any tracking.
  235. // Only trigger tracking from reads if both bits are set on any page.
  236. if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
  237. {
  238. tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
  239. break;
  240. }
  241. }
  242. mask = ulong.MaxValue;
  243. }
  244. }
  245. }
  246. /// <summary>
  247. /// Adds the given address mapping to the page table.
  248. /// </summary>
  249. /// <param name="va">Virtual memory address</param>
  250. /// <param name="size">Size to be mapped</param>
  251. public readonly void AddMapping(ulong va, ulong size)
  252. {
  253. int pages = GetPagesCount(va, size, out _);
  254. ulong pageStart = va >> PageBits;
  255. ulong pageEnd = pageStart + (ulong)pages;
  256. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  257. ulong mask = startMask;
  258. while (pageIndex <= pageEndIndex)
  259. {
  260. if (pageIndex == pageEndIndex)
  261. {
  262. mask &= endMask;
  263. }
  264. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  265. ulong pte;
  266. ulong mappedMask;
  267. // Map all 2-bit entries that are unmapped.
  268. do
  269. {
  270. pte = Volatile.Read(ref pageRef);
  271. mappedMask = pte | (pte >> 1);
  272. mappedMask |= (mappedMask & BlockMappedMask) << 1;
  273. mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
  274. }
  275. while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
  276. mask = ulong.MaxValue;
  277. }
  278. }
  279. /// <summary>
  280. /// Removes the given address mapping from the page table.
  281. /// </summary>
  282. /// <param name="va">Virtual memory address</param>
  283. /// <param name="size">Size to be unmapped</param>
  284. public readonly void RemoveMapping(ulong va, ulong size)
  285. {
  286. int pages = GetPagesCount(va, size, out _);
  287. ulong pageStart = va >> PageBits;
  288. ulong pageEnd = pageStart + (ulong)pages;
  289. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  290. startMask = ~startMask;
  291. endMask = ~endMask;
  292. ulong mask = startMask;
  293. while (pageIndex <= pageEndIndex)
  294. {
  295. if (pageIndex == pageEndIndex)
  296. {
  297. mask |= endMask;
  298. }
  299. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  300. ulong pte;
  301. do
  302. {
  303. pte = Volatile.Read(ref pageRef);
  304. }
  305. while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
  306. mask = 0;
  307. }
  308. }
  309. private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
  310. }
  311. }