MemoryManagerHostMapped.cs 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692
  1. using ARMeilleure.Memory;
  2. using Ryujinx.Cpu.Tracking;
  3. using Ryujinx.Memory;
  4. using Ryujinx.Memory.Range;
  5. using Ryujinx.Memory.Tracking;
  6. using System;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using System.Runtime.CompilerServices;
  10. using System.Threading;
  11. namespace Ryujinx.Cpu
  12. {
  13. /// <summary>
  14. /// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region.
  15. /// </summary>
  16. public class MemoryManagerHostMapped : MemoryManagerBase, IMemoryManager, IVirtualMemoryManagerTracked
  17. {
  18. public const int PageBits = 12;
  19. public const int PageSize = 1 << PageBits;
  20. public const int PageMask = PageSize - 1;
  21. public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
  22. public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
  23. private enum HostMappedPtBits : ulong
  24. {
  25. Unmapped = 0,
  26. Mapped,
  27. WriteTracked,
  28. ReadWriteTracked,
  29. MappedReplicated = 0x5555555555555555,
  30. WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
  31. ReadWriteTrackedReplicated = ulong.MaxValue
  32. }
  33. private readonly InvalidAccessHandler _invalidAccessHandler;
  34. private readonly bool _unsafeMode;
  35. private readonly MemoryBlock _addressSpace;
  36. private readonly MemoryBlock _addressSpaceMirror;
  37. private readonly ulong _addressSpaceSize;
  38. private readonly MemoryEhMeilleure _memoryEh;
  39. private ulong[] _pageTable;
  40. public int AddressSpaceBits { get; }
  41. public IntPtr PageTablePointer => _addressSpace.Pointer;
  42. public MemoryManagerType Type => _unsafeMode ? MemoryManagerType.HostMappedUnsafe : MemoryManagerType.HostMapped;
  43. public MemoryTracking Tracking { get; }
  44. public event Action<ulong, ulong> UnmapEvent;
  45. /// <summary>
  46. /// Creates a new instance of the host mapped memory manager.
  47. /// </summary>
  48. /// <param name="addressSpaceSize">Size of the address space</param>
  49. /// <param name="unsafeMode">True if unmanaged access should not be masked (unsafe), false otherwise.</param>
  50. /// <param name="invalidAccessHandler">Optional function to handle invalid memory accesses</param>
  51. public MemoryManagerHostMapped(ulong addressSpaceSize, bool unsafeMode, InvalidAccessHandler invalidAccessHandler = null)
  52. {
  53. _invalidAccessHandler = invalidAccessHandler;
  54. _unsafeMode = unsafeMode;
  55. _addressSpaceSize = addressSpaceSize;
  56. ulong asSize = PageSize;
  57. int asBits = PageBits;
  58. while (asSize < addressSpaceSize)
  59. {
  60. asSize <<= 1;
  61. asBits++;
  62. }
  63. AddressSpaceBits = asBits;
  64. _pageTable = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))];
  65. _addressSpace = new MemoryBlock(asSize, MemoryAllocationFlags.Reserve | MemoryAllocationFlags.Mirrorable);
  66. _addressSpaceMirror = _addressSpace.CreateMirror();
  67. Tracking = new MemoryTracking(this, PageSize, invalidAccessHandler);
  68. _memoryEh = new MemoryEhMeilleure(_addressSpace, Tracking);
  69. }
  70. /// <summary>
  71. /// Checks if the virtual address is part of the addressable space.
  72. /// </summary>
  73. /// <param name="va">Virtual address</param>
  74. /// <returns>True if the virtual address is part of the addressable space</returns>
  75. private bool ValidateAddress(ulong va)
  76. {
  77. return va < _addressSpaceSize;
  78. }
  79. /// <summary>
  80. /// Checks if the combination of virtual address and size is part of the addressable space.
  81. /// </summary>
  82. /// <param name="va">Virtual address of the range</param>
  83. /// <param name="size">Size of the range in bytes</param>
  84. /// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
  85. private bool ValidateAddressAndSize(ulong va, ulong size)
  86. {
  87. ulong endVa = va + size;
  88. return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
  89. }
  90. /// <summary>
  91. /// Ensures the combination of virtual address and size is part of the addressable space.
  92. /// </summary>
  93. /// <param name="va">Virtual address of the range</param>
  94. /// <param name="size">Size of the range in bytes</param>
  95. /// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
  96. private void AssertValidAddressAndSize(ulong va, ulong size)
  97. {
  98. if (!ValidateAddressAndSize(va, size))
  99. {
  100. throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
  101. }
  102. }
  103. /// <summary>
  104. /// Ensures the combination of virtual address and size is part of the addressable space and fully mapped.
  105. /// </summary>
  106. /// <param name="va">Virtual address of the range</param>
  107. /// <param name="size">Size of the range in bytes</param>
  108. private void AssertMapped(ulong va, ulong size)
  109. {
  110. if (!ValidateAddressAndSize(va, size) || !IsRangeMappedImpl(va, size))
  111. {
  112. throw new InvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  113. }
  114. }
  115. /// <inheritdoc/>
  116. public void Map(ulong va, nuint hostAddress, ulong size)
  117. {
  118. AssertValidAddressAndSize(va, size);
  119. _addressSpace.Commit(va, size);
  120. AddMapping(va, size);
  121. Tracking.Map(va, size);
  122. }
  123. /// <inheritdoc/>
  124. public void Unmap(ulong va, ulong size)
  125. {
  126. AssertValidAddressAndSize(va, size);
  127. UnmapEvent?.Invoke(va, size);
  128. Tracking.Unmap(va, size);
  129. RemoveMapping(va, size);
  130. _addressSpace.Decommit(va, size);
  131. }
  132. /// <inheritdoc/>
  133. public T Read<T>(ulong va) where T : unmanaged
  134. {
  135. try
  136. {
  137. AssertMapped(va, (ulong)Unsafe.SizeOf<T>());
  138. return _addressSpaceMirror.Read<T>(va);
  139. }
  140. catch (InvalidMemoryRegionException)
  141. {
  142. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  143. {
  144. throw;
  145. }
  146. return default;
  147. }
  148. }
  149. /// <inheritdoc/>
  150. public T ReadTracked<T>(ulong va) where T : unmanaged
  151. {
  152. try
  153. {
  154. SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
  155. return Read<T>(va);
  156. }
  157. catch (InvalidMemoryRegionException)
  158. {
  159. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  160. {
  161. throw;
  162. }
  163. return default;
  164. }
  165. }
  166. /// <inheritdoc/>
  167. public void Read(ulong va, Span<byte> data)
  168. {
  169. try
  170. {
  171. AssertMapped(va, (ulong)data.Length);
  172. _addressSpaceMirror.Read(va, data);
  173. }
  174. catch (InvalidMemoryRegionException)
  175. {
  176. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  177. {
  178. throw;
  179. }
  180. }
  181. }
  182. /// <inheritdoc/>
  183. public void Write<T>(ulong va, T value) where T : unmanaged
  184. {
  185. try
  186. {
  187. SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), write: true);
  188. _addressSpaceMirror.Write(va, value);
  189. }
  190. catch (InvalidMemoryRegionException)
  191. {
  192. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  193. {
  194. throw;
  195. }
  196. }
  197. }
  198. /// <inheritdoc/>
  199. public void Write(ulong va, ReadOnlySpan<byte> data)
  200. {
  201. try {
  202. SignalMemoryTracking(va, (ulong)data.Length, write: true);
  203. _addressSpaceMirror.Write(va, data);
  204. }
  205. catch (InvalidMemoryRegionException)
  206. {
  207. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  208. {
  209. throw;
  210. }
  211. }
  212. }
  213. /// <inheritdoc/>
  214. public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
  215. {
  216. try
  217. {
  218. AssertMapped(va, (ulong)data.Length);
  219. _addressSpaceMirror.Write(va, data);
  220. }
  221. catch (InvalidMemoryRegionException)
  222. {
  223. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  224. {
  225. throw;
  226. }
  227. }
  228. }
  229. /// <inheritdoc/>
  230. public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
  231. {
  232. if (tracked)
  233. {
  234. SignalMemoryTracking(va, (ulong)size, write: false);
  235. }
  236. else
  237. {
  238. AssertMapped(va, (ulong)size);
  239. }
  240. return _addressSpaceMirror.GetSpan(va, size);
  241. }
  242. /// <inheritdoc/>
  243. public WritableRegion GetWritableRegion(ulong va, int size)
  244. {
  245. AssertMapped(va, (ulong)size);
  246. return _addressSpaceMirror.GetWritableRegion(va, size);
  247. }
  248. /// <inheritdoc/>
  249. public ref T GetRef<T>(ulong va) where T : unmanaged
  250. {
  251. SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true);
  252. return ref _addressSpaceMirror.GetRef<T>(va);
  253. }
  254. /// <inheritdoc/>
  255. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  256. public bool IsMapped(ulong va)
  257. {
  258. return ValidateAddress(va) && IsMappedImpl(va);
  259. }
  260. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  261. private bool IsMappedImpl(ulong va)
  262. {
  263. ulong page = va >> PageBits;
  264. int bit = (int)((page & 31) << 1);
  265. int pageIndex = (int)(page >> PageToPteShift);
  266. ref ulong pageRef = ref _pageTable[pageIndex];
  267. ulong pte = Volatile.Read(ref pageRef);
  268. return ((pte >> bit) & 3) != 0;
  269. }
  270. /// <inheritdoc/>
  271. public bool IsRangeMapped(ulong va, ulong size)
  272. {
  273. AssertValidAddressAndSize(va, size);
  274. return IsRangeMappedImpl(va, size);
  275. }
  276. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  277. private void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
  278. {
  279. startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
  280. endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
  281. pageIndex = (int)(pageStart >> PageToPteShift);
  282. pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
  283. }
  284. private bool IsRangeMappedImpl(ulong va, ulong size)
  285. {
  286. int pages = GetPagesCount(va, size, out _);
  287. if (pages == 1)
  288. {
  289. return IsMappedImpl(va);
  290. }
  291. ulong pageStart = va >> PageBits;
  292. ulong pageEnd = pageStart + (ulong)pages;
  293. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  294. // Check if either bit in each 2 bit page entry is set.
  295. // OR the block with itself shifted down by 1, and check the first bit of each entry.
  296. ulong mask = BlockMappedMask & startMask;
  297. while (pageIndex <= pageEndIndex)
  298. {
  299. if (pageIndex == pageEndIndex)
  300. {
  301. mask &= endMask;
  302. }
  303. ref ulong pageRef = ref _pageTable[pageIndex++];
  304. ulong pte = Volatile.Read(ref pageRef);
  305. pte |= pte >> 1;
  306. if ((pte & mask) != mask)
  307. {
  308. return false;
  309. }
  310. mask = BlockMappedMask;
  311. }
  312. return true;
  313. }
  314. /// <inheritdoc/>
  315. public IEnumerable<HostMemoryRange> GetPhysicalRegions(ulong va, ulong size)
  316. {
  317. if (size == 0)
  318. {
  319. return Enumerable.Empty<HostMemoryRange>();
  320. }
  321. AssertMapped(va, size);
  322. return new HostMemoryRange[] { new HostMemoryRange(_addressSpaceMirror.GetPointer(va, size), size) };
  323. }
  324. /// <inheritdoc/>
  325. /// <remarks>
  326. /// This function also validates that the given range is both valid and mapped, and will throw if it is not.
  327. /// </remarks>
  328. public void SignalMemoryTracking(ulong va, ulong size, bool write)
  329. {
  330. AssertValidAddressAndSize(va, size);
  331. // Software table, used for managed memory tracking.
  332. int pages = GetPagesCount(va, size, out _);
  333. ulong pageStart = va >> PageBits;
  334. if (pages == 1)
  335. {
  336. ulong tag = (ulong)(write ? HostMappedPtBits.WriteTracked : HostMappedPtBits.ReadWriteTracked);
  337. int bit = (int)((pageStart & 31) << 1);
  338. int pageIndex = (int)(pageStart >> PageToPteShift);
  339. ref ulong pageRef = ref _pageTable[pageIndex];
  340. ulong pte = Volatile.Read(ref pageRef);
  341. ulong state = ((pte >> bit) & 3);
  342. if (state >= tag)
  343. {
  344. Tracking.VirtualMemoryEvent(va, size, write);
  345. return;
  346. }
  347. else if (state == 0)
  348. {
  349. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  350. }
  351. }
  352. else
  353. {
  354. ulong pageEnd = pageStart + (ulong)pages;
  355. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  356. ulong mask = startMask;
  357. ulong anyTrackingTag = (ulong)HostMappedPtBits.WriteTrackedReplicated;
  358. while (pageIndex <= pageEndIndex)
  359. {
  360. if (pageIndex == pageEndIndex)
  361. {
  362. mask &= endMask;
  363. }
  364. ref ulong pageRef = ref _pageTable[pageIndex++];
  365. ulong pte = Volatile.Read(ref pageRef);
  366. ulong mappedMask = mask & BlockMappedMask;
  367. ulong mappedPte = pte | (pte >> 1);
  368. if ((mappedPte & mappedMask) != mappedMask)
  369. {
  370. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  371. }
  372. pte &= mask;
  373. if ((pte & anyTrackingTag) != 0) // Search for any tracking.
  374. {
  375. // Writes trigger any tracking.
  376. // Only trigger tracking from reads if both bits are set on any page.
  377. if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
  378. {
  379. Tracking.VirtualMemoryEvent(va, size, write);
  380. break;
  381. }
  382. }
  383. mask = ulong.MaxValue;
  384. }
  385. }
  386. }
  387. /// <summary>
  388. /// Computes the number of pages in a virtual address range.
  389. /// </summary>
  390. /// <param name="va">Virtual address of the range</param>
  391. /// <param name="size">Size of the range</param>
  392. /// <param name="startVa">The virtual address of the beginning of the first page</param>
  393. /// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
  394. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  395. private int GetPagesCount(ulong va, ulong size, out ulong startVa)
  396. {
  397. // WARNING: Always check if ulong does not overflow during the operations.
  398. startVa = va & ~(ulong)PageMask;
  399. ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
  400. return (int)(vaSpan / PageSize);
  401. }
  402. /// <inheritdoc/>
  403. public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
  404. {
  405. // Protection is inverted on software pages, since the default value is 0.
  406. protection = (~protection) & MemoryPermission.ReadAndWrite;
  407. int pages = GetPagesCount(va, size, out va);
  408. ulong pageStart = va >> PageBits;
  409. if (pages == 1)
  410. {
  411. ulong protTag = protection switch
  412. {
  413. MemoryPermission.None => (ulong)HostMappedPtBits.Mapped,
  414. MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTracked,
  415. _ => (ulong)HostMappedPtBits.ReadWriteTracked,
  416. };
  417. int bit = (int)((pageStart & 31) << 1);
  418. ulong tagMask = 3UL << bit;
  419. ulong invTagMask = ~tagMask;
  420. ulong tag = protTag << bit;
  421. int pageIndex = (int)(pageStart >> PageToPteShift);
  422. ref ulong pageRef = ref _pageTable[pageIndex];
  423. ulong pte;
  424. do
  425. {
  426. pte = Volatile.Read(ref pageRef);
  427. }
  428. while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
  429. }
  430. else
  431. {
  432. ulong pageEnd = pageStart + (ulong)pages;
  433. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  434. ulong mask = startMask;
  435. ulong protTag = protection switch
  436. {
  437. MemoryPermission.None => (ulong)HostMappedPtBits.MappedReplicated,
  438. MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTrackedReplicated,
  439. _ => (ulong)HostMappedPtBits.ReadWriteTrackedReplicated,
  440. };
  441. while (pageIndex <= pageEndIndex)
  442. {
  443. if (pageIndex == pageEndIndex)
  444. {
  445. mask &= endMask;
  446. }
  447. ref ulong pageRef = ref _pageTable[pageIndex++];
  448. ulong pte;
  449. ulong mappedMask;
  450. // Change the protection of all 2 bit entries that are mapped.
  451. do
  452. {
  453. pte = Volatile.Read(ref pageRef);
  454. mappedMask = pte | (pte >> 1);
  455. mappedMask |= (mappedMask & BlockMappedMask) << 1;
  456. mappedMask &= mask; // Only update mapped pages within the given range.
  457. }
  458. while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
  459. mask = ulong.MaxValue;
  460. }
  461. }
  462. protection = protection switch
  463. {
  464. MemoryPermission.None => MemoryPermission.ReadAndWrite,
  465. MemoryPermission.Write => MemoryPermission.Read,
  466. _ => MemoryPermission.None
  467. };
  468. _addressSpace.Reprotect(va, size, protection, false);
  469. }
  470. /// <inheritdoc/>
  471. public CpuRegionHandle BeginTracking(ulong address, ulong size)
  472. {
  473. return new CpuRegionHandle(Tracking.BeginTracking(address, size));
  474. }
  475. /// <inheritdoc/>
  476. public CpuMultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity)
  477. {
  478. return new CpuMultiRegionHandle(Tracking.BeginGranularTracking(address, size, handles, granularity));
  479. }
  480. /// <inheritdoc/>
  481. public CpuSmartMultiRegionHandle BeginSmartGranularTracking(ulong address, ulong size, ulong granularity)
  482. {
  483. return new CpuSmartMultiRegionHandle(Tracking.BeginSmartGranularTracking(address, size, granularity));
  484. }
  485. /// <summary>
  486. /// Adds the given address mapping to the page table.
  487. /// </summary>
  488. /// <param name="va">Virtual memory address</param>
  489. /// <param name="size">Size to be mapped</param>
  490. private void AddMapping(ulong va, ulong size)
  491. {
  492. int pages = GetPagesCount(va, size, out _);
  493. ulong pageStart = va >> PageBits;
  494. ulong pageEnd = pageStart + (ulong)pages;
  495. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  496. ulong mask = startMask;
  497. while (pageIndex <= pageEndIndex)
  498. {
  499. if (pageIndex == pageEndIndex)
  500. {
  501. mask &= endMask;
  502. }
  503. ref ulong pageRef = ref _pageTable[pageIndex++];
  504. ulong pte;
  505. ulong mappedMask;
  506. // Map all 2-bit entries that are unmapped.
  507. do
  508. {
  509. pte = Volatile.Read(ref pageRef);
  510. mappedMask = pte | (pte >> 1);
  511. mappedMask |= (mappedMask & BlockMappedMask) << 1;
  512. mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
  513. }
  514. while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
  515. mask = ulong.MaxValue;
  516. }
  517. }
  518. /// <summary>
  519. /// Removes the given address mapping from the page table.
  520. /// </summary>
  521. /// <param name="va">Virtual memory address</param>
  522. /// <param name="size">Size to be unmapped</param>
  523. private void RemoveMapping(ulong va, ulong size)
  524. {
  525. int pages = GetPagesCount(va, size, out _);
  526. ulong pageStart = va >> PageBits;
  527. ulong pageEnd = pageStart + (ulong)pages;
  528. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  529. startMask = ~startMask;
  530. endMask = ~endMask;
  531. ulong mask = startMask;
  532. while (pageIndex <= pageEndIndex)
  533. {
  534. if (pageIndex == pageEndIndex)
  535. {
  536. mask |= endMask;
  537. }
  538. ref ulong pageRef = ref _pageTable[pageIndex++];
  539. ulong pte;
  540. do
  541. {
  542. pte = Volatile.Read(ref pageRef);
  543. }
  544. while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
  545. mask = 0;
  546. }
  547. }
  548. /// <summary>
  549. /// Disposes of resources used by the memory manager.
  550. /// </summary>
  551. protected override void Destroy()
  552. {
  553. _addressSpaceMirror.Dispose();
  554. _addressSpace.Dispose();
  555. _memoryEh.Dispose();
  556. }
  557. private void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
  558. }
  559. }