HvMemoryManager.cs 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947
  1. using ARMeilleure.Memory;
  2. using Ryujinx.Cpu.Tracking;
  3. using Ryujinx.Memory;
  4. using Ryujinx.Memory.Range;
  5. using Ryujinx.Memory.Tracking;
  6. using System;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using System.Runtime.CompilerServices;
  10. using System.Runtime.InteropServices;
  11. using System.Threading;
  12. namespace Ryujinx.Cpu.AppleHv
  13. {
  14. /// <summary>
  15. /// Represents a CPU memory manager which maps guest virtual memory directly onto the Hypervisor page table.
  16. /// </summary>
  17. public class HvMemoryManager : MemoryManagerBase, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock
  18. {
  19. public const int PageBits = 12;
  20. public const int PageSize = 1 << PageBits;
  21. public const int PageMask = PageSize - 1;
  22. public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
  23. public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
  24. private enum HostMappedPtBits : ulong
  25. {
  26. Unmapped = 0,
  27. Mapped,
  28. WriteTracked,
  29. ReadWriteTracked,
  30. MappedReplicated = 0x5555555555555555,
  31. WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
  32. ReadWriteTrackedReplicated = ulong.MaxValue
  33. }
  34. private readonly InvalidAccessHandler _invalidAccessHandler;
  35. private readonly ulong _addressSpaceSize;
  36. private readonly HvAddressSpace _addressSpace;
  37. internal HvAddressSpace AddressSpace => _addressSpace;
  38. private readonly MemoryBlock _backingMemory;
  39. private readonly PageTable<ulong> _pageTable;
  40. private readonly ulong[] _pageBitmap;
  41. public bool Supports4KBPages => true;
  42. public int AddressSpaceBits { get; }
  43. public IntPtr PageTablePointer => IntPtr.Zero;
  44. public MemoryManagerType Type => MemoryManagerType.SoftwarePageTable;
  45. public MemoryTracking Tracking { get; }
  46. public event Action<ulong, ulong> UnmapEvent;
  47. /// <summary>
  48. /// Creates a new instance of the Hypervisor memory manager.
  49. /// </summary>
  50. /// <param name="backingMemory">Physical backing memory where virtual memory will be mapped to</param>
  51. /// <param name="addressSpaceSize">Size of the address space</param>
  52. /// <param name="invalidAccessHandler">Optional function to handle invalid memory accesses</param>
  53. public HvMemoryManager(MemoryBlock backingMemory, ulong addressSpaceSize, InvalidAccessHandler invalidAccessHandler = null)
  54. {
  55. _backingMemory = backingMemory;
  56. _pageTable = new PageTable<ulong>();
  57. _invalidAccessHandler = invalidAccessHandler;
  58. _addressSpaceSize = addressSpaceSize;
  59. ulong asSize = PageSize;
  60. int asBits = PageBits;
  61. while (asSize < addressSpaceSize)
  62. {
  63. asSize <<= 1;
  64. asBits++;
  65. }
  66. _addressSpace = new HvAddressSpace(backingMemory, asSize);
  67. AddressSpaceBits = asBits;
  68. _pageBitmap = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))];
  69. Tracking = new MemoryTracking(this, PageSize, invalidAccessHandler);
  70. }
  71. /// <summary>
  72. /// Checks if the virtual address is part of the addressable space.
  73. /// </summary>
  74. /// <param name="va">Virtual address</param>
  75. /// <returns>True if the virtual address is part of the addressable space</returns>
  76. private bool ValidateAddress(ulong va)
  77. {
  78. return va < _addressSpaceSize;
  79. }
  80. /// <summary>
  81. /// Checks if the combination of virtual address and size is part of the addressable space.
  82. /// </summary>
  83. /// <param name="va">Virtual address of the range</param>
  84. /// <param name="size">Size of the range in bytes</param>
  85. /// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
  86. private bool ValidateAddressAndSize(ulong va, ulong size)
  87. {
  88. ulong endVa = va + size;
  89. return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
  90. }
  91. /// <summary>
  92. /// Ensures the combination of virtual address and size is part of the addressable space.
  93. /// </summary>
  94. /// <param name="va">Virtual address of the range</param>
  95. /// <param name="size">Size of the range in bytes</param>
  96. /// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
  97. private void AssertValidAddressAndSize(ulong va, ulong size)
  98. {
  99. if (!ValidateAddressAndSize(va, size))
  100. {
  101. throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
  102. }
  103. }
  104. /// <summary>
  105. /// Ensures the combination of virtual address and size is part of the addressable space and fully mapped.
  106. /// </summary>
  107. /// <param name="va">Virtual address of the range</param>
  108. /// <param name="size">Size of the range in bytes</param>
  109. private void AssertMapped(ulong va, ulong size)
  110. {
  111. if (!ValidateAddressAndSize(va, size) || !IsRangeMappedImpl(va, size))
  112. {
  113. throw new InvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  114. }
  115. }
  116. /// <inheritdoc/>
  117. public void Map(ulong va, ulong pa, ulong size, MemoryMapFlags flags)
  118. {
  119. AssertValidAddressAndSize(va, size);
  120. PtMap(va, pa, size);
  121. _addressSpace.MapUser(va, pa, size, MemoryPermission.ReadWriteExecute);
  122. AddMapping(va, size);
  123. Tracking.Map(va, size);
  124. }
  125. private void PtMap(ulong va, ulong pa, ulong size)
  126. {
  127. while (size != 0)
  128. {
  129. _pageTable.Map(va, pa);
  130. va += PageSize;
  131. pa += PageSize;
  132. size -= PageSize;
  133. }
  134. }
  135. /// <inheritdoc/>
  136. public void MapForeign(ulong va, nuint hostPointer, ulong size)
  137. {
  138. throw new NotSupportedException();
  139. }
  140. /// <inheritdoc/>
  141. public void Unmap(ulong va, ulong size)
  142. {
  143. AssertValidAddressAndSize(va, size);
  144. UnmapEvent?.Invoke(va, size);
  145. Tracking.Unmap(va, size);
  146. RemoveMapping(va, size);
  147. _addressSpace.UnmapUser(va, size);
  148. PtUnmap(va, size);
  149. }
  150. private void PtUnmap(ulong va, ulong size)
  151. {
  152. while (size != 0)
  153. {
  154. _pageTable.Unmap(va);
  155. va += PageSize;
  156. size -= PageSize;
  157. }
  158. }
  159. /// <inheritdoc/>
  160. public T Read<T>(ulong va) where T : unmanaged
  161. {
  162. return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
  163. }
  164. /// <inheritdoc/>
  165. public T ReadTracked<T>(ulong va) where T : unmanaged
  166. {
  167. try
  168. {
  169. SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
  170. return Read<T>(va);
  171. }
  172. catch (InvalidMemoryRegionException)
  173. {
  174. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  175. {
  176. throw;
  177. }
  178. return default;
  179. }
  180. }
  181. /// <inheritdoc/>
  182. public void Read(ulong va, Span<byte> data)
  183. {
  184. ReadImpl(va, data);
  185. }
  186. /// <inheritdoc/>
  187. public void Write<T>(ulong va, T value) where T : unmanaged
  188. {
  189. Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
  190. }
  191. /// <inheritdoc/>
  192. public void Write(ulong va, ReadOnlySpan<byte> data)
  193. {
  194. if (data.Length == 0)
  195. {
  196. return;
  197. }
  198. SignalMemoryTracking(va, (ulong)data.Length, true);
  199. WriteImpl(va, data);
  200. }
  201. /// <inheritdoc/>
  202. public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
  203. {
  204. if (data.Length == 0)
  205. {
  206. return;
  207. }
  208. WriteImpl(va, data);
  209. }
  210. /// <inheritdoc/>
  211. public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
  212. {
  213. if (data.Length == 0)
  214. {
  215. return false;
  216. }
  217. SignalMemoryTracking(va, (ulong)data.Length, false);
  218. if (IsContiguousAndMapped(va, data.Length))
  219. {
  220. var target = _backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length);
  221. bool changed = !data.SequenceEqual(target);
  222. if (changed)
  223. {
  224. data.CopyTo(target);
  225. }
  226. return changed;
  227. }
  228. else
  229. {
  230. WriteImpl(va, data);
  231. return true;
  232. }
  233. }
  234. private void WriteImpl(ulong va, ReadOnlySpan<byte> data)
  235. {
  236. try
  237. {
  238. AssertValidAddressAndSize(va, (ulong)data.Length);
  239. if (IsContiguousAndMapped(va, data.Length))
  240. {
  241. data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length));
  242. }
  243. else
  244. {
  245. int offset = 0, size;
  246. if ((va & PageMask) != 0)
  247. {
  248. ulong pa = GetPhysicalAddressChecked(va);
  249. size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
  250. data.Slice(0, size).CopyTo(_backingMemory.GetSpan(pa, size));
  251. offset += size;
  252. }
  253. for (; offset < data.Length; offset += size)
  254. {
  255. ulong pa = GetPhysicalAddressChecked(va + (ulong)offset);
  256. size = Math.Min(data.Length - offset, PageSize);
  257. data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size));
  258. }
  259. }
  260. }
  261. catch (InvalidMemoryRegionException)
  262. {
  263. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  264. {
  265. throw;
  266. }
  267. }
  268. }
  269. /// <inheritdoc/>
  270. public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
  271. {
  272. if (size == 0)
  273. {
  274. return ReadOnlySpan<byte>.Empty;
  275. }
  276. if (tracked)
  277. {
  278. SignalMemoryTracking(va, (ulong)size, false);
  279. }
  280. if (IsContiguousAndMapped(va, size))
  281. {
  282. return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size);
  283. }
  284. else
  285. {
  286. Span<byte> data = new byte[size];
  287. ReadImpl(va, data);
  288. return data;
  289. }
  290. }
  291. /// <inheritdoc/>
  292. public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
  293. {
  294. if (size == 0)
  295. {
  296. return new WritableRegion(null, va, Memory<byte>.Empty);
  297. }
  298. if (tracked)
  299. {
  300. SignalMemoryTracking(va, (ulong)size, true);
  301. }
  302. if (IsContiguousAndMapped(va, size))
  303. {
  304. return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size));
  305. }
  306. else
  307. {
  308. Memory<byte> memory = new byte[size];
  309. ReadImpl(va, memory.Span);
  310. return new WritableRegion(this, va, memory);
  311. }
  312. }
  313. /// <inheritdoc/>
  314. public ref T GetRef<T>(ulong va) where T : unmanaged
  315. {
  316. if (!IsContiguous(va, Unsafe.SizeOf<T>()))
  317. {
  318. ThrowMemoryNotContiguous();
  319. }
  320. SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true);
  321. return ref _backingMemory.GetRef<T>(GetPhysicalAddressChecked(va));
  322. }
  323. /// <inheritdoc/>
  324. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  325. public bool IsMapped(ulong va)
  326. {
  327. return ValidateAddress(va) && IsMappedImpl(va);
  328. }
  329. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  330. private bool IsMappedImpl(ulong va)
  331. {
  332. ulong page = va >> PageBits;
  333. int bit = (int)((page & 31) << 1);
  334. int pageIndex = (int)(page >> PageToPteShift);
  335. ref ulong pageRef = ref _pageBitmap[pageIndex];
  336. ulong pte = Volatile.Read(ref pageRef);
  337. return ((pte >> bit) & 3) != 0;
  338. }
  339. /// <inheritdoc/>
  340. public bool IsRangeMapped(ulong va, ulong size)
  341. {
  342. AssertValidAddressAndSize(va, size);
  343. return IsRangeMappedImpl(va, size);
  344. }
  345. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  346. private void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
  347. {
  348. startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
  349. endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
  350. pageIndex = (int)(pageStart >> PageToPteShift);
  351. pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
  352. }
  353. private bool IsRangeMappedImpl(ulong va, ulong size)
  354. {
  355. int pages = GetPagesCount(va, size, out _);
  356. if (pages == 1)
  357. {
  358. return IsMappedImpl(va);
  359. }
  360. ulong pageStart = va >> PageBits;
  361. ulong pageEnd = pageStart + (ulong)pages;
  362. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  363. // Check if either bit in each 2 bit page entry is set.
  364. // OR the block with itself shifted down by 1, and check the first bit of each entry.
  365. ulong mask = BlockMappedMask & startMask;
  366. while (pageIndex <= pageEndIndex)
  367. {
  368. if (pageIndex == pageEndIndex)
  369. {
  370. mask &= endMask;
  371. }
  372. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  373. ulong pte = Volatile.Read(ref pageRef);
  374. pte |= pte >> 1;
  375. if ((pte & mask) != mask)
  376. {
  377. return false;
  378. }
  379. mask = BlockMappedMask;
  380. }
  381. return true;
  382. }
  383. private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
  384. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  385. private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
  386. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  387. private bool IsContiguous(ulong va, int size)
  388. {
  389. if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
  390. {
  391. return false;
  392. }
  393. int pages = GetPagesCount(va, (uint)size, out va);
  394. for (int page = 0; page < pages - 1; page++)
  395. {
  396. if (!ValidateAddress(va + PageSize))
  397. {
  398. return false;
  399. }
  400. if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize))
  401. {
  402. return false;
  403. }
  404. va += PageSize;
  405. }
  406. return true;
  407. }
  408. /// <inheritdoc/>
  409. public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
  410. {
  411. if (size == 0)
  412. {
  413. return Enumerable.Empty<HostMemoryRange>();
  414. }
  415. var guestRegions = GetPhysicalRegionsImpl(va, size);
  416. if (guestRegions == null)
  417. {
  418. return null;
  419. }
  420. var regions = new HostMemoryRange[guestRegions.Count];
  421. for (int i = 0; i < regions.Length; i++)
  422. {
  423. var guestRegion = guestRegions[i];
  424. IntPtr pointer = _backingMemory.GetPointer(guestRegion.Address, guestRegion.Size);
  425. regions[i] = new HostMemoryRange((nuint)(ulong)pointer, guestRegion.Size);
  426. }
  427. return regions;
  428. }
  429. /// <inheritdoc/>
  430. public IEnumerable<MemoryRange> GetPhysicalRegions(ulong va, ulong size)
  431. {
  432. if (size == 0)
  433. {
  434. return Enumerable.Empty<MemoryRange>();
  435. }
  436. return GetPhysicalRegionsImpl(va, size);
  437. }
  438. private List<MemoryRange> GetPhysicalRegionsImpl(ulong va, ulong size)
  439. {
  440. if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size))
  441. {
  442. return null;
  443. }
  444. int pages = GetPagesCount(va, (uint)size, out va);
  445. var regions = new List<MemoryRange>();
  446. ulong regionStart = GetPhysicalAddressInternal(va);
  447. ulong regionSize = PageSize;
  448. for (int page = 0; page < pages - 1; page++)
  449. {
  450. if (!ValidateAddress(va + PageSize))
  451. {
  452. return null;
  453. }
  454. ulong newPa = GetPhysicalAddressInternal(va + PageSize);
  455. if (GetPhysicalAddressInternal(va) + PageSize != newPa)
  456. {
  457. regions.Add(new MemoryRange(regionStart, regionSize));
  458. regionStart = newPa;
  459. regionSize = 0;
  460. }
  461. va += PageSize;
  462. regionSize += PageSize;
  463. }
  464. regions.Add(new MemoryRange(regionStart, regionSize));
  465. return regions;
  466. }
  467. private void ReadImpl(ulong va, Span<byte> data)
  468. {
  469. if (data.Length == 0)
  470. {
  471. return;
  472. }
  473. try
  474. {
  475. AssertValidAddressAndSize(va, (ulong)data.Length);
  476. int offset = 0, size;
  477. if ((va & PageMask) != 0)
  478. {
  479. ulong pa = GetPhysicalAddressChecked(va);
  480. size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
  481. _backingMemory.GetSpan(pa, size).CopyTo(data.Slice(0, size));
  482. offset += size;
  483. }
  484. for (; offset < data.Length; offset += size)
  485. {
  486. ulong pa = GetPhysicalAddressChecked(va + (ulong)offset);
  487. size = Math.Min(data.Length - offset, PageSize);
  488. _backingMemory.GetSpan(pa, size).CopyTo(data.Slice(offset, size));
  489. }
  490. }
  491. catch (InvalidMemoryRegionException)
  492. {
  493. if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
  494. {
  495. throw;
  496. }
  497. }
  498. }
  499. /// <inheritdoc/>
  500. /// <remarks>
  501. /// This function also validates that the given range is both valid and mapped, and will throw if it is not.
  502. /// </remarks>
  503. public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
  504. {
  505. AssertValidAddressAndSize(va, size);
  506. if (precise)
  507. {
  508. Tracking.VirtualMemoryEvent(va, size, write, precise: true, exemptId);
  509. return;
  510. }
  511. // Software table, used for managed memory tracking.
  512. int pages = GetPagesCount(va, size, out _);
  513. ulong pageStart = va >> PageBits;
  514. if (pages == 1)
  515. {
  516. ulong tag = (ulong)(write ? HostMappedPtBits.WriteTracked : HostMappedPtBits.ReadWriteTracked);
  517. int bit = (int)((pageStart & 31) << 1);
  518. int pageIndex = (int)(pageStart >> PageToPteShift);
  519. ref ulong pageRef = ref _pageBitmap[pageIndex];
  520. ulong pte = Volatile.Read(ref pageRef);
  521. ulong state = ((pte >> bit) & 3);
  522. if (state >= tag)
  523. {
  524. Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
  525. return;
  526. }
  527. else if (state == 0)
  528. {
  529. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  530. }
  531. }
  532. else
  533. {
  534. ulong pageEnd = pageStart + (ulong)pages;
  535. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  536. ulong mask = startMask;
  537. ulong anyTrackingTag = (ulong)HostMappedPtBits.WriteTrackedReplicated;
  538. while (pageIndex <= pageEndIndex)
  539. {
  540. if (pageIndex == pageEndIndex)
  541. {
  542. mask &= endMask;
  543. }
  544. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  545. ulong pte = Volatile.Read(ref pageRef);
  546. ulong mappedMask = mask & BlockMappedMask;
  547. ulong mappedPte = pte | (pte >> 1);
  548. if ((mappedPte & mappedMask) != mappedMask)
  549. {
  550. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
  551. }
  552. pte &= mask;
  553. if ((pte & anyTrackingTag) != 0) // Search for any tracking.
  554. {
  555. // Writes trigger any tracking.
  556. // Only trigger tracking from reads if both bits are set on any page.
  557. if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
  558. {
  559. Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
  560. break;
  561. }
  562. }
  563. mask = ulong.MaxValue;
  564. }
  565. }
  566. }
  567. /// <summary>
  568. /// Computes the number of pages in a virtual address range.
  569. /// </summary>
  570. /// <param name="va">Virtual address of the range</param>
  571. /// <param name="size">Size of the range</param>
  572. /// <param name="startVa">The virtual address of the beginning of the first page</param>
  573. /// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
  574. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  575. private int GetPagesCount(ulong va, ulong size, out ulong startVa)
  576. {
  577. // WARNING: Always check if ulong does not overflow during the operations.
  578. startVa = va & ~(ulong)PageMask;
  579. ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
  580. return (int)(vaSpan / PageSize);
  581. }
  582. /// <inheritdoc/>
  583. public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
  584. {
  585. // Protection is inverted on software pages, since the default value is 0.
  586. protection = (~protection) & MemoryPermission.ReadAndWrite;
  587. int pages = GetPagesCount(va, size, out va);
  588. ulong pageStart = va >> PageBits;
  589. if (pages == 1)
  590. {
  591. ulong protTag = protection switch
  592. {
  593. MemoryPermission.None => (ulong)HostMappedPtBits.Mapped,
  594. MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTracked,
  595. _ => (ulong)HostMappedPtBits.ReadWriteTracked,
  596. };
  597. int bit = (int)((pageStart & 31) << 1);
  598. ulong tagMask = 3UL << bit;
  599. ulong invTagMask = ~tagMask;
  600. ulong tag = protTag << bit;
  601. int pageIndex = (int)(pageStart >> PageToPteShift);
  602. ref ulong pageRef = ref _pageBitmap[pageIndex];
  603. ulong pte;
  604. do
  605. {
  606. pte = Volatile.Read(ref pageRef);
  607. }
  608. while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
  609. }
  610. else
  611. {
  612. ulong pageEnd = pageStart + (ulong)pages;
  613. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  614. ulong mask = startMask;
  615. ulong protTag = protection switch
  616. {
  617. MemoryPermission.None => (ulong)HostMappedPtBits.MappedReplicated,
  618. MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTrackedReplicated,
  619. _ => (ulong)HostMappedPtBits.ReadWriteTrackedReplicated,
  620. };
  621. while (pageIndex <= pageEndIndex)
  622. {
  623. if (pageIndex == pageEndIndex)
  624. {
  625. mask &= endMask;
  626. }
  627. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  628. ulong pte;
  629. ulong mappedMask;
  630. // Change the protection of all 2 bit entries that are mapped.
  631. do
  632. {
  633. pte = Volatile.Read(ref pageRef);
  634. mappedMask = pte | (pte >> 1);
  635. mappedMask |= (mappedMask & BlockMappedMask) << 1;
  636. mappedMask &= mask; // Only update mapped pages within the given range.
  637. }
  638. while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
  639. mask = ulong.MaxValue;
  640. }
  641. }
  642. protection = protection switch
  643. {
  644. MemoryPermission.None => MemoryPermission.ReadAndWrite,
  645. MemoryPermission.Write => MemoryPermission.Read,
  646. _ => MemoryPermission.None
  647. };
  648. _addressSpace.ReprotectUser(va, size, protection);
  649. }
  650. /// <inheritdoc/>
  651. public CpuRegionHandle BeginTracking(ulong address, ulong size, int id)
  652. {
  653. return new CpuRegionHandle(Tracking.BeginTracking(address, size, id));
  654. }
  655. /// <inheritdoc/>
  656. public CpuMultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id)
  657. {
  658. return new CpuMultiRegionHandle(Tracking.BeginGranularTracking(address, size, handles, granularity, id));
  659. }
  660. /// <inheritdoc/>
  661. public CpuSmartMultiRegionHandle BeginSmartGranularTracking(ulong address, ulong size, ulong granularity, int id)
  662. {
  663. return new CpuSmartMultiRegionHandle(Tracking.BeginSmartGranularTracking(address, size, granularity, id));
  664. }
  665. /// <summary>
  666. /// Adds the given address mapping to the page table.
  667. /// </summary>
  668. /// <param name="va">Virtual memory address</param>
  669. /// <param name="size">Size to be mapped</param>
  670. private void AddMapping(ulong va, ulong size)
  671. {
  672. int pages = GetPagesCount(va, size, out _);
  673. ulong pageStart = va >> PageBits;
  674. ulong pageEnd = pageStart + (ulong)pages;
  675. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  676. ulong mask = startMask;
  677. while (pageIndex <= pageEndIndex)
  678. {
  679. if (pageIndex == pageEndIndex)
  680. {
  681. mask &= endMask;
  682. }
  683. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  684. ulong pte;
  685. ulong mappedMask;
  686. // Map all 2-bit entries that are unmapped.
  687. do
  688. {
  689. pte = Volatile.Read(ref pageRef);
  690. mappedMask = pte | (pte >> 1);
  691. mappedMask |= (mappedMask & BlockMappedMask) << 1;
  692. mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
  693. }
  694. while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
  695. mask = ulong.MaxValue;
  696. }
  697. }
  698. /// <summary>
  699. /// Removes the given address mapping from the page table.
  700. /// </summary>
  701. /// <param name="va">Virtual memory address</param>
  702. /// <param name="size">Size to be unmapped</param>
  703. private void RemoveMapping(ulong va, ulong size)
  704. {
  705. int pages = GetPagesCount(va, size, out _);
  706. ulong pageStart = va >> PageBits;
  707. ulong pageEnd = pageStart + (ulong)pages;
  708. GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
  709. startMask = ~startMask;
  710. endMask = ~endMask;
  711. ulong mask = startMask;
  712. while (pageIndex <= pageEndIndex)
  713. {
  714. if (pageIndex == pageEndIndex)
  715. {
  716. mask |= endMask;
  717. }
  718. ref ulong pageRef = ref _pageBitmap[pageIndex++];
  719. ulong pte;
  720. do
  721. {
  722. pte = Volatile.Read(ref pageRef);
  723. }
  724. while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
  725. mask = 0;
  726. }
  727. }
  728. private ulong GetPhysicalAddressChecked(ulong va)
  729. {
  730. if (!IsMapped(va))
  731. {
  732. ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}");
  733. }
  734. return GetPhysicalAddressInternal(va);
  735. }
  736. private ulong GetPhysicalAddressInternal(ulong va)
  737. {
  738. return _pageTable.Read(va) + (va & PageMask);
  739. }
  740. /// <summary>
  741. /// Disposes of resources used by the memory manager.
  742. /// </summary>
  743. protected override void Destroy()
  744. {
  745. _addressSpace.Dispose();
  746. }
  747. private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
  748. }
  749. }