MemoryManager.cs 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614
  1. using Ryujinx.Memory;
  2. using Ryujinx.Memory.Range;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Runtime.CompilerServices;
  6. using System.Runtime.InteropServices;
  7. namespace Ryujinx.Graphics.Gpu.Memory
  8. {
  9. /// <summary>
  10. /// GPU memory manager.
  11. /// </summary>
  12. public class MemoryManager : IWritableBlock
  13. {
  14. private const int PtLvl0Bits = 14;
  15. private const int PtLvl1Bits = 14;
  16. public const int PtPageBits = 12;
  17. private const ulong PtLvl0Size = 1UL << PtLvl0Bits;
  18. private const ulong PtLvl1Size = 1UL << PtLvl1Bits;
  19. public const ulong PageSize = 1UL << PtPageBits;
  20. private const ulong PtLvl0Mask = PtLvl0Size - 1;
  21. private const ulong PtLvl1Mask = PtLvl1Size - 1;
  22. public const ulong PageMask = PageSize - 1;
  23. private const int PtLvl0Bit = PtPageBits + PtLvl1Bits;
  24. private const int PtLvl1Bit = PtPageBits;
  25. private const int AddressSpaceBits = PtPageBits + PtLvl1Bits + PtLvl0Bits;
  26. public const ulong PteUnmapped = ulong.MaxValue;
  27. private readonly ulong[][] _pageTable;
  28. public event EventHandler<UnmapEventArgs> MemoryUnmapped;
  29. /// <summary>
  30. /// Physical memory where the virtual memory is mapped into.
  31. /// </summary>
  32. internal PhysicalMemory Physical { get; }
  33. /// <summary>
  34. /// Cache of GPU counters.
  35. /// </summary>
  36. internal CounterCache CounterCache { get; }
  37. /// <summary>
  38. /// Creates a new instance of the GPU memory manager.
  39. /// </summary>
  40. /// <param name="physicalMemory">Physical memory that this memory manager will map into</param>
  41. internal MemoryManager(PhysicalMemory physicalMemory)
  42. {
  43. Physical = physicalMemory;
  44. CounterCache = new CounterCache();
  45. _pageTable = new ulong[PtLvl0Size][];
  46. MemoryUnmapped += Physical.TextureCache.MemoryUnmappedHandler;
  47. MemoryUnmapped += Physical.BufferCache.MemoryUnmappedHandler;
  48. MemoryUnmapped += CounterCache.MemoryUnmappedHandler;
  49. }
  50. /// <summary>
  51. /// Reads data from GPU mapped memory.
  52. /// </summary>
  53. /// <typeparam name="T">Type of the data</typeparam>
  54. /// <param name="va">GPU virtual address where the data is located</param>
  55. /// <param name="tracked">True if read tracking is triggered on the memory region</param>
  56. /// <returns>The data at the specified memory location</returns>
  57. public T Read<T>(ulong va, bool tracked = false) where T : unmanaged
  58. {
  59. int size = Unsafe.SizeOf<T>();
  60. if (IsContiguous(va, size))
  61. {
  62. ulong address = Translate(va);
  63. if (tracked)
  64. {
  65. return Physical.ReadTracked<T>(address);
  66. }
  67. else
  68. {
  69. return Physical.Read<T>(address);
  70. }
  71. }
  72. else
  73. {
  74. Span<byte> data = new byte[size];
  75. ReadImpl(va, data, tracked);
  76. return MemoryMarshal.Cast<byte, T>(data)[0];
  77. }
  78. }
  79. /// <summary>
  80. /// Gets a read-only span of data from GPU mapped memory.
  81. /// </summary>
  82. /// <param name="va">GPU virtual address where the data is located</param>
  83. /// <param name="size">Size of the data</param>
  84. /// <param name="tracked">True if read tracking is triggered on the span</param>
  85. /// <returns>The span of the data at the specified memory location</returns>
  86. public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
  87. {
  88. if (IsContiguous(va, size))
  89. {
  90. return Physical.GetSpan(Translate(va), size, tracked);
  91. }
  92. else
  93. {
  94. Span<byte> data = new byte[size];
  95. ReadImpl(va, data, tracked);
  96. return data;
  97. }
  98. }
  99. /// <summary>
  100. /// Reads data from a possibly non-contiguous region of GPU mapped memory.
  101. /// </summary>
  102. /// <param name="va">GPU virtual address of the data</param>
  103. /// <param name="data">Span to write the read data into</param>
  104. /// <param name="tracked">True to enable write tracking on read, false otherwise</param>
  105. private void ReadImpl(ulong va, Span<byte> data, bool tracked)
  106. {
  107. if (data.Length == 0)
  108. {
  109. return;
  110. }
  111. int offset = 0, size;
  112. if ((va & PageMask) != 0)
  113. {
  114. ulong pa = Translate(va);
  115. size = Math.Min(data.Length, (int)PageSize - (int)(va & PageMask));
  116. Physical.GetSpan(pa, size, tracked).CopyTo(data.Slice(0, size));
  117. offset += size;
  118. }
  119. for (; offset < data.Length; offset += size)
  120. {
  121. ulong pa = Translate(va + (ulong)offset);
  122. size = Math.Min(data.Length - offset, (int)PageSize);
  123. Physical.GetSpan(pa, size, tracked).CopyTo(data.Slice(offset, size));
  124. }
  125. }
  126. /// <summary>
  127. /// Gets a writable region from GPU mapped memory.
  128. /// </summary>
  129. /// <param name="va">Start address of the range</param>
  130. /// <param name="size">Size in bytes to be range</param>
  131. /// <param name="tracked">True if write tracking is triggered on the span</param>
  132. /// <returns>A writable region with the data at the specified memory location</returns>
  133. public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
  134. {
  135. if (IsContiguous(va, size))
  136. {
  137. return Physical.GetWritableRegion(Translate(va), size, tracked);
  138. }
  139. else
  140. {
  141. Memory<byte> memory = new byte[size];
  142. GetSpan(va, size).CopyTo(memory.Span);
  143. return new WritableRegion(this, va, memory, tracked);
  144. }
  145. }
  146. /// <summary>
  147. /// Writes data to GPU mapped memory.
  148. /// </summary>
  149. /// <typeparam name="T">Type of the data</typeparam>
  150. /// <param name="va">GPU virtual address to write the value into</param>
  151. /// <param name="value">The value to be written</param>
  152. public void Write<T>(ulong va, T value) where T : unmanaged
  153. {
  154. Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
  155. }
  156. /// <summary>
  157. /// Writes data to GPU mapped memory.
  158. /// </summary>
  159. /// <param name="va">GPU virtual address to write the data into</param>
  160. /// <param name="data">The data to be written</param>
  161. public void Write(ulong va, ReadOnlySpan<byte> data)
  162. {
  163. WriteImpl(va, data, Physical.Write);
  164. }
  165. /// <summary>
  166. /// Writes data to GPU mapped memory, destined for a tracked resource.
  167. /// </summary>
  168. /// <param name="va">GPU virtual address to write the data into</param>
  169. /// <param name="data">The data to be written</param>
  170. public void WriteTrackedResource(ulong va, ReadOnlySpan<byte> data)
  171. {
  172. WriteImpl(va, data, Physical.WriteTrackedResource);
  173. }
  174. /// <summary>
  175. /// Writes data to GPU mapped memory without write tracking.
  176. /// </summary>
  177. /// <param name="va">GPU virtual address to write the data into</param>
  178. /// <param name="data">The data to be written</param>
  179. public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
  180. {
  181. WriteImpl(va, data, Physical.WriteUntracked);
  182. }
  183. private delegate void WriteCallback(ulong address, ReadOnlySpan<byte> data);
  184. /// <summary>
  185. /// Writes data to possibly non-contiguous GPU mapped memory.
  186. /// </summary>
  187. /// <param name="va">GPU virtual address of the region to write into</param>
  188. /// <param name="data">Data to be written</param>
  189. /// <param name="writeCallback">Write callback</param>
  190. private void WriteImpl(ulong va, ReadOnlySpan<byte> data, WriteCallback writeCallback)
  191. {
  192. if (IsContiguous(va, data.Length))
  193. {
  194. writeCallback(Translate(va), data);
  195. }
  196. else
  197. {
  198. int offset = 0, size;
  199. if ((va & PageMask) != 0)
  200. {
  201. ulong pa = Translate(va);
  202. size = Math.Min(data.Length, (int)PageSize - (int)(va & PageMask));
  203. writeCallback(pa, data.Slice(0, size));
  204. offset += size;
  205. }
  206. for (; offset < data.Length; offset += size)
  207. {
  208. ulong pa = Translate(va + (ulong)offset);
  209. size = Math.Min(data.Length - offset, (int)PageSize);
  210. writeCallback(pa, data.Slice(offset, size));
  211. }
  212. }
  213. }
  214. /// <summary>
  215. /// Writes data to GPU mapped memory, stopping at the first unmapped page at the memory region, if any.
  216. /// </summary>
  217. /// <param name="va">GPU virtual address to write the data into</param>
  218. /// <param name="data">The data to be written</param>
  219. public void WriteMapped(ulong va, ReadOnlySpan<byte> data)
  220. {
  221. if (IsContiguous(va, data.Length))
  222. {
  223. Physical.Write(Translate(va), data);
  224. }
  225. else
  226. {
  227. int offset = 0, size;
  228. if ((va & PageMask) != 0)
  229. {
  230. ulong pa = Translate(va);
  231. size = Math.Min(data.Length, (int)PageSize - (int)(va & PageMask));
  232. if (pa != PteUnmapped && Physical.IsMapped(pa))
  233. {
  234. Physical.Write(pa, data.Slice(0, size));
  235. }
  236. offset += size;
  237. }
  238. for (; offset < data.Length; offset += size)
  239. {
  240. ulong pa = Translate(va + (ulong)offset);
  241. size = Math.Min(data.Length - offset, (int)PageSize);
  242. if (pa != PteUnmapped && Physical.IsMapped(pa))
  243. {
  244. Physical.Write(pa, data.Slice(offset, size));
  245. }
  246. }
  247. }
  248. }
  249. /// <summary>
  250. /// Maps a given range of pages to the specified CPU virtual address.
  251. /// </summary>
  252. /// <remarks>
  253. /// All addresses and sizes must be page aligned.
  254. /// </remarks>
  255. /// <param name="pa">CPU virtual address to map into</param>
  256. /// <param name="va">GPU virtual address to be mapped</param>
  257. /// <param name="size">Size in bytes of the mapping</param>
  258. /// <param name="kind">Kind of the resource located at the mapping</param>
  259. public void Map(ulong pa, ulong va, ulong size, PteKind kind)
  260. {
  261. lock (_pageTable)
  262. {
  263. MemoryUnmapped?.Invoke(this, new UnmapEventArgs(va, size));
  264. for (ulong offset = 0; offset < size; offset += PageSize)
  265. {
  266. SetPte(va + offset, PackPte(pa + offset, kind));
  267. }
  268. }
  269. }
  270. /// <summary>
  271. /// Unmaps a given range of pages at the specified GPU virtual memory region.
  272. /// </summary>
  273. /// <param name="va">GPU virtual address to unmap</param>
  274. /// <param name="size">Size in bytes of the region being unmapped</param>
  275. public void Unmap(ulong va, ulong size)
  276. {
  277. lock (_pageTable)
  278. {
  279. // Event handlers are not expected to be thread safe.
  280. MemoryUnmapped?.Invoke(this, new UnmapEventArgs(va, size));
  281. for (ulong offset = 0; offset < size; offset += PageSize)
  282. {
  283. SetPte(va + offset, PteUnmapped);
  284. }
  285. }
  286. }
  287. /// <summary>
  288. /// Checks if a region of GPU mapped memory is contiguous.
  289. /// </summary>
  290. /// <param name="va">GPU virtual address of the region</param>
  291. /// <param name="size">Size of the region</param>
  292. /// <returns>True if the region is contiguous, false otherwise</returns>
  293. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  294. private bool IsContiguous(ulong va, int size)
  295. {
  296. if (!ValidateAddress(va) || GetPte(va) == PteUnmapped)
  297. {
  298. return false;
  299. }
  300. ulong endVa = (va + (ulong)size + PageMask) & ~PageMask;
  301. va &= ~PageMask;
  302. int pages = (int)((endVa - va) / PageSize);
  303. for (int page = 0; page < pages - 1; page++)
  304. {
  305. if (!ValidateAddress(va + PageSize) || GetPte(va + PageSize) == PteUnmapped)
  306. {
  307. return false;
  308. }
  309. if (Translate(va) + PageSize != Translate(va + PageSize))
  310. {
  311. return false;
  312. }
  313. va += PageSize;
  314. }
  315. return true;
  316. }
  317. /// <summary>
  318. /// Gets the physical regions that make up the given virtual address region.
  319. /// </summary>
  320. /// <param name="va">Virtual address of the range</param>
  321. /// <param name="size">Size of the range</param>
  322. /// <returns>Multi-range with the physical regions</returns>
  323. public MultiRange GetPhysicalRegions(ulong va, ulong size)
  324. {
  325. if (IsContiguous(va, (int)size))
  326. {
  327. return new MultiRange(Translate(va), size);
  328. }
  329. ulong regionStart = Translate(va);
  330. ulong regionSize = Math.Min(size, PageSize - (va & PageMask));
  331. ulong endVa = va + size;
  332. ulong endVaRounded = (endVa + PageMask) & ~PageMask;
  333. va &= ~PageMask;
  334. int pages = (int)((endVaRounded - va) / PageSize);
  335. var regions = new List<MemoryRange>();
  336. for (int page = 0; page < pages - 1; page++)
  337. {
  338. ulong currPa = Translate(va);
  339. ulong newPa = Translate(va + PageSize);
  340. if ((currPa != PteUnmapped || newPa != PteUnmapped) && currPa + PageSize != newPa)
  341. {
  342. regions.Add(new MemoryRange(regionStart, regionSize));
  343. regionStart = newPa;
  344. regionSize = 0;
  345. }
  346. va += PageSize;
  347. regionSize += Math.Min(endVa - va, PageSize);
  348. }
  349. regions.Add(new MemoryRange(regionStart, regionSize));
  350. return new MultiRange(regions.ToArray());
  351. }
  352. /// <summary>
  353. /// Checks if a given GPU virtual memory range is mapped to the same physical regions
  354. /// as the specified physical memory multi-range.
  355. /// </summary>
  356. /// <param name="range">Physical memory multi-range</param>
  357. /// <param name="va">GPU virtual memory address</param>
  358. /// <returns>True if the virtual memory region is mapped into the specified physical one, false otherwise</returns>
  359. public bool CompareRange(MultiRange range, ulong va)
  360. {
  361. va &= ~PageMask;
  362. for (int i = 0; i < range.Count; i++)
  363. {
  364. MemoryRange currentRange = range.GetSubRange(i);
  365. if (currentRange.Address != PteUnmapped)
  366. {
  367. ulong address = currentRange.Address & ~PageMask;
  368. ulong endAddress = (currentRange.EndAddress + PageMask) & ~PageMask;
  369. while (address < endAddress)
  370. {
  371. if (Translate(va) != address)
  372. {
  373. return false;
  374. }
  375. va += PageSize;
  376. address += PageSize;
  377. }
  378. }
  379. else
  380. {
  381. ulong endVa = va + (((currentRange.Size) + PageMask) & ~PageMask);
  382. while (va < endVa)
  383. {
  384. if (Translate(va) != PteUnmapped)
  385. {
  386. return false;
  387. }
  388. va += PageSize;
  389. }
  390. }
  391. }
  392. return true;
  393. }
  394. /// <summary>
  395. /// Validates a GPU virtual address.
  396. /// </summary>
  397. /// <param name="va">Address to validate</param>
  398. /// <returns>True if the address is valid, false otherwise</returns>
  399. private static bool ValidateAddress(ulong va)
  400. {
  401. return va < (1UL << AddressSpaceBits);
  402. }
  403. /// <summary>
  404. /// Checks if a given page is mapped.
  405. /// </summary>
  406. /// <param name="va">GPU virtual address of the page to check</param>
  407. /// <returns>True if the page is mapped, false otherwise</returns>
  408. public bool IsMapped(ulong va)
  409. {
  410. return Translate(va) != PteUnmapped;
  411. }
  412. /// <summary>
  413. /// Translates a GPU virtual address to a CPU virtual address.
  414. /// </summary>
  415. /// <param name="va">GPU virtual address to be translated</param>
  416. /// <returns>CPU virtual address, or <see cref="PteUnmapped"/> if unmapped</returns>
  417. public ulong Translate(ulong va)
  418. {
  419. if (!ValidateAddress(va))
  420. {
  421. return PteUnmapped;
  422. }
  423. ulong pte = GetPte(va);
  424. if (pte == PteUnmapped)
  425. {
  426. return PteUnmapped;
  427. }
  428. return UnpackPaFromPte(pte) + (va & PageMask);
  429. }
  430. /// <summary>
  431. /// Gets the kind of a given memory page.
  432. /// This might indicate the type of resource that can be allocated on the page, and also texture tiling.
  433. /// </summary>
  434. /// <param name="va">GPU virtual address</param>
  435. /// <returns>Kind of the memory page</returns>
  436. public PteKind GetKind(ulong va)
  437. {
  438. if (!ValidateAddress(va))
  439. {
  440. return PteKind.Invalid;
  441. }
  442. ulong pte = GetPte(va);
  443. if (pte == PteUnmapped)
  444. {
  445. return PteKind.Invalid;
  446. }
  447. return UnpackKindFromPte(pte);
  448. }
  449. /// <summary>
  450. /// Gets the Page Table entry for a given GPU virtual address.
  451. /// </summary>
  452. /// <param name="va">GPU virtual address</param>
  453. /// <returns>Page table entry (CPU virtual address)</returns>
  454. private ulong GetPte(ulong va)
  455. {
  456. ulong l0 = (va >> PtLvl0Bit) & PtLvl0Mask;
  457. ulong l1 = (va >> PtLvl1Bit) & PtLvl1Mask;
  458. if (_pageTable[l0] == null)
  459. {
  460. return PteUnmapped;
  461. }
  462. return _pageTable[l0][l1];
  463. }
  464. /// <summary>
  465. /// Sets a Page Table entry at a given GPU virtual address.
  466. /// </summary>
  467. /// <param name="va">GPU virtual address</param>
  468. /// <param name="pte">Page table entry (CPU virtual address)</param>
  469. private void SetPte(ulong va, ulong pte)
  470. {
  471. ulong l0 = (va >> PtLvl0Bit) & PtLvl0Mask;
  472. ulong l1 = (va >> PtLvl1Bit) & PtLvl1Mask;
  473. if (_pageTable[l0] == null)
  474. {
  475. _pageTable[l0] = new ulong[PtLvl1Size];
  476. for (ulong index = 0; index < PtLvl1Size; index++)
  477. {
  478. _pageTable[l0][index] = PteUnmapped;
  479. }
  480. }
  481. _pageTable[l0][l1] = pte;
  482. }
  483. /// <summary>
  484. /// Creates a page table entry from a physical address and kind.
  485. /// </summary>
  486. /// <param name="pa">Physical address</param>
  487. /// <param name="kind">Kind</param>
  488. /// <returns>Page table entry</returns>
  489. private static ulong PackPte(ulong pa, PteKind kind)
  490. {
  491. return pa | ((ulong)kind << 56);
  492. }
  493. /// <summary>
  494. /// Unpacks kind from a page table entry.
  495. /// </summary>
  496. /// <param name="pte">Page table entry</param>
  497. /// <returns>Kind</returns>
  498. private static PteKind UnpackKindFromPte(ulong pte)
  499. {
  500. return (PteKind)(pte >> 56);
  501. }
  502. /// <summary>
  503. /// Unpacks physical address from a page table entry.
  504. /// </summary>
  505. /// <param name="pte">Page table entry</param>
  506. /// <returns>Physical address</returns>
  507. private static ulong UnpackPaFromPte(ulong pte)
  508. {
  509. return pte & 0xffffffffffffffUL;
  510. }
  511. }
  512. }