GPFifoDevice.cs 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262
  1. using Ryujinx.Graphics.Gpu.Memory;
  2. using System;
  3. using System.Collections.Concurrent;
  4. using System.Runtime.CompilerServices;
  5. using System.Runtime.InteropServices;
  6. using System.Threading;
  7. namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
  8. {
  9. /// <summary>
  10. /// Represents a GPU General Purpose FIFO device.
  11. /// </summary>
  12. public sealed class GPFifoDevice : IDisposable
  13. {
  14. /// <summary>
  15. /// Indicates if the command buffer has pre-fetch enabled.
  16. /// </summary>
  17. private enum CommandBufferType
  18. {
  19. Prefetch,
  20. NoPrefetch
  21. }
  22. /// <summary>
  23. /// Command buffer data.
  24. /// </summary>
  25. private struct CommandBuffer
  26. {
  27. /// <summary>
  28. /// Processor used to process the command buffer. Contains channel state.
  29. /// </summary>
  30. public GPFifoProcessor Processor;
  31. /// <summary>
  32. /// The type of the command buffer.
  33. /// </summary>
  34. public CommandBufferType Type;
  35. /// <summary>
  36. /// Fetched data.
  37. /// </summary>
  38. public int[] Words;
  39. /// <summary>
  40. /// The GPFIFO entry address (used in <see cref="CommandBufferType.NoPrefetch"/> mode).
  41. /// </summary>
  42. public ulong EntryAddress;
  43. /// <summary>
  44. /// The count of entries inside this GPFIFO entry.
  45. /// </summary>
  46. public uint EntryCount;
  47. /// <summary>
  48. /// Get the entries for the command buffer from memory.
  49. /// </summary>
  50. /// <param name="memoryManager">The memory manager used to fetch the data</param>
  51. /// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
  52. /// <returns>The fetched data</returns>
  53. private ReadOnlySpan<int> GetWords(MemoryManager memoryManager, bool flush)
  54. {
  55. return MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, flush));
  56. }
  57. /// <summary>
  58. /// Prefetch the command buffer.
  59. /// </summary>
  60. /// <param name="memoryManager">The memory manager used to fetch the data</param>
  61. public void Prefetch(MemoryManager memoryManager)
  62. {
  63. Words = GetWords(memoryManager, true).ToArray();
  64. }
  65. /// <summary>
  66. /// Fetch the command buffer.
  67. /// </summary>
  68. /// <param name="memoryManager">The memory manager used to fetch the data</param>
  69. /// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
  70. /// <returns>The command buffer words</returns>
  71. public ReadOnlySpan<int> Fetch(MemoryManager memoryManager, bool flush)
  72. {
  73. return Words ?? GetWords(memoryManager, flush);
  74. }
  75. }
  76. private readonly ConcurrentQueue<CommandBuffer> _commandBufferQueue;
  77. private CommandBuffer _currentCommandBuffer;
  78. private GPFifoProcessor _prevChannelProcessor;
  79. private readonly bool _ibEnable;
  80. private readonly GpuContext _context;
  81. private readonly AutoResetEvent _event;
  82. private bool _interrupt;
  83. private int _flushSkips;
  84. /// <summary>
  85. /// Creates a new instance of the GPU General Purpose FIFO device.
  86. /// </summary>
  87. /// <param name="context">GPU context that the GPFIFO belongs to</param>
  88. internal GPFifoDevice(GpuContext context)
  89. {
  90. _commandBufferQueue = new ConcurrentQueue<CommandBuffer>();
  91. _ibEnable = true;
  92. _context = context;
  93. _event = new AutoResetEvent(false);
  94. }
  95. /// <summary>
  96. /// Signal the FIFO that there are new entries to process.
  97. /// </summary>
  98. public void SignalNewEntries()
  99. {
  100. _event.Set();
  101. }
  102. /// <summary>
  103. /// Push a GPFIFO entry in the form of a prefetched command buffer.
  104. /// It is intended to be used by nvservices to handle special cases.
  105. /// </summary>
  106. /// <param name="processor">Processor used to process <paramref name="commandBuffer"/></param>
  107. /// <param name="commandBuffer">The command buffer containing the prefetched commands</param>
  108. internal void PushHostCommandBuffer(GPFifoProcessor processor, int[] commandBuffer)
  109. {
  110. _commandBufferQueue.Enqueue(new CommandBuffer
  111. {
  112. Processor = processor,
  113. Type = CommandBufferType.Prefetch,
  114. Words = commandBuffer,
  115. EntryAddress = ulong.MaxValue,
  116. EntryCount = (uint)commandBuffer.Length
  117. });
  118. }
  119. /// <summary>
  120. /// Create a CommandBuffer from a GPFIFO entry.
  121. /// </summary>
  122. /// <param name="processor">Processor used to process the command buffer pointed to by <paramref name="entry"/></param>
  123. /// <param name="entry">The GPFIFO entry</param>
  124. /// <returns>A new CommandBuffer based on the GPFIFO entry</returns>
  125. private static CommandBuffer CreateCommandBuffer(GPFifoProcessor processor, GPEntry entry)
  126. {
  127. CommandBufferType type = CommandBufferType.Prefetch;
  128. if (entry.Entry1Sync == Entry1Sync.Wait)
  129. {
  130. type = CommandBufferType.NoPrefetch;
  131. }
  132. ulong startAddress = ((ulong)entry.Entry0Get << 2) | ((ulong)entry.Entry1GetHi << 32);
  133. return new CommandBuffer
  134. {
  135. Processor = processor,
  136. Type = type,
  137. Words = null,
  138. EntryAddress = startAddress,
  139. EntryCount = (uint)entry.Entry1Length
  140. };
  141. }
  142. /// <summary>
  143. /// Pushes GPFIFO entries.
  144. /// </summary>
  145. /// <param name="processor">Processor used to process the command buffers pointed to by <paramref name="entries"/></param>
  146. /// <param name="entries">GPFIFO entries</param>
  147. internal void PushEntries(GPFifoProcessor processor, ReadOnlySpan<ulong> entries)
  148. {
  149. bool beforeBarrier = true;
  150. for (int index = 0; index < entries.Length; index++)
  151. {
  152. ulong entry = entries[index];
  153. CommandBuffer commandBuffer = CreateCommandBuffer(processor, Unsafe.As<ulong, GPEntry>(ref entry));
  154. if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch)
  155. {
  156. commandBuffer.Prefetch(processor.MemoryManager);
  157. }
  158. if (commandBuffer.Type == CommandBufferType.NoPrefetch)
  159. {
  160. beforeBarrier = false;
  161. }
  162. _commandBufferQueue.Enqueue(commandBuffer);
  163. }
  164. }
  165. /// <summary>
  166. /// Waits until commands are pushed to the FIFO.
  167. /// </summary>
  168. /// <returns>True if commands were received, false if wait timed out</returns>
  169. public bool WaitForCommands()
  170. {
  171. return !_commandBufferQueue.IsEmpty || (_event.WaitOne(8) && !_commandBufferQueue.IsEmpty);
  172. }
  173. /// <summary>
  174. /// Processes commands pushed to the FIFO.
  175. /// </summary>
  176. public void DispatchCalls()
  177. {
  178. // Use this opportunity to also dispose any pending channels that were closed.
  179. _context.RunDeferredActions();
  180. // Process command buffers.
  181. while (_ibEnable && !_interrupt && _commandBufferQueue.TryDequeue(out CommandBuffer entry))
  182. {
  183. bool flushCommandBuffer = true;
  184. if (_flushSkips != 0)
  185. {
  186. _flushSkips--;
  187. flushCommandBuffer = false;
  188. }
  189. _currentCommandBuffer = entry;
  190. ReadOnlySpan<int> words = entry.Fetch(entry.Processor.MemoryManager, flushCommandBuffer);
  191. // If we are changing the current channel,
  192. // we need to force all the host state to be updated.
  193. if (_prevChannelProcessor != entry.Processor)
  194. {
  195. _prevChannelProcessor = entry.Processor;
  196. entry.Processor.ForceAllDirty();
  197. }
  198. entry.Processor.Process(entry.EntryAddress, words);
  199. }
  200. _interrupt = false;
  201. }
  202. /// <summary>
  203. /// Sets the number of flushes that should be skipped for subsequent command buffers.
  204. /// </summary>
  205. /// <remarks>
  206. /// This can improve performance when command buffer data only needs to be consumed by the GPU.
  207. /// </remarks>
  208. /// <param name="count">The amount of flushes that should be skipped</param>
  209. internal void SetFlushSkips(int count)
  210. {
  211. _flushSkips = count;
  212. }
  213. /// <summary>
  214. /// Interrupts command processing. This will break out of the DispatchCalls loop.
  215. /// </summary>
  216. public void Interrupt()
  217. {
  218. _interrupt = true;
  219. }
  220. /// <summary>
  221. /// Disposes of resources used for GPFifo command processing.
  222. /// </summary>
  223. public void Dispose() => _event.Dispose();
  224. }
  225. }