GPFifoDevice.cs 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. using Ryujinx.Graphics.Gpu.Memory;
  2. using System;
  3. using System.Collections.Concurrent;
  4. using System.Runtime.CompilerServices;
  5. using System.Runtime.InteropServices;
  6. using System.Threading;
  7. namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
  8. {
  9. /// <summary>
  10. /// Represents a GPU General Purpose FIFO device.
  11. /// </summary>
  12. public sealed class GPFifoDevice : IDisposable
  13. {
  14. /// <summary>
  15. /// Indicates if the command buffer has pre-fetch enabled.
  16. /// </summary>
  17. private enum CommandBufferType
  18. {
  19. Prefetch,
  20. NoPrefetch
  21. }
  22. /// <summary>
  23. /// Command buffer data.
  24. /// </summary>
  25. private struct CommandBuffer
  26. {
  27. /// <summary>
  28. /// Processor used to process the command buffer. Contains channel state.
  29. /// </summary>
  30. public GPFifoProcessor Processor;
  31. /// <summary>
  32. /// The type of the command buffer.
  33. /// </summary>
  34. public CommandBufferType Type;
  35. /// <summary>
  36. /// Fetched data.
  37. /// </summary>
  38. public int[] Words;
  39. /// <summary>
  40. /// The GPFIFO entry address (used in <see cref="CommandBufferType.NoPrefetch"/> mode).
  41. /// </summary>
  42. public ulong EntryAddress;
  43. /// <summary>
  44. /// The count of entries inside this GPFIFO entry.
  45. /// </summary>
  46. public uint EntryCount;
  47. /// <summary>
  48. /// Fetch the command buffer.
  49. /// </summary>
  50. public void Fetch(MemoryManager memoryManager)
  51. {
  52. if (Words == null)
  53. {
  54. Words = MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, true)).ToArray();
  55. }
  56. }
  57. }
  58. private readonly ConcurrentQueue<CommandBuffer> _commandBufferQueue;
  59. private CommandBuffer _currentCommandBuffer;
  60. private GPFifoProcessor _prevChannelProcessor;
  61. private readonly bool _ibEnable;
  62. private readonly GpuContext _context;
  63. private readonly AutoResetEvent _event;
  64. private bool _interrupt;
  65. /// <summary>
  66. /// Creates a new instance of the GPU General Purpose FIFO device.
  67. /// </summary>
  68. /// <param name="context">GPU context that the GPFIFO belongs to</param>
  69. internal GPFifoDevice(GpuContext context)
  70. {
  71. _commandBufferQueue = new ConcurrentQueue<CommandBuffer>();
  72. _ibEnable = true;
  73. _context = context;
  74. _event = new AutoResetEvent(false);
  75. }
  76. /// <summary>
  77. /// Signal the FIFO that there are new entries to process.
  78. /// </summary>
  79. public void SignalNewEntries()
  80. {
  81. _event.Set();
  82. }
  83. /// <summary>
  84. /// Push a GPFIFO entry in the form of a prefetched command buffer.
  85. /// It is intended to be used by nvservices to handle special cases.
  86. /// </summary>
  87. /// <param name="processor">Processor used to process <paramref name="commandBuffer"/></param>
  88. /// <param name="commandBuffer">The command buffer containing the prefetched commands</param>
  89. internal void PushHostCommandBuffer(GPFifoProcessor processor, int[] commandBuffer)
  90. {
  91. _commandBufferQueue.Enqueue(new CommandBuffer
  92. {
  93. Processor = processor,
  94. Type = CommandBufferType.Prefetch,
  95. Words = commandBuffer,
  96. EntryAddress = ulong.MaxValue,
  97. EntryCount = (uint)commandBuffer.Length
  98. });
  99. }
  100. /// <summary>
  101. /// Create a CommandBuffer from a GPFIFO entry.
  102. /// </summary>
  103. /// <param name="processor">Processor used to process the command buffer pointed to by <paramref name="entry"/></param>
  104. /// <param name="entry">The GPFIFO entry</param>
  105. /// <returns>A new CommandBuffer based on the GPFIFO entry</returns>
  106. private static CommandBuffer CreateCommandBuffer(GPFifoProcessor processor, GPEntry entry)
  107. {
  108. CommandBufferType type = CommandBufferType.Prefetch;
  109. if (entry.Entry1Sync == Entry1Sync.Wait)
  110. {
  111. type = CommandBufferType.NoPrefetch;
  112. }
  113. ulong startAddress = ((ulong)entry.Entry0Get << 2) | ((ulong)entry.Entry1GetHi << 32);
  114. return new CommandBuffer
  115. {
  116. Processor = processor,
  117. Type = type,
  118. Words = null,
  119. EntryAddress = startAddress,
  120. EntryCount = (uint)entry.Entry1Length
  121. };
  122. }
  123. /// <summary>
  124. /// Pushes GPFIFO entries.
  125. /// </summary>
  126. /// <param name="processor">Processor used to process the command buffers pointed to by <paramref name="entries"/></param>
  127. /// <param name="entries">GPFIFO entries</param>
  128. internal void PushEntries(GPFifoProcessor processor, ReadOnlySpan<ulong> entries)
  129. {
  130. bool beforeBarrier = true;
  131. for (int index = 0; index < entries.Length; index++)
  132. {
  133. ulong entry = entries[index];
  134. CommandBuffer commandBuffer = CreateCommandBuffer(processor, Unsafe.As<ulong, GPEntry>(ref entry));
  135. if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch)
  136. {
  137. commandBuffer.Fetch(processor.MemoryManager);
  138. }
  139. if (commandBuffer.Type == CommandBufferType.NoPrefetch)
  140. {
  141. beforeBarrier = false;
  142. }
  143. _commandBufferQueue.Enqueue(commandBuffer);
  144. }
  145. }
  146. /// <summary>
  147. /// Waits until commands are pushed to the FIFO.
  148. /// </summary>
  149. /// <returns>True if commands were received, false if wait timed out</returns>
  150. public bool WaitForCommands()
  151. {
  152. return !_commandBufferQueue.IsEmpty || (_event.WaitOne(8) && !_commandBufferQueue.IsEmpty);
  153. }
  154. /// <summary>
  155. /// Processes commands pushed to the FIFO.
  156. /// </summary>
  157. public void DispatchCalls()
  158. {
  159. // Use this opportunity to also dispose any pending channels that were closed.
  160. _context.RunDeferredActions();
  161. // Process command buffers.
  162. while (_ibEnable && !_interrupt && _commandBufferQueue.TryDequeue(out CommandBuffer entry))
  163. {
  164. _currentCommandBuffer = entry;
  165. _currentCommandBuffer.Fetch(entry.Processor.MemoryManager);
  166. // If we are changing the current channel,
  167. // we need to force all the host state to be updated.
  168. if (_prevChannelProcessor != entry.Processor)
  169. {
  170. _prevChannelProcessor = entry.Processor;
  171. entry.Processor.ForceAllDirty();
  172. }
  173. entry.Processor.Process(_currentCommandBuffer.Words);
  174. }
  175. _interrupt = false;
  176. }
  177. /// <summary>
  178. /// Interrupts command processing. This will break out of the DispatchCalls loop.
  179. /// </summary>
  180. public void Interrupt()
  181. {
  182. _interrupt = true;
  183. }
  184. /// <summary>
  185. /// Disposes of resources used for GPFifo command processing.
  186. /// </summary>
  187. public void Dispose() => _event.Dispose();
  188. }
  189. }