NvGpuFifo.cs 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219
  1. using Ryujinx.Graphics.Gpu.State;
  2. namespace Ryujinx.Graphics.Gpu
  3. {
  4. /// <summary>
  5. /// GPU commands FIFO.
  6. /// </summary>
  7. class NvGpuFifo
  8. {
  9. private const int MacrosCount = 0x80;
  10. private const int MacroIndexMask = MacrosCount - 1;
  11. // Note: The size of the macro memory is unknown, we just make
  12. // a guess here and use 256kb as the size. Increase if needed.
  13. private const int MmeWords = 256 * 256;
  14. private GpuContext _context;
  15. /// <summary>
  16. /// Cached GPU macro program.
  17. /// </summary>
  18. private struct CachedMacro
  19. {
  20. /// <summary>
  21. /// Word offset of the code on the code memory.
  22. /// </summary>
  23. public int Position { get; }
  24. private bool _executionPending;
  25. private int _argument;
  26. private MacroInterpreter _interpreter;
  27. /// <summary>
  28. /// Creates a new instance of the GPU cached macro program.
  29. /// </summary>
  30. /// <param name="position">Macro code start position</param>
  31. public CachedMacro(int position)
  32. {
  33. Position = position;
  34. _executionPending = false;
  35. _argument = 0;
  36. _interpreter = new MacroInterpreter();
  37. }
  38. /// <summary>
  39. /// Sets the first argument for the macro call.
  40. /// </summary>
  41. /// <param name="argument">First argument</param>
  42. public void StartExecution(int argument)
  43. {
  44. _argument = argument;
  45. _executionPending = true;
  46. }
  47. /// <summary>
  48. /// Starts executing the macro program code.
  49. /// </summary>
  50. /// <param name="mme">Program code</param>
  51. /// <param name="state">Current GPU state</param>
  52. public void Execute(int[] mme, GpuState state)
  53. {
  54. if (_executionPending)
  55. {
  56. _executionPending = false;
  57. _interpreter?.Execute(mme, Position, _argument, state);
  58. }
  59. }
  60. /// <summary>
  61. /// Pushes an argument to the macro call argument FIFO.
  62. /// </summary>
  63. /// <param name="argument">Argument to be pushed</param>
  64. public void PushArgument(int argument)
  65. {
  66. _interpreter?.Fifo.Enqueue(argument);
  67. }
  68. }
  69. private int _currMacroPosition;
  70. private int _currMacroBindIndex;
  71. private CachedMacro[] _macros;
  72. private int[] _mme;
  73. /// <summary>
  74. /// GPU sub-channel information.
  75. /// </summary>
  76. private class SubChannel
  77. {
  78. /// <summary>
  79. /// Sub-channel GPU state.
  80. /// </summary>
  81. public GpuState State { get; }
  82. /// <summary>
  83. /// Engine bound to the sub-channel.
  84. /// </summary>
  85. public ClassId Class { get; set; }
  86. /// <summary>
  87. /// Creates a new instance of the GPU sub-channel.
  88. /// </summary>
  89. public SubChannel()
  90. {
  91. State = new GpuState();
  92. }
  93. }
  94. private SubChannel[] _subChannels;
  95. /// <summary>
  96. /// Creates a new instance of the GPU commands FIFO.
  97. /// </summary>
  98. /// <param name="context">GPU emulation context</param>
  99. public NvGpuFifo(GpuContext context)
  100. {
  101. _context = context;
  102. _macros = new CachedMacro[MacrosCount];
  103. _mme = new int[MmeWords];
  104. _subChannels = new SubChannel[8];
  105. for (int index = 0; index < _subChannels.Length; index++)
  106. {
  107. _subChannels[index] = new SubChannel();
  108. context.Methods.RegisterCallbacks(_subChannels[index].State);
  109. }
  110. }
  111. /// <summary>
  112. /// Calls a GPU method.
  113. /// </summary>
  114. /// <param name="meth">GPU method call parameters</param>
  115. public void CallMethod(MethodParams meth)
  116. {
  117. if ((NvGpuFifoMeth)meth.Method == NvGpuFifoMeth.BindChannel)
  118. {
  119. _subChannels[meth.SubChannel].Class = (ClassId)meth.Argument;
  120. }
  121. else if (meth.Method < 0x60)
  122. {
  123. switch ((NvGpuFifoMeth)meth.Method)
  124. {
  125. case NvGpuFifoMeth.WaitForIdle:
  126. {
  127. _context.Methods.PerformDeferredDraws();
  128. _context.Renderer.Pipeline.Barrier();
  129. break;
  130. }
  131. case NvGpuFifoMeth.SetMacroUploadAddress:
  132. {
  133. _currMacroPosition = meth.Argument;
  134. break;
  135. }
  136. case NvGpuFifoMeth.SendMacroCodeData:
  137. {
  138. _mme[_currMacroPosition++] = meth.Argument;
  139. break;
  140. }
  141. case NvGpuFifoMeth.SetMacroBindingIndex:
  142. {
  143. _currMacroBindIndex = meth.Argument;
  144. break;
  145. }
  146. case NvGpuFifoMeth.BindMacro:
  147. {
  148. int position = meth.Argument;
  149. _macros[_currMacroBindIndex++] = new CachedMacro(position);
  150. break;
  151. }
  152. }
  153. }
  154. else if (meth.Method < 0xe00)
  155. {
  156. _subChannels[meth.SubChannel].State.CallMethod(meth);
  157. }
  158. else
  159. {
  160. int macroIndex = (meth.Method >> 1) & MacroIndexMask;
  161. if ((meth.Method & 1) != 0)
  162. {
  163. _macros[macroIndex].PushArgument(meth.Argument);
  164. }
  165. else
  166. {
  167. _macros[macroIndex].StartExecution(meth.Argument);
  168. }
  169. if (meth.IsLastCall)
  170. {
  171. _macros[macroIndex].Execute(_mme, _subChannels[meth.SubChannel].State);
  172. _context.Methods.PerformDeferredDraws();
  173. }
  174. }
  175. }
  176. }
  177. }