NvGpuFifo.cs 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. using Ryujinx.Graphics.Gpu.State;
  2. namespace Ryujinx.Graphics.Gpu
  3. {
  4. class NvGpuFifo
  5. {
  6. private const int MacrosCount = 0x80;
  7. private const int MacroIndexMask = MacrosCount - 1;
  8. // Note: The size of the macro memory is unknown, we just make
  9. // a guess here and use 256kb as the size. Increase if needed.
  10. private const int MmeWords = 256 * 256;
  11. private GpuContext _context;
  12. private struct CachedMacro
  13. {
  14. public int Position { get; private set; }
  15. private bool _executionPending;
  16. private int _argument;
  17. private MacroInterpreter _interpreter;
  18. public CachedMacro(GpuContext context, NvGpuFifo fifo, int position)
  19. {
  20. Position = position;
  21. _executionPending = false;
  22. _argument = 0;
  23. _interpreter = new MacroInterpreter(context, fifo);
  24. }
  25. public void StartExecution(int argument)
  26. {
  27. _argument = argument;
  28. _executionPending = true;
  29. }
  30. public void Execute(int[] mme, GpuState state)
  31. {
  32. if (_executionPending)
  33. {
  34. _executionPending = false;
  35. _interpreter?.Execute(mme, Position, _argument, state);
  36. }
  37. }
  38. public void PushArgument(int argument)
  39. {
  40. _interpreter?.Fifo.Enqueue(argument);
  41. }
  42. }
  43. private int _currMacroPosition;
  44. private int _currMacroBindIndex;
  45. private CachedMacro[] _macros;
  46. private int[] _mme;
  47. private class SubChannel
  48. {
  49. public GpuState State { get; }
  50. public ClassId Class { get; set; }
  51. public SubChannel()
  52. {
  53. State = new GpuState();
  54. }
  55. }
  56. private SubChannel[] _subChannels;
  57. public NvGpuFifo(GpuContext context)
  58. {
  59. _context = context;
  60. _macros = new CachedMacro[MacrosCount];
  61. _mme = new int[MmeWords];
  62. _subChannels = new SubChannel[8];
  63. for (int index = 0; index < _subChannels.Length; index++)
  64. {
  65. _subChannels[index] = new SubChannel();
  66. context.Methods.RegisterCallbacks(_subChannels[index].State);
  67. }
  68. }
  69. public void CallMethod(MethodParams meth)
  70. {
  71. if ((NvGpuFifoMeth)meth.Method == NvGpuFifoMeth.BindChannel)
  72. {
  73. _subChannels[meth.SubChannel].Class = (ClassId)meth.Argument;
  74. }
  75. else if (meth.Method < 0x60)
  76. {
  77. switch ((NvGpuFifoMeth)meth.Method)
  78. {
  79. case NvGpuFifoMeth.WaitForIdle:
  80. {
  81. _context.Methods.PerformDeferredDraws();
  82. _context.Renderer.FlushPipelines();
  83. break;
  84. }
  85. case NvGpuFifoMeth.SetMacroUploadAddress:
  86. {
  87. _currMacroPosition = meth.Argument;
  88. break;
  89. }
  90. case NvGpuFifoMeth.SendMacroCodeData:
  91. {
  92. _mme[_currMacroPosition++] = meth.Argument;
  93. break;
  94. }
  95. case NvGpuFifoMeth.SetMacroBindingIndex:
  96. {
  97. _currMacroBindIndex = meth.Argument;
  98. break;
  99. }
  100. case NvGpuFifoMeth.BindMacro:
  101. {
  102. int position = meth.Argument;
  103. _macros[_currMacroBindIndex++] = new CachedMacro(_context, this, position);
  104. break;
  105. }
  106. }
  107. }
  108. else if (meth.Method < 0xe00)
  109. {
  110. _subChannels[meth.SubChannel].State.CallMethod(meth);
  111. }
  112. else
  113. {
  114. int macroIndex = (meth.Method >> 1) & MacroIndexMask;
  115. if ((meth.Method & 1) != 0)
  116. {
  117. _macros[macroIndex].PushArgument(meth.Argument);
  118. }
  119. else
  120. {
  121. _macros[macroIndex].StartExecution(meth.Argument);
  122. }
  123. if (meth.IsLastCall)
  124. {
  125. _macros[macroIndex].Execute(_mme, _subChannels[meth.SubChannel].State);
  126. _context.Methods.PerformDeferredDraws();
  127. }
  128. }
  129. }
  130. }
  131. }