NvGpuFifo.cs 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. namespace Ryujinx.Graphics.Gpu
  2. {
  3. class NvGpuFifo
  4. {
  5. private const int MacrosCount = 0x80;
  6. private const int MacroIndexMask = MacrosCount - 1;
  7. // Note: The size of the macro memory is unknown, we just make
  8. // a guess here and use 256kb as the size. Increase if needed.
  9. private const int MmeWords = 256 * 256;
  10. private GpuContext _context;
  11. private struct CachedMacro
  12. {
  13. public int Position { get; private set; }
  14. private bool _executionPending;
  15. private int _argument;
  16. private MacroInterpreter _interpreter;
  17. public CachedMacro(GpuContext context, NvGpuFifo fifo, int position)
  18. {
  19. Position = position;
  20. _executionPending = false;
  21. _argument = 0;
  22. _interpreter = new MacroInterpreter(context, fifo);
  23. }
  24. public void StartExecution(int argument)
  25. {
  26. _argument = argument;
  27. _executionPending = true;
  28. }
  29. public void Execute(int[] mme)
  30. {
  31. if (_executionPending)
  32. {
  33. _executionPending = false;
  34. _interpreter?.Execute(mme, Position, _argument);
  35. }
  36. }
  37. public void PushArgument(int argument)
  38. {
  39. _interpreter?.Fifo.Enqueue(argument);
  40. }
  41. }
  42. private int _currMacroPosition;
  43. private int _currMacroBindIndex;
  44. private CachedMacro[] _macros;
  45. private int[] _mme;
  46. private ClassId[] _subChannels;
  47. public NvGpuFifo(GpuContext context)
  48. {
  49. _context = context;
  50. _macros = new CachedMacro[MacrosCount];
  51. _mme = new int[MmeWords];
  52. _subChannels = new ClassId[8];
  53. }
  54. public void CallMethod(MethodParams meth)
  55. {
  56. if ((NvGpuFifoMeth)meth.Method == NvGpuFifoMeth.BindChannel)
  57. {
  58. _subChannels[meth.SubChannel] = (ClassId)meth.Argument;
  59. }
  60. else if (meth.Method < 0x60)
  61. {
  62. switch ((NvGpuFifoMeth)meth.Method)
  63. {
  64. case NvGpuFifoMeth.WaitForIdle:
  65. {
  66. _context.Renderer.FlushPipelines();
  67. break;
  68. }
  69. case NvGpuFifoMeth.SetMacroUploadAddress:
  70. {
  71. _currMacroPosition = meth.Argument;
  72. break;
  73. }
  74. case NvGpuFifoMeth.SendMacroCodeData:
  75. {
  76. _mme[_currMacroPosition++] = meth.Argument;
  77. break;
  78. }
  79. case NvGpuFifoMeth.SetMacroBindingIndex:
  80. {
  81. _currMacroBindIndex = meth.Argument;
  82. break;
  83. }
  84. case NvGpuFifoMeth.BindMacro:
  85. {
  86. int position = meth.Argument;
  87. _macros[_currMacroBindIndex++] = new CachedMacro(_context, this, position);
  88. break;
  89. }
  90. }
  91. }
  92. else if (meth.Method < 0xe00)
  93. {
  94. _context.State.CallMethod(meth);
  95. }
  96. else
  97. {
  98. int macroIndex = (meth.Method >> 1) & MacroIndexMask;
  99. if ((meth.Method & 1) != 0)
  100. {
  101. _macros[macroIndex].PushArgument(meth.Argument);
  102. }
  103. else
  104. {
  105. _macros[macroIndex].StartExecution(meth.Argument);
  106. }
  107. if (meth.IsLastCall)
  108. {
  109. _macros[macroIndex].Execute(_mme);
  110. _context.Methods.PerformDeferredDraws();
  111. }
  112. }
  113. }
  114. }
  115. }