aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorriperiperi <rhy3756547@hotmail.com>2022-11-24 01:56:55 +0000
committerGitHub <noreply@github.com>2022-11-24 01:56:55 +0000
commitf3cc2e5703e5df5c359ce1789a4fb0d73fb9a637 (patch)
treee9dae9468417e5b264d967168290220fa786cf20
parent5a39d3c4a13d9de67aa324a487af3756e4ce4930 (diff)
GPU: Access non-prefetch command buffers directly (#3882)1.1.376
* GPU: Access non-prefetch command buffers directly Saves allocating new arrays for them constantly - they can be quite small so it can be very wasteful. About 0.4% of GPU thread in SMO, but was a bit higher in S/V when I checked. Assumes that non-prefetch command buffers won't be randomly clobbered before they finish executing, though that's probably a safe bet. * Small change while I'm here * Address feedback
-rw-r--r--Ryujinx.Graphics.Gpu/Engine/GPFifo/GPFifoDevice.cs35
1 files changed, 27 insertions, 8 deletions
diff --git a/Ryujinx.Graphics.Gpu/Engine/GPFifo/GPFifoDevice.cs b/Ryujinx.Graphics.Gpu/Engine/GPFifo/GPFifoDevice.cs
index b3de738d..cd29a9da 100644
--- a/Ryujinx.Graphics.Gpu/Engine/GPFifo/GPFifoDevice.cs
+++ b/Ryujinx.Graphics.Gpu/Engine/GPFifo/GPFifoDevice.cs
@@ -52,15 +52,34 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
public uint EntryCount;
/// <summary>
+ /// Get the entries for the command buffer from memory.
+ /// </summary>
+ /// <param name="memoryManager">The memory manager used to fetch the data</param>
+ /// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
+ /// <returns>The fetched data</returns>
+ private ReadOnlySpan<int> GetWords(MemoryManager memoryManager, bool flush)
+ {
+ return MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, flush));
+ }
+
+ /// <summary>
+ /// Prefetch the command buffer.
+ /// </summary>
+ /// <param name="memoryManager">The memory manager used to fetch the data</param>
+ public void Prefetch(MemoryManager memoryManager)
+ {
+ Words = GetWords(memoryManager, true).ToArray();
+ }
+
+ /// <summary>
/// Fetch the command buffer.
/// </summary>
+ /// <param name="memoryManager">The memory manager used to fetch the data</param>
/// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
- public void Fetch(MemoryManager memoryManager, bool flush = true)
+ /// <returns>The command buffer words</returns>
+ public ReadOnlySpan<int> Fetch(MemoryManager memoryManager, bool flush)
{
- if (Words == null)
- {
- Words = MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, flush)).ToArray();
- }
+ return Words ?? GetWords(memoryManager, flush);
}
}
@@ -158,7 +177,7 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch)
{
- commandBuffer.Fetch(processor.MemoryManager);
+ commandBuffer.Prefetch(processor.MemoryManager);
}
if (commandBuffer.Type == CommandBufferType.NoPrefetch)
@@ -199,7 +218,7 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
}
_currentCommandBuffer = entry;
- _currentCommandBuffer.Fetch(entry.Processor.MemoryManager, flushCommandBuffer);
+ ReadOnlySpan<int> words = entry.Fetch(entry.Processor.MemoryManager, flushCommandBuffer);
// If we are changing the current channel,
// we need to force all the host state to be updated.
@@ -209,7 +228,7 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
entry.Processor.ForceAllDirty();
}
- entry.Processor.Process(entry.EntryAddress, _currentCommandBuffer.Words);
+ entry.Processor.Process(entry.EntryAddress, words);
}
_interrupt = false;