mirror of
https://github.com/ryujinx-mirror/ryujinx.git
synced 2024-11-29 21:43:01 +00:00
4b60371e64
* Return mapped buffer pointer directly for flush, WriteableRegion for textures A few changes here to generally improve performance, even for platforms not using the persistent buffer flush. - Texture and buffer flush now return a ReadOnlySpan<byte>. It's guaranteed that this span is pinned in memory, but it will be overwritten on the next flush from that thread, so it is expected that the data is used before calling again. - As a result, persistent mappings no longer copy to a new array - rather the persistent map is returned directly as a Span<>. A similar host array is used for the glGet flushes instead of allocating new arrays each time. - Texture flushes now do their layout conversion into a WriteableRegion when the texture is not MultiRange, which allows the flush to happen directly into guest memory rather than into a temporary span, then copied over. This avoids another copy when doing layout conversion. Overall, this saves 1 data copy for buffer flush, 1 copy for linear textures with matching source/target stride, and 2 copies for block textures or linear textures with mismatching strides. * Fix tests * Fix array pointer for Mesa/Intel path * Address some feedback * Update method for getting array pointer.
489 lines
17 KiB
C#
489 lines
17 KiB
C#
using Ryujinx.Memory.Range;
|
|
using System;
|
|
using System.Collections.Generic;
|
|
using System.Linq;
|
|
using System.Runtime.CompilerServices;
|
|
using System.Runtime.InteropServices;
|
|
|
|
namespace Ryujinx.Memory
|
|
{
|
|
/// <summary>
|
|
/// Represents a address space manager.
|
|
/// Supports virtual memory region mapping, address translation and read/write access to mapped regions.
|
|
/// </summary>
|
|
public sealed class AddressSpaceManager : IVirtualMemoryManager, IWritableBlock
|
|
{
|
|
public const int PageBits = PageTable<nuint>.PageBits;
|
|
public const int PageSize = PageTable<nuint>.PageSize;
|
|
public const int PageMask = PageTable<nuint>.PageMask;
|
|
|
|
/// <summary>
|
|
/// Address space width in bits.
|
|
/// </summary>
|
|
public int AddressSpaceBits { get; }
|
|
|
|
private readonly ulong _addressSpaceSize;
|
|
|
|
private readonly PageTable<nuint> _pageTable;
|
|
|
|
/// <summary>
|
|
/// Creates a new instance of the memory manager.
|
|
/// </summary>
|
|
/// <param name="backingMemory">Physical backing memory where virtual memory will be mapped to</param>
|
|
/// <param name="addressSpaceSize">Size of the address space</param>
|
|
public AddressSpaceManager(ulong addressSpaceSize)
|
|
{
|
|
ulong asSize = PageSize;
|
|
int asBits = PageBits;
|
|
|
|
while (asSize < addressSpaceSize)
|
|
{
|
|
asSize <<= 1;
|
|
asBits++;
|
|
}
|
|
|
|
AddressSpaceBits = asBits;
|
|
_addressSpaceSize = asSize;
|
|
_pageTable = new PageTable<nuint>();
|
|
}
|
|
|
|
/// <summary>
|
|
/// Maps a virtual memory range into a physical memory range.
|
|
/// </summary>
|
|
/// <remarks>
|
|
/// Addresses and size must be page aligned.
|
|
/// </remarks>
|
|
/// <param name="va">Virtual memory address</param>
|
|
/// <param name="hostAddress">Physical memory address</param>
|
|
/// <param name="size">Size to be mapped</param>
|
|
public void Map(ulong va, nuint hostAddress, ulong size)
|
|
{
|
|
AssertValidAddressAndSize(va, size);
|
|
|
|
while (size != 0)
|
|
{
|
|
_pageTable.Map(va, hostAddress);
|
|
|
|
va += PageSize;
|
|
hostAddress += PageSize;
|
|
size -= PageSize;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Unmaps a previously mapped range of virtual memory.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the range to be unmapped</param>
|
|
/// <param name="size">Size of the range to be unmapped</param>
|
|
public void Unmap(ulong va, ulong size)
|
|
{
|
|
AssertValidAddressAndSize(va, size);
|
|
|
|
while (size != 0)
|
|
{
|
|
_pageTable.Unmap(va);
|
|
|
|
va += PageSize;
|
|
size -= PageSize;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Reads data from mapped memory.
|
|
/// </summary>
|
|
/// <typeparam name="T">Type of the data being read</typeparam>
|
|
/// <param name="va">Virtual address of the data in memory</param>
|
|
/// <returns>The data</returns>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
|
|
public T Read<T>(ulong va) where T : unmanaged
|
|
{
|
|
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
|
|
}
|
|
|
|
/// <summary>
|
|
/// Reads data from mapped memory.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the data in memory</param>
|
|
/// <param name="data">Span to store the data being read into</param>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
|
|
public void Read(ulong va, Span<byte> data)
|
|
{
|
|
ReadImpl(va, data);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Writes data to mapped memory.
|
|
/// </summary>
|
|
/// <typeparam name="T">Type of the data being written</typeparam>
|
|
/// <param name="va">Virtual address to write the data into</param>
|
|
/// <param name="value">Data to be written</param>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
|
|
public void Write<T>(ulong va, T value) where T : unmanaged
|
|
{
|
|
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
|
|
}
|
|
|
|
/// <summary>
|
|
/// Writes data to mapped memory.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address to write the data into</param>
|
|
/// <param name="data">Data to be written</param>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
|
|
public void Write(ulong va, ReadOnlySpan<byte> data)
|
|
{
|
|
if (data.Length == 0)
|
|
{
|
|
return;
|
|
}
|
|
|
|
AssertValidAddressAndSize(va, (ulong)data.Length);
|
|
|
|
if (IsContiguousAndMapped(va, data.Length))
|
|
{
|
|
data.CopyTo(GetHostSpanContiguous(va, data.Length));
|
|
}
|
|
else
|
|
{
|
|
int offset = 0, size;
|
|
|
|
if ((va & PageMask) != 0)
|
|
{
|
|
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
|
|
|
|
data.Slice(0, size).CopyTo(GetHostSpanContiguous(va, size));
|
|
|
|
offset += size;
|
|
}
|
|
|
|
for (; offset < data.Length; offset += size)
|
|
{
|
|
size = Math.Min(data.Length - offset, PageSize);
|
|
|
|
data.Slice(offset, size).CopyTo(GetHostSpanContiguous(va + (ulong)offset, size));
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Gets a read-only span of data from mapped memory.
|
|
/// </summary>
|
|
/// <remarks>
|
|
/// This may perform a allocation if the data is not contiguous in memory.
|
|
/// For this reason, the span is read-only, you can't modify the data.
|
|
/// </remarks>
|
|
/// <param name="va">Virtual address of the data</param>
|
|
/// <param name="size">Size of the data</param>
|
|
/// <param name="tracked">True if read tracking is triggered on the span</param>
|
|
/// <returns>A read-only span of the data</returns>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
|
|
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
|
|
{
|
|
if (size == 0)
|
|
{
|
|
return ReadOnlySpan<byte>.Empty;
|
|
}
|
|
|
|
if (IsContiguousAndMapped(va, size))
|
|
{
|
|
return GetHostSpanContiguous(va, size);
|
|
}
|
|
else
|
|
{
|
|
Span<byte> data = new byte[size];
|
|
|
|
ReadImpl(va, data);
|
|
|
|
return data;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Gets a region of memory that can be written to.
|
|
/// </summary>
|
|
/// <remarks>
|
|
/// If the requested region is not contiguous in physical memory,
|
|
/// this will perform an allocation, and flush the data (writing it
|
|
/// back to the backing memory) on disposal.
|
|
/// </remarks>
|
|
/// <param name="va">Virtual address of the data</param>
|
|
/// <param name="size">Size of the data</param>
|
|
/// <param name="tracked">True if write tracking is triggered on the span</param>
|
|
/// <returns>A writable region of memory containing the data</returns>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
|
|
public unsafe WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
|
|
{
|
|
if (size == 0)
|
|
{
|
|
return new WritableRegion(null, va, Memory<byte>.Empty);
|
|
}
|
|
|
|
if (IsContiguousAndMapped(va, size))
|
|
{
|
|
return new WritableRegion(null, va, new NativeMemoryManager<byte>((byte*)GetHostAddress(va), size).Memory);
|
|
}
|
|
else
|
|
{
|
|
Memory<byte> memory = new byte[size];
|
|
|
|
GetSpan(va, size).CopyTo(memory.Span);
|
|
|
|
return new WritableRegion(this, va, memory);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Gets a reference for the given type at the specified virtual memory address.
|
|
/// </summary>
|
|
/// <remarks>
|
|
/// The data must be located at a contiguous memory region.
|
|
/// </remarks>
|
|
/// <typeparam name="T">Type of the data to get the reference</typeparam>
|
|
/// <param name="va">Virtual address of the data</param>
|
|
/// <returns>A reference to the data in memory</returns>
|
|
/// <exception cref="MemoryNotContiguousException">Throw if the specified memory region is not contiguous in physical memory</exception>
|
|
public unsafe ref T GetRef<T>(ulong va) where T : unmanaged
|
|
{
|
|
if (!IsContiguous(va, Unsafe.SizeOf<T>()))
|
|
{
|
|
ThrowMemoryNotContiguous();
|
|
}
|
|
|
|
return ref *(T*)GetHostAddress(va);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Computes the number of pages in a virtual address range.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the range</param>
|
|
/// <param name="size">Size of the range</param>
|
|
/// <param name="startVa">The virtual address of the beginning of the first page</param>
|
|
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
private int GetPagesCount(ulong va, uint size, out ulong startVa)
|
|
{
|
|
// WARNING: Always check if ulong does not overflow during the operations.
|
|
startVa = va & ~(ulong)PageMask;
|
|
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
|
|
|
|
return (int)(vaSpan / PageSize);
|
|
}
|
|
|
|
private void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
|
|
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
|
|
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
private bool IsContiguous(ulong va, int size)
|
|
{
|
|
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
int pages = GetPagesCount(va, (uint)size, out va);
|
|
|
|
for (int page = 0; page < pages - 1; page++)
|
|
{
|
|
if (!ValidateAddress(va + PageSize))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
if (GetHostAddress(va) + PageSize != GetHostAddress(va + PageSize))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
va += PageSize;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Gets the physical regions that make up the given virtual address region.
|
|
/// If any part of the virtual region is unmapped, null is returned.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the range</param>
|
|
/// <param name="size">Size of the range</param>
|
|
/// <returns>Array of physical regions</returns>
|
|
public IEnumerable<HostMemoryRange> GetPhysicalRegions(ulong va, ulong size)
|
|
{
|
|
if (size == 0)
|
|
{
|
|
return Enumerable.Empty<HostMemoryRange>();
|
|
}
|
|
|
|
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size))
|
|
{
|
|
return null;
|
|
}
|
|
|
|
int pages = GetPagesCount(va, (uint)size, out va);
|
|
|
|
var regions = new List<HostMemoryRange>();
|
|
|
|
nuint regionStart = GetHostAddress(va);
|
|
ulong regionSize = PageSize;
|
|
|
|
for (int page = 0; page < pages - 1; page++)
|
|
{
|
|
if (!ValidateAddress(va + PageSize))
|
|
{
|
|
return null;
|
|
}
|
|
|
|
nuint newHostAddress = GetHostAddress(va + PageSize);
|
|
|
|
if (GetHostAddress(va) + PageSize != newHostAddress)
|
|
{
|
|
regions.Add(new HostMemoryRange(regionStart, regionSize));
|
|
regionStart = newHostAddress;
|
|
regionSize = 0;
|
|
}
|
|
|
|
va += PageSize;
|
|
regionSize += PageSize;
|
|
}
|
|
|
|
regions.Add(new HostMemoryRange(regionStart, regionSize));
|
|
|
|
return regions;
|
|
}
|
|
|
|
private void ReadImpl(ulong va, Span<byte> data)
|
|
{
|
|
if (data.Length == 0)
|
|
{
|
|
return;
|
|
}
|
|
|
|
AssertValidAddressAndSize(va, (ulong)data.Length);
|
|
|
|
int offset = 0, size;
|
|
|
|
if ((va & PageMask) != 0)
|
|
{
|
|
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
|
|
|
|
GetHostSpanContiguous(va, size).CopyTo(data.Slice(0, size));
|
|
|
|
offset += size;
|
|
}
|
|
|
|
for (; offset < data.Length; offset += size)
|
|
{
|
|
size = Math.Min(data.Length - offset, PageSize);
|
|
|
|
GetHostSpanContiguous(va + (ulong)offset, size).CopyTo(data.Slice(offset, size));
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if the page at a given virtual address is mapped.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address to check</param>
|
|
/// <returns>True if the address is mapped, false otherwise</returns>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
public bool IsMapped(ulong va)
|
|
{
|
|
if (!ValidateAddress(va))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
return _pageTable.Read(va) != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if a memory range is mapped.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the range</param>
|
|
/// <param name="size">Size of the range in bytes</param>
|
|
/// <returns>True if the entire range is mapped, false otherwise</returns>
|
|
public bool IsRangeMapped(ulong va, ulong size)
|
|
{
|
|
if (size == 0UL)
|
|
{
|
|
return true;
|
|
}
|
|
|
|
if (!ValidateAddressAndSize(va, size))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
int pages = GetPagesCount(va, (uint)size, out va);
|
|
|
|
for (int page = 0; page < pages; page++)
|
|
{
|
|
if (!IsMapped(va))
|
|
{
|
|
return false;
|
|
}
|
|
|
|
va += PageSize;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
private bool ValidateAddress(ulong va)
|
|
{
|
|
return va < _addressSpaceSize;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if the combination of virtual address and size is part of the addressable space.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the range</param>
|
|
/// <param name="size">Size of the range in bytes</param>
|
|
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
|
|
private bool ValidateAddressAndSize(ulong va, ulong size)
|
|
{
|
|
ulong endVa = va + size;
|
|
return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Ensures the combination of virtual address and size is part of the addressable space.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address of the range</param>
|
|
/// <param name="size">Size of the range in bytes</param>
|
|
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
|
|
private void AssertValidAddressAndSize(ulong va, ulong size)
|
|
{
|
|
if (!ValidateAddressAndSize(va, size))
|
|
{
|
|
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
|
|
}
|
|
}
|
|
|
|
private unsafe Span<byte> GetHostSpanContiguous(ulong va, int size)
|
|
{
|
|
return new Span<byte>((void*)GetHostAddress(va), size);
|
|
}
|
|
|
|
private nuint GetHostAddress(ulong va)
|
|
{
|
|
return _pageTable.Read(va) + (nuint)(va & PageMask);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Reprotect a region of virtual memory for tracking. Sets software protection bits.
|
|
/// </summary>
|
|
/// <param name="va">Virtual address base</param>
|
|
/// <param name="size">Size of the region to protect</param>
|
|
/// <param name="protection">Memory protection to set</param>
|
|
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
|
|
{
|
|
throw new NotImplementedException();
|
|
}
|
|
|
|
public void SignalMemoryTracking(ulong va, ulong size, bool write)
|
|
{
|
|
// Only the ARM Memory Manager has tracking for now.
|
|
}
|
|
}
|
|
}
|