using ARMeilleure.Memory; using Ryujinx.Memory; using Ryujinx.Memory.Range; using Ryujinx.Memory.Tracking; using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; namespace Ryujinx.Cpu.Jit { /// /// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region. /// public sealed class MemoryManagerHostMapped : MemoryManagerBase, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock { public const int PageBits = 12; public const int PageSize = 1 << PageBits; public const int PageMask = PageSize - 1; public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry. public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set. private enum HostMappedPtBits : ulong { Unmapped = 0, Mapped, WriteTracked, ReadWriteTracked, MappedReplicated = 0x5555555555555555, WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa, ReadWriteTrackedReplicated = ulong.MaxValue } private readonly InvalidAccessHandler _invalidAccessHandler; private readonly bool _unsafeMode; private readonly AddressSpace _addressSpace; public ulong AddressSpaceSize { get; } private readonly PageTable _pageTable; private readonly MemoryEhMeilleure _memoryEh; private readonly ulong[] _pageBitmap; /// public bool Supports4KBPages => MemoryBlock.GetPageSize() == PageSize; public int AddressSpaceBits { get; } public IntPtr PageTablePointer => _addressSpace.Base.Pointer; public MemoryManagerType Type => _unsafeMode ? MemoryManagerType.HostMappedUnsafe : MemoryManagerType.HostMapped; public MemoryTracking Tracking { get; } public event Action UnmapEvent; /// /// Creates a new instance of the host mapped memory manager. /// /// Address space instance to use /// True if unmanaged access should not be masked (unsafe), false otherwise. /// Optional function to handle invalid memory accesses public MemoryManagerHostMapped(AddressSpace addressSpace, bool unsafeMode, InvalidAccessHandler invalidAccessHandler) { _addressSpace = addressSpace; _pageTable = new PageTable(); _invalidAccessHandler = invalidAccessHandler; _unsafeMode = unsafeMode; AddressSpaceSize = addressSpace.AddressSpaceSize; ulong asSize = PageSize; int asBits = PageBits; while (asSize < AddressSpaceSize) { asSize <<= 1; asBits++; } AddressSpaceBits = asBits; _pageBitmap = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))]; Tracking = new MemoryTracking(this, (int)MemoryBlock.GetPageSize(), invalidAccessHandler); _memoryEh = new MemoryEhMeilleure(_addressSpace.Base, _addressSpace.Mirror, Tracking); } /// /// Checks if the virtual address is part of the addressable space. /// /// Virtual address /// True if the virtual address is part of the addressable space private bool ValidateAddress(ulong va) { return va < AddressSpaceSize; } /// /// Checks if the combination of virtual address and size is part of the addressable space. /// /// Virtual address of the range /// Size of the range in bytes /// True if the combination of virtual address and size is part of the addressable space private bool ValidateAddressAndSize(ulong va, ulong size) { ulong endVa = va + size; return endVa >= va && endVa >= size && endVa <= AddressSpaceSize; } /// /// Ensures the combination of virtual address and size is part of the addressable space. /// /// Virtual address of the range /// Size of the range in bytes /// Throw when the memory region specified outside the addressable space private void AssertValidAddressAndSize(ulong va, ulong size) { if (!ValidateAddressAndSize(va, size)) { throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}"); } } /// /// Ensures the combination of virtual address and size is part of the addressable space and fully mapped. /// /// Virtual address of the range /// Size of the range in bytes private void AssertMapped(ulong va, ulong size) { if (!ValidateAddressAndSize(va, size) || !IsRangeMappedImpl(va, size)) { throw new InvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}"); } } /// public void Map(ulong va, ulong pa, ulong size, MemoryMapFlags flags) { AssertValidAddressAndSize(va, size); _addressSpace.Map(va, pa, size, flags); AddMapping(va, size); PtMap(va, pa, size); Tracking.Map(va, size); } /// public void MapForeign(ulong va, nuint hostPointer, ulong size) { throw new NotSupportedException(); } /// public void Unmap(ulong va, ulong size) { AssertValidAddressAndSize(va, size); UnmapEvent?.Invoke(va, size); Tracking.Unmap(va, size); RemoveMapping(va, size); PtUnmap(va, size); _addressSpace.Unmap(va, size); } private void PtMap(ulong va, ulong pa, ulong size) { while (size != 0) { _pageTable.Map(va, pa); va += PageSize; pa += PageSize; size -= PageSize; } } private void PtUnmap(ulong va, ulong size) { while (size != 0) { _pageTable.Unmap(va); va += PageSize; size -= PageSize; } } /// public T Read(ulong va) where T : unmanaged { try { AssertMapped(va, (ulong)Unsafe.SizeOf()); return _addressSpace.Mirror.Read(va); } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } return default; } } /// public T ReadTracked(ulong va) where T : unmanaged { try { SignalMemoryTracking(va, (ulong)Unsafe.SizeOf(), false); return Read(va); } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } return default; } } /// public void Read(ulong va, Span data) { try { AssertMapped(va, (ulong)data.Length); _addressSpace.Mirror.Read(va, data); } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } } } /// public void Write(ulong va, T value) where T : unmanaged { try { SignalMemoryTracking(va, (ulong)Unsafe.SizeOf(), write: true); _addressSpace.Mirror.Write(va, value); } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } } } /// public void Write(ulong va, ReadOnlySpan data) { try { SignalMemoryTracking(va, (ulong)data.Length, write: true); _addressSpace.Mirror.Write(va, data); } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } } } /// public void WriteUntracked(ulong va, ReadOnlySpan data) { try { AssertMapped(va, (ulong)data.Length); _addressSpace.Mirror.Write(va, data); } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } } } /// public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan data) { try { SignalMemoryTracking(va, (ulong)data.Length, false); Span target = _addressSpace.Mirror.GetSpan(va, data.Length); bool changed = !data.SequenceEqual(target); if (changed) { data.CopyTo(target); } return changed; } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } return true; } } /// public ReadOnlySpan GetSpan(ulong va, int size, bool tracked = false) { if (tracked) { SignalMemoryTracking(va, (ulong)size, write: false); } else { AssertMapped(va, (ulong)size); } return _addressSpace.Mirror.GetSpan(va, size); } /// public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false) { if (tracked) { SignalMemoryTracking(va, (ulong)size, true); } else { AssertMapped(va, (ulong)size); } return _addressSpace.Mirror.GetWritableRegion(va, size); } /// public ref T GetRef(ulong va) where T : unmanaged { SignalMemoryTracking(va, (ulong)Unsafe.SizeOf(), true); return ref _addressSpace.Mirror.GetRef(va); } /// [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool IsMapped(ulong va) { return ValidateAddress(va) && IsMappedImpl(va); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool IsMappedImpl(ulong va) { ulong page = va >> PageBits; int bit = (int)((page & 31) << 1); int pageIndex = (int)(page >> PageToPteShift); ref ulong pageRef = ref _pageBitmap[pageIndex]; ulong pte = Volatile.Read(ref pageRef); return ((pte >> bit) & 3) != 0; } /// public bool IsRangeMapped(ulong va, ulong size) { AssertValidAddressAndSize(va, size); return IsRangeMappedImpl(va, size); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex) { startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1); endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1)); pageIndex = (int)(pageStart >> PageToPteShift); pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift); } private bool IsRangeMappedImpl(ulong va, ulong size) { int pages = GetPagesCount(va, size, out _); if (pages == 1) { return IsMappedImpl(va); } ulong pageStart = va >> PageBits; ulong pageEnd = pageStart + (ulong)pages; GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex); // Check if either bit in each 2 bit page entry is set. // OR the block with itself shifted down by 1, and check the first bit of each entry. ulong mask = BlockMappedMask & startMask; while (pageIndex <= pageEndIndex) { if (pageIndex == pageEndIndex) { mask &= endMask; } ref ulong pageRef = ref _pageBitmap[pageIndex++]; ulong pte = Volatile.Read(ref pageRef); pte |= pte >> 1; if ((pte & mask) != mask) { return false; } mask = BlockMappedMask; } return true; } /// public IEnumerable GetHostRegions(ulong va, ulong size) { AssertValidAddressAndSize(va, size); return Enumerable.Repeat(new HostMemoryRange((nuint)(ulong)_addressSpace.Mirror.GetPointer(va, size), size), 1); } /// public IEnumerable GetPhysicalRegions(ulong va, ulong size) { int pages = GetPagesCount(va, (uint)size, out va); var regions = new List(); ulong regionStart = GetPhysicalAddressChecked(va); ulong regionSize = PageSize; for (int page = 0; page < pages - 1; page++) { if (!ValidateAddress(va + PageSize)) { return null; } ulong newPa = GetPhysicalAddressChecked(va + PageSize); if (GetPhysicalAddressChecked(va) + PageSize != newPa) { regions.Add(new MemoryRange(regionStart, regionSize)); regionStart = newPa; regionSize = 0; } va += PageSize; regionSize += PageSize; } regions.Add(new MemoryRange(regionStart, regionSize)); return regions; } private ulong GetPhysicalAddressChecked(ulong va) { if (!IsMapped(va)) { ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}"); } return GetPhysicalAddressInternal(va); } private ulong GetPhysicalAddressInternal(ulong va) { return _pageTable.Read(va) + (va & PageMask); } /// /// /// This function also validates that the given range is both valid and mapped, and will throw if it is not. /// public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) { AssertValidAddressAndSize(va, size); if (precise) { Tracking.VirtualMemoryEvent(va, size, write, precise: true, exemptId); return; } // Software table, used for managed memory tracking. int pages = GetPagesCount(va, size, out _); ulong pageStart = va >> PageBits; if (pages == 1) { ulong tag = (ulong)(write ? HostMappedPtBits.WriteTracked : HostMappedPtBits.ReadWriteTracked); int bit = (int)((pageStart & 31) << 1); int pageIndex = (int)(pageStart >> PageToPteShift); ref ulong pageRef = ref _pageBitmap[pageIndex]; ulong pte = Volatile.Read(ref pageRef); ulong state = ((pte >> bit) & 3); if (state >= tag) { Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId); return; } else if (state == 0) { ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}"); } } else { ulong pageEnd = pageStart + (ulong)pages; GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex); ulong mask = startMask; ulong anyTrackingTag = (ulong)HostMappedPtBits.WriteTrackedReplicated; while (pageIndex <= pageEndIndex) { if (pageIndex == pageEndIndex) { mask &= endMask; } ref ulong pageRef = ref _pageBitmap[pageIndex++]; ulong pte = Volatile.Read(ref pageRef); ulong mappedMask = mask & BlockMappedMask; ulong mappedPte = pte | (pte >> 1); if ((mappedPte & mappedMask) != mappedMask) { ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}"); } pte &= mask; if ((pte & anyTrackingTag) != 0) // Search for any tracking. { // Writes trigger any tracking. // Only trigger tracking from reads if both bits are set on any page. if (write || (pte & (pte >> 1) & BlockMappedMask) != 0) { Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId); break; } } mask = ulong.MaxValue; } } } /// /// Computes the number of pages in a virtual address range. /// /// Virtual address of the range /// Size of the range /// The virtual address of the beginning of the first page /// This function does not differentiate between allocated and unallocated pages. [MethodImpl(MethodImplOptions.AggressiveInlining)] private int GetPagesCount(ulong va, ulong size, out ulong startVa) { // WARNING: Always check if ulong does not overflow during the operations. startVa = va & ~(ulong)PageMask; ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask; return (int)(vaSpan / PageSize); } /// public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection) { // Protection is inverted on software pages, since the default value is 0. protection = (~protection) & MemoryPermission.ReadAndWrite; int pages = GetPagesCount(va, size, out va); ulong pageStart = va >> PageBits; if (pages == 1) { ulong protTag = protection switch { MemoryPermission.None => (ulong)HostMappedPtBits.Mapped, MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTracked, _ => (ulong)HostMappedPtBits.ReadWriteTracked, }; int bit = (int)((pageStart & 31) << 1); ulong tagMask = 3UL << bit; ulong invTagMask = ~tagMask; ulong tag = protTag << bit; int pageIndex = (int)(pageStart >> PageToPteShift); ref ulong pageRef = ref _pageBitmap[pageIndex]; ulong pte; do { pte = Volatile.Read(ref pageRef); } while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte); } else { ulong pageEnd = pageStart + (ulong)pages; GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex); ulong mask = startMask; ulong protTag = protection switch { MemoryPermission.None => (ulong)HostMappedPtBits.MappedReplicated, MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTrackedReplicated, _ => (ulong)HostMappedPtBits.ReadWriteTrackedReplicated, }; while (pageIndex <= pageEndIndex) { if (pageIndex == pageEndIndex) { mask &= endMask; } ref ulong pageRef = ref _pageBitmap[pageIndex++]; ulong pte; ulong mappedMask; // Change the protection of all 2 bit entries that are mapped. do { pte = Volatile.Read(ref pageRef); mappedMask = pte | (pte >> 1); mappedMask |= (mappedMask & BlockMappedMask) << 1; mappedMask &= mask; // Only update mapped pages within the given range. } while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte); mask = ulong.MaxValue; } } protection = protection switch { MemoryPermission.None => MemoryPermission.ReadAndWrite, MemoryPermission.Write => MemoryPermission.Read, _ => MemoryPermission.None }; _addressSpace.Base.Reprotect(va, size, protection, false); } /// public RegionHandle BeginTracking(ulong address, ulong size, int id) { return Tracking.BeginTracking(address, size, id); } /// public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable handles, ulong granularity, int id) { return Tracking.BeginGranularTracking(address, size, handles, granularity, id); } /// public SmartMultiRegionHandle BeginSmartGranularTracking(ulong address, ulong size, ulong granularity, int id) { return Tracking.BeginSmartGranularTracking(address, size, granularity, id); } /// /// Adds the given address mapping to the page table. /// /// Virtual memory address /// Size to be mapped private void AddMapping(ulong va, ulong size) { int pages = GetPagesCount(va, size, out _); ulong pageStart = va >> PageBits; ulong pageEnd = pageStart + (ulong)pages; GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex); ulong mask = startMask; while (pageIndex <= pageEndIndex) { if (pageIndex == pageEndIndex) { mask &= endMask; } ref ulong pageRef = ref _pageBitmap[pageIndex++]; ulong pte; ulong mappedMask; // Map all 2-bit entries that are unmapped. do { pte = Volatile.Read(ref pageRef); mappedMask = pte | (pte >> 1); mappedMask |= (mappedMask & BlockMappedMask) << 1; mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged. } while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte); mask = ulong.MaxValue; } } /// /// Removes the given address mapping from the page table. /// /// Virtual memory address /// Size to be unmapped private void RemoveMapping(ulong va, ulong size) { int pages = GetPagesCount(va, size, out _); ulong pageStart = va >> PageBits; ulong pageEnd = pageStart + (ulong)pages; GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex); startMask = ~startMask; endMask = ~endMask; ulong mask = startMask; while (pageIndex <= pageEndIndex) { if (pageIndex == pageEndIndex) { mask |= endMask; } ref ulong pageRef = ref _pageBitmap[pageIndex++]; ulong pte; do { pte = Volatile.Read(ref pageRef); } while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte); mask = 0; } } /// /// Disposes of resources used by the memory manager. /// protected override void Destroy() { _addressSpace.Dispose(); _memoryEh.Dispose(); } private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message); } }