From 13c60ebcdee05feb8bd99f4d0dc3279aa0e2c3ac Mon Sep 17 00:00:00 2001 From: collecting Date: Wed, 15 Oct 2025 04:24:03 +0000 Subject: [PATCH] fix: Prevent race condition on unmapped memory reads --- src/video_core/buffer_cache/buffer_cache.h | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/video_core/buffer_cache/buffer_cache.h b/src/video_core/buffer_cache/buffer_cache.h index 0c230f1fc..4fdd8e3b5 100644 --- a/src/video_core/buffer_cache/buffer_cache.h +++ b/src/video_core/buffer_cache/buffer_cache.h @@ -8,6 +8,7 @@ #include #include +#include "common/logging/log.h" #include "common/range_sets.inc" #include "video_core/buffer_cache/buffer_cache_base.h" #include "video_core/guest_memory.h" @@ -1509,13 +1510,25 @@ template void BufferCache

::MappedUploadMemory([[maybe_unused]] Buffer& buffer, [[maybe_unused]] u64 total_size_bytes, [[maybe_unused]] std::span copies) { + if constexpr (USE_MEMORY_MAPS) { auto upload_staging = runtime.UploadStagingBuffer(total_size_bytes); const std::span staging_pointer = upload_staging.mapped_span; for (BufferCopy& copy : copies) { u8* const src_pointer = staging_pointer.data() + copy.src_offset; const DAddr device_addr = buffer.CpuAddr() + copy.dst_offset; - device_memory.ReadBlockUnsafe(device_addr, src_pointer, copy.size); + + // GetSpan is a fast way to check for contiguous, mapped memory. + // If it returns nullptr, the memory is unmapped or fragmented. + if (device_memory.GetSpan(device_addr, copy.size) == nullptr) { + // The memory is no longer valid. Log a warning and fill this chunk with zeros + // to prevent the GPU from rendering garbage. + LOG_WARNING(Render_Vulkan, "MappedUploadMemory: Aborting copy from now-unmapped guest address 0x{:08X}", device_addr); + std::memset(src_pointer, 0, copy.size); + } else { + // Memory is valid, proceed with the copy. + device_memory.ReadBlockUnsafe(device_addr, src_pointer, copy.size); + } // Apply the staging offset copy.src_offset += upload_staging.offset;