early-access version 2974

This commit is contained in:
pineappleEA 2022-09-26 08:52:18 +02:00
parent ba3af01e76
commit aff4265364
6 changed files with 65 additions and 66 deletions

View File

@ -7,11 +7,6 @@ function(get_timestamp _var)
set(${_var} "${timestamp}" PARENT_SCOPE) set(${_var} "${timestamp}" PARENT_SCOPE)
endfunction() endfunction()
list(APPEND CMAKE_MODULE_PATH "${SRC_DIR}/externals/cmake-modules")
# Find the package here with the known path so that the GetGit commands can find it as well
find_package(Git QUIET PATHS "${GIT_EXECUTABLE}")
# generate git/build information # generate git/build information
include(GetGitRevisionDescription) include(GetGitRevisionDescription)
if(NOT GIT_REF_SPEC) if(NOT GIT_REF_SPEC)
@ -29,6 +24,7 @@ get_timestamp(BUILD_DATE)
# Also if this is a CI build, add the build name (ie: Nightly, Canary) to the scm_rev file as well # Also if this is a CI build, add the build name (ie: Nightly, Canary) to the scm_rev file as well
set(REPO_NAME "") set(REPO_NAME "")
set(BUILD_VERSION "0") set(BUILD_VERSION "0")
set(BUILD_ID ${DISPLAY_VERSION})
if (BUILD_REPOSITORY) if (BUILD_REPOSITORY)
# regex capture the string nightly or canary into CMAKE_MATCH_1 # regex capture the string nightly or canary into CMAKE_MATCH_1
string(REGEX MATCH "yuzu-emu/yuzu-?(.*)" OUTVAR ${BUILD_REPOSITORY}) string(REGEX MATCH "yuzu-emu/yuzu-?(.*)" OUTVAR ${BUILD_REPOSITORY})
@ -57,6 +53,4 @@ if (BUILD_REPOSITORY)
endif() endif()
endif() endif()
# The variable SRC_DIR must be passed into the script configure_file(scm_rev.cpp.in scm_rev.cpp @ONLY)
# (since it uses the current build directory for all values of CMAKE_*_DIR)
configure_file("${SRC_DIR}/src/common/scm_rev.cpp.in" "scm_rev.cpp" @ONLY)

View File

@ -1,7 +1,7 @@
yuzu emulator early access yuzu emulator early access
============= =============
This is the source code for early-access 2970. This is the source code for early-access 2974.
## Legal Notice ## Legal Notice

View File

@ -14,32 +14,7 @@ if (DEFINED ENV{DISPLAYVERSION})
set(DISPLAY_VERSION $ENV{DISPLAYVERSION}) set(DISPLAY_VERSION $ENV{DISPLAYVERSION})
endif () endif ()
# Pass the path to git to the GenerateSCMRev.cmake as well include(GenerateSCMRev)
find_package(Git QUIET)
add_custom_command(OUTPUT scm_rev.cpp
COMMAND ${CMAKE_COMMAND}
-DSRC_DIR=${PROJECT_SOURCE_DIR}
-DBUILD_REPOSITORY=${BUILD_REPOSITORY}
-DTITLE_BAR_FORMAT_IDLE=${TITLE_BAR_FORMAT_IDLE}
-DTITLE_BAR_FORMAT_RUNNING=${TITLE_BAR_FORMAT_RUNNING}
-DBUILD_TAG=${BUILD_TAG}
-DBUILD_ID=${DISPLAY_VERSION}
-DGIT_REF_SPEC=${GIT_REF_SPEC}
-DGIT_REV=${GIT_REV}
-DGIT_DESC=${GIT_DESC}
-DGIT_BRANCH=${GIT_BRANCH}
-DBUILD_FULLNAME=${BUILD_FULLNAME}
-DGIT_EXECUTABLE=${GIT_EXECUTABLE}
-P ${PROJECT_SOURCE_DIR}/CMakeModules/GenerateSCMRev.cmake
DEPENDS
# Check that the scm_rev files haven't changed
"${CMAKE_CURRENT_SOURCE_DIR}/scm_rev.cpp.in"
"${CMAKE_CURRENT_SOURCE_DIR}/scm_rev.h"
# technically we should regenerate if the git version changed, but its not worth the effort imo
"${PROJECT_SOURCE_DIR}/CMakeModules/GenerateSCMRev.cmake"
VERBATIM
)
add_library(common STATIC add_library(common STATIC
address_space.cpp address_space.cpp
@ -121,7 +96,7 @@ add_library(common STATIC
quaternion.h quaternion.h
reader_writer_queue.h reader_writer_queue.h
ring_buffer.h ring_buffer.h
scm_rev.cpp ${CMAKE_CURRENT_BINARY_DIR}/scm_rev.cpp
scm_rev.h scm_rev.h
scope_exit.h scope_exit.h
settings.cpp settings.cpp

View File

@ -4,13 +4,10 @@
#pragma once #pragma once
#include <array> #include <array>
#include "common/bit_field.h"
#include "common/common_funcs.h" #include "common/common_funcs.h"
#include "common/common_types.h" #include "common/common_types.h"
#include "common/swap.h"
#include "core/hle/service/hid/controllers/controller_base.h" #include "core/hle/service/hid/controllers/controller_base.h"
#include "core/hle/service/hid/errors.h" #include "core/hle/service/hid/errors.h"
#include "core/hle/service/hid/ring_lifo.h"
namespace Kernel { namespace Kernel {
class KEvent; class KEvent;

View File

@ -26,20 +26,39 @@ using namespace Common::Literals;
constexpr VkDeviceSize MAX_ALIGNMENT = 256; constexpr VkDeviceSize MAX_ALIGNMENT = 256;
// Maximum size to put elements in the stream buffer // Maximum size to put elements in the stream buffer
constexpr VkDeviceSize MAX_STREAM_BUFFER_REQUEST_SIZE = 8_MiB; constexpr VkDeviceSize MAX_STREAM_BUFFER_REQUEST_SIZE = 8_MiB;
// Stream buffer size in bytes
constexpr VkDeviceSize STREAM_BUFFER_SIZE = 128_MiB;
constexpr VkDeviceSize REGION_SIZE = STREAM_BUFFER_SIZE / StagingBufferPool::NUM_SYNCS;
constexpr VkMemoryPropertyFlags HOST_FLAGS = constexpr VkMemoryPropertyFlags HOST_FLAGS =
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
constexpr VkMemoryPropertyFlags STREAM_FLAGS = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | HOST_FLAGS; constexpr VkMemoryPropertyFlags STREAM_FLAGS = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | HOST_FLAGS;
bool IsStreamHeap(VkMemoryHeap heap) noexcept { static bool IsStreamHeap(VkMemoryHeap heap, size_t staging_buffer_size) noexcept {
return STREAM_BUFFER_SIZE < (heap.size * 2) / 3; return staging_buffer_size < (heap.size * 2) / 3;
}
static bool HasLargeDeviceLocalHostVisibleMemory(const VkPhysicalDeviceMemoryProperties& props) {
const auto flags{VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT};
for (u32 type_index = 0; type_index < props.memoryTypeCount; ++type_index) {
const auto& memory_type{props.memoryTypes[type_index]};
if ((memory_type.propertyFlags & flags) != flags) {
// Memory must be device local and host visible
continue;
}
const auto& heap{props.memoryHeaps[memory_type.heapIndex]};
if (heap.size >= 7168_MiB) {
// This is the right type of memory
return true;
}
}
return false;
} }
std::optional<u32> FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_mask, std::optional<u32> FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_mask,
VkMemoryPropertyFlags flags) noexcept { VkMemoryPropertyFlags flags,
size_t staging_buffer_size) noexcept {
for (u32 type_index = 0; type_index < props.memoryTypeCount; ++type_index) { for (u32 type_index = 0; type_index < props.memoryTypeCount; ++type_index) {
if (((type_mask >> type_index) & 1) == 0) { if (((type_mask >> type_index) & 1) == 0) {
// Memory type is incompatible // Memory type is incompatible
@ -50,7 +69,7 @@ std::optional<u32> FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& p
// Memory type doesn't have the flags we want // Memory type doesn't have the flags we want
continue; continue;
} }
if (!IsStreamHeap(props.memoryHeaps[memory_type.heapIndex])) { if (!IsStreamHeap(props.memoryHeaps[memory_type.heapIndex], staging_buffer_size)) {
// Memory heap is not suitable for streaming // Memory heap is not suitable for streaming
continue; continue;
} }
@ -61,17 +80,17 @@ std::optional<u32> FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& p
} }
u32 FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_mask, u32 FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_mask,
bool try_device_local) { bool try_device_local, size_t staging_buffer_size) {
std::optional<u32> type; std::optional<u32> type;
if (try_device_local) { if (try_device_local) {
// Try to find a DEVICE_LOCAL_BIT type, Nvidia and AMD have a dedicated heap for this // Try to find a DEVICE_LOCAL_BIT type, Nvidia and AMD have a dedicated heap for this
type = FindMemoryTypeIndex(props, type_mask, STREAM_FLAGS); type = FindMemoryTypeIndex(props, type_mask, STREAM_FLAGS, staging_buffer_size);
if (type) { if (type) {
return *type; return *type;
} }
} }
// Otherwise try without the DEVICE_LOCAL_BIT // Otherwise try without the DEVICE_LOCAL_BIT
type = FindMemoryTypeIndex(props, type_mask, HOST_FLAGS); type = FindMemoryTypeIndex(props, type_mask, HOST_FLAGS, staging_buffer_size);
if (type) { if (type) {
return *type; return *type;
} }
@ -79,20 +98,32 @@ u32 FindMemoryTypeIndex(const VkPhysicalDeviceMemoryProperties& props, u32 type_
throw vk::Exception(VK_ERROR_OUT_OF_DEVICE_MEMORY); throw vk::Exception(VK_ERROR_OUT_OF_DEVICE_MEMORY);
} }
size_t Region(size_t iterator) noexcept { size_t Region(size_t iterator, size_t region_size) noexcept {
return iterator / REGION_SIZE; return iterator / region_size;
} }
} // Anonymous namespace } // Anonymous namespace
StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& memory_allocator_, StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& memory_allocator_,
Scheduler& scheduler_) Scheduler& scheduler_)
: device{device_}, memory_allocator{memory_allocator_}, scheduler{scheduler_} { : device{device_}, memory_allocator{memory_allocator_}, scheduler{scheduler_} {
const auto memory_properties{device.GetPhysical().GetMemoryProperties().memoryProperties};
if (HasLargeDeviceLocalHostVisibleMemory(memory_properties)) {
// Possible on many integrated and newer discrete cards
staging_buffer_size = 1_GiB;
} else {
// Well-supported default size used by most Vulkan PC games
staging_buffer_size = 256_MiB;
}
region_size = staging_buffer_size / StagingBufferPool::NUM_SYNCS;
const vk::Device& dev = device.GetLogical(); const vk::Device& dev = device.GetLogical();
stream_buffer = dev.CreateBuffer(VkBufferCreateInfo{ stream_buffer = dev.CreateBuffer(VkBufferCreateInfo{
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.pNext = nullptr, .pNext = nullptr,
.flags = 0, .flags = 0,
.size = STREAM_BUFFER_SIZE, .size = staging_buffer_size,
.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE, .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
@ -117,19 +148,18 @@ StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& mem
.image = nullptr, .image = nullptr,
.buffer = *stream_buffer, .buffer = *stream_buffer,
}; };
const auto memory_properties = device.GetPhysical().GetMemoryProperties().memoryProperties;
VkMemoryAllocateInfo stream_memory_info{ VkMemoryAllocateInfo stream_memory_info{
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.pNext = make_dedicated ? &dedicated_info : nullptr, .pNext = make_dedicated ? &dedicated_info : nullptr,
.allocationSize = requirements.size, .allocationSize = requirements.size,
.memoryTypeIndex = .memoryTypeIndex = FindMemoryTypeIndex(memory_properties, requirements.memoryTypeBits, true,
FindMemoryTypeIndex(memory_properties, requirements.memoryTypeBits, true), staging_buffer_size),
}; };
stream_memory = dev.TryAllocateMemory(stream_memory_info); stream_memory = dev.TryAllocateMemory(stream_memory_info);
if (!stream_memory) { if (!stream_memory) {
LOG_INFO(Render_Vulkan, "Dynamic memory allocation failed, trying with system memory"); LOG_INFO(Render_Vulkan, "Dynamic memory allocation failed, trying with system memory");
stream_memory_info.memoryTypeIndex = stream_memory_info.memoryTypeIndex = FindMemoryTypeIndex(
FindMemoryTypeIndex(memory_properties, requirements.memoryTypeBits, false); memory_properties, requirements.memoryTypeBits, false, staging_buffer_size);
stream_memory = dev.AllocateMemory(stream_memory_info); stream_memory = dev.AllocateMemory(stream_memory_info);
} }
@ -137,7 +167,7 @@ StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& mem
stream_memory.SetObjectNameEXT("Stream Buffer Memory"); stream_memory.SetObjectNameEXT("Stream Buffer Memory");
} }
stream_buffer.BindMemory(*stream_memory, 0); stream_buffer.BindMemory(*stream_memory, 0);
stream_pointer = stream_memory.Map(0, STREAM_BUFFER_SIZE); stream_pointer = stream_memory.Map(0, staging_buffer_size);
} }
StagingBufferPool::~StagingBufferPool() = default; StagingBufferPool::~StagingBufferPool() = default;
@ -158,25 +188,25 @@ void StagingBufferPool::TickFrame() {
} }
StagingBufferRef StagingBufferPool::GetStreamBuffer(size_t size) { StagingBufferRef StagingBufferPool::GetStreamBuffer(size_t size) {
if (AreRegionsActive(Region(free_iterator) + 1, if (AreRegionsActive(Region(free_iterator, region_size) + 1,
std::min(Region(iterator + size) + 1, NUM_SYNCS))) { std::min(Region(iterator + size, region_size) + 1, NUM_SYNCS))) {
// Avoid waiting for the previous usages to be free // Avoid waiting for the previous usages to be free
return GetStagingBuffer(size, MemoryUsage::Upload); return GetStagingBuffer(size, MemoryUsage::Upload);
} }
const u64 current_tick = scheduler.CurrentTick(); const u64 current_tick = scheduler.CurrentTick();
std::fill(sync_ticks.begin() + Region(used_iterator), sync_ticks.begin() + Region(iterator), std::fill(sync_ticks.begin() + Region(used_iterator, region_size),
current_tick); sync_ticks.begin() + Region(iterator, region_size), current_tick);
used_iterator = iterator; used_iterator = iterator;
free_iterator = std::max(free_iterator, iterator + size); free_iterator = std::max(free_iterator, iterator + size);
if (iterator + size >= STREAM_BUFFER_SIZE) { if (iterator + size >= staging_buffer_size) {
std::fill(sync_ticks.begin() + Region(used_iterator), sync_ticks.begin() + NUM_SYNCS, std::fill(sync_ticks.begin() + Region(used_iterator, region_size),
current_tick); sync_ticks.begin() + NUM_SYNCS, current_tick);
used_iterator = 0; used_iterator = 0;
iterator = 0; iterator = 0;
free_iterator = size; free_iterator = size;
if (AreRegionsActive(0, Region(size) + 1)) { if (AreRegionsActive(0, Region(size, region_size) + 1)) {
// Avoid waiting for the previous usages to be free // Avoid waiting for the previous usages to be free
return GetStagingBuffer(size, MemoryUsage::Upload); return GetStagingBuffer(size, MemoryUsage::Upload);
} }

View File

@ -93,6 +93,9 @@ private:
size_t free_iterator = 0; size_t free_iterator = 0;
std::array<u64, NUM_SYNCS> sync_ticks{}; std::array<u64, NUM_SYNCS> sync_ticks{};
size_t staging_buffer_size = 0;
size_t region_size = 0;
StagingBuffersCache device_local_cache; StagingBuffersCache device_local_cache;
StagingBuffersCache upload_cache; StagingBuffersCache upload_cache;
StagingBuffersCache download_cache; StagingBuffersCache download_cache;