From 4ea58b64d43273806a29de28d1635d1b323469ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrik=20Rydg=C3=A5rd?= Date: Thu, 4 Apr 2024 15:32:48 +0200 Subject: [PATCH] Merge our two different Vulkan barrier batch implementations --- Common/GPU/Vulkan/VulkanBarrier.cpp | 6 ++--- Common/GPU/Vulkan/VulkanBarrier.h | 32 +++---------------------- Common/GPU/Vulkan/VulkanQueueRunner.cpp | 8 +++---- Common/GPU/Vulkan/VulkanQueueRunner.h | 8 +++---- 4 files changed, 14 insertions(+), 40 deletions(-) diff --git a/Common/GPU/Vulkan/VulkanBarrier.cpp b/Common/GPU/Vulkan/VulkanBarrier.cpp index 130787b8bb..585fe98c04 100644 --- a/Common/GPU/Vulkan/VulkanBarrier.cpp +++ b/Common/GPU/Vulkan/VulkanBarrier.cpp @@ -11,7 +11,7 @@ VulkanBarrierBatch::~VulkanBarrierBatch() { } } -void VulkanBarrier::Flush(VkCommandBuffer cmd) { +void VulkanBarrierBatch::Flush(VkCommandBuffer cmd) { if (!imageBarriers_.empty()) { vkCmdPipelineBarrier(cmd, srcStageMask_, dstStageMask_, dependencyFlags_, 0, nullptr, 0, nullptr, (uint32_t)imageBarriers_.size(), imageBarriers_.data()); } @@ -21,7 +21,7 @@ void VulkanBarrier::Flush(VkCommandBuffer cmd) { dependencyFlags_ = 0; } -void VulkanBarrier::TransitionImage( +void VulkanBarrierBatch::TransitionImage( VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask, VkImageLayout oldImageLayout, VkImageLayout newImageLayout, VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask, @@ -50,7 +50,7 @@ void VulkanBarrier::TransitionImage( imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; } -void VulkanBarrier::TransitionImageAuto( +void VulkanBarrierBatch::TransitionImageAuto( VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask, VkImageLayout oldImageLayout, VkImageLayout newImageLayout) { _dbg_assert_(image != VK_NULL_HANDLE); diff --git a/Common/GPU/Vulkan/VulkanBarrier.h b/Common/GPU/Vulkan/VulkanBarrier.h index 556f8bb074..60b1ea3fc0 100644 --- a/Common/GPU/Vulkan/VulkanBarrier.h +++ b/Common/GPU/Vulkan/VulkanBarrier.h @@ -14,6 +14,8 @@ public: VulkanBarrierBatch() : imageBarriers_(4) {} ~VulkanBarrierBatch(); + bool empty() const { return imageBarriers_.empty(); } + VkImageMemoryBarrier *Add(VkImage image, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags) { srcStageMask_ |= srcStageMask; dstStageMask_ |= dstStageMask; @@ -33,34 +35,6 @@ public: return &barrier; } - void Flush(VkCommandBuffer cmd) { - if (!imageBarriers_.empty()) { - vkCmdPipelineBarrier(cmd, srcStageMask_, dstStageMask_, dependencyFlags_, 0, nullptr, 0, nullptr, (uint32_t)imageBarriers_.size(), imageBarriers_.data()); - imageBarriers_.clear(); - srcStageMask_ = 0; - dstStageMask_ = 0; - dependencyFlags_ = 0; - } - } - - bool empty() const { return imageBarriers_.empty(); } - -private: - FastVec imageBarriers_; - VkPipelineStageFlags srcStageMask_ = 0; - VkPipelineStageFlags dstStageMask_ = 0; - VkDependencyFlags dependencyFlags_ = 0; -}; - -// Collects multiple barriers into one, then flushes it. -// Reusable after a flush, in case you want to reuse the allocation made by the vector. -// However, not thread safe in any way! -class VulkanBarrier { -public: - VulkanBarrier() : imageBarriers_(4) {} - - bool empty() const { return imageBarriers_.empty(); } - void TransitionImage( VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask, VkImageLayout oldImageLayout, VkImageLayout newImageLayout, @@ -76,9 +50,9 @@ public: void Flush(VkCommandBuffer cmd); private: + FastVec imageBarriers_; VkPipelineStageFlags srcStageMask_ = 0; VkPipelineStageFlags dstStageMask_ = 0; - FastVec imageBarriers_; VkDependencyFlags dependencyFlags_ = 0; }; diff --git a/Common/GPU/Vulkan/VulkanQueueRunner.cpp b/Common/GPU/Vulkan/VulkanQueueRunner.cpp index 07a2f99486..949355e81d 100644 --- a/Common/GPU/Vulkan/VulkanQueueRunner.cpp +++ b/Common/GPU/Vulkan/VulkanQueueRunner.cpp @@ -931,7 +931,7 @@ void VulkanQueueRunner::LogReadbackImage(const VKRStep &step) { INFO_LOG(G3D, "%s", StepToString(vulkan_, step).c_str()); } -void TransitionToOptimal(VkCommandBuffer cmd, VkImage colorImage, VkImageLayout colorLayout, VkImage depthStencilImage, VkImageLayout depthStencilLayout, int numLayers, VulkanBarrier *recordBarrier) { +void TransitionToOptimal(VkCommandBuffer cmd, VkImage colorImage, VkImageLayout colorLayout, VkImage depthStencilImage, VkImageLayout depthStencilLayout, int numLayers, VulkanBarrierBatch *recordBarrier) { if (colorLayout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) { VkPipelineStageFlags srcStageMask = 0; VkAccessFlags srcAccessMask = 0; @@ -1754,7 +1754,7 @@ void VulkanQueueRunner::PerformBlit(const VKRStep &step, VkCommandBuffer cmd) { } } -void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier) { +void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier) { if (img.layout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) { return; } @@ -1806,7 +1806,7 @@ void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageAspec img.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; } -void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier) { +void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier) { if (img.layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) { return; } @@ -1859,7 +1859,7 @@ void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageAspec img.layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; } -void VulkanQueueRunner::SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier) { +void VulkanQueueRunner::SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier) { VkImageAspectFlags imageAspect = aspect; VkAccessFlags srcAccessMask = 0; VkPipelineStageFlags srcStageMask = 0; diff --git a/Common/GPU/Vulkan/VulkanQueueRunner.h b/Common/GPU/Vulkan/VulkanQueueRunner.h index b6c1094065..ab39c7b06f 100644 --- a/Common/GPU/Vulkan/VulkanQueueRunner.h +++ b/Common/GPU/Vulkan/VulkanQueueRunner.h @@ -289,9 +289,9 @@ private: void ApplySonicHack(std::vector &steps); void ApplyRenderPassMerge(std::vector &steps); - static void SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier); - static void SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier); - static void SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier); + static void SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier); + static void SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier); + static void SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier); VulkanContext *vulkan_; @@ -315,7 +315,7 @@ private: // Image barrier helper used during command buffer record (PerformRenderPass etc). // Stored here to help reuse the allocation. - VulkanBarrier recordBarrier_; + VulkanBarrierBatch recordBarrier_; // Swap chain management struct SwapchainImageData {