mirror of
https://github.com/hrydgard/ppsspp.git
synced 2025-04-02 11:01:50 -04:00
Merge our two different Vulkan barrier batch implementations
This commit is contained in:
parent
9f9881dfe3
commit
4ea58b64d4
4 changed files with 14 additions and 40 deletions
|
@ -11,7 +11,7 @@ VulkanBarrierBatch::~VulkanBarrierBatch() {
|
|||
}
|
||||
}
|
||||
|
||||
void VulkanBarrier::Flush(VkCommandBuffer cmd) {
|
||||
void VulkanBarrierBatch::Flush(VkCommandBuffer cmd) {
|
||||
if (!imageBarriers_.empty()) {
|
||||
vkCmdPipelineBarrier(cmd, srcStageMask_, dstStageMask_, dependencyFlags_, 0, nullptr, 0, nullptr, (uint32_t)imageBarriers_.size(), imageBarriers_.data());
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ void VulkanBarrier::Flush(VkCommandBuffer cmd) {
|
|||
dependencyFlags_ = 0;
|
||||
}
|
||||
|
||||
void VulkanBarrier::TransitionImage(
|
||||
void VulkanBarrierBatch::TransitionImage(
|
||||
VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask,
|
||||
VkImageLayout oldImageLayout, VkImageLayout newImageLayout,
|
||||
VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask,
|
||||
|
@ -50,7 +50,7 @@ void VulkanBarrier::TransitionImage(
|
|||
imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
}
|
||||
|
||||
void VulkanBarrier::TransitionImageAuto(
|
||||
void VulkanBarrierBatch::TransitionImageAuto(
|
||||
VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask,
|
||||
VkImageLayout oldImageLayout, VkImageLayout newImageLayout) {
|
||||
_dbg_assert_(image != VK_NULL_HANDLE);
|
||||
|
|
|
@ -14,6 +14,8 @@ public:
|
|||
VulkanBarrierBatch() : imageBarriers_(4) {}
|
||||
~VulkanBarrierBatch();
|
||||
|
||||
bool empty() const { return imageBarriers_.empty(); }
|
||||
|
||||
VkImageMemoryBarrier *Add(VkImage image, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags) {
|
||||
srcStageMask_ |= srcStageMask;
|
||||
dstStageMask_ |= dstStageMask;
|
||||
|
@ -33,34 +35,6 @@ public:
|
|||
return &barrier;
|
||||
}
|
||||
|
||||
void Flush(VkCommandBuffer cmd) {
|
||||
if (!imageBarriers_.empty()) {
|
||||
vkCmdPipelineBarrier(cmd, srcStageMask_, dstStageMask_, dependencyFlags_, 0, nullptr, 0, nullptr, (uint32_t)imageBarriers_.size(), imageBarriers_.data());
|
||||
imageBarriers_.clear();
|
||||
srcStageMask_ = 0;
|
||||
dstStageMask_ = 0;
|
||||
dependencyFlags_ = 0;
|
||||
}
|
||||
}
|
||||
|
||||
bool empty() const { return imageBarriers_.empty(); }
|
||||
|
||||
private:
|
||||
FastVec<VkImageMemoryBarrier> imageBarriers_;
|
||||
VkPipelineStageFlags srcStageMask_ = 0;
|
||||
VkPipelineStageFlags dstStageMask_ = 0;
|
||||
VkDependencyFlags dependencyFlags_ = 0;
|
||||
};
|
||||
|
||||
// Collects multiple barriers into one, then flushes it.
|
||||
// Reusable after a flush, in case you want to reuse the allocation made by the vector.
|
||||
// However, not thread safe in any way!
|
||||
class VulkanBarrier {
|
||||
public:
|
||||
VulkanBarrier() : imageBarriers_(4) {}
|
||||
|
||||
bool empty() const { return imageBarriers_.empty(); }
|
||||
|
||||
void TransitionImage(
|
||||
VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask,
|
||||
VkImageLayout oldImageLayout, VkImageLayout newImageLayout,
|
||||
|
@ -76,9 +50,9 @@ public:
|
|||
void Flush(VkCommandBuffer cmd);
|
||||
|
||||
private:
|
||||
FastVec<VkImageMemoryBarrier> imageBarriers_;
|
||||
VkPipelineStageFlags srcStageMask_ = 0;
|
||||
VkPipelineStageFlags dstStageMask_ = 0;
|
||||
FastVec<VkImageMemoryBarrier> imageBarriers_;
|
||||
VkDependencyFlags dependencyFlags_ = 0;
|
||||
};
|
||||
|
||||
|
|
|
@ -931,7 +931,7 @@ void VulkanQueueRunner::LogReadbackImage(const VKRStep &step) {
|
|||
INFO_LOG(G3D, "%s", StepToString(vulkan_, step).c_str());
|
||||
}
|
||||
|
||||
void TransitionToOptimal(VkCommandBuffer cmd, VkImage colorImage, VkImageLayout colorLayout, VkImage depthStencilImage, VkImageLayout depthStencilLayout, int numLayers, VulkanBarrier *recordBarrier) {
|
||||
void TransitionToOptimal(VkCommandBuffer cmd, VkImage colorImage, VkImageLayout colorLayout, VkImage depthStencilImage, VkImageLayout depthStencilLayout, int numLayers, VulkanBarrierBatch *recordBarrier) {
|
||||
if (colorLayout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
|
||||
VkPipelineStageFlags srcStageMask = 0;
|
||||
VkAccessFlags srcAccessMask = 0;
|
||||
|
@ -1754,7 +1754,7 @@ void VulkanQueueRunner::PerformBlit(const VKRStep &step, VkCommandBuffer cmd) {
|
|||
}
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier) {
|
||||
void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier) {
|
||||
if (img.layout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
return;
|
||||
}
|
||||
|
@ -1806,7 +1806,7 @@ void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageAspec
|
|||
img.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier) {
|
||||
void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier) {
|
||||
if (img.layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
return;
|
||||
}
|
||||
|
@ -1859,7 +1859,7 @@ void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageAspec
|
|||
img.layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier) {
|
||||
void VulkanQueueRunner::SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier) {
|
||||
VkImageAspectFlags imageAspect = aspect;
|
||||
VkAccessFlags srcAccessMask = 0;
|
||||
VkPipelineStageFlags srcStageMask = 0;
|
||||
|
|
|
@ -289,9 +289,9 @@ private:
|
|||
void ApplySonicHack(std::vector<VKRStep *> &steps);
|
||||
void ApplyRenderPassMerge(std::vector<VKRStep *> &steps);
|
||||
|
||||
static void SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier);
|
||||
static void SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier);
|
||||
static void SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrier *recordBarrier);
|
||||
static void SetupTransitionToTransferSrc(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier);
|
||||
static void SetupTransitionToTransferDst(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier);
|
||||
static void SetupTransferDstWriteAfterWrite(VKRImage &img, VkImageAspectFlags aspect, VulkanBarrierBatch *recordBarrier);
|
||||
|
||||
VulkanContext *vulkan_;
|
||||
|
||||
|
@ -315,7 +315,7 @@ private:
|
|||
// Image barrier helper used during command buffer record (PerformRenderPass etc).
|
||||
// Stored here to help reuse the allocation.
|
||||
|
||||
VulkanBarrier recordBarrier_;
|
||||
VulkanBarrierBatch recordBarrier_;
|
||||
|
||||
// Swap chain management
|
||||
struct SwapchainImageData {
|
||||
|
|
Loading…
Add table
Reference in a new issue