1
0
mirror of https://github.com/doitsujin/dxvk.git synced 2024-12-11 10:24:10 +01:00

[dxvk] Use new barrier batch and barrier tracker for main command buffer

This commit is contained in:
Philip Rebohle 2024-10-12 11:05:06 +02:00 committed by Philip Rebohle
parent 4d2799f0d4
commit 708d23ca4b
5 changed files with 184 additions and 61 deletions

View File

@ -474,6 +474,7 @@ namespace dxvk {
return; return;
list->cmdPipelineBarrier(m_cmdBuffer, &depInfo); list->cmdPipelineBarrier(m_cmdBuffer, &depInfo);
list->addStatCtr(DxvkStatCounter::CmdBarrierCount, 1u);
m_memoryBarrier.srcStageMask = 0u; m_memoryBarrier.srcStageMask = 0u;
m_memoryBarrier.srcAccessMask = 0u; m_memoryBarrier.srcAccessMask = 0u;

View File

@ -351,6 +351,15 @@ namespace dxvk {
return m_storage; return m_storage;
} }
/**
* \brief Retrieves resource ID for barrier tracking
* \returns Unique resource ID
*/
uint64_t getResourceId() const {
constexpr static size_t Align = alignof(DxvkResourceAllocation);
return reinterpret_cast<uintptr_t>(m_storage.ptr()) / (Align & -Align);
}
/** /**
* \brief Checks whether the buffer can be relocated * \brief Checks whether the buffer can be relocated
* *

View File

@ -4699,8 +4699,8 @@ namespace dxvk {
// If there are pending layout transitions, execute them immediately // If there are pending layout transitions, execute them immediately
// since the backend expects images to be in the store layout after // since the backend expects images to be in the store layout after
// a render pass instance. This is expected to be rare. // a render pass instance. This is expected to be rare.
if (m_execBarriers.hasResourceBarriers()) if (m_execBarriers.hasLayoutTransitions())
m_execBarriers.recordCommands(m_cmd); flushBarriers();
} }
@ -5861,7 +5861,7 @@ namespace dxvk {
// Exit early if we're only checking for hazards and // Exit early if we're only checking for hazards and
// if the barrier set is empty, to avoid some overhead. // if the barrier set is empty, to avoid some overhead.
if (!DoEmit && !m_execBarriers.hasResourceBarriers()) if (!DoEmit && m_barrierTracker.empty())
return; return;
for (uint32_t i = 0; i < DxvkDescriptorSets::CsSetCount; i++) { for (uint32_t i = 0; i < DxvkDescriptorSets::CsSetCount; i++) {
@ -6107,8 +6107,9 @@ namespace dxvk {
return false; return false;
if (stages & VK_SHADER_STAGE_COMPUTE_BIT) { if (stages & VK_SHADER_STAGE_COMPUTE_BIT) {
VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; VkPipelineStageFlags2 stageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT
return !(m_execBarriers.getSrcStages() & ~stageMask); | VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT;
return !m_execBarriers.hasPendingStages(~stageMask);
} }
return true; return true;
@ -6222,7 +6223,7 @@ namespace dxvk {
for (size_t i = 0; i < imageCount; i++) for (size_t i = 0; i < imageCount; i++)
prepareImage(imageInfos[i].image, imageInfos[i].image->getAvailableSubresources()); prepareImage(imageInfos[i].image, imageInfos[i].image->getAvailableSubresources());
m_execBarriers.recordCommands(m_cmd); flushBarriers();
small_vector<VkImageMemoryBarrier2, 16> imageBarriers; small_vector<VkImageMemoryBarrier2, 16> imageBarriers;
@ -6539,6 +6540,8 @@ namespace dxvk {
m_sdmaBarriers.finalize(m_cmd); m_sdmaBarriers.finalize(m_cmd);
m_initBarriers.finalize(m_cmd); m_initBarriers.finalize(m_cmd);
m_execBarriers.finalize(m_cmd); m_execBarriers.finalize(m_cmd);
m_barrierTracker.clear();
} }
@ -6564,6 +6567,7 @@ namespace dxvk {
depInfo.pImageMemoryBarriers = m_imageLayoutTransitions.data(); depInfo.pImageMemoryBarriers = m_imageLayoutTransitions.data();
m_cmd->cmdPipelineBarrier(cmdBuffer, &depInfo); m_cmd->cmdPipelineBarrier(cmdBuffer, &depInfo);
m_cmd->addStatCtr(DxvkStatCounter::CmdBarrierCount, 1u);
m_imageLayoutTransitions.clear(); m_imageLayoutTransitions.clear();
} }
@ -6635,21 +6639,15 @@ namespace dxvk {
VkAccessFlags2 srcAccess, VkAccessFlags2 srcAccess,
VkPipelineStageFlags2 dstStages, VkPipelineStageFlags2 dstStages,
VkAccessFlags2 dstAccess) { VkAccessFlags2 dstAccess) {
if (likely(cmdBuffer == DxvkCmdBuffer::ExecBuffer)) { auto& batch = getBarrierBatch(cmdBuffer);
m_execBarriers.accessMemory(srcStages, srcAccess, dstStages, dstAccess);
} else {
auto& batch = cmdBuffer == DxvkCmdBuffer::InitBuffer
? m_initBarriers
: m_sdmaBarriers;
VkMemoryBarrier2 barrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 }; VkMemoryBarrier2 barrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 };
barrier.srcStageMask = srcStages; barrier.srcStageMask = srcStages;
barrier.srcAccessMask = srcAccess; barrier.srcAccessMask = srcAccess;
barrier.dstStageMask = dstStages; barrier.dstStageMask = dstStages;
barrier.dstAccessMask = dstAccess; barrier.dstAccessMask = dstAccess;
batch.addMemoryBarrier(barrier); batch.addMemoryBarrier(barrier);
}
} }
@ -6678,28 +6676,53 @@ namespace dxvk {
VkImageLayout dstLayout, VkImageLayout dstLayout,
VkPipelineStageFlags2 dstStages, VkPipelineStageFlags2 dstStages,
VkAccessFlags2 dstAccess) { VkAccessFlags2 dstAccess) {
if (likely(cmdBuffer == DxvkCmdBuffer::ExecBuffer)) { auto& batch = getBarrierBatch(cmdBuffer);
m_execBarriers.accessImage(&image, subresources,
srcLayout, srcStages, srcAccess,
dstLayout, dstStages, dstAccess);
} else {
auto& batch = cmdBuffer == DxvkCmdBuffer::InitBuffer
? m_initBarriers
: m_sdmaBarriers;
VkImageMemoryBarrier2 barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 }; VkImageMemoryBarrier2 barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 };
barrier.srcStageMask = srcStages; barrier.srcStageMask = srcStages;
barrier.srcAccessMask = srcAccess; barrier.srcAccessMask = srcAccess;
barrier.dstStageMask = dstStages; barrier.dstStageMask = dstStages;
barrier.dstAccessMask = dstAccess; barrier.dstAccessMask = dstAccess;
barrier.oldLayout = srcLayout; barrier.oldLayout = srcLayout;
barrier.newLayout = dstLayout; barrier.newLayout = dstLayout;
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
barrier.image = image.handle(); barrier.image = image.handle();
barrier.subresourceRange = subresources; barrier.subresourceRange = subresources;
batch.addImageBarrier(barrier); batch.addImageBarrier(barrier);
if (cmdBuffer == DxvkCmdBuffer::ExecBuffer) {
bool hasWrite = (srcAccess & vk::AccessWriteMask) || (srcLayout != dstLayout);
bool hasRead = (srcAccess & vk::AccessReadMask);
uint32_t layerCount = image.info().numLayers;
if (subresources.levelCount == 1u || subresources.layerCount == layerCount) {
DxvkAddressRange range;
range.resource = image.getResourceId();
range.rangeStart = subresources.baseMipLevel * layerCount + subresources.baseArrayLayer;
range.rangeEnd = (subresources.baseMipLevel + subresources.levelCount - 1u) * layerCount
+ (subresources.baseArrayLayer + subresources.layerCount - 1u);
if (hasWrite)
m_barrierTracker.insertRange(range, DxvkAccess::Write);
if (hasRead)
m_barrierTracker.insertRange(range, DxvkAccess::Read);
} else {
DxvkAddressRange range;
range.resource = image.getResourceId();
for (uint32_t i = subresources.baseMipLevel; i < subresources.baseMipLevel + subresources.levelCount; i++) {
range.rangeStart = i * layerCount + subresources.baseArrayLayer;
range.rangeEnd = range.rangeStart + subresources.layerCount - 1u;
if (hasWrite)
m_barrierTracker.insertRange(range, DxvkAccess::Write);
if (hasRead)
m_barrierTracker.insertRange(range, DxvkAccess::Read);
}
}
} }
} }
@ -6727,21 +6750,29 @@ namespace dxvk {
VkAccessFlags2 srcAccess, VkAccessFlags2 srcAccess,
VkPipelineStageFlags2 dstStages, VkPipelineStageFlags2 dstStages,
VkAccessFlags2 dstAccess) { VkAccessFlags2 dstAccess) {
if (likely(cmdBuffer == DxvkCmdBuffer::ExecBuffer)) { if (unlikely(!size))
DxvkBufferSliceHandle slice = buffer.getSliceHandle(offset, size); return;
m_execBarriers.accessBuffer(slice, srcStages, srcAccess, dstStages, dstAccess);
} else {
auto& batch = cmdBuffer == DxvkCmdBuffer::InitBuffer
? m_initBarriers
: m_sdmaBarriers;
VkMemoryBarrier2 barrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 }; auto& batch = getBarrierBatch(cmdBuffer);
barrier.srcStageMask = srcStages;
barrier.srcAccessMask = srcAccess;
barrier.dstStageMask = dstStages;
barrier.dstAccessMask = dstAccess;
batch.addMemoryBarrier(barrier); VkMemoryBarrier2 barrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 };
barrier.srcStageMask = srcStages;
barrier.srcAccessMask = srcAccess;
barrier.dstStageMask = dstStages;
barrier.dstAccessMask = dstAccess;
batch.addMemoryBarrier(barrier);
if (cmdBuffer == DxvkCmdBuffer::ExecBuffer) {
DxvkAddressRange range;
range.resource = buffer.getResourceId();
range.rangeStart = offset;
range.rangeEnd = offset + size - 1;
if (srcAccess & vk::AccessWriteMask)
m_barrierTracker.insertRange(range, DxvkAccess::Write);
if (srcAccess & vk::AccessReadMask)
m_barrierTracker.insertRange(range, DxvkAccess::Read);
} }
} }
@ -6780,8 +6811,13 @@ namespace dxvk {
VkDeviceSize offset, VkDeviceSize offset,
VkDeviceSize size, VkDeviceSize size,
DxvkAccess access) { DxvkAccess access) {
if (m_execBarriers.isBufferDirty(buffer.getSliceHandle(offset, size), access)) bool flush = resourceHasAccess(buffer, offset, size, DxvkAccess::Write);
m_execBarriers.recordCommands(m_cmd);
if (access == DxvkAccess::Write && !flush)
flush = resourceHasAccess(buffer, offset, size, DxvkAccess::Read);
if (flush)
flushBarriers();
} }
@ -6799,8 +6835,13 @@ namespace dxvk {
DxvkImage& image, DxvkImage& image,
const VkImageSubresourceRange& subresources, const VkImageSubresourceRange& subresources,
DxvkAccess access) { DxvkAccess access) {
if (m_execBarriers.isImageDirty(&image, subresources, access)) bool flush = resourceHasAccess(image, subresources, DxvkAccess::Write);
m_execBarriers.recordCommands(m_cmd);
if (access == DxvkAccess::Write && !flush)
flush = resourceHasAccess(image, subresources, DxvkAccess::Read);
if (flush)
flushBarriers();
} }
@ -6813,7 +6854,8 @@ namespace dxvk {
void DxvkContext::flushBarriers() { void DxvkContext::flushBarriers() {
m_execBarriers.recordCommands(m_cmd); m_execBarriers.flush(m_cmd);
m_barrierTracker.clear();
} }
@ -6822,7 +6864,15 @@ namespace dxvk {
VkDeviceSize offset, VkDeviceSize offset,
VkDeviceSize size, VkDeviceSize size,
DxvkAccess access) { DxvkAccess access) {
return m_execBarriers.getBufferAccess(buffer.getSliceHandle(offset, size)).test(access); if (unlikely(!size))
return false;
DxvkAddressRange range;
range.resource = buffer.getResourceId();
range.rangeStart = offset;
range.rangeEnd = offset + size - 1;
return m_barrierTracker.findRange(range, access);
} }
@ -6835,11 +6885,56 @@ namespace dxvk {
} }
bool DxvkContext::resourceHasAccess(
DxvkImage& image,
const VkImageSubresourceRange& subresources,
DxvkAccess access) {
uint32_t layerCount = image.info().numLayers;
// Subresources are enumerated in such a way that array layers of
// one mip form a consecutive address range, and we do not track
// individual image aspects. This is useful since image views for
// rendering and compute can only access one mip level.
DxvkAddressRange range;
range.resource = image.getResourceId();
range.rangeStart = subresources.baseMipLevel * layerCount + subresources.baseArrayLayer;
range.rangeEnd = (subresources.baseMipLevel + subresources.levelCount - 1u) * layerCount
+ (subresources.baseArrayLayer + subresources.layerCount - 1u);
// Probe all subresources first, only check individual mip levels
// if there are overlaps and if we are checking a subset of array
// layers of multiple mips.
bool dirty = m_barrierTracker.findRange(range, access);
if (!dirty || subresources.levelCount == 1u || subresources.layerCount == layerCount)
return dirty;
for (uint32_t i = subresources.baseMipLevel; i < subresources.baseMipLevel + subresources.levelCount && !dirty; i++) {
range.rangeStart = i * layerCount + subresources.baseArrayLayer;
range.rangeEnd = range.rangeStart + subresources.layerCount - 1u;
dirty = m_barrierTracker.findRange(range, access);
}
return dirty;
}
bool DxvkContext::resourceHasAccess( bool DxvkContext::resourceHasAccess(
DxvkImageView& imageView, DxvkImageView& imageView,
DxvkAccess access) { DxvkAccess access) {
return m_execBarriers.getImageAccess(imageView.image(), return resourceHasAccess(*imageView.image(), imageView.imageSubresources(), access);
imageView.imageSubresources()).test(access); }
DxvkBarrierBatch& DxvkContext::getBarrierBatch(
DxvkCmdBuffer cmdBuffer) {
if (cmdBuffer == DxvkCmdBuffer::ExecBuffer)
return m_execBarriers;
return cmdBuffer == DxvkCmdBuffer::InitBuffer
? m_initBarriers
: m_sdmaBarriers;
} }

View File

@ -1408,7 +1408,8 @@ namespace dxvk {
DxvkBarrierBatch m_sdmaBarriers; DxvkBarrierBatch m_sdmaBarriers;
DxvkBarrierBatch m_initBarriers; DxvkBarrierBatch m_initBarriers;
DxvkBarrierSet m_execBarriers; DxvkBarrierBatch m_execBarriers;
DxvkBarrierTracker m_barrierTracker;
DxvkBarrierControlFlags m_barrierControl; DxvkBarrierControlFlags m_barrierControl;
DxvkGpuQueryManager m_queryManager; DxvkGpuQueryManager m_queryManager;
@ -1895,10 +1896,18 @@ namespace dxvk {
DxvkBufferView& bufferView, DxvkBufferView& bufferView,
DxvkAccess access); DxvkAccess access);
bool resourceHasAccess(
DxvkImage& image,
const VkImageSubresourceRange& subresources,
DxvkAccess access);
bool resourceHasAccess( bool resourceHasAccess(
DxvkImageView& imageView, DxvkImageView& imageView,
DxvkAccess access); DxvkAccess access);
DxvkBarrierBatch& getBarrierBatch(
DxvkCmdBuffer cmdBuffer);
template<typename Pred> template<typename Pred>
bool checkResourceBarrier( bool checkResourceBarrier(
const Pred& pred, const Pred& pred,

View File

@ -565,6 +565,15 @@ namespace dxvk {
return m_storage; return m_storage;
} }
/**
* \brief Retrieves resource ID for barrier tracking
* \returns Unique resource ID
*/
uint64_t getResourceId() const {
constexpr static size_t Align = alignof(DxvkResourceAllocation);
return reinterpret_cast<uintptr_t>(m_storage.ptr()) / (Align & -Align);
}
/** /**
* \brief Creates or retrieves an image view * \brief Creates or retrieves an image view
* *