2018-07-23 15:21:17 +02:00
|
|
|
#include <cstring>
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
#include "d3d11_context_imm.h"
|
2019-03-26 17:49:14 +01:00
|
|
|
#include "d3d11_device.h"
|
2018-07-23 15:21:17 +02:00
|
|
|
#include "d3d11_initializer.h"
|
|
|
|
|
|
|
|
namespace dxvk {
|
|
|
|
|
|
|
|
D3D11Initializer::D3D11Initializer(
|
2019-03-26 17:49:14 +01:00
|
|
|
D3D11Device* pParent)
|
|
|
|
: m_parent(pParent),
|
|
|
|
m_device(pParent->GetDXVKDevice()),
|
2024-10-07 14:33:45 +02:00
|
|
|
m_stagingBuffer(m_device, StagingBufferSize),
|
2024-10-08 11:40:05 +02:00
|
|
|
m_stagingSignal(new sync::Fence(0)),
|
|
|
|
m_csChunk(m_parent->AllocCsChunk(DxvkCsChunkFlag::SingleUse)) {
|
|
|
|
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
D3D11Initializer::~D3D11Initializer() {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
void D3D11Initializer::NotifyContextFlush() {
|
2021-06-28 19:19:29 +02:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2024-10-08 11:40:05 +02:00
|
|
|
NotifyContextFlushLocked();
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
|
2018-07-23 15:21:17 +02:00
|
|
|
void D3D11Initializer::InitBuffer(
|
|
|
|
D3D11Buffer* pBuffer,
|
|
|
|
const D3D11_SUBRESOURCE_DATA* pInitialData) {
|
2022-08-20 17:02:49 +02:00
|
|
|
if (!(pBuffer->Desc()->MiscFlags & D3D11_RESOURCE_MISC_TILED)) {
|
|
|
|
VkMemoryPropertyFlags memFlags = pBuffer->GetBuffer()->memFlags();
|
2018-07-23 15:21:17 +02:00
|
|
|
|
2022-08-20 17:02:49 +02:00
|
|
|
(memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
|
|
|
|
? InitHostVisibleBuffer(pBuffer, pInitialData)
|
|
|
|
: InitDeviceLocalBuffer(pBuffer, pInitialData);
|
|
|
|
}
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void D3D11Initializer::InitTexture(
|
|
|
|
D3D11CommonTexture* pTexture,
|
|
|
|
const D3D11_SUBRESOURCE_DATA* pInitialData) {
|
2022-08-20 17:16:42 +02:00
|
|
|
if (pTexture->Desc()->MiscFlags & D3D11_RESOURCE_MISC_TILED)
|
|
|
|
InitTiledTexture(pTexture);
|
|
|
|
else if (pTexture->GetMapMode() == D3D11_COMMON_TEXTURE_MAP_MODE_DIRECT)
|
|
|
|
InitHostVisibleTexture(pTexture, pInitialData);
|
|
|
|
else
|
|
|
|
InitDeviceLocalTexture(pTexture, pInitialData);
|
2023-08-23 14:50:42 +01:00
|
|
|
|
2024-09-26 08:16:11 +02:00
|
|
|
SyncSharedTexture(pTexture);
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-11-18 18:38:27 +01:00
|
|
|
void D3D11Initializer::InitUavCounter(
|
|
|
|
D3D11UnorderedAccessView* pUav) {
|
2023-01-12 18:07:52 +01:00
|
|
|
auto counterView = pUav->GetCounterView();
|
2019-11-18 18:38:27 +01:00
|
|
|
|
2023-01-12 18:07:52 +01:00
|
|
|
if (counterView == nullptr)
|
2019-11-18 18:38:27 +01:00
|
|
|
return;
|
|
|
|
|
2021-06-28 19:19:29 +02:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2020-01-03 13:42:25 +01:00
|
|
|
m_transferCommands += 1;
|
2019-11-18 18:38:27 +01:00
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
EmitCs([
|
|
|
|
cCounterSlice = DxvkBufferSlice(counterView)
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
const uint32_t zero = 0;
|
|
|
|
ctx->updateBuffer(
|
|
|
|
cCounterSlice.buffer(),
|
|
|
|
cCounterSlice.offset(),
|
|
|
|
sizeof(zero), &zero);
|
|
|
|
});
|
2019-11-18 18:38:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-07-23 15:21:17 +02:00
|
|
|
void D3D11Initializer::InitDeviceLocalBuffer(
|
|
|
|
D3D11Buffer* pBuffer,
|
|
|
|
const D3D11_SUBRESOURCE_DATA* pInitialData) {
|
2021-06-28 19:19:29 +02:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2018-07-23 15:21:17 +02:00
|
|
|
|
2024-10-05 09:07:06 +02:00
|
|
|
Rc<DxvkBuffer> buffer = pBuffer->GetBuffer();
|
2018-07-23 15:21:17 +02:00
|
|
|
|
|
|
|
if (pInitialData != nullptr && pInitialData->pSysMem != nullptr) {
|
2024-10-05 09:07:06 +02:00
|
|
|
auto stagingSlice = m_stagingBuffer.alloc(buffer->info().size);
|
|
|
|
std::memcpy(stagingSlice.mapPtr(0), pInitialData->pSysMem, stagingSlice.length());
|
|
|
|
|
2018-07-23 15:21:17 +02:00
|
|
|
m_transferCommands += 1;
|
2024-10-07 14:33:45 +02:00
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
EmitCs([
|
|
|
|
cBuffer = buffer,
|
|
|
|
cStagingSlice = std::move(stagingSlice)
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->uploadBuffer(cBuffer,
|
|
|
|
cStagingSlice.buffer(),
|
|
|
|
cStagingSlice.offset());
|
|
|
|
});
|
2018-07-23 15:21:17 +02:00
|
|
|
} else {
|
|
|
|
m_transferCommands += 1;
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
EmitCs([
|
|
|
|
cBuffer = buffer
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->initBuffer(cBuffer);
|
|
|
|
});
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
ThrottleAllocationLocked();
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void D3D11Initializer::InitHostVisibleBuffer(
|
|
|
|
D3D11Buffer* pBuffer,
|
|
|
|
const D3D11_SUBRESOURCE_DATA* pInitialData) {
|
|
|
|
// If the buffer is mapped, we can write data directly
|
|
|
|
// to the mapped memory region instead of doing it on
|
|
|
|
// the GPU. Same goes for zero-initialization.
|
2024-10-29 22:15:07 +01:00
|
|
|
if (pInitialData && pInitialData->pSysMem)
|
|
|
|
std::memcpy(pBuffer->GetMapPtr(), pInitialData->pSysMem, pBuffer->Desc()->ByteWidth);
|
|
|
|
else
|
|
|
|
std::memset(pBuffer->GetMapPtr(), 0, pBuffer->Desc()->ByteWidth);
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void D3D11Initializer::InitDeviceLocalTexture(
|
|
|
|
D3D11CommonTexture* pTexture,
|
|
|
|
const D3D11_SUBRESOURCE_DATA* pInitialData) {
|
2021-06-28 19:19:29 +02:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2018-07-23 15:21:17 +02:00
|
|
|
|
2024-10-05 10:07:03 +02:00
|
|
|
// Image migt be null if this is a staging resource
|
2018-07-23 15:21:17 +02:00
|
|
|
Rc<DxvkImage> image = pTexture->GetImage();
|
|
|
|
|
2021-06-22 08:04:21 +02:00
|
|
|
auto mapMode = pTexture->GetMapMode();
|
|
|
|
auto desc = pTexture->Desc();
|
|
|
|
|
|
|
|
VkFormat packedFormat = m_parent->LookupPackedFormat(desc->Format, pTexture->GetFormatMode()).Format;
|
2022-07-15 17:23:54 +02:00
|
|
|
auto formatInfo = lookupFormatInfo(packedFormat);
|
2018-07-23 15:21:17 +02:00
|
|
|
|
|
|
|
if (pInitialData != nullptr && pInitialData->pSysMem != nullptr) {
|
2024-10-05 10:07:03 +02:00
|
|
|
// Compute data size for all subresources and allocate staging buffer memory
|
|
|
|
DxvkBufferSlice stagingSlice;
|
2021-05-19 18:04:16 +02:00
|
|
|
|
2024-10-05 10:07:03 +02:00
|
|
|
if (mapMode != D3D11_COMMON_TEXTURE_MAP_MODE_STAGING) {
|
|
|
|
VkDeviceSize dataSize = 0u;
|
|
|
|
|
|
|
|
for (uint32_t mip = 0; mip < image->info().mipLevels; mip++) {
|
2024-10-16 11:22:50 +02:00
|
|
|
dataSize += image->info().numLayers * align(util::computeImageDataSize(
|
|
|
|
packedFormat, image->mipLevelExtent(mip), formatInfo->aspectMask), CACHE_LINE_SIZE);
|
2024-10-05 10:07:03 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
stagingSlice = m_stagingBuffer.alloc(dataSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Copy initial data for each subresource into the staging buffer,
|
|
|
|
// as well as the mapped per-subresource buffers if available.
|
|
|
|
VkDeviceSize dataOffset = 0u;
|
|
|
|
|
|
|
|
for (uint32_t mip = 0; mip < desc->MipLevels; mip++) {
|
|
|
|
for (uint32_t layer = 0; layer < desc->ArraySize; layer++) {
|
|
|
|
uint32_t index = D3D11CalcSubresource(mip, layer, desc->MipLevels);
|
|
|
|
VkExtent3D mipLevelExtent = pTexture->MipLevelExtent(mip);
|
2021-06-22 08:04:21 +02:00
|
|
|
|
|
|
|
if (mapMode != D3D11_COMMON_TEXTURE_MAP_MODE_STAGING) {
|
2024-10-05 10:07:03 +02:00
|
|
|
VkDeviceSize mipSizePerLayer = util::computeImageDataSize(
|
|
|
|
packedFormat, image->mipLevelExtent(mip), formatInfo->aspectMask);
|
|
|
|
|
2021-06-22 08:04:21 +02:00
|
|
|
m_transferCommands += 1;
|
2024-10-05 10:07:03 +02:00
|
|
|
|
|
|
|
util::packImageData(stagingSlice.mapPtr(dataOffset),
|
|
|
|
pInitialData[index].pSysMem, pInitialData[index].SysMemPitch, pInitialData[index].SysMemSlicePitch,
|
|
|
|
0, 0, pTexture->GetVkImageType(), mipLevelExtent, 1, formatInfo, formatInfo->aspectMask);
|
|
|
|
|
2024-10-16 11:22:50 +02:00
|
|
|
dataOffset += align(mipSizePerLayer, CACHE_LINE_SIZE);
|
2019-03-26 17:49:14 +01:00
|
|
|
}
|
2019-06-25 14:22:47 +02:00
|
|
|
|
2021-06-22 08:04:21 +02:00
|
|
|
if (mapMode != D3D11_COMMON_TEXTURE_MAP_MODE_NONE) {
|
2024-10-29 22:15:58 +01:00
|
|
|
util::packImageData(pTexture->GetMapPtr(index, 0),
|
2024-10-05 10:07:03 +02:00
|
|
|
pInitialData[index].pSysMem, pInitialData[index].SysMemPitch, pInitialData[index].SysMemSlicePitch,
|
2021-07-19 16:53:00 +02:00
|
|
|
0, 0, pTexture->GetVkImageType(), mipLevelExtent, 1, formatInfo, formatInfo->aspectMask);
|
2019-06-25 14:22:47 +02:00
|
|
|
}
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
}
|
2024-10-05 10:07:03 +02:00
|
|
|
|
|
|
|
// Upload all subresources of the image in one go
|
2024-10-08 11:40:05 +02:00
|
|
|
if (mapMode != D3D11_COMMON_TEXTURE_MAP_MODE_STAGING) {
|
|
|
|
EmitCs([
|
|
|
|
cImage = std::move(image),
|
|
|
|
cStagingSlice = std::move(stagingSlice),
|
|
|
|
cFormat = packedFormat
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->uploadImage(cImage,
|
|
|
|
cStagingSlice.buffer(),
|
|
|
|
cStagingSlice.offset(),
|
2024-10-16 11:22:50 +02:00
|
|
|
CACHE_LINE_SIZE, cFormat);
|
2024-10-08 11:40:05 +02:00
|
|
|
});
|
|
|
|
}
|
2018-07-23 15:21:17 +02:00
|
|
|
} else {
|
2021-06-22 08:04:21 +02:00
|
|
|
if (mapMode != D3D11_COMMON_TEXTURE_MAP_MODE_STAGING) {
|
|
|
|
m_transferCommands += 1;
|
|
|
|
|
|
|
|
// While the Microsoft docs state that resource contents are
|
|
|
|
// undefined if no initial data is provided, some applications
|
2022-02-17 15:19:05 +01:00
|
|
|
// expect a resource to be pre-cleared.
|
2024-10-08 11:40:05 +02:00
|
|
|
EmitCs([
|
|
|
|
cImage = std::move(image)
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->initImage(cImage,
|
|
|
|
cImage->getAvailableSubresources(),
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED);
|
|
|
|
});
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
2021-06-22 08:04:46 +02:00
|
|
|
|
2021-06-22 08:04:21 +02:00
|
|
|
if (mapMode != D3D11_COMMON_TEXTURE_MAP_MODE_NONE) {
|
2021-06-22 08:04:46 +02:00
|
|
|
for (uint32_t i = 0; i < pTexture->CountSubresources(); i++) {
|
2024-10-29 22:15:58 +01:00
|
|
|
auto layout = pTexture->GetSubresourceLayout(formatInfo->aspectMask, i);
|
|
|
|
std::memset(pTexture->GetMapPtr(i, layout.Offset), 0, layout.Size);
|
2021-06-22 08:04:46 +02:00
|
|
|
}
|
|
|
|
}
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
ThrottleAllocationLocked();
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void D3D11Initializer::InitHostVisibleTexture(
|
|
|
|
D3D11CommonTexture* pTexture,
|
|
|
|
const D3D11_SUBRESOURCE_DATA* pInitialData) {
|
2019-09-19 18:23:07 +02:00
|
|
|
Rc<DxvkImage> image = pTexture->GetImage();
|
2024-10-29 22:33:43 +01:00
|
|
|
auto formatInfo = image->formatInfo();
|
|
|
|
|
|
|
|
for (uint32_t layer = 0; layer < pTexture->Desc()->ArraySize; layer++) {
|
|
|
|
for (uint32_t level = 0; level < pTexture->Desc()->MipLevels; level++) {
|
|
|
|
uint32_t subresourceIndex = D3D11CalcSubresource(level, layer, pTexture->Desc()->MipLevels);
|
2019-09-19 18:23:07 +02:00
|
|
|
|
|
|
|
VkImageSubresource subresource;
|
2024-10-29 22:33:43 +01:00
|
|
|
subresource.aspectMask = formatInfo->aspectMask;
|
2019-09-19 18:23:07 +02:00
|
|
|
subresource.mipLevel = level;
|
|
|
|
subresource.arrayLayer = layer;
|
|
|
|
|
|
|
|
VkExtent3D blockCount = util::computeBlockCount(
|
2024-10-29 22:33:43 +01:00
|
|
|
image->mipLevelExtent(level), formatInfo->blockSize);
|
|
|
|
|
|
|
|
auto layout = pTexture->GetSubresourceLayout(
|
|
|
|
subresource.aspectMask, subresourceIndex);
|
2019-09-19 18:23:07 +02:00
|
|
|
|
2024-10-29 22:33:43 +01:00
|
|
|
if (pInitialData && pInitialData[subresourceIndex].pSysMem) {
|
|
|
|
const auto& initialData = pInitialData[subresourceIndex];
|
2019-09-19 18:23:07 +02:00
|
|
|
|
2024-10-29 22:33:43 +01:00
|
|
|
for (uint32_t z = 0; z < blockCount.depth; z++) {
|
|
|
|
for (uint32_t y = 0; y < blockCount.height; y++) {
|
|
|
|
auto size = blockCount.width * formatInfo->elementSize;
|
2019-09-19 18:23:07 +02:00
|
|
|
|
2024-10-29 22:33:43 +01:00
|
|
|
auto dst = pTexture->GetMapPtr(subresourceIndex, layout.Offset
|
|
|
|
+ y * layout.RowPitch
|
|
|
|
+ z * layout.DepthPitch);
|
|
|
|
|
|
|
|
auto src = reinterpret_cast<const char*>(initialData.pSysMem)
|
|
|
|
+ y * initialData.SysMemPitch
|
|
|
|
+ z * initialData.SysMemSlicePitch;
|
2019-09-19 18:23:07 +02:00
|
|
|
|
|
|
|
std::memcpy(dst, src, size);
|
2024-10-29 22:33:43 +01:00
|
|
|
|
|
|
|
if (size < layout.RowPitch)
|
|
|
|
std::memset(reinterpret_cast<char*>(dst) + size, 0, layout.RowPitch - size);
|
2019-09-19 18:23:07 +02:00
|
|
|
}
|
|
|
|
}
|
2024-10-29 22:33:43 +01:00
|
|
|
} else {
|
|
|
|
void* dst = pTexture->GetMapPtr(subresourceIndex, layout.Offset);
|
|
|
|
std::memset(dst, 0, layout.Size);
|
2019-09-19 18:23:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Initialize the image on the GPU
|
2021-06-28 19:19:29 +02:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
2019-09-19 18:23:07 +02:00
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
EmitCs([
|
|
|
|
cImage = std::move(image)
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->initImage(cImage,
|
|
|
|
cImage->getAvailableSubresources(),
|
|
|
|
VK_IMAGE_LAYOUT_PREINITIALIZED);
|
|
|
|
});
|
2019-09-19 18:23:07 +02:00
|
|
|
|
|
|
|
m_transferCommands += 1;
|
2024-10-08 11:40:05 +02:00
|
|
|
ThrottleAllocationLocked();
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-08-20 17:16:42 +02:00
|
|
|
void D3D11Initializer::InitTiledTexture(
|
|
|
|
D3D11CommonTexture* pTexture) {
|
2024-10-08 11:40:05 +02:00
|
|
|
std::lock_guard<dxvk::mutex> lock(m_mutex);
|
|
|
|
|
|
|
|
EmitCs([
|
|
|
|
cImage = pTexture->GetImage()
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->initSparseImage(cImage);
|
|
|
|
});
|
2022-08-20 17:16:42 +02:00
|
|
|
|
|
|
|
m_transferCommands += 1;
|
2024-10-08 11:40:05 +02:00
|
|
|
ThrottleAllocationLocked();
|
2022-08-20 17:16:42 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
void D3D11Initializer::ThrottleAllocationLocked() {
|
2024-10-07 14:33:45 +02:00
|
|
|
DxvkStagingBufferStats stats = m_stagingBuffer.getStatistics();
|
|
|
|
|
|
|
|
// If the amount of memory in flight exceeds the limit, stall the
|
|
|
|
// calling thread and wait for some memory to actually get released.
|
|
|
|
VkDeviceSize stagingMemoryInFlight = stats.allocatedTotal - m_stagingSignal->value();
|
|
|
|
|
|
|
|
if (stagingMemoryInFlight > MaxMemoryInFlight) {
|
2024-10-08 11:40:05 +02:00
|
|
|
ExecuteFlushLocked();
|
2024-10-07 14:33:45 +02:00
|
|
|
|
|
|
|
m_stagingSignal->wait(stats.allocatedTotal - MaxMemoryInFlight);
|
|
|
|
} else if (m_transferCommands >= MaxCommandsPerSubmission || stats.allocatedSinceLastReset >= MaxMemoryPerSubmission) {
|
|
|
|
// Flush pending commands if there are a lot of updates in flight
|
|
|
|
// to keep both execution time and staging memory in check.
|
2024-10-08 11:40:05 +02:00
|
|
|
ExecuteFlushLocked();
|
2024-10-07 14:33:45 +02:00
|
|
|
}
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-10-07 14:33:45 +02:00
|
|
|
void D3D11Initializer::ExecuteFlush() {
|
2024-10-08 11:40:05 +02:00
|
|
|
std::lock_guard lock(m_mutex);
|
|
|
|
|
|
|
|
ExecuteFlushLocked();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void D3D11Initializer::ExecuteFlushLocked() {
|
2024-10-07 14:33:45 +02:00
|
|
|
DxvkStagingBufferStats stats = m_stagingBuffer.getStatistics();
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
EmitCs([
|
|
|
|
cSignal = m_stagingSignal,
|
|
|
|
cSignalValue = stats.allocatedTotal
|
|
|
|
] (DxvkContext* ctx) {
|
|
|
|
ctx->signal(cSignal, cSignalValue);
|
|
|
|
ctx->flushCommandList(nullptr);
|
|
|
|
});
|
2024-10-05 09:07:06 +02:00
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
FlushCsChunk();
|
|
|
|
|
|
|
|
NotifyContextFlushLocked();
|
2018-07-23 15:21:17 +02:00
|
|
|
}
|
|
|
|
|
2023-08-23 14:50:42 +01:00
|
|
|
|
2024-09-26 08:16:11 +02:00
|
|
|
void D3D11Initializer::SyncSharedTexture(D3D11CommonTexture* pResource) {
|
|
|
|
if (!(pResource->Desc()->MiscFlags & (D3D11_RESOURCE_MISC_SHARED | D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX | D3D11_RESOURCE_MISC_SHARED_NTHANDLE)))
|
2023-08-23 14:50:42 +01:00
|
|
|
return;
|
|
|
|
|
2024-09-26 08:16:11 +02:00
|
|
|
// Ensure that initialization commands are submitted and waited on before
|
|
|
|
// returning control to the application in order to avoid race conditions
|
|
|
|
// in case the texture is used immediately on a secondary device.
|
|
|
|
auto mapMode = pResource->GetMapMode();
|
|
|
|
|
|
|
|
if (mapMode == D3D11_COMMON_TEXTURE_MAP_MODE_NONE
|
|
|
|
|| mapMode == D3D11_COMMON_TEXTURE_MAP_MODE_BUFFER) {
|
2024-10-07 14:33:45 +02:00
|
|
|
ExecuteFlush();
|
2024-09-26 08:16:11 +02:00
|
|
|
|
2024-10-17 16:22:37 +02:00
|
|
|
m_device->waitForResource(*pResource->GetImage(), DxvkAccess::Write);
|
2024-09-26 08:16:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// If a keyed mutex is used, initialize that to the correct state as well.
|
|
|
|
Com<IDXGIKeyedMutex> keyedMutex;
|
|
|
|
|
|
|
|
if (SUCCEEDED(pResource->GetInterface()->QueryInterface(
|
|
|
|
__uuidof(IDXGIKeyedMutex), reinterpret_cast<void**>(&keyedMutex)))) {
|
|
|
|
keyedMutex->AcquireSync(0, 0);
|
|
|
|
keyedMutex->ReleaseSync(0);
|
|
|
|
}
|
2023-08-23 14:50:42 +01:00
|
|
|
}
|
|
|
|
|
2024-10-08 11:40:05 +02:00
|
|
|
|
|
|
|
void D3D11Initializer::FlushCsChunkLocked() {
|
|
|
|
m_parent->GetContext()->InjectCsChunk(std::move(m_csChunk), false);
|
|
|
|
m_csChunk = m_parent->AllocCsChunk(DxvkCsChunkFlag::SingleUse);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void D3D11Initializer::NotifyContextFlushLocked() {
|
|
|
|
m_stagingBuffer.reset();
|
|
|
|
m_transferCommands = 0;
|
|
|
|
}
|
|
|
|
|
2024-09-26 08:53:53 +02:00
|
|
|
}
|