1
0
mirror of https://github.com/Yours3lf/rpi-vk-driver.git synced 2024-12-13 01:08:53 +01:00
rpi-vk-driver/driver/descriptorSet.c

557 lines
19 KiB
C
Raw Normal View History

2020-04-26 21:21:18 +02:00
#include "common.h"
2019-04-22 15:58:27 +02:00
#include "declarations.h"
VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkCreateDescriptorPool)(
2019-04-22 15:58:27 +02:00
VkDevice device,
const VkDescriptorPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorPool* pDescriptorPool)
2019-04-22 15:58:27 +02:00
{
PROFILESTART(RPIFUNC(vkCreateDescriptorPool));
assert(device);
assert(pCreateInfo);
_descriptorPool* dp = ALLOCATE(sizeof(_descriptorPool), 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!dp)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
2020-05-29 23:58:27 +02:00
#ifdef DEBUG_BUILD
2020-04-29 19:02:11 +02:00
memset(dp, 0, sizeof(_descriptorPool));
2020-05-29 23:58:27 +02:00
#endif
2020-04-29 19:02:11 +02:00
uint32_t imageDescriptorCount = 0, bufferDescriptorCount = 0, texelBufferDescriptorCount = 0;
for(uint32_t c = 0; c < pCreateInfo->poolSizeCount; ++c)
{
switch(pCreateInfo->pPoolSizes[c].type)
{
case VK_DESCRIPTOR_TYPE_SAMPLER:
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
imageDescriptorCount += pCreateInfo->pPoolSizes[c].descriptorCount;
break;
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
bufferDescriptorCount += pCreateInfo->pPoolSizes[c].descriptorCount;
break;
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
texelBufferDescriptorCount += pCreateInfo->pPoolSizes[c].descriptorCount;
break;
2020-06-08 19:54:57 +02:00
default:
assert(0);
break;
}
}
2019-09-08 00:30:52 +02:00
dp->freeAble = pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2019-05-05 18:33:13 +02:00
void* dsmem = ALLOCATE(sizeof(_descriptorSet)*pCreateInfo->maxSets, 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!dsmem)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
dp->descriptorSetPA = createPoolAllocator(dsmem, sizeof(_descriptorSet), sizeof(_descriptorSet) * pCreateInfo->maxSets);
2020-03-05 22:18:26 +01:00
uint32_t mapElemBlockSize = sizeof(mapElem);
uint32_t mapBufSize = mapElemBlockSize * (imageDescriptorCount + bufferDescriptorCount + texelBufferDescriptorCount);
void* memem = ALLOCATE(mapBufSize, 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!memem)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
2020-03-05 22:18:26 +01:00
dp->mapElementCPA = createConsecutivePoolAllocator(memem, mapElemBlockSize, mapBufSize);
if(imageDescriptorCount > 0)
{
2020-03-05 22:18:26 +01:00
uint32_t blockSize = sizeof(_descriptorImage);
void* mem = ALLOCATE(blockSize*imageDescriptorCount, 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!mem)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
2020-04-29 19:02:11 +02:00
dp->imageDescriptorCPA = createConsecutivePoolAllocator(mem, blockSize, blockSize * imageDescriptorCount);
}
if(bufferDescriptorCount > 0)
{
2020-03-05 22:18:26 +01:00
uint32_t blockSize = sizeof(_descriptorBuffer);
void* mem = ALLOCATE(blockSize*bufferDescriptorCount, 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!mem)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
2020-04-29 19:02:11 +02:00
dp->bufferDescriptorCPA = createConsecutivePoolAllocator(mem, blockSize, blockSize * bufferDescriptorCount);
}
if(texelBufferDescriptorCount > 0)
{
2020-03-05 22:18:26 +01:00
uint32_t blockSize = sizeof(_descriptorBuffer);
void* mem = ALLOCATE(blockSize*texelBufferDescriptorCount, 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!mem)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
2020-04-29 19:02:11 +02:00
dp->texelBufferDescriptorCPA = createConsecutivePoolAllocator(mem, blockSize, blockSize * texelBufferDescriptorCount);
}
*pDescriptorPool = dp;
PROFILEEND(RPIFUNC(vkCreateDescriptorPool));
return VK_SUCCESS;
}
VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkAllocateDescriptorSets)(
VkDevice device,
const VkDescriptorSetAllocateInfo* pAllocateInfo,
VkDescriptorSet* pDescriptorSets)
{
PROFILESTART(RPIFUNC(vkAllocateDescriptorSets));
assert(device);
_descriptorPool* dp = pAllocateInfo->descriptorPool;
for(uint32_t c = 0; c < pAllocateInfo->descriptorSetCount; ++c)
{
_descriptorSet* ds = poolAllocate(&dp->descriptorSetPA);
pDescriptorSets[c] = ds;
_descriptorSetLayout* dsl = pAllocateInfo->pSetLayouts[c];
uint32_t imageDescriptorCount = 0, bufferDescriptorCount = 0, texelBufferDescriptorCount = 0;
for(uint32_t d = 0; d < dsl->bindingsCount; ++d)
{
switch(dsl->bindings[d].descriptorType)
{
case VK_DESCRIPTOR_TYPE_SAMPLER:
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
imageDescriptorCount += dsl->bindings[d].descriptorCount;
break;
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
bufferDescriptorCount += dsl->bindings[d].descriptorCount;
break;
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
texelBufferDescriptorCount += dsl->bindings[d].descriptorCount;
break;
2020-06-08 19:54:57 +02:00
default:
assert(0);
break;
}
}
ds->imageDescriptorsCount = imageDescriptorCount;
ds->bufferDescriptorsCount = bufferDescriptorCount;
ds->texelBufferDescriptorsCount = texelBufferDescriptorCount;
ds->imageDescriptors = 0;
ds->bufferDescriptors = 0;
ds->texelBufferDescriptors = 0;
if(imageDescriptorCount > 0)
{
ds->imageDescriptors = getCPAptrFromOffset(&dp->imageDescriptorCPA, consecutivePoolAllocate(&dp->imageDescriptorCPA, imageDescriptorCount));
ds->imageBindingMap = createMap(getCPAptrFromOffset(&dp->mapElementCPA, consecutivePoolAllocate(&dp->mapElementCPA, imageDescriptorCount)), imageDescriptorCount);
}
if(bufferDescriptorCount > 0)
{
ds->bufferDescriptors = getCPAptrFromOffset(&dp->bufferDescriptorCPA, consecutivePoolAllocate(&dp->bufferDescriptorCPA, bufferDescriptorCount));
ds->bufferBindingMap = createMap(getCPAptrFromOffset(&dp->mapElementCPA, consecutivePoolAllocate(&dp->mapElementCPA, bufferDescriptorCount)), bufferDescriptorCount);
}
if(texelBufferDescriptorCount > 0)
{
ds->texelBufferDescriptors = getCPAptrFromOffset(&dp->texelBufferDescriptorCPA, consecutivePoolAllocate(&dp->texelBufferDescriptorCPA, texelBufferDescriptorCount));
ds->texelBufferBindingMap = createMap(getCPAptrFromOffset(&dp->mapElementCPA, consecutivePoolAllocate(&dp->mapElementCPA, texelBufferDescriptorCount)), texelBufferDescriptorCount);
}
//TODO immutable samplers
uint32_t imageDescriptorCounter = 0, bufferDescriptorCounter = 0, texelBufferDescriptorCounter = 0;
for(uint32_t d = 0; d < dsl->bindingsCount; ++d)
{
switch(dsl->bindings[d].descriptorType)
{
case VK_DESCRIPTOR_TYPE_SAMPLER:
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
setMapElement(&ds->imageBindingMap, dsl->bindings[d].binding, &ds->imageDescriptors[imageDescriptorCounter]);
ds->imageDescriptors[imageDescriptorCounter].count = dsl->bindings[d].descriptorCount;
ds->imageDescriptors[imageDescriptorCounter].type = dsl->bindings[d].descriptorType;
ds->imageDescriptors[imageDescriptorCounter].stageFlags = dsl->bindings[d].stageFlags;
imageDescriptorCounter += dsl->bindings[d].descriptorCount;
break;
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
setMapElement(&ds->bufferBindingMap, dsl->bindings[d].binding, &ds->bufferDescriptors[bufferDescriptorCounter]);
ds->bufferDescriptors[bufferDescriptorCounter].count = dsl->bindings[d].descriptorCount;
ds->bufferDescriptors[bufferDescriptorCounter].type = dsl->bindings[d].descriptorType;
ds->bufferDescriptors[bufferDescriptorCounter].stageFlags = dsl->bindings[d].stageFlags;
bufferDescriptorCounter += dsl->bindings[d].descriptorCount;
break;
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
setMapElement(&ds->texelBufferBindingMap, dsl->bindings[d].binding, &ds->texelBufferDescriptors[texelBufferDescriptorCounter]);
ds->texelBufferDescriptors[texelBufferDescriptorCounter].count = dsl->bindings[d].descriptorCount;
ds->texelBufferDescriptors[texelBufferDescriptorCounter].type = dsl->bindings[d].descriptorType;
ds->texelBufferDescriptors[texelBufferDescriptorCounter].stageFlags = dsl->bindings[d].stageFlags;
texelBufferDescriptorCounter += dsl->bindings[d].descriptorCount;
break;
2020-06-08 19:54:57 +02:00
default:
assert(0);
break;
}
}
}
PROFILEEND(RPIFUNC(vkAllocateDescriptorSets));
2019-04-22 15:58:27 +02:00
return VK_SUCCESS;
}
VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkCreateDescriptorSetLayout)(
2019-04-22 15:58:27 +02:00
VkDevice device,
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorSetLayout* pSetLayout)
{
PROFILESTART(RPIFUNC(vkCreateDescriptorSetLayout));
assert(device);
assert(pCreateInfo);
_descriptorSetLayout* dsl = ALLOCATE(sizeof(_descriptorSetLayout), 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!dsl)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorSetLayout));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
2019-05-05 18:33:13 +02:00
dsl->bindings = ALLOCATE(sizeof(VkDescriptorSetLayoutBinding)*pCreateInfo->bindingCount, 1, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if(!dsl->bindings)
{
PROFILEEND(RPIFUNC(vkCreateDescriptorSetLayout));
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
memcpy(dsl->bindings, pCreateInfo->pBindings, sizeof(VkDescriptorSetLayoutBinding)*pCreateInfo->bindingCount);
2020-05-29 23:58:27 +02:00
//TODO immutable samplers
dsl->flags = pCreateInfo->flags;
dsl->bindingsCount = pCreateInfo->bindingCount;
*pSetLayout = dsl;
PROFILEEND(RPIFUNC(vkCreateDescriptorSetLayout));
2019-04-22 15:58:27 +02:00
return VK_SUCCESS;
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkUpdateDescriptorSets)(
2019-04-22 15:58:27 +02:00
VkDevice device,
uint32_t descriptorWriteCount,
const VkWriteDescriptorSet* pDescriptorWrites,
uint32_t descriptorCopyCount,
const VkCopyDescriptorSet* pDescriptorCopies)
{
PROFILESTART(RPIFUNC(vkUpdateDescriptorSets));
assert(device);
for(uint32_t c = 0; c < descriptorWriteCount; ++c)
{
_descriptorSet* ds = pDescriptorWrites[c].dstSet;
switch(pDescriptorWrites[c].descriptorType)
{
case VK_DESCRIPTOR_TYPE_SAMPLER:
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
{
_descriptorImage* di = getMapElement(ds->imageBindingMap, pDescriptorWrites[c].dstBinding);
di += pDescriptorWrites[c].dstArrayElement;
for(uint32_t d = 0; d < pDescriptorWrites[c].descriptorCount; ++d, di++)
{
di->imageLayout = pDescriptorWrites[c].pImageInfo[d].imageLayout;
di->imageView = pDescriptorWrites[c].pImageInfo[d].imageView;
di->sampler = pDescriptorWrites[c].pImageInfo[d].sampler;
}
break;
}
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
{
_descriptorBuffer* di = getMapElement(ds->bufferBindingMap, pDescriptorWrites[c].dstBinding);
di += pDescriptorWrites[c].dstArrayElement;
for(uint32_t d = 0; d < pDescriptorWrites[c].descriptorCount; ++d, di++)
{
di->buffer = pDescriptorWrites[c].pBufferInfo[d].buffer;
di->offset = pDescriptorWrites[c].pBufferInfo[d].offset;
di->range = pDescriptorWrites[c].pBufferInfo[d].range;
}
break;
}
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
{
_descriptorTexelBuffer* di = getMapElement(ds->texelBufferBindingMap, pDescriptorWrites[c].dstBinding);
di += pDescriptorWrites[c].dstArrayElement;
for(uint32_t d = 0; d < pDescriptorWrites[c].descriptorCount; ++d, di++)
{
di->bufferView = pDescriptorWrites[c].pTexelBufferView[d];
}
break;
}
2020-06-08 19:54:57 +02:00
default:
{
assert(0);
break;
}
}
}
for(uint32_t c = 0; c < descriptorCopyCount; ++c)
{
_descriptorSet* sds = pDescriptorCopies[c].srcSet;
_descriptorSet* dds = pDescriptorCopies[c].dstSet;
_descriptorImage* sdi = getMapElement(sds->imageBindingMap, pDescriptorCopies[c].srcBinding);
if(sdi)
{
_descriptorImage* ddi = getMapElement(dds->imageBindingMap, pDescriptorCopies[c].dstBinding);
sdi += pDescriptorCopies[c].srcArrayElement;
ddi += pDescriptorCopies[c].dstArrayElement;
memcpy(ddi, sdi, sizeof(_descriptorImage) * pDescriptorCopies[c].descriptorCount);
}
_descriptorBuffer* sdb = getMapElement(sds->bufferBindingMap, pDescriptorCopies[c].srcBinding);
if(sdb)
{
_descriptorBuffer* ddb = getMapElement(dds->bufferBindingMap, pDescriptorCopies[c].dstBinding);
sdb += pDescriptorCopies[c].srcArrayElement;
ddb += pDescriptorCopies[c].dstArrayElement;
memcpy(ddb, sdb, sizeof(_descriptorBuffer) * pDescriptorCopies[c].descriptorCount);
}
_descriptorTexelBuffer* sdtb = getMapElement(sds->texelBufferBindingMap, pDescriptorCopies[c].srcBinding);
if(sdtb)
{
_descriptorTexelBuffer* ddtb = getMapElement(dds->texelBufferBindingMap, pDescriptorCopies[c].dstBinding);
sdtb += pDescriptorCopies[c].srcArrayElement;
ddtb += pDescriptorCopies[c].dstArrayElement;
memcpy(ddtb, sdtb, sizeof(_descriptorTexelBuffer) * pDescriptorCopies[c].descriptorCount);
}
}
PROFILEEND(RPIFUNC(vkUpdateDescriptorSets));
}
VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkResetDescriptorPool)(
VkDevice device,
VkDescriptorPool descriptorPool,
VkDescriptorPoolResetFlags flags)
2019-04-22 15:58:27 +02:00
{
PROFILESTART(RPIFUNC(vkResetDescriptorPool));
2019-09-08 00:30:52 +02:00
//TODO
PROFILEEND(RPIFUNC(vkResetDescriptorPool));
2019-04-22 15:58:27 +02:00
return VK_SUCCESS;
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkDestroyDescriptorPool)(
2019-04-22 15:58:27 +02:00
VkDevice device,
VkDescriptorPool descriptorPool,
const VkAllocationCallbacks* pAllocator)
{
PROFILESTART(RPIFUNC(vkDestroyDescriptorPool));
2020-04-26 21:21:18 +02:00
assert(device);
assert(descriptorPool);
_descriptorPool* dp = descriptorPool;
2020-04-29 19:02:11 +02:00
FREE(dp->descriptorSetPA.buf);
FREE(dp->mapElementCPA.buf);
FREE(dp->imageDescriptorCPA.buf);
FREE(dp->texelBufferDescriptorCPA.buf);
FREE(dp->bufferDescriptorCPA.buf);
2020-04-26 21:21:18 +02:00
FREE(dp);
PROFILEEND(RPIFUNC(vkDestroyDescriptorPool));
2019-04-22 15:58:27 +02:00
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkCmdBindDescriptorSets)(
2019-04-22 15:58:27 +02:00
VkCommandBuffer commandBuffer,
VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout,
uint32_t firstSet,
uint32_t descriptorSetCount,
const VkDescriptorSet* pDescriptorSets,
uint32_t dynamicOffsetCount,
const uint32_t* pDynamicOffsets)
{
PROFILESTART(RPIFUNC(vkCmdBindDescriptorSets));
2019-07-08 22:10:22 +02:00
//TODO dynamic offsets
2019-04-22 15:58:27 +02:00
2019-07-08 22:10:22 +02:00
assert(commandBuffer);
assert(layout);
assert(pDescriptorSets);
_commandBuffer* cb = commandBuffer;
//use pipeline layout's memory to store what is bound...
2020-04-21 22:30:30 +02:00
_pipelineLayout* pl = layout;//pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS ? cb->graphicsPipeline->layout : cb->computePipeline->layout;
2019-07-08 22:10:22 +02:00
assert(firstSet + descriptorSetCount <= pl->setLayoutCount);
for(uint32_t c = firstSet; c < firstSet + descriptorSetCount; ++c)
{
setMapElement(&pl->descriptorSetBindingMap, c, pDescriptorSets[c]);
}
cb->descriptorSetDirty = 1;
PROFILEEND(RPIFUNC(vkCmdBindDescriptorSets));
2019-04-22 15:58:27 +02:00
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkDestroyDescriptorSetLayout)(
2019-04-22 15:58:27 +02:00
VkDevice device,
VkDescriptorSetLayout descriptorSetLayout,
const VkAllocationCallbacks* pAllocator)
{
PROFILESTART(RPIFUNC(vkDestroyDescriptorSetLayout));
2020-04-26 21:21:18 +02:00
assert(device);
assert(descriptorSetLayout);
_descriptorSetLayout* dsl = descriptorSetLayout;
FREE(dsl->bindings);
FREE(dsl);
PROFILEEND(RPIFUNC(vkDestroyDescriptorSetLayout));
2019-04-22 15:58:27 +02:00
}
VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkFreeDescriptorSets)(
2019-04-22 15:58:27 +02:00
VkDevice device,
VkDescriptorPool descriptorPool,
uint32_t descriptorSetCount,
const VkDescriptorSet* pDescriptorSets)
{
PROFILESTART(RPIFUNC(vkFreeDescriptorSets));
2019-09-08 00:30:52 +02:00
assert(device);
assert(descriptorPool);
_descriptorPool* dp = descriptorPool;
assert(dp->freeAble);
2020-04-26 21:21:18 +02:00
for(uint32_t c = 0; c < descriptorSetCount; ++c)
{
_descriptorSet* ds = pDescriptorSets[c];
if(ds->imageDescriptorsCount > 0)
{
consecutivePoolFree(&dp->mapElementCPA, ds->imageBindingMap.elements, ds->imageDescriptorsCount);
2020-04-29 19:02:11 +02:00
consecutivePoolFree(&dp->imageDescriptorCPA, ds->imageDescriptors, ds->imageDescriptorsCount);
2020-04-26 21:21:18 +02:00
}
if(ds->bufferDescriptorsCount > 0)
{
consecutivePoolFree(&dp->mapElementCPA, ds->bufferBindingMap.elements, ds->bufferDescriptorsCount);
2020-04-29 19:02:11 +02:00
consecutivePoolFree(&dp->bufferDescriptorCPA, ds->bufferDescriptors, ds->bufferDescriptorsCount);
2020-04-26 21:21:18 +02:00
}
2020-04-29 19:02:11 +02:00
if(ds->texelBufferDescriptorsCount > 0)
2020-04-26 21:21:18 +02:00
{
consecutivePoolFree(&dp->mapElementCPA, ds->texelBufferBindingMap.elements, ds->texelBufferDescriptorsCount);
2020-04-29 19:02:11 +02:00
consecutivePoolFree(&dp->texelBufferDescriptorCPA, ds->texelBufferDescriptors, ds->texelBufferDescriptorsCount);
2020-04-26 21:21:18 +02:00
}
poolFree(&dp->descriptorSetPA, ds);
}
2019-09-08 00:30:52 +02:00
PROFILEEND(RPIFUNC(vkFreeDescriptorSets));
2019-04-22 15:58:27 +02:00
return VK_SUCCESS;
}
VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkCreateDescriptorUpdateTemplate)(
2019-04-22 15:58:27 +02:00
VkDevice device,
const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
{
PROFILESTART(RPIFUNC(vkCreateDescriptorUpdateTemplate));
2019-09-08 00:30:52 +02:00
//TODO
PROFILEEND(RPIFUNC(vkCreateDescriptorUpdateTemplate));
2020-06-08 19:54:57 +02:00
return VK_SUCCESS;
2019-04-22 15:58:27 +02:00
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkDestroyDescriptorUpdateTemplate)(
2019-04-22 15:58:27 +02:00
VkDevice device,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
const VkAllocationCallbacks* pAllocator)
{
PROFILESTART(RPIFUNC(vkDestroyDescriptorUpdateTemplate));
2019-09-08 00:30:52 +02:00
//TODO
PROFILEEND(RPIFUNC(vkDestroyDescriptorUpdateTemplate));
2019-04-22 15:58:27 +02:00
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkUpdateDescriptorSetWithTemplate)(
2019-04-22 15:58:27 +02:00
VkDevice device,
VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
const void* pData)
{
PROFILESTART(RPIFUNC(vkUpdateDescriptorSetWithTemplate));
2019-09-08 00:30:52 +02:00
//TODO
PROFILEEND(RPIFUNC(vkUpdateDescriptorSetWithTemplate));
2019-04-22 15:58:27 +02:00
}
VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkGetDescriptorSetLayoutSupport)(
2019-04-22 15:58:27 +02:00
VkDevice device,
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
VkDescriptorSetLayoutSupport* pSupport)
{
PROFILESTART(RPIFUNC(vkGetDescriptorSetLayoutSupport));
2019-09-08 00:30:52 +02:00
//TODO
PROFILEEND(RPIFUNC(vkGetDescriptorSetLayoutSupport));
2019-04-22 15:58:27 +02:00
}