2018-10-15 23:37:09 +02:00
# include "common.h"
2020-05-18 23:38:57 +02:00
# include "declarations.h"
2018-10-15 23:37:09 +02:00
# include "kernel/vc4_packet.h"
2019-09-23 21:27:07 +02:00
//returns max index
2020-04-29 20:45:42 +02:00
static uint32_t drawCommon ( VkCommandBuffer commandBuffer , int32_t vertexOffset )
2018-10-15 23:37:09 +02:00
{
assert ( commandBuffer ) ;
_commandBuffer * cb = commandBuffer ;
2020-05-16 23:31:07 +02:00
assert ( ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > memGuard = = 0xDDDDDDDD ) ;
2018-10-15 23:37:09 +02:00
2019-08-25 22:59:18 +02:00
//TODO handle cases when submitting >65k vertices in a VBO
//TODO HW-2116 workaround
//TODO GFXH-515 / SW-5891 workaround
2019-09-08 00:30:52 +02:00
//TODO make this as lightweight as possible to make sure
//as many drawcalls can be submitted as possible
2018-10-15 23:37:09 +02:00
2019-09-02 23:37:42 +02:00
//uint32_t vertexBufferDirty;
//uint32_t indexBufferDirty;
///uint32_t viewportDirty;
///uint32_t lineWidthDirty;
///uint32_t depthBiasDirty;
///uint32_t depthBoundsDirty;
//uint32_t graphicsPipelineDirty;
//uint32_t computePipelineDirty;
//uint32_t subpassDirty;
//uint32_t blendConstantsDirty;
//uint32_t scissorDirty;
//uint32_t stencilCompareMaskDirty;
//uint32_t stencilWriteMaskDirty;
//uint32_t stencilReferenceDirty;
//uint32_t descriptorSetDirty;
//uint32_t pushConstantDirty;
2020-05-20 23:39:06 +02:00
static uint32_t drawCommon1 ;
PROFILESTART ( & drawCommon1 ) ;
2019-09-20 11:30:03 +02:00
//TODO multiple viewports
VkViewport vp ;
vp = cb - > graphicsPipeline - > viewports [ 0 ] ;
for ( uint32_t c = 0 ; c < cb - > graphicsPipeline - > dynamicStateCount ; + + c )
{
if ( cb - > graphicsPipeline - > dynamicStates [ c ] = = VK_DYNAMIC_STATE_VIEWPORT )
{
vp = cb - > viewport ;
}
}
2019-09-02 23:37:42 +02:00
//if(cb->lineWidthDirty)
{
//Line width
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_LINE_WIDTH_length ) ;
2019-09-02 23:37:42 +02:00
clInsertLineWidth ( & commandBuffer - > binCl , cb - > graphicsPipeline - > lineWidth ) ;
2018-10-15 23:37:09 +02:00
2019-09-02 23:37:42 +02:00
cb - > lineWidthDirty = 0 ;
}
2018-10-15 23:37:09 +02:00
2019-09-02 23:37:42 +02:00
//if(cb->viewportDirty)
{
//Clip Window
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_CLIP_WINDOW_length ) ;
2019-09-02 23:37:42 +02:00
clInsertClipWindow ( & commandBuffer - > binCl ,
2020-05-10 20:15:30 +02:00
vp . width ,
vp . height ,
2019-09-20 11:30:03 +02:00
vp . y , //bottom pixel coord
vp . x ) ; //left pixel coord
2019-09-02 23:37:42 +02:00
2020-05-04 20:36:36 +02:00
//Vulkan conventions, Y flipped [1...-1] bottom->top
2019-09-02 23:37:42 +02:00
//Clipper XY Scaling
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_CLIPPER_XY_SCALING_length ) ;
2020-05-04 20:36:36 +02:00
clInsertClipperXYScaling ( & commandBuffer - > binCl , ( float ) ( vp . width ) * 0.5f * 16.0f , 1.0f * ( float ) ( vp . height ) * 0.5f * 16.0f ) ;
2019-09-02 23:37:42 +02:00
//Viewport Offset
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_VIEWPORT_OFFSET_length ) ;
2020-05-14 00:05:59 +02:00
clInsertViewPortOffset ( & commandBuffer - > binCl , vp . width * 0.5f + vp . x , vp . height * 0.5f + vp . y ) ;
2019-09-02 23:37:42 +02:00
cb - > viewportDirty = 0 ;
}
//if(cb->depthBiasDirty || cb->depthBoundsDirty)
{
//Configuration Bits
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_CONFIGURATION_BITS_length ) ;
2019-09-02 23:37:42 +02:00
clInsertConfigurationBits ( & commandBuffer - > binCl ,
2020-04-20 14:45:17 +02:00
1 , //earlyz updates enable
2019-09-23 20:40:36 +02:00
cb - > graphicsPipeline - > depthTestEnable , //earlyz enable
2020-04-20 14:45:17 +02:00
cb - > graphicsPipeline - > depthWriteEnable & & cb - > graphicsPipeline - > depthTestEnable , //z updates enable
2019-09-23 18:51:31 +02:00
cb - > graphicsPipeline - > depthTestEnable ? getCompareOp ( cb - > graphicsPipeline - > depthCompareOp ) : V3D_COMPARE_FUNC_ALWAYS , //depth compare func
2019-09-02 23:37:42 +02:00
0 , //coverage read mode
0 , //coverage pipe select
0 , //coverage update mode
0 , //coverage read type
2020-02-16 19:14:45 +01:00
cb - > graphicsPipeline - > rasterizationSamples > 1 , //rasterizer oversample mode
2019-09-02 23:37:42 +02:00
cb - > graphicsPipeline - > depthBiasEnable , //depth offset enable
cb - > graphicsPipeline - > frontFace = = VK_FRONT_FACE_CLOCKWISE , //clockwise
! ( cb - > graphicsPipeline - > cullMode & VK_CULL_MODE_BACK_BIT ) , //enable back facing primitives
! ( cb - > graphicsPipeline - > cullMode & VK_CULL_MODE_FRONT_BIT ) ) ; //enable front facing primitives
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_DEPTH_OFFSET_length ) ;
2020-05-22 18:39:56 +02:00
float depthBiasConstant = cb - > graphicsPipeline - > depthBiasConstantFactor ;
float depthBiasSlope = cb - > graphicsPipeline - > depthBiasSlopeFactor ;
for ( uint32_t c = 0 ; c < cb - > graphicsPipeline - > dynamicStateCount ; + + c )
{
if ( cb - > graphicsPipeline - > dynamicStates [ c ] = = VK_DYNAMIC_STATE_DEPTH_BIAS )
{
depthBiasConstant = cb - > depthBiasConstantFactor ;
depthBiasSlope = cb - > depthBiasSlopeFactor ;
break ;
}
}
clInsertDepthOffset ( & commandBuffer - > binCl , depthBiasConstant , depthBiasSlope ) ;
2019-09-02 23:37:42 +02:00
2020-05-04 20:36:36 +02:00
//Vulkan conventions, we expect the resulting NDC space Z axis to be in range [0...1] close->far
2019-09-02 23:37:42 +02:00
//cb->graphicsPipeline->minDepthBounds;
//Clipper Z Scale and Offset
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_CLIPPER_Z_SCALE_AND_OFFSET_length ) ;
2020-05-22 18:39:56 +02:00
//offset, scale
float scale = vp . maxDepth - vp . minDepth ;
float offset = vp . minDepth ;
clInsertClipperZScaleOffset ( & commandBuffer - > binCl , offset , scale ) ;
2019-09-02 23:37:42 +02:00
cb - > vertexBufferDirty = 0 ;
cb - > depthBoundsDirty = 0 ;
}
2018-10-15 23:37:09 +02:00
//Point size
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_POINT_SIZE_length ) ;
2018-10-15 23:37:09 +02:00
clInsertPointSize ( & commandBuffer - > binCl , 1.0f ) ;
//TODO?
//Flat Shade Flags
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_FLAT_SHADE_FLAGS_length ) ;
2018-10-15 23:37:09 +02:00
clInsertFlatShadeFlags ( & commandBuffer - > binCl , 0 ) ;
//GL Shader State
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_GL_SHADER_STATE_length ) ;
2019-08-30 22:48:42 +02:00
clInsertShaderState ( & commandBuffer - > binCl ,
0 , //shader state record address
0 , //extended shader state record
2019-09-22 17:40:06 +02:00
cb - > graphicsPipeline - > vertexAttributeDescriptionCount & 0x7 ) ; //number of attribute arrays, 0 -> 8
2018-10-15 23:37:09 +02:00
2020-04-21 16:22:19 +02:00
_shaderModule * vertModule = 0 , * fragModule = 0 ;
//it could be that all stages are contained in a single module, or have separate modules
if ( cb - > graphicsPipeline - > modules [ ulog2 ( VK_SHADER_STAGE_FRAGMENT_BIT ) ] )
{
fragModule = cb - > graphicsPipeline - > modules [ ulog2 ( VK_SHADER_STAGE_FRAGMENT_BIT ) ] ;
}
if ( cb - > graphicsPipeline - > modules [ ulog2 ( VK_SHADER_STAGE_VERTEX_BIT ) ] )
{
2020-04-21 22:38:21 +02:00
vertModule = cb - > graphicsPipeline - > modules [ ulog2 ( VK_SHADER_STAGE_VERTEX_BIT ) ] ;
2020-04-21 16:22:19 +02:00
}
if ( ! vertModule )
{
vertModule = fragModule ;
}
if ( ! fragModule )
{
fragModule = vertModule ;
}
2020-05-08 14:06:45 +02:00
assert ( fragModule ) ;
assert ( vertModule ) ;
assert ( fragModule - > bos [ VK_RPI_ASSEMBLY_TYPE_FRAGMENT ] ) ;
assert ( vertModule - > bos [ VK_RPI_ASSEMBLY_TYPE_VERTEX ] ) ;
assert ( vertModule - > bos [ VK_RPI_ASSEMBLY_TYPE_COORDINATE ] ) ;
2020-05-20 23:39:06 +02:00
PROFILEEND ( & drawCommon1 ) ;
static uint32_t drawCommon2 ;
PROFILESTART ( & drawCommon2 ) ;
2018-10-15 23:37:09 +02:00
//emit shader record
ControlListAddress fragCode = {
2020-04-21 16:22:19 +02:00
. handle = fragModule - > bos [ VK_RPI_ASSEMBLY_TYPE_FRAGMENT ] ,
2018-10-15 23:37:09 +02:00
. offset = 0 ,
} ;
ControlListAddress vertCode = {
2020-04-21 16:22:19 +02:00
. handle = vertModule - > bos [ VK_RPI_ASSEMBLY_TYPE_VERTEX ] ,
2018-10-15 23:37:09 +02:00
. offset = 0 ,
} ;
ControlListAddress coordCode = {
2020-04-21 16:22:19 +02:00
. handle = vertModule - > bos [ VK_RPI_ASSEMBLY_TYPE_COORDINATE ] ,
2018-10-15 23:37:09 +02:00
. offset = 0 ,
} ;
commandBuffer - > shaderRecCount + + ;
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > shaderRecCl , 12 * sizeof ( uint32_t ) + 104 + 8 * 32 ) ;
2018-10-15 23:37:09 +02:00
ControlList relocCl = commandBuffer - > shaderRecCl ;
2019-09-20 17:42:07 +02:00
2019-09-22 17:40:06 +02:00
uint32_t attribCount = 0 ;
uint32_t attribSelectBits = 0 ;
2019-09-20 17:42:07 +02:00
for ( uint32_t c = 0 ; c < cb - > graphicsPipeline - > vertexAttributeDescriptionCount ; + + c )
{
if ( cb - > vertexBuffers [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] )
{
2019-09-22 17:40:06 +02:00
attribCount + + ;
attribSelectBits | = 1 < < cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . location ;
2019-09-20 17:42:07 +02:00
}
}
2020-05-01 20:38:13 +02:00
2020-05-01 21:37:33 +02:00
//attrib size is simply how many times we read VPM (x4 bytes) in VS and CS
//attrib records:
//base address, num bytes, stride are for the kernel side to assemble our vpm
//VPM offsets: these would be how many vpm reads were before a specific attrib (x4 bytes)
//we don't really have that info, so we have to play with strides/formats
uint32_t vertexAttribSize = 0 , coordAttribSize = 0 ;
for ( uint32_t c = 0 ; c < cb - > graphicsPipeline - > vertexAttributeDescriptionCount ; + + c )
{
vertexAttribSize + = getFormatBpp ( cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . format ) > > 3 ;
if ( cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . location = = 0 )
{
//this should be the vertex coordinates location
coordAttribSize = getFormatBpp ( cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . format ) > > 3 ;
}
}
2019-09-20 17:42:07 +02:00
2020-05-12 20:55:37 +02:00
assert ( vertModule - > numVertVPMreads = = vertexAttribSize > > 2 ) ;
assert ( vertModule - > numCoordVPMreads = = coordAttribSize > > 2 ) ;
2019-09-23 20:40:36 +02:00
//number of attribs
2018-10-15 23:37:09 +02:00
//3 is the number of type of possible shaders
2020-06-08 19:54:57 +02:00
for ( uint32_t c = 0 ; c < ( 3 + attribCount ) * 4 ; + + c )
2018-10-15 23:37:09 +02:00
{
clInsertNop ( & commandBuffer - > shaderRecCl ) ;
}
2020-05-17 11:47:23 +02:00
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > handlesCl , ( 3 + 8 ) * 4 ) ;
2018-10-15 23:37:09 +02:00
clInsertShaderRecord ( & commandBuffer - > shaderRecCl ,
& relocCl ,
& commandBuffer - > handlesCl ,
2020-05-17 11:47:23 +02:00
( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesBufOffset + cb - > handlesCl . offset ,
2020-05-16 23:31:07 +02:00
( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesSize ,
2020-04-21 16:22:19 +02:00
! fragModule - > hasThreadSwitch ,
2019-08-25 22:59:18 +02:00
0 , //TODO point size included in shaded vertex data?
2019-09-08 00:30:52 +02:00
1 , //enable clipping
2019-09-20 17:42:07 +02:00
0 , //TODO fragment number of used uniforms?
2020-04-21 16:22:19 +02:00
fragModule - > numVaryings , //fragment number of varyings
2018-10-15 23:37:09 +02:00
0 , //fragment uniform address?
fragCode , //fragment code address
2019-09-20 17:42:07 +02:00
0 , //TODO vertex number of used uniforms?
2019-09-22 17:40:06 +02:00
attribSelectBits , //vertex attribute array select bits
2020-05-01 21:37:33 +02:00
vertexAttribSize , //vertex total attribute size
2018-10-15 23:37:09 +02:00
0 , //vertex uniform address
vertCode , //vertex shader code address
2019-09-20 17:42:07 +02:00
0 , //TODO coordinate number of used uniforms?
2019-09-22 17:40:06 +02:00
//TODO how do we know which attribute contains the vertices?
//for now the first one will be hardcoded to have the vertices...
1 < < 0 , //coordinate attribute array select bits
2020-05-01 21:37:33 +02:00
coordAttribSize , //coordinate total attribute size
2018-10-15 23:37:09 +02:00
0 , //coordinate uniform address
coordCode //coordinate shader code address
) ;
2020-04-30 19:38:12 +02:00
uint32_t vertexAttribOffsets [ 8 ] = { } ;
2020-05-01 20:38:13 +02:00
uint32_t coordAttribOffsets [ 8 ] = { } ;
2020-05-01 21:37:33 +02:00
for ( uint32_t c = 1 ; c < 8 ; + + c )
2020-04-30 19:38:12 +02:00
{
2020-05-01 20:38:13 +02:00
for ( uint32_t d = 0 ; d < cb - > graphicsPipeline - > vertexAttributeDescriptionCount ; + + d )
2020-04-30 19:38:12 +02:00
{
2020-05-01 21:37:33 +02:00
if ( cb - > graphicsPipeline - > vertexAttributeDescriptions [ d ] . location < c )
2020-04-30 19:38:12 +02:00
{
2020-05-01 21:37:33 +02:00
vertexAttribOffsets [ c ] + = getFormatBpp ( cb - > graphicsPipeline - > vertexAttributeDescriptions [ d ] . format ) > > 3 ;
2020-04-30 19:38:12 +02:00
}
}
}
2020-05-01 21:37:33 +02:00
for ( uint32_t c = 1 ; c < 8 ; + + c )
{
coordAttribOffsets [ c ] = vertexAttribOffsets [ 1 ] ;
}
2019-09-23 21:27:07 +02:00
uint32_t maxIndex = 0xffff ;
2019-09-20 17:42:07 +02:00
for ( uint32_t c = 0 ; c < cb - > graphicsPipeline - > vertexAttributeDescriptionCount ; + + c )
{
if ( cb - > vertexBuffers [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] )
{
2020-04-30 19:38:12 +02:00
uint32_t formatByteSize = getFormatBpp ( cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . format ) > > 3 ;
2019-09-23 21:27:07 +02:00
uint32_t stride = cb - > graphicsPipeline - > vertexBindingDescriptions [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] . stride ;
2020-04-29 21:17:55 +02:00
if ( stride > 0 )
{
uint32_t usedIndices = ( cb - > vertexBuffers [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] - > boundMem - > size
- cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . offset
- vertexOffset * stride
- cb - > vertexBufferOffsets [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ]
- cb - > vertexBuffers [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] - > boundOffset
- formatByteSize ) / stride ;
if ( usedIndices < maxIndex )
{
maxIndex = usedIndices ;
}
}
2019-09-23 21:27:07 +02:00
2019-09-20 17:42:07 +02:00
ControlListAddress vertexBuffer = {
. handle = cb - > vertexBuffers [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] - > boundMem - > bo ,
2020-04-29 20:45:42 +02:00
. offset = cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . offset
+ vertexOffset * stride
+ cb - > vertexBufferOffsets [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ]
+ cb - > vertexBuffers [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . binding ] - > boundOffset ,
2019-09-20 17:42:07 +02:00
} ;
clInsertAttributeRecord ( & commandBuffer - > shaderRecCl ,
& relocCl ,
& commandBuffer - > handlesCl ,
2020-05-17 11:47:23 +02:00
( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesBufOffset + cb - > handlesCl . offset ,
2020-05-16 23:31:07 +02:00
( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesSize ,
2019-09-20 17:42:07 +02:00
vertexBuffer , //reloc address
2019-09-23 21:27:07 +02:00
formatByteSize ,
stride ,
2020-05-01 20:38:13 +02:00
vertexAttribOffsets [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . location ] , //vertex vpm offset
coordAttribOffsets [ cb - > graphicsPipeline - > vertexAttributeDescriptions [ c ] . location ] //coordinte vpm offset
2019-09-20 17:42:07 +02:00
) ;
}
}
2018-10-15 23:37:09 +02:00
2020-05-20 23:39:06 +02:00
PROFILEEND ( & drawCommon2 ) ;
static uint32_t drawCommon3 ;
PROFILESTART ( & drawCommon3 ) ;
2020-05-16 23:31:07 +02:00
2018-10-15 23:37:09 +02:00
//write uniforms
2019-07-27 22:57:13 +02:00
_pipelineLayout * pl = cb - > graphicsPipeline - > layout ;
2020-05-12 20:55:37 +02:00
assert ( vertModule - > numVertVPMwrites - 3 = = fragModule - > numVaryings ) ;
assert ( vertModule - > numCoordVPMwrites = = 7 ) ;
uint32_t numTextureSamples = 0 ;
uint32_t numFragUniformReads = 0 ;
2020-04-21 16:22:19 +02:00
2019-08-26 19:25:58 +02:00
//kernel side expects relocations first!
2020-04-21 16:22:19 +02:00
for ( uint32_t c = 0 ; c < fragModule - > numMappings [ VK_RPI_ASSEMBLY_TYPE_FRAGMENT ] ; + + c )
2019-08-26 19:25:58 +02:00
{
2020-04-21 16:22:19 +02:00
VkRpiAssemblyMappingEXT mapping = fragModule - > mappings [ VK_RPI_ASSEMBLY_TYPE_FRAGMENT ] [ c ] ;
2019-08-26 19:25:58 +02:00
2020-04-21 16:22:19 +02:00
if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_DESCRIPTOR )
2019-08-26 19:25:58 +02:00
{
2020-05-12 20:55:37 +02:00
numTextureSamples + + ;
2020-04-21 16:22:19 +02:00
if ( mapping . descriptorType = = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE )
2019-08-26 19:25:58 +02:00
{
2020-04-21 16:22:19 +02:00
_descriptorSet * ds = getMapElement ( pl - > descriptorSetBindingMap , mapping . descriptorSet ) ;
_descriptorImage * di = getMapElement ( ds - > imageBindingMap , mapping . descriptorBinding ) ;
di + = mapping . descriptorArrayElement ;
2019-08-26 19:25:58 +02:00
2020-04-21 16:22:19 +02:00
//emit reloc for texture BO
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > handlesCl , 4 ) ;
2020-05-17 11:47:23 +02:00
uint32_t idx = clGetHandleIndex ( & commandBuffer - > handlesCl , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesBufOffset + cb - > handlesCl . offset , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesSize , di - > imageView - > image - > boundMem - > bo ) ;
2019-08-26 19:25:58 +02:00
2020-04-21 16:22:19 +02:00
//emit tex bo reloc index
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , 4 ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , 4 , & idx ) ;
2020-05-12 20:55:37 +02:00
numFragUniformReads + + ;
2020-04-21 16:22:19 +02:00
}
else if ( mapping . descriptorType = = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC )
{
_descriptorSet * ds = getMapElement ( pl - > descriptorSetBindingMap , mapping . descriptorSet ) ;
_descriptorBuffer * db = getMapElement ( ds - > bufferBindingMap , mapping . descriptorBinding ) ;
db + = mapping . descriptorArrayElement ;
2019-08-26 19:25:58 +02:00
2020-04-21 16:22:19 +02:00
//emit reloc for BO
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > handlesCl , 4 ) ;
2020-05-17 11:47:23 +02:00
uint32_t idx = clGetHandleIndex ( & commandBuffer - > handlesCl , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesBufOffset + cb - > handlesCl . offset , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesSize , db - > buffer - > boundMem - > bo ) ;
2019-08-26 19:25:58 +02:00
2020-04-21 16:22:19 +02:00
//emit bo reloc index
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , 4 ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , 4 , & idx ) ;
2020-05-12 20:55:37 +02:00
numFragUniformReads + + ;
2019-08-26 19:25:58 +02:00
}
2020-04-21 16:22:19 +02:00
else if ( mapping . descriptorType = = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER )
{
_descriptorSet * ds = getMapElement ( pl - > descriptorSetBindingMap , mapping . descriptorSet ) ;
_descriptorTexelBuffer * dtb = getMapElement ( ds - > texelBufferBindingMap , mapping . descriptorBinding ) ;
dtb + = mapping . descriptorArrayElement ;
2019-08-26 19:25:58 +02:00
2020-04-21 16:22:19 +02:00
//emit reloc for BO
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > handlesCl , 4 ) ;
2020-05-17 11:47:23 +02:00
uint32_t idx = clGetHandleIndex ( & commandBuffer - > handlesCl , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesBufOffset + cb - > handlesCl . offset , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesSize , dtb - > bufferView - > buffer - > boundMem - > bo ) ;
2019-08-18 18:36:57 +02:00
2020-04-21 16:22:19 +02:00
//emit bo reloc index
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , 4 ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , 4 , & idx ) ;
2020-05-12 20:55:37 +02:00
numFragUniformReads + + ;
2019-08-18 18:36:57 +02:00
}
2020-04-21 16:22:19 +02:00
else
2019-08-18 18:36:57 +02:00
{
2020-04-21 16:22:19 +02:00
assert ( 0 ) ; //shouldn't happen
2019-08-18 18:36:57 +02:00
}
}
}
2020-05-12 20:55:37 +02:00
assert ( numTextureSamples = = fragModule - > numTextureSamples ) ;
2020-04-21 16:22:19 +02:00
//after relocs we can proceed with the usual uniforms
for ( uint32_t c = 0 ; c < fragModule - > numMappings [ VK_RPI_ASSEMBLY_TYPE_FRAGMENT ] ; + + c )
2019-08-18 18:36:57 +02:00
{
2020-04-21 16:22:19 +02:00
VkRpiAssemblyMappingEXT mapping = fragModule - > mappings [ VK_RPI_ASSEMBLY_TYPE_FRAGMENT ] [ c ] ;
2019-08-18 18:36:57 +02:00
2020-04-21 16:22:19 +02:00
if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_PUSH_CONSTANT )
{
2020-05-12 20:55:37 +02:00
numFragUniformReads + + ;
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , 4 ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , 4 , cb - > pushConstantBufferPixel + mapping . resourceOffset ) ;
}
else if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_DESCRIPTOR )
{
if ( mapping . descriptorType = = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE | |
mapping . descriptorType = = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE )
2019-08-18 18:36:57 +02:00
{
2020-04-21 16:22:19 +02:00
_descriptorSet * ds = getMapElement ( pl - > descriptorSetBindingMap , mapping . descriptorSet ) ;
_descriptorImage * di = getMapElement ( ds - > imageBindingMap , mapping . descriptorBinding ) ;
di + = mapping . descriptorArrayElement ;
2020-06-09 21:55:46 +02:00
uint32_t cubemapStride = di - > imageView - > image - > size / 6 ;
2020-04-21 16:22:19 +02:00
2020-04-26 21:21:18 +02:00
//fprintf(stderr, "cubemap stride %i\n", cubemapStride);
uint32_t numLevels = 0 ;
numLevels = di - > imageView - > subresourceRange . levelCount < di - > imageView - > image - > miplevels ? di - > imageView - > subresourceRange . levelCount : di - > imageView - > image - > miplevels ;
2020-04-21 16:22:19 +02:00
uint32_t params [ 4 ] ;
encodeTextureUniform ( params ,
2020-04-26 21:21:18 +02:00
numLevels - 1 ,
2020-04-21 16:22:19 +02:00
getTextureDataType ( di - > imageView - > interpretedFormat ) ,
di - > imageView - > viewType = = VK_IMAGE_VIEW_TYPE_CUBE ,
2020-04-26 21:21:18 +02:00
cubemapStride > > 12 , //cubemap stride in multiples of 4KB
2020-06-09 21:55:46 +02:00
( di - > imageView - > subresourceRange . baseArrayLayer * cubemapStride + di - > imageView - > image - > levelOffsets [ 0 ] + di - > imageView - > image - > boundOffset ) > > 12 , //Image level 0 offset in multiples of 4KB
2020-04-21 16:22:19 +02:00
di - > imageView - > image - > height & 2047 ,
di - > imageView - > image - > width & 2047 ,
2020-06-09 21:55:46 +02:00
getMinFilterType ( di - > sampler - > minFilter , di - > sampler - > mipmapMode ) ,
2020-04-21 16:22:19 +02:00
di - > sampler - > magFilter = = VK_FILTER_NEAREST ,
getWrapMode ( di - > sampler - > addressModeU ) ,
getWrapMode ( di - > sampler - > addressModeV ) ,
di - > sampler - > disableAutoLod
) ;
uint32_t size = 0 ;
if ( di - > imageView - > viewType = = VK_IMAGE_VIEW_TYPE_1D )
2019-08-18 18:36:57 +02:00
{
2020-04-21 16:22:19 +02:00
size = 4 ;
2019-08-18 18:36:57 +02:00
}
2020-04-21 16:22:19 +02:00
else if ( di - > imageView - > viewType = = VK_IMAGE_VIEW_TYPE_2D )
2019-08-18 18:36:57 +02:00
{
2020-04-21 16:22:19 +02:00
size = 8 ;
}
else if ( di - > imageView - > viewType = = VK_IMAGE_VIEW_TYPE_CUBE )
{
size = 12 ;
2019-08-18 18:36:57 +02:00
}
else
{
2020-04-21 16:22:19 +02:00
assert ( 0 ) ; //unsupported
2019-08-18 18:36:57 +02:00
}
2020-04-21 16:22:19 +02:00
//TMU0_B requires an extra uniform written
//we need to signal that somehow from API side
//if mode is cubemap we don't need an extra uniform, it's included!
if ( di - > imageView - > viewType ! = VK_IMAGE_VIEW_TYPE_CUBE & & di - > sampler - > disableAutoLod )
{
size + = 4 ;
}
2020-05-12 20:55:37 +02:00
numFragUniformReads + = size > > 2 ;
2020-04-21 16:22:19 +02:00
//emit tex parameters
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , size ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , size , params ) ;
2019-08-18 18:36:57 +02:00
}
}
}
2019-09-23 21:27:07 +02:00
2020-06-09 21:55:46 +02:00
//assert(numFragUniformReads == fragModule->numFragUniformReads);
2020-05-12 20:55:37 +02:00
2020-05-20 23:39:06 +02:00
PROFILEEND ( & drawCommon3 ) ;
static uint32_t drawCommon4 ;
PROFILESTART ( & drawCommon4 ) ;
2020-05-12 20:55:37 +02:00
uint32_t numVertUniformReads = 0 ;
2020-04-21 16:22:19 +02:00
//vertex and then coordinate
for ( uint32_t c = 0 ; c < vertModule - > numMappings [ VK_RPI_ASSEMBLY_TYPE_VERTEX ] ; + + c )
{
VkRpiAssemblyMappingEXT mapping = vertModule - > mappings [ VK_RPI_ASSEMBLY_TYPE_VERTEX ] [ c ] ;
if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_PUSH_CONSTANT )
{
2020-05-12 20:55:37 +02:00
numVertUniformReads + + ;
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , 4 ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , 4 , cb - > pushConstantBufferVertex + mapping . resourceOffset ) ;
}
else if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_DESCRIPTOR )
{
}
else
{
assert ( 0 ) ; //shouldn't happen
}
}
2020-05-12 20:55:37 +02:00
assert ( numVertUniformReads = = vertModule - > numVertUniformReads ) ;
uint32_t numCoordUniformReads = 0 ;
2020-04-21 16:22:19 +02:00
//if there are no coordinate mappings, just use the vertex ones
VkRpiAssemblyTypeEXT coordMappingType = VK_RPI_ASSEMBLY_TYPE_COORDINATE ;
if ( vertModule - > numMappings [ VK_RPI_ASSEMBLY_TYPE_COORDINATE ] < 1 )
{
coordMappingType = VK_RPI_ASSEMBLY_TYPE_VERTEX ;
}
for ( uint32_t c = 0 ; c < vertModule - > numMappings [ coordMappingType ] ; + + c )
{
VkRpiAssemblyMappingEXT mapping = vertModule - > mappings [ coordMappingType ] [ c ] ;
if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_PUSH_CONSTANT )
{
2020-05-12 20:55:37 +02:00
numCoordUniformReads + + ;
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > uniformsCl , 4 ) ;
2020-04-21 16:22:19 +02:00
clInsertData ( & commandBuffer - > uniformsCl , 4 , cb - > pushConstantBufferVertex + mapping . resourceOffset ) ;
}
else if ( mapping . mappingType = = VK_RPI_ASSEMBLY_MAPPING_TYPE_DESCRIPTOR )
{
}
else
{
assert ( 0 ) ; //shouldn't happen
}
}
2020-05-12 20:55:37 +02:00
assert ( numCoordUniformReads = = vertModule - > numCoordUniformReads ) ;
2020-05-20 23:39:06 +02:00
PROFILEEND ( & drawCommon4 ) ;
2020-05-16 23:31:07 +02:00
2019-09-23 21:27:07 +02:00
return maxIndex ;
2019-09-23 20:40:36 +02:00
}
/*
* https : //www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#vkCmdDraw
*/
2020-05-18 23:38:57 +02:00
void RPIFUNC ( vkCmdDraw ) ( VkCommandBuffer commandBuffer , uint32_t vertexCount , uint32_t instanceCount , uint32_t firstVertex , uint32_t firstInstance )
2019-09-23 20:40:36 +02:00
{
2020-05-18 23:38:57 +02:00
PROFILESTART ( RPIFUNC ( vkCmdDraw ) ) ;
2020-05-18 20:39:33 +02:00
2019-09-23 20:40:36 +02:00
assert ( commandBuffer ) ;
2019-09-23 21:27:07 +02:00
if ( instanceCount ! = 1 | | firstInstance ! = 0 )
{
unsigned instancing ;
UNSUPPORTED ( instancing ) ;
}
2020-06-07 19:15:36 +02:00
assert ( ( firstVertex + vertexCount ) < = ( ( 1 < < 16 ) - 1 ) ) ;
2020-04-29 20:45:42 +02:00
drawCommon ( commandBuffer , 0 ) ;
2019-09-23 20:40:36 +02:00
_commandBuffer * cb = commandBuffer ;
//Submit draw call: vertex Array Primitives
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_VERTEX_ARRAY_PRIMITIVES_length ) ;
2019-09-23 20:40:36 +02:00
clInsertVertexArrayPrimitives ( & commandBuffer - > binCl , firstVertex , vertexCount , getPrimitiveMode ( cb - > graphicsPipeline - > topology ) ) ;
2019-09-01 20:14:47 +02:00
2020-06-07 19:15:36 +02:00
( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > numDrawCallsSubmitted + + ;
2020-05-16 23:31:07 +02:00
2020-05-18 23:38:57 +02:00
PROFILEEND ( RPIFUNC ( vkCmdDraw ) ) ;
2018-10-15 23:37:09 +02:00
}
2019-04-22 15:58:27 +02:00
2020-05-18 23:38:57 +02:00
VKAPI_ATTR void VKAPI_CALL RPIFUNC ( vkCmdDrawIndexed ) (
2019-08-30 22:48:42 +02:00
VkCommandBuffer commandBuffer ,
uint32_t indexCount ,
uint32_t instanceCount ,
uint32_t firstIndex ,
int32_t vertexOffset ,
uint32_t firstInstance )
{
2020-05-18 23:38:57 +02:00
PROFILESTART ( RPIFUNC ( vkCmdDrawIndexed ) ) ;
2020-05-18 20:39:33 +02:00
2019-09-23 20:40:36 +02:00
assert ( commandBuffer ) ;
2019-09-23 21:27:07 +02:00
if ( instanceCount ! = 1 | | firstInstance ! = 0 )
{
unsigned instancing ;
UNSUPPORTED ( instancing ) ;
}
2020-06-07 19:15:36 +02:00
assert ( ( firstIndex + indexCount ) < = ( ( 1 < < 16 ) - 1 ) ) ;
2020-04-29 20:45:42 +02:00
uint32_t maxIndex = drawCommon ( commandBuffer , vertexOffset ) ;
2019-09-23 20:40:36 +02:00
_commandBuffer * cb = commandBuffer ;
2020-05-16 23:31:07 +02:00
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > handlesCl , 4 ) ;
2020-05-17 11:47:23 +02:00
uint32_t idx = clGetHandleIndex ( & commandBuffer - > handlesCl , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesBufOffset + cb - > handlesCl . offset , ( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > handlesSize , cb - > indexBuffer - > boundMem - > bo ) ;
2019-09-23 20:40:36 +02:00
2019-09-23 21:27:07 +02:00
clInsertGEMRelocations ( & commandBuffer - > binCl , idx , 0 ) ;
2019-09-23 20:40:36 +02:00
//Submit draw call: vertex Array Primitives
2020-06-08 19:54:57 +02:00
clFit ( & commandBuffer - > binCl , V3D21_VERTEX_ARRAY_PRIMITIVES_length ) ;
2020-05-16 23:31:07 +02:00
2019-09-23 20:40:36 +02:00
clInsertIndexedPrimitiveList ( & commandBuffer - > binCl ,
2019-09-23 21:27:07 +02:00
maxIndex , //max index
2020-04-29 20:45:42 +02:00
cb - > indexBuffer - > boundOffset + cb - > indexBufferOffset + firstIndex * 2 ,
2019-09-23 20:40:36 +02:00
indexCount ,
1 , //we only support 16 bit indices
getPrimitiveMode ( cb - > graphicsPipeline - > topology ) ) ;
2020-06-07 19:15:36 +02:00
( ( CLMarker * ) getCPAptrFromOffset ( cb - > binCl . CPA , cb - > binCl . currMarkerOffset ) ) - > numDrawCallsSubmitted + + ;
2020-05-16 23:31:07 +02:00
2020-05-18 23:38:57 +02:00
PROFILEEND ( RPIFUNC ( vkCmdDrawIndexed ) ) ;
2019-08-30 22:48:42 +02:00
}
2020-05-18 23:38:57 +02:00
VKAPI_ATTR void VKAPI_CALL RPIFUNC ( vkCmdDrawIndexedIndirect ) (
2019-04-22 15:58:27 +02:00
VkCommandBuffer commandBuffer ,
VkBuffer buffer ,
VkDeviceSize offset ,
uint32_t drawCount ,
uint32_t stride )
{
2020-04-17 14:04:28 +02:00
UNSUPPORTED ( vkCmdDrawIndexedIndirect ) ;
2019-04-22 15:58:27 +02:00
}
2020-05-18 23:38:57 +02:00
VKAPI_ATTR void VKAPI_CALL RPIFUNC ( vkCmdDrawIndirect ) (
2019-04-22 15:58:27 +02:00
VkCommandBuffer commandBuffer ,
VkBuffer buffer ,
VkDeviceSize offset ,
uint32_t drawCount ,
uint32_t stride )
{
2020-04-17 14:04:28 +02:00
UNSUPPORTED ( vkCmdDrawIndirect ) ;
2019-04-22 15:58:27 +02:00
}