nvk: Rework descriptor set binding
This prepares us for VK_EXT_graphics_pipeline_library by allowing null descriptor sets to be bound and handling holes in pipeline layouts. We also add a set_dynamic_buffer_start map to the root descriptor table which says where in dynamic_buffers each set starts. This can be used by the pipeline layout lowering in the case where we can't statically the dynamic buffer index for a binding. Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/27048>
This commit is contained in:

committed by
Marge Bot

parent
a11adbe408
commit
e0d907f56f
@@ -572,12 +572,29 @@ nvk_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
|
|||||||
struct nvk_descriptor_state *desc =
|
struct nvk_descriptor_state *desc =
|
||||||
nvk_get_descriptors_state(cmd, pipelineBindPoint);
|
nvk_get_descriptors_state(cmd, pipelineBindPoint);
|
||||||
|
|
||||||
|
/* Fro the Vulkan 1.3.275 spec:
|
||||||
|
*
|
||||||
|
* "When binding a descriptor set (see Descriptor Set Binding) to
|
||||||
|
* set number N...
|
||||||
|
*
|
||||||
|
* If, additionally, the previously bound descriptor set for set
|
||||||
|
* N was bound using a pipeline layout not compatible for set N,
|
||||||
|
* then all bindings in sets numbered greater than N are
|
||||||
|
* disturbed."
|
||||||
|
*
|
||||||
|
* This means that, if some earlier set gets bound in such a way that
|
||||||
|
* it changes set_dynamic_buffer_start[s], this binding is implicitly
|
||||||
|
* invalidated. Therefore, we can always look at the current value
|
||||||
|
* of set_dynamic_buffer_start[s] as the base of our dynamic buffer
|
||||||
|
* range and it's only our responsibility to adjust all
|
||||||
|
* set_dynamic_buffer_start[p] for p > s as needed.
|
||||||
|
*/
|
||||||
|
uint8_t dyn_buffer_start = desc->root.set_dynamic_buffer_start[firstSet];
|
||||||
|
|
||||||
uint32_t next_dyn_offset = 0;
|
uint32_t next_dyn_offset = 0;
|
||||||
for (uint32_t i = 0; i < descriptorSetCount; ++i) {
|
for (uint32_t i = 0; i < descriptorSetCount; ++i) {
|
||||||
unsigned s = i + firstSet;
|
unsigned s = i + firstSet;
|
||||||
VK_FROM_HANDLE(nvk_descriptor_set, set, pDescriptorSets[i]);
|
VK_FROM_HANDLE(nvk_descriptor_set, set, pDescriptorSets[i]);
|
||||||
const struct nvk_descriptor_set_layout *set_layout =
|
|
||||||
vk_to_nvk_descriptor_set_layout(pipeline_layout->set_layouts[s]);
|
|
||||||
|
|
||||||
if (desc->sets[s] != set) {
|
if (desc->sets[s] != set) {
|
||||||
desc->root.sets[s] = nvk_descriptor_set_addr(set);
|
desc->root.sets[s] = nvk_descriptor_set_addr(set);
|
||||||
@@ -589,19 +606,31 @@ nvk_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
|
|||||||
desc->push_dirty &= ~BITFIELD_BIT(s);
|
desc->push_dirty &= ~BITFIELD_BIT(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (set_layout->dynamic_buffer_count > 0) {
|
desc->root.set_dynamic_buffer_start[s] = dyn_buffer_start;
|
||||||
const uint32_t dynamic_buffer_start =
|
|
||||||
nvk_descriptor_set_layout_dynbuf_start(pipeline_layout, s);
|
|
||||||
|
|
||||||
for (uint32_t j = 0; j < set_layout->dynamic_buffer_count; j++) {
|
if (pipeline_layout->set_layouts[s] != NULL) {
|
||||||
struct nvk_buffer_address addr = set->dynamic_buffers[j];
|
const struct nvk_descriptor_set_layout *set_layout =
|
||||||
addr.base_addr += pDynamicOffsets[next_dyn_offset + j];
|
vk_to_nvk_descriptor_set_layout(pipeline_layout->set_layouts[s]);
|
||||||
desc->root.dynamic_buffers[dynamic_buffer_start + j] = addr;
|
|
||||||
|
if (set != NULL && set_layout->dynamic_buffer_count > 0) {
|
||||||
|
for (uint32_t j = 0; j < set_layout->dynamic_buffer_count; j++) {
|
||||||
|
struct nvk_buffer_address addr = set->dynamic_buffers[j];
|
||||||
|
addr.base_addr += pDynamicOffsets[next_dyn_offset + j];
|
||||||
|
desc->root.dynamic_buffers[dyn_buffer_start + j] = addr;
|
||||||
|
}
|
||||||
|
next_dyn_offset += set->layout->dynamic_buffer_count;
|
||||||
}
|
}
|
||||||
next_dyn_offset += set->layout->dynamic_buffer_count;
|
|
||||||
|
dyn_buffer_start += set_layout->dynamic_buffer_count;
|
||||||
|
} else {
|
||||||
|
assert(set == NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert(dyn_buffer_start <= NVK_MAX_DYNAMIC_BUFFERS);
|
||||||
assert(next_dyn_offset <= dynamicOffsetCount);
|
assert(next_dyn_offset <= dynamicOffsetCount);
|
||||||
|
|
||||||
|
for (uint32_t s = firstSet + descriptorSetCount; s < NVK_MAX_SETS; s++)
|
||||||
|
desc->root.set_dynamic_buffer_start[s] = dyn_buffer_start;
|
||||||
}
|
}
|
||||||
|
|
||||||
VKAPI_ATTR void VKAPI_CALL
|
VKAPI_ATTR void VKAPI_CALL
|
||||||
|
@@ -56,8 +56,11 @@ struct nvk_root_descriptor_table {
|
|||||||
/* Dynamic buffer bindings */
|
/* Dynamic buffer bindings */
|
||||||
struct nvk_buffer_address dynamic_buffers[NVK_MAX_DYNAMIC_BUFFERS];
|
struct nvk_buffer_address dynamic_buffers[NVK_MAX_DYNAMIC_BUFFERS];
|
||||||
|
|
||||||
|
/* Start index in dynamic_buffers where each set starts */
|
||||||
|
uint8_t set_dynamic_buffer_start[NVK_MAX_SETS];
|
||||||
|
|
||||||
/* enfore alignment to 0x100 as needed pre pascal */
|
/* enfore alignment to 0x100 as needed pre pascal */
|
||||||
uint8_t __padding[0x20];
|
uint8_t __padding[0x18];
|
||||||
};
|
};
|
||||||
|
|
||||||
/* helper macro for computing root descriptor byte offsets */
|
/* helper macro for computing root descriptor byte offsets */
|
||||||
|
Reference in New Issue
Block a user