anv: handle push descriptor sets when they are sent with push constants

When vkCmdPushDescriptorSetKHR is used, the descriptor set is allocated
internally without belonging to any pool. Such descriptor set will be
visible on the GPU side because it's a part of the dynamic state stream,
but we still have to store its address in the array of descriptor sets.

Complements: 379b9bb7b0 ("anv: Support fetching descriptor addresses from push constants")

Signed-off-by: Marcin Ślusarz <marcin.slusarz@intel.com>
Reviewed-by: Jason Ekstrand <jason@jlekstrand.net>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/11577>
This commit is contained in:
Marcin Ślusarz
2021-06-28 13:06:07 +02:00
committed by Marge Bot
parent 79a50c6f54
commit 904bd8f358
3 changed files with 35 additions and 28 deletions

View File

@@ -931,7 +931,12 @@ anv_cmd_buffer_bind_descriptor_set(struct anv_cmd_buffer *cmd_buffer,
} }
VkShaderStageFlags dirty_stages = 0; VkShaderStageFlags dirty_stages = 0;
if (pipe_state->descriptors[set_index] != set) { /* If it's a push descriptor set, we have to flag things as dirty
* regardless of whether or not the CPU-side data structure changed as we
* may have edited in-place.
*/
if (pipe_state->descriptors[set_index] != set ||
anv_descriptor_set_is_push(set)) {
pipe_state->descriptors[set_index] = set; pipe_state->descriptors[set_index] = set;
/* Ray-tracing shaders are entirely bindless and so they don't have /* Ray-tracing shaders are entirely bindless and so they don't have
@@ -941,23 +946,19 @@ anv_cmd_buffer_bind_descriptor_set(struct anv_cmd_buffer *cmd_buffer,
if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR) { if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR) {
struct anv_push_constants *push = &pipe_state->push_constants; struct anv_push_constants *push = &pipe_state->push_constants;
push->desc_sets[set_index] = anv_address_physical(set->desc_addr); struct anv_address addr = anv_descriptor_set_address(cmd_buffer, set);
push->desc_sets[set_index] = anv_address_physical(addr);
if (addr.bo) {
anv_reloc_list_add_bo(cmd_buffer->batch.relocs, anv_reloc_list_add_bo(cmd_buffer->batch.relocs,
cmd_buffer->batch.alloc, cmd_buffer->batch.alloc,
set->pool->bo); addr.bo);
}
} }
dirty_stages |= stages; dirty_stages |= stages;
} }
/* If it's a push descriptor set, we have to flag things as dirty
* regardless of whether or not the CPU-side data structure changed as we
* may have edited in-place.
*/
if (set->pool == NULL)
dirty_stages |= stages;
if (dynamic_offsets) { if (dynamic_offsets) {
if (set_layout->dynamic_offset_count > 0) { if (set_layout->dynamic_offset_count > 0) {
struct anv_push_constants *push = &pipe_state->push_constants; struct anv_push_constants *push = &pipe_state->push_constants;

View File

@@ -2060,6 +2060,12 @@ struct anv_descriptor_set {
struct anv_descriptor descriptors[0]; struct anv_descriptor descriptors[0];
}; };
static inline bool
anv_descriptor_set_is_push(struct anv_descriptor_set *set)
{
return set->pool == NULL;
}
struct anv_buffer_view { struct anv_buffer_view {
struct vk_object_base base; struct vk_object_base base;
@@ -2090,6 +2096,22 @@ struct anv_push_descriptor_set {
struct anv_buffer_view buffer_views[MAX_PUSH_DESCRIPTORS]; struct anv_buffer_view buffer_views[MAX_PUSH_DESCRIPTORS];
}; };
static inline struct anv_address
anv_descriptor_set_address(struct anv_cmd_buffer *cmd_buffer,
struct anv_descriptor_set *set)
{
if (anv_descriptor_set_is_push(set)) {
/* We have to flag push descriptor set as used on the GPU
* so that the next time we push descriptors, we grab a new memory.
*/
struct anv_push_descriptor_set *push_set =
(struct anv_push_descriptor_set *)set;
push_set->set_used_on_gpu = true;
}
return set->desc_addr;
}
struct anv_descriptor_pool { struct anv_descriptor_pool {
struct vk_object_base base; struct vk_object_base base;

View File

@@ -2547,22 +2547,6 @@ cmd_buffer_alloc_push_constants(struct anv_cmd_buffer *cmd_buffer)
cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_ALL_GRAPHICS; cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_ALL_GRAPHICS;
} }
static struct anv_address
anv_descriptor_set_address(struct anv_cmd_buffer *cmd_buffer,
struct anv_descriptor_set *set)
{
if (set->pool == NULL) {
/* This is a push descriptor set. We have to flag it as used on the GPU
* so that the next time we push descriptors, we grab a new memory.
*/
struct anv_push_descriptor_set *push_set =
(struct anv_push_descriptor_set *)set;
push_set->set_used_on_gpu = true;
}
return set->desc_addr;
}
static VkResult static VkResult
emit_binding_table(struct anv_cmd_buffer *cmd_buffer, emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
struct anv_cmd_pipeline_state *pipe_state, struct anv_cmd_pipeline_state *pipe_state,