spirv: Use the same types for resource indices as pointers

We need more space than just a 32-bit scalar and we have to burn all
that space anyway so we may as well expose it to the driver.  This also
fixes a subtle bug when UBOs and SSBOs have different pointer types.

Reviewed-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
This commit is contained in:
Jason Ekstrand
2019-01-12 10:58:33 -06:00
parent 9f7ee4f8e5
commit 61e009d2c4
5 changed files with 79 additions and 32 deletions

View File

@@ -98,6 +98,19 @@ vk_desc_type_for_mode(struct vtn_builder *b, enum vtn_variable_mode mode)
}
}
static const struct glsl_type *
vtn_ptr_type_for_mode(struct vtn_builder *b, enum vtn_variable_mode mode)
{
switch (mode) {
case vtn_variable_mode_ubo:
return b->options->ubo_ptr_type;
case vtn_variable_mode_ssbo:
return b->options->ssbo_ptr_type;
default:
vtn_fail("Invalid mode for vulkan_resource_index");
}
}
static nir_ssa_def *
vtn_variable_resource_index(struct vtn_builder *b, struct vtn_variable *var,
nir_ssa_def *desc_array_index)
@@ -115,7 +128,13 @@ vtn_variable_resource_index(struct vtn_builder *b, struct vtn_variable *var,
nir_intrinsic_set_binding(instr, var->binding);
nir_intrinsic_set_desc_type(instr, vk_desc_type_for_mode(b, var->mode));
nir_ssa_dest_init(&instr->instr, &instr->dest, 1, 32, NULL);
const struct glsl_type *index_type =
b->options->lower_ubo_ssbo_access_to_offsets ?
glsl_uint_type() : vtn_ptr_type_for_mode(b, var->mode);
instr->num_components = glsl_get_vector_elements(index_type);
nir_ssa_dest_init(&instr->instr, &instr->dest, instr->num_components,
glsl_get_bit_size(index_type), NULL);
nir_builder_instr_insert(&b->nb, &instr->instr);
return &instr->dest.ssa;
@@ -132,7 +151,13 @@ vtn_resource_reindex(struct vtn_builder *b, enum vtn_variable_mode mode,
instr->src[1] = nir_src_for_ssa(offset_index);
nir_intrinsic_set_desc_type(instr, vk_desc_type_for_mode(b, mode));
nir_ssa_dest_init(&instr->instr, &instr->dest, 1, 32, NULL);
const struct glsl_type *index_type =
b->options->lower_ubo_ssbo_access_to_offsets ?
glsl_uint_type() : vtn_ptr_type_for_mode(b, mode);
instr->num_components = glsl_get_vector_elements(index_type);
nir_ssa_dest_init(&instr->instr, &instr->dest, instr->num_components,
glsl_get_bit_size(index_type), NULL);
nir_builder_instr_insert(&b->nb, &instr->instr);
return &instr->dest.ssa;
@@ -140,17 +165,20 @@ vtn_resource_reindex(struct vtn_builder *b, enum vtn_variable_mode mode,
static nir_ssa_def *
vtn_descriptor_load(struct vtn_builder *b, enum vtn_variable_mode mode,
const struct glsl_type *desc_type, nir_ssa_def *desc_index)
nir_ssa_def *desc_index)
{
nir_intrinsic_instr *desc_load =
nir_intrinsic_instr_create(b->nb.shader,
nir_intrinsic_load_vulkan_descriptor);
desc_load->src[0] = nir_src_for_ssa(desc_index);
desc_load->num_components = glsl_get_vector_elements(desc_type);
nir_intrinsic_set_desc_type(desc_load, vk_desc_type_for_mode(b, mode));
const struct glsl_type *ptr_type = vtn_ptr_type_for_mode(b, mode);
desc_load->num_components = glsl_get_vector_elements(ptr_type);
nir_ssa_dest_init(&desc_load->instr, &desc_load->dest,
desc_load->num_components,
glsl_get_bit_size(desc_type), NULL);
glsl_get_bit_size(ptr_type), NULL);
nir_builder_instr_insert(&b->nb, &desc_load->instr);
return &desc_load->dest.ssa;
@@ -254,8 +282,7 @@ vtn_nir_deref_pointer_dereference(struct vtn_builder *b,
* final block index. Insert a descriptor load and cast to a deref to
* start the deref chain.
*/
nir_ssa_def *desc =
vtn_descriptor_load(b, base->mode, base->ptr_type->type, block_index);
nir_ssa_def *desc = vtn_descriptor_load(b, base->mode, block_index);
assert(base->mode == vtn_variable_mode_ssbo ||
base->mode == vtn_variable_mode_ubo);
@@ -1765,10 +1792,6 @@ vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr)
if (vtn_pointer_is_external_block(b, ptr) &&
vtn_type_contains_block(b, ptr->type) &&
ptr->mode != vtn_variable_mode_phys_ssbo) {
const unsigned bit_size = glsl_get_bit_size(ptr->ptr_type->type);
const unsigned num_components =
glsl_get_vector_elements(ptr->ptr_type->type);
/* In this case, we're looking for a block index and not an actual
* deref.
*
@@ -1793,13 +1816,7 @@ vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr)
ptr = vtn_nir_deref_pointer_dereference(b, ptr, &chain);
}
/* A block index is just a 32-bit value but the pointer has some
* other dimensionality. Cram it in there and we'll unpack it later
* in vtn_pointer_from_ssa.
*/
const unsigned swiz[4] = { 0, };
return nir_swizzle(&b->nb, nir_u2u(&b->nb, ptr->block_index, bit_size),
swiz, num_components, false);
return ptr->block_index;
} else {
return &vtn_pointer_to_deref(b, ptr)->dest.ssa;
}
@@ -1859,11 +1876,10 @@ vtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
} else if (vtn_type_contains_block(b, ptr->type) &&
ptr->mode != vtn_variable_mode_phys_ssbo) {
/* This is a pointer to somewhere in an array of blocks, not a
* pointer to somewhere inside the block. We squashed it into a
* random vector type before so just pick off the first channel and
* cast it back to 32 bits.
* pointer to somewhere inside the block. Set the block index
* instead of making a cast.
*/
ptr->block_index = nir_u2u32(&b->nb, nir_channel(&b->nb, ssa, 0));
ptr->block_index = ssa;
} else {
/* This is a pointer to something internal or a pointer inside a
* block. It's just a regular cast.