nir: Add and use some deref mode helpers
NIR derefs currently have exactly one variable mode. This is about to change so we can handle OpenCL generic pointers. In order to transition safely, we need to audit every deref->mode check. This commit adds a set of helpers that provide more nuanced mode checks and converts most of NIR to use them. For simple cases, we add nir_deref_mode_is and nir_deref_mode_is_one_of helpers. These can be used in passes which don't have to bother with generic pointers and just want to know what mode a thing is. If the pass ever encounters generic pointers in a way that this check would be unsafe, it will assert-fail to alert developers that they need to think harder about things and fix the pass. For more complex passes which require a more nuanced understanding of modes, we add nir_deref_mode_may_be and nir_deref_mode_must_be helpers which accurately describe the compiler's best knowledge about the given deref. Unfortunately, we may not be able to exactly identify the mode in a generic pointers scenario so we have to be very careful when we use these. Conversion of these passes is left to later commits. For the case of mass lowering of a particular mode (nir_lower_explicit_io is one good example), we add nir_deref_mode_is_in_set. This is also pretty assert-happy like nir_deref_mode_is but is for a set containment comparison on deref modes where you expect the deref to either be all-in or all-out. Reviewed-by: Jesse Natalie <jenatali@microsoft.com> Reviewed-by: Caio Marcelo de Oliveira Filho <caio.oliveira@intel.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/6332>
This commit is contained in:

committed by
Marge Bot

parent
74886cabaa
commit
3cc58e6470
@@ -4502,7 +4502,7 @@ static LLVMTypeRef glsl_to_llvm_type(struct ac_llvm_context *ac, const struct gl
|
||||
|
||||
static void visit_deref(struct ac_nir_context *ctx, nir_deref_instr *instr)
|
||||
{
|
||||
if (instr->mode != nir_var_mem_shared && instr->mode != nir_var_mem_global)
|
||||
if (!nir_deref_mode_is_one_of(instr, nir_var_mem_shared | nir_var_mem_global))
|
||||
return;
|
||||
|
||||
LLVMValueRef result = NULL;
|
||||
@@ -4513,7 +4513,7 @@ static void visit_deref(struct ac_nir_context *ctx, nir_deref_instr *instr)
|
||||
break;
|
||||
}
|
||||
case nir_deref_type_struct:
|
||||
if (instr->mode == nir_var_mem_global) {
|
||||
if (nir_deref_mode_is(instr, nir_var_mem_global)) {
|
||||
nir_deref_instr *parent = nir_deref_instr_parent(instr);
|
||||
uint64_t offset = glsl_get_struct_field_offset(parent->type, instr->strct.index);
|
||||
result = ac_build_gep_ptr(&ctx->ac, get_src(ctx, instr->parent),
|
||||
@@ -4524,7 +4524,7 @@ static void visit_deref(struct ac_nir_context *ctx, nir_deref_instr *instr)
|
||||
}
|
||||
break;
|
||||
case nir_deref_type_array:
|
||||
if (instr->mode == nir_var_mem_global) {
|
||||
if (nir_deref_mode_is(instr, nir_var_mem_global)) {
|
||||
nir_deref_instr *parent = nir_deref_instr_parent(instr);
|
||||
unsigned stride = glsl_get_explicit_stride(parent->type);
|
||||
|
||||
@@ -4547,7 +4547,7 @@ static void visit_deref(struct ac_nir_context *ctx, nir_deref_instr *instr)
|
||||
}
|
||||
break;
|
||||
case nir_deref_type_ptr_as_array:
|
||||
if (instr->mode == nir_var_mem_global) {
|
||||
if (nir_deref_mode_is(instr, nir_var_mem_global)) {
|
||||
unsigned stride = nir_deref_instr_array_stride(instr);
|
||||
|
||||
LLVMValueRef index = get_src(ctx, instr->arr.index);
|
||||
@@ -4569,7 +4569,7 @@ static void visit_deref(struct ac_nir_context *ctx, nir_deref_instr *instr)
|
||||
/* We can't use the structs from LLVM because the shader
|
||||
* specifies its own offsets. */
|
||||
LLVMTypeRef pointee_type = ctx->ac.i8;
|
||||
if (instr->mode == nir_var_mem_shared)
|
||||
if (nir_deref_mode_is(instr, nir_var_mem_shared))
|
||||
pointee_type = glsl_to_llvm_type(&ctx->ac, instr->type);
|
||||
|
||||
unsigned address_space;
|
||||
|
@@ -2932,7 +2932,8 @@ mem_vectorize_callback(unsigned align_mul, unsigned align_offset,
|
||||
return align % (bit_size == 8 ? 2 : 4) == 0;
|
||||
case nir_intrinsic_load_deref:
|
||||
case nir_intrinsic_store_deref:
|
||||
assert(nir_src_as_deref(low->src[0])->mode == nir_var_mem_shared);
|
||||
assert(nir_deref_mode_is(nir_src_as_deref(low->src[0]),
|
||||
nir_var_mem_shared));
|
||||
/* fallthrough */
|
||||
case nir_intrinsic_load_shared:
|
||||
case nir_intrinsic_store_shared:
|
||||
|
@@ -392,7 +392,9 @@ add_var_use_deref(nir_deref_instr *deref, struct hash_table *live,
|
||||
|
||||
deref = path.path[0];
|
||||
if (deref->deref_type != nir_deref_type_var ||
|
||||
deref->mode & ~(nir_var_uniform | nir_var_mem_ubo | nir_var_mem_ssbo)) {
|
||||
!nir_deref_mode_is_one_of(deref, nir_var_uniform |
|
||||
nir_var_mem_ubo |
|
||||
nir_var_mem_ssbo)) {
|
||||
nir_deref_path_finish(&path);
|
||||
return;
|
||||
}
|
||||
|
@@ -83,11 +83,11 @@ get_block_array_index(nir_builder *b, nir_deref_instr *deref,
|
||||
|
||||
unsigned num_blocks;
|
||||
struct gl_uniform_block **blocks;
|
||||
if (deref->mode == nir_var_mem_ubo) {
|
||||
if (nir_deref_mode_is(deref, nir_var_mem_ubo)) {
|
||||
num_blocks = linked_shader->Program->info.num_ubos;
|
||||
blocks = linked_shader->Program->sh.UniformBlocks;
|
||||
} else {
|
||||
assert(deref->mode == nir_var_mem_ssbo);
|
||||
assert(nir_deref_mode_is(deref, nir_var_mem_ssbo));
|
||||
num_blocks = linked_shader->Program->info.num_ssbos;
|
||||
blocks = linked_shader->Program->sh.ShaderStorageBlocks;
|
||||
}
|
||||
@@ -170,7 +170,8 @@ lower_buffer_interface_derefs_impl(nir_function_impl *impl,
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_deref: {
|
||||
nir_deref_instr *deref = nir_instr_as_deref(instr);
|
||||
if (!(deref->mode & (nir_var_mem_ubo | nir_var_mem_ssbo)))
|
||||
if (!nir_deref_mode_is_one_of(deref, nir_var_mem_ubo |
|
||||
nir_var_mem_ssbo))
|
||||
break;
|
||||
|
||||
/* We use nir_address_format_32bit_index_offset */
|
||||
@@ -229,7 +230,8 @@ lower_buffer_interface_derefs_impl(nir_function_impl *impl,
|
||||
switch (intrin->intrinsic) {
|
||||
case nir_intrinsic_load_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (!(deref->mode & (nir_var_mem_ubo | nir_var_mem_ssbo)))
|
||||
if (!nir_deref_mode_is_one_of(deref, nir_var_mem_ubo |
|
||||
nir_var_mem_ssbo))
|
||||
break;
|
||||
|
||||
/* UBO and SSBO Booleans are 32-bit integers where any non-zero
|
||||
@@ -254,7 +256,8 @@ lower_buffer_interface_derefs_impl(nir_function_impl *impl,
|
||||
|
||||
case nir_intrinsic_store_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (!(deref->mode & (nir_var_mem_ubo | nir_var_mem_ssbo)))
|
||||
if (!nir_deref_mode_is_one_of(deref, nir_var_mem_ubo |
|
||||
nir_var_mem_ssbo))
|
||||
break;
|
||||
|
||||
/* SSBO Booleans are 32-bit integers where any non-zero value
|
||||
|
@@ -1830,7 +1830,7 @@ nir_visitor::visit(ir_expression *ir)
|
||||
deref->accept(this);
|
||||
|
||||
nir_intrinsic_op op;
|
||||
if (this->deref->mode == nir_var_shader_in) {
|
||||
if (nir_deref_mode_is(this->deref, nir_var_shader_in)) {
|
||||
switch (ir->operation) {
|
||||
case ir_unop_interpolate_at_centroid:
|
||||
op = nir_intrinsic_interp_deref_at_centroid;
|
||||
@@ -1851,7 +1851,7 @@ nir_visitor::visit(ir_expression *ir)
|
||||
* sense, we'll just turn it into a load which will probably
|
||||
* eventually end up as an SSA definition.
|
||||
*/
|
||||
assert(this->deref->mode == nir_var_shader_temp);
|
||||
assert(nir_deref_mode_is(this->deref, nir_var_shader_temp));
|
||||
op = nir_intrinsic_load_deref;
|
||||
}
|
||||
|
||||
@@ -2468,7 +2468,7 @@ nir_visitor::visit(ir_texture *ir)
|
||||
nir_deref_instr *sampler_deref = evaluate_deref(ir->sampler);
|
||||
|
||||
/* check for bindless handles */
|
||||
if (sampler_deref->mode != nir_var_uniform ||
|
||||
if (!nir_deref_mode_is(sampler_deref, nir_var_uniform) ||
|
||||
nir_deref_instr_get_variable(sampler_deref)->data.bindless) {
|
||||
nir_ssa_def *load = nir_load_deref(&b, sampler_deref);
|
||||
instr->src[0].src = nir_src_for_ssa(load);
|
||||
|
@@ -1443,7 +1443,11 @@ typedef struct {
|
||||
/** The type of this deref instruction */
|
||||
nir_deref_type deref_type;
|
||||
|
||||
/** The mode of the underlying variable */
|
||||
/** The mode of the underlying variable
|
||||
*
|
||||
* Generally, this field should not be accessed directly. Use one of the
|
||||
* nir_deref_mode_ helpers instead.
|
||||
*/
|
||||
nir_variable_mode mode;
|
||||
|
||||
/** The dereferenced type of the resulting pointer value */
|
||||
@@ -1478,6 +1482,103 @@ typedef struct {
|
||||
nir_dest dest;
|
||||
} nir_deref_instr;
|
||||
|
||||
/** Returns true if deref might have one of the given modes
|
||||
*
|
||||
* For multi-mode derefs, this returns true if any of the possible modes for
|
||||
* the deref to have any of the specified modes. This function returning true
|
||||
* does NOT mean that the deref definitely has one of those modes. It simply
|
||||
* means that, with the best information we have at the time, it might.
|
||||
*/
|
||||
static inline bool
|
||||
nir_deref_mode_may_be(const nir_deref_instr *deref, nir_variable_mode modes)
|
||||
{
|
||||
assert(!(modes & ~nir_var_all));
|
||||
assert(deref->mode != 0);
|
||||
return deref->mode & modes;
|
||||
}
|
||||
|
||||
/** Returns true if deref must have one of the given modes
|
||||
*
|
||||
* For multi-mode derefs, this returns true if NIR can prove that the given
|
||||
* deref has one of the specified modes. This function returning false does
|
||||
* NOT mean that deref doesn't have one of the given mode. It very well may
|
||||
* have one of those modes, we just don't have enough information to prove
|
||||
* that it does for sure.
|
||||
*/
|
||||
static inline bool
|
||||
nir_deref_mode_must_be(const nir_deref_instr *deref, nir_variable_mode modes)
|
||||
{
|
||||
assert(!(modes & ~nir_var_all));
|
||||
assert(deref->mode != 0);
|
||||
return !(deref->mode & ~modes);
|
||||
}
|
||||
|
||||
/** Returns true if deref has the given mode
|
||||
*
|
||||
* This returns true if the deref has exactly the mode specified. If the
|
||||
* deref may have that mode but may also have a different mode (i.e. modes has
|
||||
* multiple bits set), this will assert-fail.
|
||||
*
|
||||
* If you're confused about which nir_deref_mode_ helper to use, use this one
|
||||
* or nir_deref_mode_is_one_of below.
|
||||
*/
|
||||
static inline bool
|
||||
nir_deref_mode_is(const nir_deref_instr *deref, nir_variable_mode mode)
|
||||
{
|
||||
assert(util_bitcount(mode) == 1 && (mode & nir_var_all));
|
||||
|
||||
/* This is only for "simple" cases so, if modes might interact with this
|
||||
* deref then the deref has to have a single mode.
|
||||
*/
|
||||
if (nir_deref_mode_may_be(deref, mode)) {
|
||||
assert(util_bitcount(deref->mode) == 1);
|
||||
assert(deref->mode == mode);
|
||||
}
|
||||
|
||||
return deref->mode == mode;
|
||||
}
|
||||
|
||||
/** Returns true if deref has one of the given modes
|
||||
*
|
||||
* This returns true if the deref has exactly one possible mode and that mode
|
||||
* is one of the modes specified. If the deref may have one of those modes
|
||||
* but may also have a different mode (i.e. modes has multiple bits set), this
|
||||
* will assert-fail.
|
||||
*/
|
||||
static inline bool
|
||||
nir_deref_mode_is_one_of(const nir_deref_instr *deref, nir_variable_mode modes)
|
||||
{
|
||||
/* This is only for "simple" cases so, if modes might interact with this
|
||||
* deref then the deref has to have a single mode.
|
||||
*/
|
||||
if (nir_deref_mode_may_be(deref, modes)) {
|
||||
assert(util_bitcount(deref->mode) == 1);
|
||||
assert(nir_deref_mode_must_be(deref, modes));
|
||||
}
|
||||
|
||||
return nir_deref_mode_may_be(deref, modes);
|
||||
}
|
||||
|
||||
/** Returns true if deref's possible modes lie in the given set of modes
|
||||
*
|
||||
* This returns true if the deref's modes lie in the given set of modes. If
|
||||
* the deref's modes overlap with the specified modes but aren't entirely
|
||||
* contained in the specified set of modes, this will assert-fail. In
|
||||
* particular, if this is used in a generic pointers scenario, the specified
|
||||
* modes has to contain all or none of the possible generic pointer modes.
|
||||
*
|
||||
* This is intended mostly for mass-lowering of derefs which might have
|
||||
* generic pointers.
|
||||
*/
|
||||
static inline bool
|
||||
nir_deref_mode_is_in_set(const nir_deref_instr *deref, nir_variable_mode modes)
|
||||
{
|
||||
if (nir_deref_mode_may_be(deref, modes))
|
||||
assert(nir_deref_mode_must_be(deref, modes));
|
||||
|
||||
return nir_deref_mode_may_be(deref, modes);
|
||||
}
|
||||
|
||||
static inline nir_deref_instr *nir_src_as_deref(nir_src src);
|
||||
|
||||
static inline nir_deref_instr *
|
||||
|
@@ -300,15 +300,8 @@ try_mask_partial_io(nir_shader *shader, nir_variable *var,
|
||||
static void
|
||||
update_memory_written_for_deref(nir_shader *shader, nir_deref_instr *deref)
|
||||
{
|
||||
switch (deref->mode) {
|
||||
case nir_var_mem_ssbo:
|
||||
case nir_var_mem_global:
|
||||
if (nir_deref_mode_may_be(deref, (nir_var_mem_ssbo | nir_var_mem_global)))
|
||||
shader->info.writes_memory = true;
|
||||
break;
|
||||
default:
|
||||
/* Nothing to do. */
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -350,8 +343,8 @@ gather_intrinsic_info(nir_intrinsic_instr *instr, nir_shader *shader,
|
||||
case nir_intrinsic_load_deref:
|
||||
case nir_intrinsic_store_deref:{
|
||||
nir_deref_instr *deref = nir_src_as_deref(instr->src[0]);
|
||||
if (deref->mode == nir_var_shader_in ||
|
||||
deref->mode == nir_var_shader_out) {
|
||||
if (nir_deref_mode_is_one_of(deref, nir_var_shader_in |
|
||||
nir_var_shader_out)) {
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
bool is_output_read = false;
|
||||
if (var->data.mode == nir_var_shader_out &&
|
||||
|
@@ -85,7 +85,7 @@ tcs_add_output_reads(nir_shader *shader, uint64_t *read, uint64_t *patches_read)
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_shader_out)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_out))
|
||||
continue;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
@@ -540,7 +540,7 @@ gather_varying_component_info(nir_shader *producer, nir_shader *consumer,
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (deref->mode != nir_var_shader_in)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_in))
|
||||
continue;
|
||||
|
||||
/* We only remap things that aren't builtins. */
|
||||
@@ -597,7 +597,7 @@ gather_varying_component_info(nir_shader *producer, nir_shader *consumer,
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (deref->mode != nir_var_shader_out)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_out))
|
||||
continue;
|
||||
|
||||
/* We only remap things that aren't builtins. */
|
||||
@@ -928,7 +928,7 @@ replace_constant_input(nir_shader *shader, nir_intrinsic_instr *store_intr)
|
||||
continue;
|
||||
|
||||
nir_deref_instr *in_deref = nir_src_as_deref(intr->src[0]);
|
||||
if (in_deref->mode != nir_var_shader_in)
|
||||
if (!nir_deref_mode_is(in_deref, nir_var_shader_in))
|
||||
continue;
|
||||
|
||||
nir_variable *in_var = nir_deref_instr_get_variable(in_deref);
|
||||
@@ -980,7 +980,7 @@ replace_duplicate_input(nir_shader *shader, nir_variable *input_var,
|
||||
continue;
|
||||
|
||||
nir_deref_instr *in_deref = nir_src_as_deref(intr->src[0]);
|
||||
if (in_deref->mode != nir_var_shader_in)
|
||||
if (!nir_deref_mode_is(in_deref, nir_var_shader_in))
|
||||
continue;
|
||||
|
||||
nir_variable *in_var = nir_deref_instr_get_variable(in_deref);
|
||||
@@ -1031,7 +1031,7 @@ nir_link_opt_varyings(nir_shader *producer, nir_shader *consumer)
|
||||
continue;
|
||||
|
||||
nir_deref_instr *out_deref = nir_src_as_deref(intr->src[0]);
|
||||
if (out_deref->mode != nir_var_shader_out)
|
||||
if (!nir_deref_mode_is(out_deref, nir_var_shader_out))
|
||||
continue;
|
||||
|
||||
nir_variable *out_var = nir_deref_instr_get_variable(out_deref);
|
||||
|
@@ -1676,7 +1676,7 @@ lower_explicit_io_array_length(nir_builder *b, nir_intrinsic_instr *intrin,
|
||||
|
||||
assert(glsl_type_is_array(deref->type));
|
||||
assert(glsl_get_length(deref->type) == 0);
|
||||
assert(deref->mode == nir_var_mem_ssbo);
|
||||
assert(nir_deref_mode_is(deref, nir_var_mem_ssbo));
|
||||
unsigned stride = glsl_get_explicit_stride(deref->type);
|
||||
assert(stride > 0);
|
||||
|
||||
|
@@ -250,7 +250,7 @@ create_indirects_mask(nir_shader *shader,
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (deref->mode != mode)
|
||||
if (!nir_deref_mode_is(deref, mode))
|
||||
continue;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
@@ -296,7 +296,7 @@ lower_io_arrays_to_elements(nir_shader *shader, nir_variable_mode mask,
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (!(deref->mode & mask))
|
||||
if (!nir_deref_mode_is_one_of(deref, mask))
|
||||
continue;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
|
@@ -314,7 +314,7 @@ nir_lower_io_to_scalar_early_instr(nir_builder *b, nir_instr *instr, void *data)
|
||||
return false;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (!(deref->mode & state->mask))
|
||||
if (!nir_deref_mode_is_one_of(deref, state->mask))
|
||||
return false;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
|
@@ -431,10 +431,10 @@ nir_lower_io_to_vector_impl(nir_function_impl *impl, nir_variable_mode modes)
|
||||
case nir_intrinsic_interp_deref_at_offset:
|
||||
case nir_intrinsic_interp_deref_at_vertex: {
|
||||
nir_deref_instr *old_deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (!(old_deref->mode & modes))
|
||||
if (!nir_deref_mode_is_one_of(old_deref, modes))
|
||||
break;
|
||||
|
||||
if (old_deref->mode == nir_var_shader_out)
|
||||
if (nir_deref_mode_is(old_deref, nir_var_shader_out))
|
||||
assert(b.shader->info.stage == MESA_SHADER_TESS_CTRL ||
|
||||
b.shader->info.stage == MESA_SHADER_FRAGMENT);
|
||||
|
||||
@@ -490,7 +490,7 @@ nir_lower_io_to_vector_impl(nir_function_impl *impl, nir_variable_mode modes)
|
||||
|
||||
case nir_intrinsic_store_deref: {
|
||||
nir_deref_instr *old_deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (old_deref->mode != nir_var_shader_out)
|
||||
if (!nir_deref_mode_is(old_deref, nir_var_shader_out))
|
||||
break;
|
||||
|
||||
nir_variable *old_var = nir_deref_instr_get_variable(old_deref);
|
||||
|
@@ -193,7 +193,7 @@ lower_locals_to_regs_block(nir_block *block,
|
||||
switch (intrin->intrinsic) {
|
||||
case nir_intrinsic_load_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_function_temp)
|
||||
if (!nir_deref_mode_is(deref, nir_var_function_temp))
|
||||
continue;
|
||||
|
||||
b->cursor = nir_before_instr(&intrin->instr);
|
||||
@@ -219,7 +219,7 @@ lower_locals_to_regs_block(nir_block *block,
|
||||
|
||||
case nir_intrinsic_store_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_function_temp)
|
||||
if (!nir_deref_mode_is(deref, nir_var_function_temp))
|
||||
continue;
|
||||
|
||||
b->cursor = nir_before_instr(&intrin->instr);
|
||||
|
@@ -112,7 +112,7 @@ nir_lower_vars_to_scratch(nir_shader *shader,
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (!(deref->mode & modes))
|
||||
if (!nir_deref_mode_is_one_of(deref, modes))
|
||||
continue;
|
||||
|
||||
if (!nir_deref_instr_has_indirect(nir_src_as_deref(intrin->src[0])))
|
||||
|
@@ -110,7 +110,7 @@ lower_system_value_instr(nir_builder *b, nir_instr *instr, void *_state)
|
||||
|
||||
case nir_intrinsic_load_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_system_value)
|
||||
if (!nir_deref_mode_is(deref, nir_var_system_value))
|
||||
return NULL;
|
||||
|
||||
if (deref->deref_type != nir_deref_type_var) {
|
||||
|
@@ -114,7 +114,7 @@ try_fold_alu(nir_builder *b, nir_alu_instr *instr)
|
||||
static nir_const_value *
|
||||
const_value_for_deref(nir_deref_instr *deref)
|
||||
{
|
||||
if (deref->mode != nir_var_mem_constant)
|
||||
if (!nir_deref_mode_is(deref, nir_var_mem_constant))
|
||||
return NULL;
|
||||
|
||||
nir_deref_path path;
|
||||
|
@@ -241,7 +241,7 @@ nir_opt_large_constants(nir_shader *shader,
|
||||
continue;
|
||||
}
|
||||
|
||||
if (dst_deref && dst_deref->mode == nir_var_function_temp) {
|
||||
if (dst_deref && nir_deref_mode_is(dst_deref, nir_var_function_temp)) {
|
||||
nir_variable *var = nir_deref_instr_get_variable(dst_deref);
|
||||
if (var == NULL)
|
||||
continue;
|
||||
@@ -269,7 +269,7 @@ nir_opt_large_constants(nir_shader *shader,
|
||||
}
|
||||
}
|
||||
|
||||
if (src_deref && src_deref->mode == nir_var_function_temp) {
|
||||
if (src_deref && nir_deref_mode_is(src_deref, nir_var_function_temp)) {
|
||||
nir_variable *var = nir_deref_instr_get_variable(src_deref);
|
||||
if (var == NULL)
|
||||
continue;
|
||||
@@ -354,7 +354,7 @@ nir_opt_large_constants(nir_shader *shader,
|
||||
switch (intrin->intrinsic) {
|
||||
case nir_intrinsic_load_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_function_temp)
|
||||
if (!nir_deref_mode_is(deref, nir_var_function_temp))
|
||||
continue;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
@@ -375,7 +375,7 @@ nir_opt_large_constants(nir_shader *shader,
|
||||
|
||||
case nir_intrinsic_store_deref: {
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_function_temp)
|
||||
if (!nir_deref_mode_is(deref, nir_var_function_temp))
|
||||
continue;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
|
@@ -591,7 +591,7 @@ validate_intrinsic_instr(nir_intrinsic_instr *instr, validate_state *state)
|
||||
/* Also allow 32-bit boolean store operations */
|
||||
if (glsl_type_is_boolean(dst->type))
|
||||
src_bit_sizes[1] |= 32;
|
||||
validate_assert(state, (dst->mode & nir_var_read_only_modes) == 0);
|
||||
validate_assert(state, !nir_deref_mode_may_be(dst, nir_var_read_only_modes));
|
||||
validate_assert(state, (nir_intrinsic_write_mask(instr) & ~((1 << instr->num_components) - 1)) == 0);
|
||||
break;
|
||||
}
|
||||
@@ -601,7 +601,7 @@ validate_intrinsic_instr(nir_intrinsic_instr *instr, validate_state *state)
|
||||
nir_deref_instr *src = nir_src_as_deref(instr->src[1]);
|
||||
validate_assert(state, glsl_get_bare_type(dst->type) ==
|
||||
glsl_get_bare_type(src->type));
|
||||
validate_assert(state, (dst->mode & nir_var_read_only_modes) == 0);
|
||||
validate_assert(state, !nir_deref_mode_may_be(dst, nir_var_read_only_modes));
|
||||
break;
|
||||
}
|
||||
|
||||
|
@@ -40,7 +40,7 @@ lower_multiview_mask(nir_shader *nir, uint32_t *mask)
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_shader_out)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_out))
|
||||
continue;
|
||||
|
||||
nir_variable *var = nir_deref_instr_get_variable(deref);
|
||||
|
@@ -2054,8 +2054,8 @@ static void visit_jump(struct lp_build_nir_context *bld_base,
|
||||
static void visit_deref(struct lp_build_nir_context *bld_base,
|
||||
nir_deref_instr *instr)
|
||||
{
|
||||
if (instr->mode != nir_var_mem_shared &&
|
||||
instr->mode != nir_var_mem_global)
|
||||
if (!nir_deref_mode_is_one_of(instr, nir_var_mem_shared |
|
||||
nir_var_mem_global))
|
||||
return;
|
||||
LLVMValueRef result = NULL;
|
||||
switch(instr->deref_type) {
|
||||
|
@@ -421,7 +421,7 @@ bool NirLowerFSOutToVector::instr_can_rewrite_type(nir_intrinsic_instr *intr) co
|
||||
return false;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (deref->mode != nir_var_shader_out)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_out))
|
||||
return false;
|
||||
|
||||
return var_can_rewrite(nir_deref_instr_get_variable(deref));
|
||||
|
@@ -87,7 +87,7 @@ r600_instr_can_rewrite(nir_instr *instr)
|
||||
return false;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
|
||||
if (deref->mode != nir_var_shader_in)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_in))
|
||||
return false;
|
||||
|
||||
return r600_variable_can_rewrite(nir_deref_instr_get_variable(deref));
|
||||
|
@@ -299,7 +299,7 @@ void ShaderFromNirProcessor::add_array_deref(nir_deref_instr *instr)
|
||||
{
|
||||
nir_variable *var = nir_deref_instr_get_variable(instr);
|
||||
|
||||
assert(instr->mode == nir_var_function_temp);
|
||||
assert(nir_deref_mode_is(instr, nir_var_function_temp));
|
||||
assert(glsl_type_is_array(var->type));
|
||||
|
||||
// add an alias for the index to the register(s);
|
||||
|
@@ -525,7 +525,7 @@ static void si_nir_lower_color(nir_shader *nir)
|
||||
continue;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_shader_in)
|
||||
if (!nir_deref_mode_is(deref, nir_var_shader_in))
|
||||
continue;
|
||||
|
||||
b.cursor = nir_before_instr(instr);
|
||||
|
@@ -262,7 +262,7 @@ try_lower_direct_buffer_intrinsic(nir_intrinsic_instr *intrin, bool is_atomic,
|
||||
nir_builder *b = &state->builder;
|
||||
|
||||
nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
|
||||
if (deref->mode != nir_var_mem_ssbo)
|
||||
if (!nir_deref_mode_is(deref, nir_var_mem_ssbo))
|
||||
return false;
|
||||
|
||||
/* 64-bit atomics only support A64 messages so we can't lower them to the
|
||||
|
Reference in New Issue
Block a user