nir: Switch the arguments to nir_foreach_instr
This matches the "foreach x in container" pattern found in many other programming languages. Generated by the following regular expression: s/nir_foreach_instr(\([^,]*\),\s*\([^,]*\))/nir_foreach_instr(\2, \1)/ and similar expressions for nir_foreach_instr_safe etc. Reviewed-by: Ian Romanick <ian.d.romanick@intel.com>
This commit is contained in:
@@ -1708,7 +1708,7 @@ nir_index_ssa_defs(nir_function_impl *impl)
|
||||
unsigned index = 0;
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr(block, instr)
|
||||
nir_foreach_instr(instr, block)
|
||||
nir_foreach_ssa_def(instr, index_ssa_def_cb, &index);
|
||||
}
|
||||
|
||||
@@ -1725,7 +1725,7 @@ nir_index_instrs(nir_function_impl *impl)
|
||||
unsigned index = 0;
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr(block, instr)
|
||||
nir_foreach_instr(instr, block)
|
||||
instr->index = index++;
|
||||
}
|
||||
|
||||
|
@@ -1434,13 +1434,13 @@ nir_block_last_instr(nir_block *block)
|
||||
return exec_node_data(nir_instr, tail, node);
|
||||
}
|
||||
|
||||
#define nir_foreach_instr(block, instr) \
|
||||
#define nir_foreach_instr(instr, block) \
|
||||
foreach_list_typed(nir_instr, instr, node, &(block)->instr_list)
|
||||
#define nir_foreach_instr_reverse(block, instr) \
|
||||
#define nir_foreach_instr_reverse(instr, block) \
|
||||
foreach_list_typed_reverse(nir_instr, instr, node, &(block)->instr_list)
|
||||
#define nir_foreach_instr_safe(block, instr) \
|
||||
#define nir_foreach_instr_safe(instr, block) \
|
||||
foreach_list_typed_safe(nir_instr, instr, node, &(block)->instr_list)
|
||||
#define nir_foreach_instr_reverse_safe(block, instr) \
|
||||
#define nir_foreach_instr_reverse_safe(instr, block) \
|
||||
foreach_list_typed_reverse_safe(nir_instr, instr, node, &(block)->instr_list)
|
||||
|
||||
typedef struct nir_if {
|
||||
@@ -2018,7 +2018,7 @@ nir_after_cf_node_and_phis(nir_cf_node *node)
|
||||
nir_block *block = nir_cf_node_as_block(nir_cf_node_next(node));
|
||||
assert(block->cf_node.type == nir_cf_node_block);
|
||||
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
return nir_before_instr(instr);
|
||||
}
|
||||
|
@@ -524,7 +524,7 @@ ${pass_name}_block(nir_block *block, const bool *condition_flags,
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_reverse_safe(block, instr) {
|
||||
nir_foreach_instr_reverse_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -523,7 +523,7 @@ clone_block(clone_state *state, struct exec_list *cf_list, const nir_block *blk)
|
||||
/* We need this for phi sources */
|
||||
add_remap(state, nblk, blk);
|
||||
|
||||
nir_foreach_instr(blk, instr) {
|
||||
nir_foreach_instr(instr, blk) {
|
||||
if (instr->type == nir_instr_type_phi) {
|
||||
/* Phi instructions are a bit of a special case when cloning because
|
||||
* we don't want inserting the instruction to automatically handle
|
||||
|
@@ -241,7 +241,7 @@ split_block_beginning(nir_block *block)
|
||||
* sourcse will be messed up. This will reverse the order of the phi's, but
|
||||
* order shouldn't matter.
|
||||
*/
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -256,7 +256,7 @@ split_block_beginning(nir_block *block)
|
||||
static void
|
||||
rewrite_phi_preds(nir_block *block, nir_block *old_pred, nir_block *new_pred)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -274,7 +274,7 @@ static void
|
||||
insert_phi_undef(nir_block *block, nir_block *pred)
|
||||
{
|
||||
nir_function_impl *impl = nir_cf_node_get_function(&block->cf_node);
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -404,7 +404,7 @@ split_block_before_instr(nir_instr *instr)
|
||||
assert(instr->type != nir_instr_type_phi);
|
||||
nir_block *new_block = split_block_beginning(instr->block);
|
||||
|
||||
nir_foreach_instr_safe(instr->block, cur_instr) {
|
||||
nir_foreach_instr_safe(cur_instr, instr->block) {
|
||||
if (cur_instr == instr)
|
||||
break;
|
||||
|
||||
@@ -537,7 +537,7 @@ nir_handle_add_jump(nir_block *block)
|
||||
static void
|
||||
remove_phi_src(nir_block *block, nir_block *pred)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -706,7 +706,7 @@ cleanup_cf_node(nir_cf_node *node, nir_function_impl *impl)
|
||||
case nir_cf_node_block: {
|
||||
nir_block *block = nir_cf_node_as_block(node);
|
||||
/* We need to walk the instructions and clean up defs/uses */
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_jump) {
|
||||
nir_jump_type jump_type = nir_instr_as_jump(instr)->type;
|
||||
unlink_jump(block, jump_type, false);
|
||||
|
@@ -305,7 +305,7 @@ static bool
|
||||
isolate_phi_nodes_block(nir_block *block, void *dead_ctx)
|
||||
{
|
||||
nir_instr *last_phi_instr = NULL;
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
/* Phi nodes only ever come at the start of a block */
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
@@ -324,7 +324,7 @@ isolate_phi_nodes_block(nir_block *block, void *dead_ctx)
|
||||
nir_parallel_copy_instr_create(dead_ctx);
|
||||
nir_instr_insert_after(last_phi_instr, &block_pcopy->instr);
|
||||
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
/* Phi nodes only ever come at the start of a block */
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
@@ -370,7 +370,7 @@ isolate_phi_nodes_block(nir_block *block, void *dead_ctx)
|
||||
static bool
|
||||
coalesce_phi_nodes_block(nir_block *block, struct from_ssa_state *state)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
/* Phi nodes only ever come at the start of a block */
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
@@ -424,7 +424,7 @@ static bool
|
||||
aggressive_coalesce_block(nir_block *block, struct from_ssa_state *state)
|
||||
{
|
||||
nir_parallel_copy_instr *start_pcopy = NULL;
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
/* Phi nodes only ever come at the start of a block */
|
||||
if (instr->type != nir_instr_type_phi) {
|
||||
if (instr->type != nir_instr_type_parallel_copy)
|
||||
@@ -520,7 +520,7 @@ rewrite_ssa_def(nir_ssa_def *def, void *void_state)
|
||||
static bool
|
||||
resolve_registers_block(nir_block *block, struct from_ssa_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
state->instr = instr;
|
||||
nir_foreach_ssa_def(instr, rewrite_ssa_def, state);
|
||||
|
||||
|
@@ -71,7 +71,7 @@ gather_tex_info(nir_tex_instr *instr, nir_shader *shader)
|
||||
static bool
|
||||
gather_info_block(nir_block *block, void *shader)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_intrinsic:
|
||||
gather_intrinsic_info(nir_instr_as_intrinsic(instr), shader);
|
||||
|
@@ -66,7 +66,7 @@ nir_gs_count_vertices(const nir_shader *shader)
|
||||
set_foreach(function->impl->end_block->predecessors, entry) {
|
||||
nir_block *block = (nir_block *) entry->key;
|
||||
|
||||
nir_foreach_instr_reverse(block, instr) {
|
||||
nir_foreach_instr_reverse(instr, block) {
|
||||
nir_intrinsic_instr *intrin = as_set_vertex_count(instr);
|
||||
if (!intrin)
|
||||
continue;
|
||||
|
@@ -30,7 +30,7 @@ static bool inline_function_impl(nir_function_impl *impl, struct set *inlined);
|
||||
static bool
|
||||
rewrite_param_derefs_block(nir_block *block, nir_call_instr *call)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -89,7 +89,7 @@ lower_param_to_local(nir_variable *param, nir_function_impl *impl, bool write)
|
||||
static bool
|
||||
lower_params_to_locals_block(nir_block *block, nir_function_impl *impl)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -135,7 +135,7 @@ inline_functions_block(nir_block *block, nir_builder *b,
|
||||
* properly get moved to the next block when it gets split, and we
|
||||
* continue iterating there.
|
||||
*/
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_call)
|
||||
continue;
|
||||
|
||||
|
@@ -124,7 +124,7 @@ propagate_across_edge(nir_block *pred, nir_block *succ,
|
||||
NIR_VLA(BITSET_WORD, live, state->bitset_words);
|
||||
memcpy(live, succ->live_in, state->bitset_words * sizeof *live);
|
||||
|
||||
nir_foreach_instr(succ, instr) {
|
||||
nir_foreach_instr(instr, succ) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
nir_phi_instr *phi = nir_instr_as_phi(instr);
|
||||
@@ -133,7 +133,7 @@ propagate_across_edge(nir_block *pred, nir_block *succ,
|
||||
set_ssa_def_dead(&phi->dest.ssa, live);
|
||||
}
|
||||
|
||||
nir_foreach_instr(succ, instr) {
|
||||
nir_foreach_instr(instr, succ) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
nir_phi_instr *phi = nir_instr_as_phi(instr);
|
||||
@@ -165,7 +165,7 @@ nir_live_ssa_defs_impl(nir_function_impl *impl)
|
||||
*/
|
||||
state.num_ssa_defs = 1;
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr(block, instr)
|
||||
nir_foreach_instr(instr, block)
|
||||
nir_foreach_ssa_def(instr, index_ssa_def, &state);
|
||||
}
|
||||
|
||||
@@ -201,7 +201,7 @@ nir_live_ssa_defs_impl(nir_function_impl *impl)
|
||||
if (following_if)
|
||||
set_src_live(&following_if->condition, block->live_in);
|
||||
|
||||
nir_foreach_instr_reverse(block, instr) {
|
||||
nir_foreach_instr_reverse(instr, block) {
|
||||
/* Phi nodes are handled seperately so we want to skip them. Since
|
||||
* we are going backwards and they are at the beginning, we can just
|
||||
* break as soon as we see one.
|
||||
|
@@ -247,7 +247,7 @@ nir_lower_alu_to_scalar_impl(nir_function_impl *impl)
|
||||
nir_builder_init(&builder, impl);
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_alu)
|
||||
lower_alu_instr_scalar(nir_instr_as_alu(instr), &builder);
|
||||
}
|
||||
|
@@ -140,7 +140,7 @@ nir_lower_atomics(nir_shader *shader,
|
||||
nir_foreach_function(shader, function) {
|
||||
if (function->impl) {
|
||||
nir_foreach_block(block, function->impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_intrinsic)
|
||||
lower_instr(nir_instr_as_intrinsic(instr),
|
||||
shader_program, shader);
|
||||
|
@@ -100,7 +100,7 @@ load_clipdist_input(nir_builder *b, nir_variable *in, nir_ssa_def **val)
|
||||
static nir_ssa_def *
|
||||
find_output_in_block(nir_block *block, unsigned drvloc)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
|
||||
if (instr->type == nir_instr_type_intrinsic) {
|
||||
nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
|
||||
|
@@ -538,7 +538,7 @@ lower_doubles_block(nir_block *block, void *ctx)
|
||||
{
|
||||
nir_lower_doubles_options options = *((nir_lower_doubles_options *) ctx);
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -51,7 +51,7 @@ lower_double_pack_block(nir_block *block, void *ctx)
|
||||
{
|
||||
nir_builder *b = (nir_builder *) ctx;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -36,7 +36,7 @@ static bool
|
||||
mark_global_var_uses_block(nir_block *block, nir_function_impl *impl,
|
||||
struct hash_table *var_func_table)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -133,7 +133,7 @@ rewrite_end_primitive(nir_intrinsic_instr *intrin, struct state *state)
|
||||
static bool
|
||||
rewrite_intrinsics(nir_block *block, struct state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -124,7 +124,7 @@ convert_impl(nir_function_impl *impl)
|
||||
nir_builder_init(&b, impl);
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_alu)
|
||||
convert_instr(&b, nir_instr_as_alu(instr));
|
||||
}
|
||||
|
@@ -165,7 +165,7 @@ lower_indirect_block(nir_block *block, nir_builder *b,
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -216,7 +216,7 @@ nir_lower_io_block(nir_block *block,
|
||||
{
|
||||
nir_builder *b = &state->builder;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -71,7 +71,7 @@ static void
|
||||
nir_lower_load_const_to_scalar_impl(nir_function_impl *impl)
|
||||
{
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_load_const)
|
||||
lower_load_const_instr_scalar(nir_instr_as_load_const(instr));
|
||||
}
|
||||
|
@@ -204,7 +204,7 @@ static bool
|
||||
lower_locals_to_regs_block(nir_block *block,
|
||||
struct locals_to_regs_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -61,7 +61,7 @@ emit_output_copies(nir_cursor cursor, struct lower_outputs_state *state)
|
||||
static bool
|
||||
emit_output_copies_block(nir_block *block, void *state)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -168,7 +168,7 @@ lower_phis_to_scalar_block(nir_block *block,
|
||||
{
|
||||
/* Find the last phi node in the block */
|
||||
nir_phi_instr *last_phi = NULL;
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -178,7 +178,7 @@ lower_phis_to_scalar_block(nir_block *block,
|
||||
/* We have to handle the phi nodes in their own pass due to the way
|
||||
* we're modifying the linked list of instructions.
|
||||
*/
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
|
@@ -160,7 +160,7 @@ lower_impl(nir_function_impl *impl, const struct gl_shader_program *shader_progr
|
||||
nir_builder_init(&b, impl);
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type == nir_instr_type_tex)
|
||||
lower_sampler(nir_instr_as_tex(instr), shader_program, stage, &b);
|
||||
}
|
||||
|
@@ -33,7 +33,7 @@ convert_block(nir_block *block, nir_builder *b)
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -321,7 +321,7 @@ nir_lower_tex_block(nir_block *block, void *void_state)
|
||||
const nir_lower_tex_options *options = state->options;
|
||||
nir_builder *b = &state->b;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_tex)
|
||||
continue;
|
||||
|
||||
|
@@ -36,7 +36,7 @@
|
||||
static bool
|
||||
nir_lower_to_source_mods_block(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -138,7 +138,7 @@ nir_lower_two_sided_color_block(nir_block *block, void *void_state)
|
||||
lower_2side_state *state = void_state;
|
||||
nir_builder *b = &state->b;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -160,7 +160,7 @@ lower_var_copies_impl(nir_function_impl *impl)
|
||||
void *mem_ctx = ralloc_parent(impl);
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -407,7 +407,7 @@ static bool
|
||||
register_variable_uses_block(nir_block *block,
|
||||
struct lower_variables_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -484,7 +484,7 @@ rename_variables_block(nir_block *block, struct lower_variables_state *state)
|
||||
nir_builder b;
|
||||
nir_builder_init(&b, state->impl);
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -215,7 +215,7 @@ lower_vec_to_movs_block(nir_block *block, nir_function_impl *impl)
|
||||
bool progress = false;
|
||||
nir_shader *shader = impl->function->shader;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -64,7 +64,7 @@ ssa_def_dominates_instr(nir_ssa_def *def, nir_instr *instr)
|
||||
static bool
|
||||
move_vec_src_uses_to_dest_block(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -38,7 +38,7 @@ normalize_cubemap_coords_block(nir_block *block, nir_builder *b)
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_tex)
|
||||
continue;
|
||||
|
||||
|
@@ -177,7 +177,7 @@ constant_fold_block(nir_block *block, void *mem_ctx)
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_alu:
|
||||
progress |= constant_fold_alu_instr(nir_instr_as_alu(instr), mem_ctx);
|
||||
|
@@ -246,7 +246,7 @@ nir_copy_prop_impl(nir_function_impl *impl)
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (copy_prop_instr(instr))
|
||||
progress = true;
|
||||
}
|
||||
|
@@ -43,7 +43,7 @@ cse_block(nir_block *block, struct set *instr_set)
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (nir_instr_set_add_or_rewrite(instr_set, instr)) {
|
||||
progress = true;
|
||||
nir_instr_remove(instr);
|
||||
@@ -55,7 +55,7 @@ cse_block(nir_block *block, struct set *instr_set)
|
||||
progress |= cse_block(child, instr_set);
|
||||
}
|
||||
|
||||
nir_foreach_instr(block, instr)
|
||||
nir_foreach_instr(instr, block)
|
||||
nir_instr_set_remove(instr_set, instr);
|
||||
|
||||
return progress;
|
||||
|
@@ -115,7 +115,7 @@ init_instr(nir_instr *instr, struct exec_list *worklist)
|
||||
static bool
|
||||
init_block(nir_block *block, struct exec_list *worklist)
|
||||
{
|
||||
nir_foreach_instr(block, instr)
|
||||
nir_foreach_instr(instr, block)
|
||||
init_instr(instr, worklist);
|
||||
|
||||
nir_if *following_if = nir_block_get_following_if(block);
|
||||
@@ -148,7 +148,7 @@ nir_opt_dce_impl(nir_function_impl *impl)
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (!instr->pass_flags) {
|
||||
nir_instr_remove(instr);
|
||||
progress = true;
|
||||
|
@@ -91,7 +91,7 @@ opt_constant_if(nir_if *if_stmt, bool condition)
|
||||
nir_cf_node_as_block(condition ? nir_if_last_then_node(if_stmt)
|
||||
: nir_if_last_else_node(if_stmt));
|
||||
|
||||
nir_foreach_instr_safe(after, instr) {
|
||||
nir_foreach_instr_safe(instr, after) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -138,7 +138,7 @@ static bool
|
||||
cf_node_has_side_effects(nir_cf_node *node)
|
||||
{
|
||||
nir_foreach_block_in_cf_node(block, node) {
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type == nir_instr_type_call)
|
||||
return true;
|
||||
|
||||
@@ -207,7 +207,7 @@ loop_is_dead(nir_loop *loop)
|
||||
nir_metadata_dominance);
|
||||
|
||||
for (nir_block *cur = after->imm_dom; cur != before; cur = cur->imm_dom) {
|
||||
nir_foreach_instr(cur, instr) {
|
||||
nir_foreach_instr(instr, cur) {
|
||||
if (!nir_foreach_ssa_def(instr, def_not_live_out, after))
|
||||
return false;
|
||||
}
|
||||
|
@@ -107,7 +107,7 @@ gcm_build_block_info(struct exec_list *cf_list, struct gcm_state *state,
|
||||
static bool
|
||||
gcm_pin_instructions_block(nir_block *block, struct gcm_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_alu:
|
||||
switch (nir_instr_as_alu(instr)->op) {
|
||||
|
@@ -50,7 +50,7 @@
|
||||
static bool
|
||||
block_check_for_allowed_instrs(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_intrinsic: {
|
||||
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
|
||||
@@ -171,19 +171,19 @@ nir_opt_peephole_select_block(nir_block *block, void *mem_ctx)
|
||||
* block before. We have already guaranteed that this is safe by
|
||||
* calling block_check_for_allowed_instrs()
|
||||
*/
|
||||
nir_foreach_instr_safe(then_block, instr) {
|
||||
nir_foreach_instr_safe(instr, then_block) {
|
||||
exec_node_remove(&instr->node);
|
||||
instr->block = prev_block;
|
||||
exec_list_push_tail(&prev_block->instr_list, &instr->node);
|
||||
}
|
||||
|
||||
nir_foreach_instr_safe(else_block, instr) {
|
||||
nir_foreach_instr_safe(instr, else_block) {
|
||||
exec_node_remove(&instr->node);
|
||||
instr->block = prev_block;
|
||||
exec_list_push_tail(&prev_block->instr_list, &instr->node);
|
||||
}
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
|
@@ -47,7 +47,7 @@ remove_phis_block(nir_block *block)
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
|
@@ -79,7 +79,7 @@ nir_opt_undef(nir_shader *shader)
|
||||
nir_foreach_function(shader, function) {
|
||||
if (function->impl) {
|
||||
nir_foreach_block(block, function->impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_alu)
|
||||
if (opt_undef_alu(nir_instr_as_alu(instr)))
|
||||
progress = true;
|
||||
|
@@ -908,7 +908,7 @@ print_block(nir_block *block, print_state *state, unsigned tabs)
|
||||
|
||||
free(preds);
|
||||
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
print_instr(instr, state, tabs);
|
||||
fprintf(fp, "\n");
|
||||
}
|
||||
|
@@ -71,7 +71,7 @@ add_var_use_shader(nir_shader *shader, struct set *live)
|
||||
nir_foreach_function(shader, function) {
|
||||
if (function->impl) {
|
||||
nir_foreach_block(block, function->impl) {
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
switch(instr->type) {
|
||||
case nir_instr_type_intrinsic:
|
||||
add_var_use_intrinsic(nir_instr_as_intrinsic(instr), live);
|
||||
|
@@ -114,7 +114,7 @@ nir_repair_ssa_impl(nir_function_impl *impl)
|
||||
nir_metadata_dominance);
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
nir_foreach_ssa_def(instr, repair_ssa_def, &state);
|
||||
}
|
||||
}
|
||||
|
@@ -208,7 +208,7 @@ split_var_copy_instr(nir_intrinsic_instr *old_copy,
|
||||
static bool
|
||||
split_var_copies_block(nir_block *block, struct split_var_copies_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -63,7 +63,7 @@ sweep_block(nir_shader *nir, nir_block *block)
|
||||
{
|
||||
ralloc_steal(nir, block);
|
||||
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
ralloc_steal(nir, instr);
|
||||
|
||||
nir_foreach_src(instr, sweep_src_indirect, nir);
|
||||
|
@@ -381,7 +381,7 @@ rewrite_instr_forward(nir_instr *instr, rewrite_state *state)
|
||||
static void
|
||||
rewrite_phi_sources(nir_block *block, nir_block *pred, rewrite_state *state)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -434,7 +434,7 @@ rewrite_block(nir_block *block, rewrite_state *state)
|
||||
* what we want because those instructions (vector gather, conditional
|
||||
* select) will already be in SSA form.
|
||||
*/
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
rewrite_instr_forward(instr, state);
|
||||
}
|
||||
|
||||
@@ -455,7 +455,7 @@ rewrite_block(nir_block *block, rewrite_state *state)
|
||||
for (unsigned i = 0; i < block->num_dom_children; i++)
|
||||
rewrite_block(block->dom_children[i], state);
|
||||
|
||||
nir_foreach_instr_reverse(block, instr) {
|
||||
nir_foreach_instr_reverse(instr, block) {
|
||||
rewrite_instr_backwards(instr, state);
|
||||
}
|
||||
}
|
||||
|
@@ -606,7 +606,7 @@ validate_phi_src(nir_phi_instr *instr, nir_block *pred, validate_state *state)
|
||||
static void
|
||||
validate_phi_srcs(nir_block *block, nir_block *succ, validate_state *state)
|
||||
{
|
||||
nir_foreach_instr(succ, instr) {
|
||||
nir_foreach_instr(instr, succ) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
@@ -624,7 +624,7 @@ validate_block(nir_block *block, validate_state *state)
|
||||
state->block = block;
|
||||
|
||||
exec_list_validate(&block->instr_list);
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type == nir_instr_type_phi) {
|
||||
assert(instr == nir_block_first_instr(block) ||
|
||||
nir_instr_prev(instr)->type == nir_instr_type_phi);
|
||||
@@ -1017,7 +1017,7 @@ validate_function_impl(nir_function_impl *impl, validate_state *state)
|
||||
}
|
||||
|
||||
nir_foreach_block(block, impl) {
|
||||
nir_foreach_instr(block, instr)
|
||||
nir_foreach_instr(instr, block)
|
||||
nir_foreach_ssa_def(instr, postvalidate_ssa_def, state);
|
||||
}
|
||||
}
|
||||
|
@@ -1811,7 +1811,7 @@ emit_block(struct ir3_compile *ctx, nir_block *nblock)
|
||||
_mesa_hash_table_destroy(ctx->addr_ht, NULL);
|
||||
ctx->addr_ht = NULL;
|
||||
|
||||
nir_foreach_instr(nblock, instr) {
|
||||
nir_foreach_instr(instr, nblock) {
|
||||
emit_instr(ctx, instr);
|
||||
if (ctx->error)
|
||||
return;
|
||||
|
@@ -96,7 +96,7 @@ valid_dest(nir_block *block, nir_dest *dest)
|
||||
static bool
|
||||
block_check_for_allowed_instrs(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_intrinsic: {
|
||||
nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
|
||||
@@ -165,7 +165,7 @@ static void
|
||||
flatten_block(nir_builder *bld, nir_block *if_block, nir_block *prev_block,
|
||||
nir_ssa_def *condition, bool invert)
|
||||
{
|
||||
nir_foreach_instr_safe(if_block, instr) {
|
||||
nir_foreach_instr_safe(instr, if_block) {
|
||||
if (instr->type == nir_instr_type_intrinsic) {
|
||||
nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
|
||||
if ((intr->intrinsic == nir_intrinsic_discard) ||
|
||||
@@ -270,7 +270,7 @@ lower_if_else_block(nir_block *block, void *void_state)
|
||||
flatten_block(&state->b, else_block, prev_block,
|
||||
if_stmt->condition.ssa, true);
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_phi)
|
||||
break;
|
||||
|
||||
|
@@ -678,7 +678,7 @@ vc4_nir_lower_blend_block(nir_block *block, void *state)
|
||||
{
|
||||
struct vc4_compile *c = state;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
|
||||
|
@@ -437,7 +437,7 @@ vc4_nir_lower_io_block(nir_block *block, void *arg)
|
||||
nir_builder b;
|
||||
nir_builder_init(&b, impl);
|
||||
|
||||
nir_foreach_instr_safe(block, instr)
|
||||
nir_foreach_instr_safe(instr, block)
|
||||
vc4_nir_lower_io_instr(c, &b, instr);
|
||||
|
||||
return true;
|
||||
|
@@ -139,7 +139,7 @@ vc4_nir_lower_txf_ms_block(nir_block *block, void *arg)
|
||||
nir_builder b;
|
||||
nir_builder_init(&b, impl);
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type == nir_instr_type_tex) {
|
||||
vc4_nir_lower_txf_ms_instr(c, &b,
|
||||
nir_instr_as_tex(instr));
|
||||
|
@@ -1694,7 +1694,7 @@ ntq_emit_instr(struct vc4_compile *c, nir_instr *instr)
|
||||
static void
|
||||
ntq_emit_block(struct vc4_compile *c, nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
ntq_emit_instr(c, instr);
|
||||
}
|
||||
}
|
||||
@@ -1781,7 +1781,7 @@ static bool
|
||||
count_nir_instrs_in_block(nir_block *block, void *state)
|
||||
{
|
||||
int *count = (int *) state;
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
*count = *count + 1;
|
||||
}
|
||||
return true;
|
||||
|
@@ -31,7 +31,7 @@ apply_dynamic_offsets_block(nir_block *block, nir_builder *b,
|
||||
{
|
||||
struct anv_descriptor_set_layout *set_layout;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -54,7 +54,7 @@ static void
|
||||
get_used_bindings_block(nir_block *block,
|
||||
struct apply_pipeline_layout_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_intrinsic: {
|
||||
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
|
||||
@@ -212,7 +212,7 @@ static void
|
||||
apply_pipeline_layout_block(nir_block *block,
|
||||
struct apply_pipeline_layout_state *state)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_intrinsic: {
|
||||
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
|
||||
|
@@ -31,7 +31,7 @@ anv_nir_lower_push_constants(nir_shader *shader)
|
||||
continue;
|
||||
|
||||
nir_foreach_block(block, function->impl) {
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -185,7 +185,7 @@ emit_system_values_block(nir_block *block, fs_visitor *v)
|
||||
{
|
||||
fs_reg *reg;
|
||||
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -432,7 +432,7 @@ fs_visitor::nir_emit_loop(nir_loop *loop)
|
||||
void
|
||||
fs_visitor::nir_emit_block(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
nir_emit_instr(instr);
|
||||
}
|
||||
}
|
||||
|
@@ -59,7 +59,7 @@ static bool
|
||||
add_const_offset_to_base_block(nir_block *block, nir_builder *b,
|
||||
nir_variable_mode mode)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -98,7 +98,7 @@ add_const_offset_to_base(nir_shader *nir, nir_variable_mode mode)
|
||||
static bool
|
||||
remap_vs_attrs(nir_block *block, GLbitfield64 inputs_read)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -122,7 +122,7 @@ remap_vs_attrs(nir_block *block, GLbitfield64 inputs_read)
|
||||
static bool
|
||||
remap_inputs_with_vue_map(nir_block *block, const struct brw_vue_map *vue_map)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -142,7 +142,7 @@ static bool
|
||||
remap_patch_urb_offsets(nir_block *block, nir_builder *b,
|
||||
const struct brw_vue_map *vue_map)
|
||||
{
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -86,7 +86,7 @@ src_mark_needs_resolve(nir_src *src, void *void_state)
|
||||
static bool
|
||||
analyze_boolean_resolves_block(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
switch (instr->type) {
|
||||
case nir_instr_type_alu: {
|
||||
/* For ALU instructions, the resolve status is handled in a
|
||||
|
@@ -43,7 +43,7 @@ apply_attr_wa_block(nir_block *block, struct attr_wa_state *state)
|
||||
{
|
||||
nir_builder *b = &state->builder;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
|
@@ -171,7 +171,7 @@ brw_nir_opt_peephole_ffma_block(nir_block *block, void *mem_ctx)
|
||||
{
|
||||
bool progress = false;
|
||||
|
||||
nir_foreach_instr_safe(block, instr) {
|
||||
nir_foreach_instr_safe(instr, block) {
|
||||
if (instr->type != nir_instr_type_alu)
|
||||
continue;
|
||||
|
||||
|
@@ -100,7 +100,7 @@ vec4_visitor::nir_setup_system_value_intrinsic(nir_intrinsic_instr *instr)
|
||||
static bool
|
||||
setup_system_values_block(nir_block *block, vec4_visitor *v)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
if (instr->type != nir_instr_type_intrinsic)
|
||||
continue;
|
||||
|
||||
@@ -213,7 +213,7 @@ vec4_visitor::nir_emit_loop(nir_loop *loop)
|
||||
void
|
||||
vec4_visitor::nir_emit_block(nir_block *block)
|
||||
{
|
||||
nir_foreach_instr(block, instr) {
|
||||
nir_foreach_instr(instr, block) {
|
||||
nir_emit_instr(instr);
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user