agx: Introduce agx_foreach_ssa_{src,dest} macros

These are convenient iterators especially in the register allocator.

Signed-off-by: Alyssa Rosenzweig <alyssa@rosenzweig.io>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/19590>
This commit is contained in:
Alyssa Rosenzweig
2022-10-08 21:02:03 -04:00
committed by Marge Bot
parent 4971870441
commit 4cc2427ad6
4 changed files with 39 additions and 44 deletions

View File

@@ -549,6 +549,14 @@ agx_start_block(agx_context *ctx)
#define agx_foreach_dest(ins, v) \
for (unsigned v = 0; v < ins->nr_dests; ++v)
#define agx_foreach_ssa_src(ins, v) \
agx_foreach_src(ins, v) \
if (ins->src[v].type == AGX_INDEX_NORMAL)
#define agx_foreach_ssa_dest(ins, v) \
agx_foreach_dest(ins, v) \
if (ins->dest[v].type == AGX_INDEX_NORMAL)
/* Phis only come at the start so we stop as soon as we hit a non-phi */
#define agx_foreach_phi_in_block(block, v) \
agx_foreach_instr_in_block(block, v) \

View File

@@ -36,19 +36,16 @@
void
agx_liveness_ins_update(BITSET_WORD *live, agx_instr *I)
{
agx_foreach_dest(I, d) {
if (I->dest[d].type == AGX_INDEX_NORMAL)
agx_foreach_ssa_dest(I, d)
BITSET_CLEAR(live, I->dest[d].value);
}
agx_foreach_src(I, s) {
if (I->src[s].type == AGX_INDEX_NORMAL) {
agx_foreach_ssa_src(I, s) {
/* If the source is not live after this instruction, but becomes live
* at this instruction, this is the use that kills the source */
* at this instruction, this is the use that kills the source
*/
I->src[s].kill = !BITSET_TEST(live, I->src[s].value);
BITSET_SET(live, I->src[s].value);
}
}
}
/* Globally, liveness analysis uses a fixed-point algorithm based on a

View File

@@ -94,11 +94,10 @@ agx_compose_float_src(agx_index to, agx_index from)
static void
agx_optimizer_fmov(agx_instr **defs, agx_instr *ins)
{
agx_foreach_src(ins, s) {
agx_foreach_ssa_src(ins, s) {
agx_index src = ins->src[s];
if (src.type != AGX_INDEX_NORMAL) continue;
agx_instr *def = defs[src.value];
if (def == NULL) continue; /* happens for phis in loops */
if (!agx_is_fmov(def)) continue;
if (def->saturate) continue;
@@ -153,11 +152,10 @@ agx_optimizer_fmov_rev(agx_instr *I, agx_instr *use)
static void
agx_optimizer_copyprop(agx_instr **defs, agx_instr *I)
{
agx_foreach_src(I, s) {
agx_foreach_ssa_src(I, s) {
agx_index src = I->src[s];
if (src.type != AGX_INDEX_NORMAL) continue;
agx_instr *def = defs[src.value];
if (def == NULL) continue; /* happens for phis in loops */
if (def->op != AGX_OPCODE_MOV) continue;
@@ -204,8 +202,7 @@ agx_optimizer_forward(agx_context *ctx)
agx_foreach_instr_global(ctx, I) {
struct agx_opcode_info info = agx_opcodes_info[I->op];
agx_foreach_dest(I, d) {
if (I->dest[d].type == AGX_INDEX_NORMAL)
agx_foreach_ssa_dest(I, d) {
defs[I->dest[d].value] = I;
}

View File

@@ -70,7 +70,7 @@ agx_split_width(const agx_instr *I)
enum agx_size width = ~0;
agx_foreach_dest(I, d) {
if (agx_is_null(I->dest[d]))
if (I->dest[d].type == AGX_INDEX_NULL)
continue;
else if (width != ~0)
assert(width == I->dest[d].size);
@@ -188,8 +188,8 @@ agx_ra_assign_local(struct ra_ctx *rctx)
}
/* First, free killed sources */
agx_foreach_src(I, s) {
if (I->src[s].type == AGX_INDEX_NORMAL && I->src[s].kill) {
agx_foreach_ssa_src(I, s) {
if (I->src[s].kill) {
unsigned reg = ssa_to_reg[I->src[s].value];
unsigned count = ncomps[I->src[s].value];
@@ -200,8 +200,7 @@ agx_ra_assign_local(struct ra_ctx *rctx)
/* Next, assign destinations one at a time. This is always legal
* because of the SSA form.
*/
agx_foreach_dest(I, d) {
if (I->dest[d].type == AGX_INDEX_NORMAL) {
agx_foreach_ssa_dest(I, d) {
unsigned count = agx_write_registers(I, d);
unsigned align = agx_size_align_16(I->dest[d].size);
@@ -209,7 +208,6 @@ agx_ra_assign_local(struct ra_ctx *rctx)
find_regs(used_regs, count, align, rctx->bound));
}
}
}
STATIC_ASSERT(sizeof(block->regs_out) == sizeof(used_regs));
memcpy(block->regs_out, used_regs, sizeof(used_regs));
@@ -302,9 +300,7 @@ agx_ra(agx_context *ctx)
BITSET_WORD *visited = calloc(BITSET_WORDS(ctx->alloc), sizeof(BITSET_WORD));
agx_foreach_instr_global(ctx, I) {
agx_foreach_dest(I, d) {
if (I->dest[d].type != AGX_INDEX_NORMAL) continue;
agx_foreach_ssa_dest(I, d) {
unsigned v = I->dest[d].value;
assert(ncomps[v] == 0 && "broken SSA");
ncomps[v] = agx_write_registers(I, d);
@@ -331,20 +327,16 @@ agx_ra(agx_context *ctx)
}
agx_foreach_instr_global(ctx, ins) {
agx_foreach_src(ins, s) {
if (ins->src[s].type == AGX_INDEX_NORMAL) {
agx_foreach_ssa_src(ins, s) {
unsigned v = ssa_to_reg[ins->src[s].value];
ins->src[s] = agx_replace_index(ins->src[s], agx_register(v, ins->src[s].size));
}
}
agx_foreach_dest(ins, d) {
if (ins->dest[d].type == AGX_INDEX_NORMAL) {
agx_foreach_ssa_dest(ins, d) {
unsigned v = ssa_to_reg[ins->dest[d].value];
ins->dest[d] = agx_replace_index(ins->dest[d], agx_register(v, ins->dest[d].size));
}
}
}
agx_foreach_instr_global_safe(ctx, ins) {
/* Lower away RA pseudo-instructions */
@@ -386,7 +378,8 @@ agx_ra(agx_context *ctx)
/* Move the sources */
agx_foreach_dest(ins, i) {
if (agx_is_null(ins->dest[i])) continue;
if (ins->dest[i].type != AGX_INDEX_REGISTER)
continue;
copies[n++] = (struct agx_copy) {
.dest = agx_index_to_reg(ssa_to_reg, ins->dest[i]),