From 819bd2ea6223efe0128ae7ebeb71d1e8e9ce3973 Mon Sep 17 00:00:00 2001 From: Alyssa Rosenzweig Date: Thu, 26 Sep 2024 08:27:03 -0400 Subject: [PATCH] agx: factor out insert_copy commonize the scalarization logic. Signed-off-by: Alyssa Rosenzweig Part-of: --- src/asahi/compiler/agx_register_allocate.c | 50 +++++++++++----------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/src/asahi/compiler/agx_register_allocate.c b/src/asahi/compiler/agx_register_allocate.c index d348bbbe208..51a927414cd 100644 --- a/src/asahi/compiler/agx_register_allocate.c +++ b/src/asahi/compiler/agx_register_allocate.c @@ -448,6 +448,29 @@ set_ssa_to_reg(struct ra_ctx *rctx, unsigned ssa, unsigned reg) } } +/* + * Insert parallel copies to move an SSA variable `var` to a new register + * `new_reg`. This may require scalarizing. + */ +static void +insert_copy(struct ra_ctx *rctx, struct util_dynarray *copies, unsigned new_reg, + unsigned var) +{ + enum agx_size size = rctx->sizes[var]; + unsigned align = agx_size_align_16(size); + + for (unsigned i = 0; i < rctx->ncomps[var]; i += align) { + struct agx_copy copy = { + .dest = new_reg + i, + .src = agx_register(rctx->ssa_to_reg[var] + i, size), + }; + + assert((copy.dest % align) == 0 && "new dest must be aligned"); + assert((copy.src.value % align) == 0 && "src must be aligned"); + util_dynarray_append(copies, struct agx_copy, copy); + } +} + static unsigned assign_regs_by_copying(struct ra_ctx *rctx, agx_index dest, const agx_instr *I, struct util_dynarray *copies, BITSET_WORD *clobbered, @@ -479,9 +502,6 @@ assign_regs_by_copying(struct ra_ctx *rctx, agx_index dest, const agx_instr *I, /* Pop it from the work list by swapping in the last element */ blocked_vars[chosen_idx] = blocked_vars[--nr_blocked]; - enum agx_size size = rctx->sizes[ssa]; - unsigned align = agx_size_align_16(size); - /* We need to shuffle some variables to make room. Look for a range of * the register file that is partially blocked. */ @@ -524,18 +544,7 @@ assign_regs_by_copying(struct ra_ctx *rctx, agx_index dest, const agx_instr *I, * variable. For those, copy the blocked variable to its new register. */ if (ssa != dest.value) { - unsigned old_reg = rctx->ssa_to_reg[ssa]; - - for (unsigned i = 0; i < nr; i += align) { - struct agx_copy copy = { - .dest = new_reg + i, - .src = agx_register(old_reg + i, size), - }; - - assert((copy.dest % align) == 0 && "new dest must be aligned"); - assert((copy.src.value % align) == 0 && "src must be aligned"); - util_dynarray_append(copies, struct agx_copy, copy); - } + insert_copy(rctx, copies, new_reg, ssa); } /* Mark down the set of clobbered registers, so that killed sources may be @@ -627,7 +636,6 @@ insert_copies_for_clobbered_killed(struct ra_ctx *rctx, unsigned reg, for (unsigned i = 0; i < nr_vars; ++i) { unsigned var = vars[i]; - unsigned var_base = rctx->ssa_to_reg[var]; unsigned var_count = rctx->ncomps[var]; unsigned var_align = agx_size_align_16(rctx->sizes[var]); @@ -635,15 +643,7 @@ insert_copies_for_clobbered_killed(struct ra_ctx *rctx, unsigned reg, assert((base % var_align) == 0 && "induction"); assert((var_count % var_align) == 0 && "no partial variables"); - for (unsigned j = 0; j < var_count; j += var_align) { - struct agx_copy copy = { - .dest = base + j, - .src = agx_register(var_base + j, rctx->sizes[var]), - }; - - util_dynarray_append(copies, struct agx_copy, copy); - } - + insert_copy(rctx, copies, base, var); set_ssa_to_reg(rctx, var, base); base += var_count; }