From ca2a1340a2ef8bfc919579bfec48d1280f5adb71 Mon Sep 17 00:00:00 2001 From: Lionel Landwerlin Date: Mon, 16 May 2022 16:23:02 +0300 Subject: [PATCH] nir/lower_shader_calls: avoid respilling values Currently we do something like this : ssa_0 = ... ssa_1 = ... * spill ssa_0, ssa_1 call1() * fill ssa_0, ssa_1 ssa_2 = ... ssa_3 = ... * spill ssa_0, ssa_1, ssa_2, ssa_3 call2() * fill ssa_0, ssa_1, ssa_2, ssa_3 If we assign the same possition to ssa_0 & ssa_1 in the spilling stack, then on call2(), we know that those values are already present in memory at the right location and we can avoid respilling them. The result would be something like this : ssa_0 = ... ssa_1 = ... * spill ssa_0, ssa_1 call1() * fill ssa_0, ssa_1 ssa_2 = ... ssa_3 = ... * spill ssa_2, ssa_3 call2() * fill ssa_0, ssa_1, ssa_2, ssa_3 Signed-off-by: Lionel Landwerlin Reviewed-by: Konstantin Seurer Part-of: --- src/compiler/nir/nir_lower_shader_calls.c | 42 +++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/compiler/nir/nir_lower_shader_calls.c b/src/compiler/nir/nir_lower_shader_calls.c index 520b63e6986..582a6d4061c 100644 --- a/src/compiler/nir/nir_lower_shader_calls.c +++ b/src/compiler/nir/nir_lower_shader_calls.c @@ -1209,6 +1209,45 @@ nir_lower_stack_to_scratch(nir_shader *shader, &state); } +static bool +opt_remove_respills_instr(struct nir_builder *b, nir_instr *instr, void *data) +{ + if (instr->type != nir_instr_type_intrinsic) + return false; + + nir_intrinsic_instr *store_intrin = nir_instr_as_intrinsic(instr); + if (store_intrin->intrinsic != nir_intrinsic_store_stack) + return false; + + nir_instr *value_instr = store_intrin->src[0].ssa->parent_instr; + if (value_instr->type != nir_instr_type_intrinsic) + return false; + + nir_intrinsic_instr *load_intrin = nir_instr_as_intrinsic(value_instr); + if (load_intrin->intrinsic != nir_intrinsic_load_stack) + return false; + + if (nir_intrinsic_base(load_intrin) != nir_intrinsic_base(store_intrin)) + return false; + + nir_instr_remove(&store_intrin->instr); + return true; +} + +/* After shader split, look at stack load/store operations. If we're loading + * and storing the same value at the same location, we can drop the store + * instruction. + */ +static bool +nir_opt_remove_respills(nir_shader *shader) +{ + return nir_shader_instructions_pass(shader, + opt_remove_respills_instr, + nir_metadata_block_index | + nir_metadata_dominance, + NULL); +} + /** Lower shader call instructions to split shaders. * * Shader calls can be split into an initial shader and a series of "resume" @@ -1296,6 +1335,9 @@ nir_lower_shader_calls(nir_shader *shader, nir_opt_if(resume_shaders[i], nir_opt_if_optimize_phi_true_false); } + for (unsigned i = 0; i < num_calls; i++) + NIR_PASS_V(resume_shaders[i], nir_opt_remove_respills); + NIR_PASS_V(shader, nir_lower_stack_to_scratch, address_format); for (unsigned i = 0; i < num_calls; i++) NIR_PASS_V(resume_shaders[i], nir_lower_stack_to_scratch, address_format);