nir: Remove handling for non-scoped barriers

Nothing generates them so this is all dead.

Signed-off-by: Alyssa Rosenzweig <alyssa@rosenzweig.io>
Reviewed-by: Jesse Natalie <jenatali@microsoft.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/23191>
This commit is contained in:
Alyssa Rosenzweig 2023-06-06 16:51:54 -04:00 committed by Marge Bot
parent c7232be537
commit df51464cac
7 changed files with 0 additions and 132 deletions

View file

@ -763,10 +763,6 @@ gather_intrinsic_info(nir_intrinsic_instr *instr, nir_shader *shader,
break;
case nir_intrinsic_control_barrier:
shader->info.uses_control_barrier = true;
break;
case nir_intrinsic_scoped_barrier:
shader->info.uses_control_barrier |=
nir_intrinsic_execution_scope(instr) != NIR_SCOPE_NONE;
@ -775,16 +771,6 @@ gather_intrinsic_info(nir_intrinsic_instr *instr, nir_shader *shader,
nir_intrinsic_memory_scope(instr) != NIR_SCOPE_NONE;
break;
case nir_intrinsic_memory_barrier:
case nir_intrinsic_group_memory_barrier:
case nir_intrinsic_memory_barrier_atomic_counter:
case nir_intrinsic_memory_barrier_buffer:
case nir_intrinsic_memory_barrier_image:
case nir_intrinsic_memory_barrier_shared:
case nir_intrinsic_memory_barrier_tcs_patch:
shader->info.uses_memory_barrier = true;
break;
case nir_intrinsic_store_zs_agx:
shader->info.outputs_written |= BITFIELD64_BIT(FRAG_RESULT_DEPTH) |
BITFIELD64_BIT(FRAG_RESULT_STENCIL);

View file

@ -55,17 +55,6 @@ lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b, un
b->cursor = nir_before_instr(&instr->instr);
switch (instr->intrinsic) {
case nir_intrinsic_memory_barrier_atomic_counter:
/* Atomic counters are now SSBOs so memoryBarrierAtomicCounter() is now
* memoryBarrierBuffer().
*/
instr->intrinsic = nir_intrinsic_scoped_barrier;
nir_intrinsic_set_execution_scope(instr, NIR_SCOPE_NONE);
nir_intrinsic_set_memory_scope(instr, NIR_SCOPE_DEVICE);
nir_intrinsic_set_memory_semantics(instr, NIR_MEMORY_ACQ_REL);
nir_intrinsic_set_memory_modes(instr, nir_var_mem_ssbo);
return true;
case nir_intrinsic_atomic_counter_inc:
case nir_intrinsic_atomic_counter_add:
case nir_intrinsic_atomic_counter_pre_dec:

View file

@ -314,28 +314,6 @@ combine_stores_block(struct combine_stores_state *state, nir_block *block)
}
break;
case nir_intrinsic_control_barrier:
case nir_intrinsic_group_memory_barrier:
case nir_intrinsic_memory_barrier:
combine_stores_with_modes(state, nir_var_shader_out |
nir_var_mem_ssbo |
nir_var_mem_shared |
nir_var_mem_global);
break;
case nir_intrinsic_memory_barrier_buffer:
combine_stores_with_modes(state, nir_var_mem_ssbo |
nir_var_mem_global);
break;
case nir_intrinsic_memory_barrier_shared:
combine_stores_with_modes(state, nir_var_mem_shared);
break;
case nir_intrinsic_memory_barrier_tcs_patch:
combine_stores_with_modes(state, nir_var_shader_out);
break;
case nir_intrinsic_scoped_barrier:
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) {
combine_stores_with_modes(state,

View file

@ -186,15 +186,6 @@ gather_vars_written(struct copy_prop_var_state *state,
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
switch (intrin->intrinsic) {
case nir_intrinsic_control_barrier:
case nir_intrinsic_group_memory_barrier:
case nir_intrinsic_memory_barrier:
written->modes |= nir_var_shader_out |
nir_var_mem_ssbo |
nir_var_mem_shared |
nir_var_mem_global;
break;
case nir_intrinsic_scoped_barrier:
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE)
written->modes |= nir_intrinsic_memory_modes(intrin);
@ -998,35 +989,6 @@ copy_prop_vars_block(struct copy_prop_var_state *state,
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
switch (intrin->intrinsic) {
case nir_intrinsic_control_barrier:
case nir_intrinsic_memory_barrier:
if (debug) dump_instr(instr);
apply_barrier_for_modes(copies, nir_var_shader_out |
nir_var_mem_ssbo |
nir_var_mem_shared |
nir_var_mem_global);
break;
case nir_intrinsic_memory_barrier_buffer:
if (debug) dump_instr(instr);
apply_barrier_for_modes(copies, nir_var_mem_ssbo |
nir_var_mem_global);
break;
case nir_intrinsic_memory_barrier_shared:
if (debug) dump_instr(instr);
apply_barrier_for_modes(copies, nir_var_mem_shared);
break;
case nir_intrinsic_memory_barrier_tcs_patch:
if (debug) dump_instr(instr);
apply_barrier_for_modes(copies, nir_var_shader_out);
break;
case nir_intrinsic_scoped_barrier:
if (debug) dump_instr(instr);

View file

@ -132,29 +132,6 @@ remove_dead_write_vars_local(void *mem_ctx, nir_shader *shader, nir_block *block
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
switch (intrin->intrinsic) {
case nir_intrinsic_control_barrier:
case nir_intrinsic_group_memory_barrier:
case nir_intrinsic_memory_barrier: {
clear_unused_for_modes(&unused_writes, nir_var_shader_out |
nir_var_mem_ssbo |
nir_var_mem_shared |
nir_var_mem_global);
break;
}
case nir_intrinsic_memory_barrier_buffer:
clear_unused_for_modes(&unused_writes, nir_var_mem_ssbo |
nir_var_mem_global);
break;
case nir_intrinsic_memory_barrier_shared:
clear_unused_for_modes(&unused_writes, nir_var_mem_shared);
break;
case nir_intrinsic_memory_barrier_tcs_patch:
clear_unused_for_modes(&unused_writes, nir_var_shader_out);
break;
case nir_intrinsic_scoped_barrier: {
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) {
clear_unused_for_modes(&unused_writes,

View file

@ -1306,11 +1306,6 @@ handle_barrier(struct vectorize_ctx *ctx, bool *progress, nir_function_impl *imp
if (instr->type == nir_instr_type_intrinsic) {
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
switch (intrin->intrinsic) {
case nir_intrinsic_group_memory_barrier:
case nir_intrinsic_memory_barrier:
modes = nir_var_mem_ssbo | nir_var_mem_shared | nir_var_mem_global |
nir_var_mem_task_payload;
break;
/* prevent speculative loads/stores */
case nir_intrinsic_discard_if:
case nir_intrinsic_discard:
@ -1324,12 +1319,6 @@ handle_barrier(struct vectorize_ctx *ctx, bool *progress, nir_function_impl *imp
acquire = false;
modes = nir_var_all;
break;
case nir_intrinsic_memory_barrier_buffer:
modes = nir_var_mem_ssbo | nir_var_mem_global;
break;
case nir_intrinsic_memory_barrier_shared:
modes = nir_var_mem_shared | nir_var_mem_task_payload;
break;
case nir_intrinsic_scoped_barrier:
if (nir_intrinsic_memory_scope(intrin) == NIR_SCOPE_NONE)
break;

View file

@ -403,19 +403,6 @@ nir_schedule_intrinsic_deps(nir_deps_state *state,
add_write_dep(state, &state->store_shared, n);
break;
case nir_intrinsic_control_barrier:
case nir_intrinsic_memory_barrier_shared:
case nir_intrinsic_group_memory_barrier:
/* A generic memory barrier can be emitted when multiple synchronization
* semantics are involved, including shared memory.
*/
case nir_intrinsic_memory_barrier:
add_write_dep(state, &state->store_shared, n);
/* Serialize against ssbos/atomics/etc. */
add_write_dep(state, &state->unknown_intrinsic, n);
break;
case nir_intrinsic_scoped_barrier: {
const nir_variable_mode modes = nir_intrinsic_memory_modes(instr);