anv/nir: Work with the new vulkan_resource_index intrinsic

This commit is contained in:
Jason Ekstrand 2015-10-27 13:42:51 -07:00
parent 3d44b3aaa6
commit a6be53223e
2 changed files with 39 additions and 72 deletions

View file

@ -50,22 +50,27 @@ apply_dynamic_offsets_block(nir_block *block, void *void_state)
unsigned block_idx_src;
switch (intrin->intrinsic) {
case nir_intrinsic_load_ubo_vk:
case nir_intrinsic_load_ubo_vk_indirect:
case nir_intrinsic_load_ssbo_vk:
case nir_intrinsic_load_ssbo_vk_indirect:
case nir_intrinsic_load_ubo:
case nir_intrinsic_load_ubo_indirect:
case nir_intrinsic_load_ssbo:
case nir_intrinsic_load_ssbo_indirect:
block_idx_src = 0;
break;
case nir_intrinsic_store_ssbo_vk:
case nir_intrinsic_store_ssbo_vk_indirect:
case nir_intrinsic_store_ssbo:
case nir_intrinsic_store_ssbo_indirect:
block_idx_src = 1;
break;
default:
continue; /* the loop */
}
unsigned set = intrin->const_index[0];
unsigned binding = intrin->const_index[1];
nir_instr *res_instr = intrin->src[block_idx_src].ssa->parent_instr;
assert(res_instr->type == nir_instr_type_intrinsic);
nir_intrinsic_instr *res_intrin = nir_instr_as_intrinsic(res_instr);
assert(res_intrin->intrinsic == nir_intrinsic_vulkan_resource_index);
unsigned set = res_intrin->const_index[0];
unsigned binding = res_intrin->const_index[1];
set_layout = state->layout->set[set].layout;
if (set_layout->binding[binding].dynamic_offset_index < 0)
@ -75,11 +80,11 @@ apply_dynamic_offsets_block(nir_block *block, void *void_state)
int indirect_src;
switch (intrin->intrinsic) {
case nir_intrinsic_load_ubo_vk_indirect:
case nir_intrinsic_load_ssbo_vk_indirect:
case nir_intrinsic_load_ubo_indirect:
case nir_intrinsic_load_ssbo_indirect:
indirect_src = 1;
break;
case nir_intrinsic_store_ssbo_vk_indirect:
case nir_intrinsic_store_ssbo_indirect:
indirect_src = 2;
break;
default:
@ -92,7 +97,7 @@ apply_dynamic_offsets_block(nir_block *block, void *void_state)
set_layout->binding[binding].dynamic_offset_index;
nir_const_value *const_arr_idx =
nir_src_as_const_value(intrin->src[block_idx_src]);
nir_src_as_const_value(res_intrin->src[0]);
nir_intrinsic_op offset_load_op;
if (const_arr_idx)
@ -109,7 +114,8 @@ apply_dynamic_offsets_block(nir_block *block, void *void_state)
offset_load->const_index[1] = const_arr_idx->u[0];
} else {
offset_load->const_index[1] = 0;
nir_src_copy(&offset_load->src[0], &intrin->src[0], &intrin->instr);
nir_src_copy(&offset_load->src[0], &res_intrin->src[0],
&intrin->instr);
}
nir_ssa_dest_init(&offset_load->instr, &offset_load->dest, 1, NULL);
@ -130,14 +136,14 @@ apply_dynamic_offsets_block(nir_block *block, void *void_state)
nir_intrinsic_op indirect_op;
switch (intrin->intrinsic) {
case nir_intrinsic_load_ubo_vk:
indirect_op = nir_intrinsic_load_ubo_vk_indirect;
case nir_intrinsic_load_ubo:
indirect_op = nir_intrinsic_load_ubo_indirect;
break;
case nir_intrinsic_load_ssbo_vk:
indirect_op = nir_intrinsic_load_ssbo_vk_indirect;
case nir_intrinsic_load_ssbo:
indirect_op = nir_intrinsic_load_ssbo_indirect;
break;
case nir_intrinsic_store_ssbo_vk:
indirect_op = nir_intrinsic_store_ssbo_vk_indirect;
case nir_intrinsic_store_ssbo:
indirect_op = nir_intrinsic_store_ssbo_indirect;
break;
default:
unreachable("Invalid direct load/store intrinsic");

View file

@ -34,27 +34,6 @@ struct apply_pipeline_layout_state {
bool progress;
};
static nir_intrinsic_op
lowered_op(nir_intrinsic_op op)
{
switch (op) {
case nir_intrinsic_load_ubo_vk:
return nir_intrinsic_load_ubo;
case nir_intrinsic_load_ubo_vk_indirect:
return nir_intrinsic_load_ubo_indirect;
case nir_intrinsic_load_ssbo_vk:
return nir_intrinsic_load_ssbo;
case nir_intrinsic_load_ssbo_vk_indirect:
return nir_intrinsic_load_ssbo_indirect;
case nir_intrinsic_store_ssbo_vk:
return nir_intrinsic_store_ssbo;
case nir_intrinsic_store_ssbo_vk_indirect:
return nir_intrinsic_store_ssbo_indirect;
default:
unreachable("Invalid intrinsic for lowering");
}
}
static uint32_t
get_surface_index(unsigned set, unsigned binding,
struct apply_pipeline_layout_state *state)
@ -76,28 +55,12 @@ get_surface_index(unsigned set, unsigned binding,
return surface_index;
}
static bool
try_lower_intrinsic(nir_intrinsic_instr *intrin,
struct apply_pipeline_layout_state *state)
static void
lower_res_index_intrinsic(nir_intrinsic_instr *intrin,
struct apply_pipeline_layout_state *state)
{
nir_builder *b = &state->builder;
int block_idx_src;
switch (intrin->intrinsic) {
case nir_intrinsic_load_ubo_vk:
case nir_intrinsic_load_ubo_vk_indirect:
case nir_intrinsic_load_ssbo_vk:
case nir_intrinsic_load_ssbo_vk_indirect:
block_idx_src = 0;
break;
case nir_intrinsic_store_ssbo_vk:
case nir_intrinsic_store_ssbo_vk_indirect:
block_idx_src = 1;
break;
default:
return false;
}
b->cursor = nir_before_instr(&intrin->instr);
uint32_t set = intrin->const_index[0];
@ -106,25 +69,19 @@ try_lower_intrinsic(nir_intrinsic_instr *intrin,
uint32_t surface_index = get_surface_index(set, binding, state);
nir_const_value *const_block_idx =
nir_src_as_const_value(intrin->src[block_idx_src]);
nir_src_as_const_value(intrin->src[0]);
nir_ssa_def *block_index;
if (const_block_idx) {
block_index = nir_imm_int(b, surface_index + const_block_idx->u[0]);
} else {
block_index = nir_iadd(b, nir_imm_int(b, surface_index),
nir_ssa_for_src(b, intrin->src[block_idx_src], 1));
nir_ssa_for_src(b, intrin->src[0], 1));
}
nir_instr_rewrite_src(&intrin->instr, &intrin->src[block_idx_src],
nir_src_for_ssa(block_index));
intrin->intrinsic = lowered_op(intrin->intrinsic);
/* Shift the offset indices down */
intrin->const_index[0] = intrin->const_index[2];
intrin->const_index[1] = intrin->const_index[3];
return true;
assert(intrin->dest.is_ssa);
nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_src_for_ssa(block_index));
nir_instr_remove(&intrin->instr);
}
static void
@ -177,10 +134,14 @@ apply_pipeline_layout_block(nir_block *block, void *void_state)
nir_foreach_instr_safe(block, instr) {
switch (instr->type) {
case nir_instr_type_intrinsic:
if (try_lower_intrinsic(nir_instr_as_intrinsic(instr), state))
case nir_instr_type_intrinsic: {
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
if (intrin->intrinsic == nir_intrinsic_vulkan_resource_index) {
lower_res_index_intrinsic(intrin, state);
state->progress = true;
}
break;
}
case nir_instr_type_tex:
lower_tex(nir_instr_as_tex(instr), state);
/* All texture instructions need lowering */