panvk: Let the compiler handle texture queries on v9+
Some checks are pending
macOS-CI / macOS-CI (dri) (push) Waiting to run
macOS-CI / macOS-CI (xlib) (push) Waiting to run

The panfrost compiler is now able to handle these on v9+ and we don't
need to lower them ourselves anymore.  We only need the lowering on
Bifrost because we don't have the magic LD_PKA there.

Reviewed-by: Lorenzo Rossi <lorenzo.rossi@collabora.com>
Reviewed-by: Lars-Ivar Hesselberg Simonsen <lars-ivar.simonsen@arm.com>
Acked-by: Erik Faye-Lund <erik.faye-lund@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/41352>
This commit is contained in:
Faith Ekstrand 2026-04-17 19:24:22 -04:00 committed by Marge Bot
parent 8e6adcad7d
commit 593e3b3916

View file

@ -553,36 +553,18 @@ load_resource_deref_desc(nir_builder *b, nir_deref_instr *deref,
#endif
}
static nir_def *
is_nulldesc(nir_builder *b, nir_deref_instr *deref,
enum VkDescriptorType desc_type, const struct lower_desc_ctx *ctx)
{
nir_def *desc_header =
load_resource_deref_desc(b, deref, desc_type, 0, 1, 16, ctx);
/* If the first 16 bits are all zero (specifically the descriptor type),
* this is a nulldescriptor, in which case we need to avoid the "add 1"
* when loading the size from the descriptor. */
return nir_ieq_imm(b, desc_header, 0);
}
#if PAN_ARCH < 9
static nir_def *
load_tex_size(nir_builder *b, nir_deref_instr *deref, enum glsl_sampler_dim dim,
bool is_array, const struct lower_desc_ctx *ctx)
{
nir_def *loaded_size;
if (dim == GLSL_SAMPLER_DIM_BUF) {
#if PAN_ARCH >= 9
nir_def *size = load_resource_deref_desc(
b, deref, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 4, 1, 32, ctx);
nir_def *stride = load_resource_deref_desc(
b, deref, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 16, 1, 32, ctx);
loaded_size = nir_idiv(b, size, stride);
#else
assert(PAN_ARCH < 9);
nir_def *stride_size = load_resource_deref_desc(
b, deref, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 8, 2, 32, ctx);
loaded_size = nir_idiv(b, nir_channel(b, stride_size, 1),
nir_channel(b, stride_size, 0));
#endif
} else {
nir_def *tex_w_h = load_resource_deref_desc(
b, deref, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 4, 2, 16, ctx);
@ -604,12 +586,8 @@ load_tex_size(nir_builder *b, nir_deref_instr *deref, enum glsl_sampler_dim dim,
loaded_size = nir_iadd_imm(b, nir_u2u32(b, tex_sz), 1);
}
if (PAN_ARCH >= 9 && ctx->null_descriptor_support) {
nir_def *nulldesc =
is_nulldesc(b, deref, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, ctx);
return nir_bcsel(b, nulldesc, nir_u2u32(b, nir_imm_int(b, 0)),
loaded_size);
}
/* TODO/Bifrost: Null descriptors */
assert(PAN_ARCH < 9 && !ctx->null_descriptor_support);
return loaded_size;
}
@ -618,9 +596,6 @@ static nir_def *
load_img_size(nir_builder *b, nir_deref_instr *deref, enum glsl_sampler_dim dim,
bool is_array, const struct lower_desc_ctx *ctx)
{
if (PAN_ARCH >= 9)
return load_tex_size(b, deref, dim, is_array, ctx);
if (dim == GLSL_SAMPLER_DIM_BUF) {
nir_def *stride_size = load_resource_deref_desc(
b, deref, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 8, 2, 32, ctx);
@ -681,11 +656,8 @@ load_tex_levels(nir_builder *b, nir_deref_instr *deref,
nir_def *lod_count = nir_iand_imm(b, nir_ushr_imm(b, tex_word2, 16), 0x1f);
nir_def *loaded_levels = nir_iadd_imm(b, lod_count, 1);
if (PAN_ARCH >= 9 && ctx->null_descriptor_support) {
nir_def *nulldesc =
is_nulldesc(b, deref, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, ctx);
return nir_bcsel(b, nulldesc, nir_imm_int(b, 0), loaded_levels);
}
/* TODO/Bifrost: Null descriptors */
assert(PAN_ARCH < 9 && !ctx->null_descriptor_support);
return loaded_levels;
}
@ -702,11 +674,8 @@ load_tex_samples(nir_builder *b, nir_deref_instr *deref,
nir_def *sample_count = nir_iand_imm(b, nir_ushr_imm(b, tex_word3, 13), 0x7);
nir_def *loaded_samples = nir_ishl(b, nir_imm_int(b, 1), sample_count);
if (PAN_ARCH >= 9 && ctx->null_descriptor_support) {
nir_def *nulldesc =
is_nulldesc(b, deref, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, ctx);
return nir_bcsel(b, nulldesc, nir_imm_int(b, 0), loaded_samples);
}
/* TODO/Bifrost: Null descriptors */
assert(PAN_ARCH < 9 && !ctx->null_descriptor_support);
return loaded_samples;
}
@ -715,8 +684,7 @@ static nir_def *
load_img_samples(nir_builder *b, nir_deref_instr *deref,
enum glsl_sampler_dim dim, const struct lower_desc_ctx *ctx)
{
if (PAN_ARCH >= 9)
return load_tex_samples(b, deref, dim, ctx);
assert(PAN_ARCH < 9);
assert(dim != GLSL_SAMPLER_DIM_BUF);
@ -732,6 +700,7 @@ load_img_samples(nir_builder *b, nir_deref_instr *deref,
BITFIELD_MASK(3));
return nir_ishl(b, one, sample_count);
}
#endif /* PAN_ARCH < 9 */
static uint32_t
get_desc_array_stride(const struct panvk_descriptor_set_binding_layout *layout,
@ -764,6 +733,7 @@ lower_tex(nir_builder *b, nir_tex_instr *tex, const struct lower_desc_ctx *ctx)
b->cursor = nir_before_instr(&tex->instr);
#if PAN_ARCH < 9
if (tex->op == nir_texop_txs || tex->op == nir_texop_query_levels ||
tex->op == nir_texop_texture_samples) {
int tex_src_idx = nir_tex_instr_src_index(tex, nir_tex_src_texture_deref);
@ -793,6 +763,7 @@ lower_tex(nir_builder *b, nir_tex_instr *tex, const struct lower_desc_ctx *ctx)
nir_def_replace(&tex->def, res);
return true;
}
#endif
uint32_t plane = 0;
int sampler_src_idx =
@ -897,6 +868,7 @@ lower_img_intrinsic(nir_builder *b, nir_intrinsic_instr *intr,
b->cursor = nir_before_instr(&intr->instr);
nir_deref_instr *deref = nir_src_as_deref(intr->src[0]);
#if PAN_ARCH < 9
if (intr->intrinsic == nir_intrinsic_image_deref_size ||
intr->intrinsic == nir_intrinsic_image_deref_samples) {
const enum glsl_sampler_dim dim = nir_intrinsic_image_dim(intr);
@ -916,10 +888,12 @@ lower_img_intrinsic(nir_builder *b, nir_intrinsic_instr *intr,
}
nir_def_replace(&intr->def, res);
} else {
nir_rewrite_image_intrinsic(intr, get_img_index(b, deref, ctx),
nir_image_intrinsic_type_default);
return true;
}
#endif
nir_rewrite_image_intrinsic(intr, get_img_index(b, deref, ctx),
nir_image_intrinsic_type_default);
return true;
}