zink: require nullDescriptor feature

this simplifies a huge amount of code and enables a lot of refactoring

Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/35722>
This commit is contained in:
Mike Blumenkrantz 2025-06-23 11:08:10 -04:00
parent bac51d2931
commit 0a2c395394
7 changed files with 30 additions and 178 deletions

View file

@ -164,13 +164,8 @@ zink_context_destroy(struct pipe_context *pctx)
util_unreference_framebuffer_state(&ctx->fb_state);
util_framebuffer_init(pctx, NULL, ctx->fb_cbufs, &ctx->fb_zsbuf);
pipe_resource_reference(&ctx->dummy_vertex_buffer, NULL);
pipe_resource_reference(&ctx->dummy_xfb_buffer, NULL);
for (unsigned i = 0; i < ARRAY_SIZE(ctx->dummy_surface); i++)
pipe_surface_unref(&ctx->base, &ctx->dummy_surface[i]);
zink_buffer_view_reference(screen, &ctx->dummy_bufferview, NULL);
zink_descriptors_deinit_bindless(ctx);
struct zink_batch_state *bs = ctx->batch_states;
@ -720,9 +715,7 @@ update_descriptor_state_ubo_lazy(struct zink_context *ctx, gl_shader_stage shade
}
#endif
} else {
bool have_null_descriptors = screen->info.rb2_feats.nullDescriptor;
VkBuffer null_buffer = zink_resource(ctx->dummy_vertex_buffer)->obj->buffer;
ctx->di.t.ubos[shader][slot].buffer = have_null_descriptors ? VK_NULL_HANDLE : null_buffer;
ctx->di.t.ubos[shader][slot].buffer = VK_NULL_HANDLE;
ctx->di.t.ubos[shader][slot].range = VK_WHOLE_SIZE;
}
return res;
@ -745,16 +738,13 @@ update_descriptor_state_ssbo_db(struct zink_context *ctx, gl_shader_stage shader
ALWAYS_INLINE static struct zink_resource *
update_descriptor_state_ssbo_lazy(struct zink_context *ctx, gl_shader_stage shader, unsigned slot, struct zink_resource *res)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
ctx->di.t.ssbos[shader][slot].offset = ctx->ssbos[shader][slot].buffer_offset;
ctx->di.descriptor_res[ZINK_DESCRIPTOR_TYPE_SSBO][shader][slot] = res;
if (res) {
ctx->di.t.ssbos[shader][slot].buffer = res->obj->buffer;
ctx->di.t.ssbos[shader][slot].range = ctx->ssbos[shader][slot].buffer_size;
} else {
bool have_null_descriptors = screen->info.rb2_feats.nullDescriptor;
VkBuffer null_buffer = zink_resource(ctx->dummy_vertex_buffer)->obj->buffer;
ctx->di.t.ssbos[shader][slot].buffer = have_null_descriptors ? VK_NULL_HANDLE : null_buffer;
ctx->di.t.ssbos[shader][slot].buffer = VK_NULL_HANDLE;
ctx->di.t.ssbos[shader][slot].range = VK_WHOLE_SIZE;
}
return res;
@ -764,7 +754,6 @@ ALWAYS_INLINE static struct zink_resource *
update_descriptor_state_sampler(struct zink_context *ctx, gl_shader_stage shader, unsigned slot, struct zink_resource *res)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
bool have_null_descriptors = screen->info.rb2_feats.nullDescriptor;
const enum zink_descriptor_type type = ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW;
ctx->di.descriptor_res[type][shader][slot] = res;
if (res) {
@ -795,22 +784,13 @@ update_descriptor_state_sampler(struct zink_context *ctx, gl_shader_stage shader
}
}
} else {
if (likely(have_null_descriptors)) {
ctx->di.textures[shader][slot].imageView = VK_NULL_HANDLE;
ctx->di.textures[shader][slot].imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.db.tbos[shader][slot].address = 0;
ctx->di.db.tbos[shader][slot].range = VK_WHOLE_SIZE;
} else {
ctx->di.t.tbos[shader][slot] = VK_NULL_HANDLE;
}
ctx->di.textures[shader][slot].imageView = VK_NULL_HANDLE;
ctx->di.textures[shader][slot].imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.db.tbos[shader][slot].address = 0;
ctx->di.db.tbos[shader][slot].range = VK_WHOLE_SIZE;
} else {
assert(zink_descriptor_mode != ZINK_DESCRIPTOR_MODE_DB);
struct zink_surface *null_surface = zink_get_dummy_surface(ctx, 0);
struct zink_buffer_view *null_bufferview = ctx->dummy_bufferview;
ctx->di.textures[shader][slot].imageView = null_surface->image_view;
ctx->di.textures[shader][slot].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
ctx->di.t.tbos[shader][slot] = null_bufferview->buffer_view;
ctx->di.t.tbos[shader][slot] = VK_NULL_HANDLE;
}
}
return res;
@ -827,7 +807,6 @@ ALWAYS_INLINE static struct zink_resource *
update_descriptor_state_image(struct zink_context *ctx, gl_shader_stage shader, unsigned slot, struct zink_resource *res)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
bool have_null_descriptors = screen->info.rb2_feats.nullDescriptor;
const enum zink_descriptor_type type = ZINK_DESCRIPTOR_TYPE_IMAGE;
ctx->di.descriptor_res[type][shader][slot] = res;
if (res) {
@ -846,21 +825,12 @@ update_descriptor_state_image(struct zink_context *ctx, gl_shader_stage shader,
ctx->di.images[shader][slot].imageView = surface->image_view;
}
} else {
if (likely(have_null_descriptors)) {
memset(&ctx->di.images[shader][slot], 0, sizeof(ctx->di.images[shader][slot]));
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.db.texel_images[shader][slot].address = 0;
ctx->di.db.texel_images[shader][slot].range = VK_WHOLE_SIZE;
} else {
ctx->di.t.texel_images[shader][slot] = VK_NULL_HANDLE;
}
memset(&ctx->di.images[shader][slot], 0, sizeof(ctx->di.images[shader][slot]));
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.db.texel_images[shader][slot].address = 0;
ctx->di.db.texel_images[shader][slot].range = VK_WHOLE_SIZE;
} else {
assert(zink_descriptor_mode != ZINK_DESCRIPTOR_MODE_DB);
struct zink_surface *null_surface = zink_get_dummy_surface(ctx, 0);
struct zink_buffer_view *null_bufferview = ctx->dummy_bufferview;
ctx->di.images[shader][slot].imageView = null_surface->image_view;
ctx->di.images[shader][slot].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
ctx->di.t.texel_images[shader][slot] = null_bufferview->buffer_view;
ctx->di.t.texel_images[shader][slot] = VK_NULL_HANDLE;
}
}
return res;
@ -2482,36 +2452,17 @@ rebind_bindless_bufferview(struct zink_context *ctx, struct zink_resource *res,
static void
zero_bindless_descriptor(struct zink_context *ctx, uint32_t handle, bool is_buffer, bool is_image)
{
if (likely(zink_screen(ctx->base.screen)->info.rb2_feats.nullDescriptor)) {
if (is_buffer) {
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.bindless[is_image].db.buffer_infos[handle].address = 0;
ctx->di.bindless[is_image].db.buffer_infos[handle].range = 0;
} else {
VkBufferView *bv = &ctx->di.bindless[is_image].t.buffer_infos[handle];
*bv = VK_NULL_HANDLE;
}
if (is_buffer) {
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.bindless[is_image].db.buffer_infos[handle].address = 0;
ctx->di.bindless[is_image].db.buffer_infos[handle].range = 0;
} else {
VkDescriptorImageInfo *ii = &ctx->di.bindless[is_image].img_infos[handle];
memset(ii, 0, sizeof(*ii));
VkBufferView *bv = &ctx->di.bindless[is_image].t.buffer_infos[handle];
*bv = VK_NULL_HANDLE;
}
} else {
if (is_buffer) {
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
ctx->di.bindless[is_image].db.buffer_infos[handle].address = zink_resource(ctx->dummy_bufferview->pres)->obj->bda;
ctx->di.bindless[is_image].db.buffer_infos[handle].range = 1;
} else {
VkBufferView *bv = &ctx->di.bindless[is_image].t.buffer_infos[handle];
struct zink_buffer_view *null_bufferview = ctx->dummy_bufferview;
*bv = null_bufferview->buffer_view;
}
} else {
struct zink_surface *null_surface = zink_get_dummy_surface(ctx, 0);
VkDescriptorImageInfo *ii = &ctx->di.bindless[is_image].img_infos[handle];
ii->sampler = VK_NULL_HANDLE;
ii->imageView = null_surface->image_view;
ii->imageLayout = VK_IMAGE_LAYOUT_GENERAL;
}
VkDescriptorImageInfo *ii = &ctx->di.bindless[is_image].img_infos[handle];
memset(ii, 0, sizeof(*ii));
}
}
@ -2845,9 +2796,7 @@ zink_update_fbfetch(struct zink_context *ctx)
return false;
zink_batch_no_rp(ctx);
ctx->di.fbfetch.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
ctx->di.fbfetch.imageView = zink_screen(ctx->base.screen)->info.rb2_feats.nullDescriptor ?
VK_NULL_HANDLE :
zink_get_dummy_surface(ctx, 0)->image_view;
ctx->di.fbfetch.imageView = VK_NULL_HANDLE;
ctx->invalidate_descriptor_state(ctx, MESA_SHADER_FRAGMENT, ZINK_DESCRIPTOR_TYPE_UBO, 0, 1);
return true;
}
@ -2865,9 +2814,7 @@ zink_update_fbfetch(struct zink_context *ctx)
if (zink_get_fs_base_key(ctx)->fbfetch_ms != fbfetch_ms)
zink_set_fs_base_key(ctx)->fbfetch_ms = fbfetch_ms;
} else {
ctx->di.fbfetch.imageView = zink_screen(ctx->base.screen)->info.rb2_feats.nullDescriptor ?
VK_NULL_HANDLE :
zink_get_dummy_surface(ctx, 0)->image_view;
ctx->di.fbfetch.imageView = VK_NULL_HANDLE;
}
bool ret = false;
ctx->di.fbfetch.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
@ -3409,8 +3356,7 @@ zink_prep_fb_attachment(struct zink_context *ctx, struct zink_surface *surf, uns
{
struct zink_resource *res;
if (!surf) {
surf = zink_get_dummy_surface(ctx, util_logbase2_ceil(ctx->fb_state.samples));
res = zink_resource(surf->base.texture);
return VK_NULL_HANDLE;
} else {
res = zink_resource(surf->base.texture);
zink_batch_resource_usage_set(ctx->bs, res, true, false);
@ -5357,44 +5303,6 @@ zink_emit_string_marker(struct pipe_context *pctx,
free(temp);
}
struct pipe_surface *
zink_get_dummy_pipe_surface(struct zink_context *ctx, int samples_index)
{
unsigned size = calc_max_dummy_fbo_size(ctx);
bool needs_null_init = false;
if (ctx->dummy_surface[samples_index]) {
/* delete old surface if ETOOSMALL */
struct zink_resource *res = zink_resource(ctx->dummy_surface[samples_index]->texture);
if (res->base.b.width0 < size || res->base.b.height0 < size) {
pipe_surface_unref(&ctx->base, &ctx->dummy_surface[samples_index]);
needs_null_init = !samples_index && ctx->di.null_fbfetch_init;
if (!samples_index)
ctx->di.null_fbfetch_init = false;
}
}
if (!ctx->dummy_surface[samples_index]) {
ctx->dummy_surface[samples_index] = zink_surface_create_null(ctx, PIPE_TEXTURE_2D, size, size, BITFIELD_BIT(samples_index));
assert(ctx->dummy_surface[samples_index]);
/* This is possibly used with imageLoad which according to GL spec must return 0 */
if (!samples_index) {
union pipe_color_union color = {0};
struct pipe_box box;
u_box_2d(0, 0, size, size, &box);
ctx->base.clear_texture(&ctx->base, ctx->dummy_surface[samples_index]->texture, 0, &box, &color);
}
}
if (needs_null_init)
init_null_fbfetch(ctx);
return ctx->dummy_surface[samples_index];
}
struct zink_surface *
zink_get_dummy_surface(struct zink_context *ctx, int samples_index)
{
return zink_surface(zink_get_dummy_pipe_surface(ctx, samples_index));
}
static void
zink_tc_parse_dsa(void *state, struct tc_renderpass_info *info)
{
@ -5611,21 +5519,12 @@ zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
const uint32_t data[] = {0};
if (!is_copy_only) {
ctx->dummy_vertex_buffer = pipe_buffer_create(&screen->base,
PIPE_BIND_VERTEX_BUFFER | PIPE_BIND_SHADER_IMAGE, PIPE_USAGE_IMMUTABLE, sizeof(data));
if (!ctx->dummy_vertex_buffer)
goto fail;
ctx->dummy_xfb_buffer = pipe_buffer_create(&screen->base,
PIPE_BIND_STREAM_OUTPUT, PIPE_USAGE_IMMUTABLE, sizeof(data));
if (!ctx->dummy_xfb_buffer)
goto fail;
}
if (!is_copy_only) {
VkBufferViewCreateInfo bvci = create_bvci(ctx, zink_resource(ctx->dummy_vertex_buffer), PIPE_FORMAT_R8G8B8A8_UNORM, 0, sizeof(data));
ctx->dummy_bufferview = get_buffer_view(ctx, zink_resource(ctx->dummy_vertex_buffer), &bvci);
if (!ctx->dummy_bufferview)
goto fail;
if (!zink_descriptors_init(ctx))
goto fail;
}
@ -5682,7 +5581,6 @@ zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
else
ctx->invalidate_descriptor_state = zink_context_invalidate_descriptor_state;
if (!is_copy_only && !is_compute_only) {
pipe_buffer_write_nooverlap(&ctx->base, ctx->dummy_vertex_buffer, 0, sizeof(data), data);
pipe_buffer_write_nooverlap(&ctx->base, ctx->dummy_xfb_buffer, 0, sizeof(data), data);
if (screen->info.have_EXT_color_write_enable)
reapply_color_write(ctx);
@ -5742,8 +5640,6 @@ zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
}
ctx->gfx_pipeline_state.sample_mask = BITFIELD_MASK(32);
struct pipe_framebuffer_state fb = {0};
struct pipe_surface *psurf = zink_get_dummy_pipe_surface(ctx, 0);
fb.cbufs[0] = *psurf;
fb.nr_cbufs = 1;
fb.width = fb.height = 256;
ctx->base.set_framebuffer_state(&ctx->base, &fb);
@ -5760,7 +5656,6 @@ zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
ctx->base.bind_blend_state(&ctx->base, blend_state);
zink_batch_rp(ctx);
ctx->base.surface_destroy(&ctx->base, psurf);
}
if (is_compute_only || zink_debug & ZINK_DEBUG_NOREORDER)

View file

@ -281,11 +281,6 @@ zink_copy_image_buffer(struct zink_context *ctx, struct zink_resource *dst, stru
void
zink_destroy_buffer_view(struct zink_screen *screen, struct zink_buffer_view *buffer_view);
struct pipe_surface *
zink_get_dummy_pipe_surface(struct zink_context *ctx, int samples_index);
struct zink_surface *
zink_get_dummy_surface(struct zink_context *ctx, int samples_index);
void
debug_describe_zink_buffer_view(char *buf, const struct zink_buffer_view *ptr);

View file

@ -129,7 +129,7 @@ zink_bind_vertex_buffers(struct zink_context *ctx)
buffers[i] = res->obj->buffer;
buffer_offsets[i] = vb->buffer_offset;
} else {
buffers[i] = zink_resource(ctx->dummy_vertex_buffer)->obj->buffer;
buffers[i] = VK_NULL_HANDLE;
buffer_offsets[i] = 0;
}
}

View file

@ -3344,6 +3344,11 @@ zink_internal_create_screen(const struct pipe_screen_config *config, int64_t dev
goto fail;
}
if (!screen->info.rb2_feats.nullDescriptor) {
mesa_loge("Zink requires the nullDescriptor feature of KHR/EXT robustness2.");
goto fail;
}
if (zink_set_driver_strings(screen)) {
mesa_loge("ZINK: failed to set driver strings\n");
goto fail;
@ -3591,14 +3596,6 @@ zink_internal_create_screen(const struct pipe_screen_config *config, int64_t dev
}
can_db = false;
}
if (!screen->info.rb2_feats.nullDescriptor) {
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
if (!screen->driver_name_is_inferred)
mesa_loge("Cannot use db descriptor mode without robustness2.nullDescriptor");
goto fail;
}
can_db = false;
}
if (ZINK_FBFETCH_DESCRIPTOR_SIZE < screen->info.db_props.inputAttachmentDescriptorSize) {
if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB) {
if (!screen->driver_name_is_inferred)

View file

@ -405,35 +405,6 @@ zink_rebind_surface(struct zink_context *ctx, struct pipe_surface **psurface)
return true;
}
/* dummy surfaces are used for null framebuffer/descriptors */
struct pipe_surface *
zink_surface_create_null(struct zink_context *ctx, enum pipe_texture_target target, unsigned width, unsigned height, unsigned samples)
{
struct pipe_surface surf_templ = {0};
struct pipe_resource *pres;
struct pipe_resource templ = {0};
templ.width0 = width;
templ.height0 = height;
templ.depth0 = 1;
templ.format = PIPE_FORMAT_R8G8B8A8_UNORM;
templ.target = target;
templ.bind = PIPE_BIND_RENDER_TARGET | PIPE_BIND_SAMPLER_VIEW;
if (samples < 2)
templ.bind |= PIPE_BIND_SHADER_IMAGE;
templ.nr_samples = samples;
pres = ctx->base.screen->resource_create(ctx->base.screen, &templ);
if (!pres)
return NULL;
surf_templ.format = PIPE_FORMAT_R8G8B8A8_UNORM;
surf_templ.nr_samples = 0;
struct pipe_surface *psurf = ctx->base.create_surface(&ctx->base, pres, &surf_templ);
pipe_resource_reference(&pres, NULL);
return psurf;
}
void
zink_context_surface_init(struct pipe_context *context)
{

View file

@ -77,9 +77,6 @@ zink_surface_clamp_viewtype(VkImageViewType viewType, unsigned first_layer, unsi
bool
zink_rebind_surface(struct zink_context *ctx, struct pipe_surface **psurface);
struct pipe_surface *
zink_surface_create_null(struct zink_context *ctx, enum pipe_texture_target target, unsigned width, unsigned height, unsigned samples);
void
zink_surface_swapchain_update(struct zink_context *ctx, struct zink_surface *surface);
void

View file

@ -1838,10 +1838,7 @@ struct zink_context {
uint64_t render_passes;
} hud;
struct pipe_resource *dummy_vertex_buffer;
struct pipe_resource *dummy_xfb_buffer;
struct pipe_surface *dummy_surface[7];
struct zink_buffer_view *dummy_bufferview;
unsigned buffer_rebind_counter;
unsigned image_rebind_counter;