zink: allow null descriptor set layouts

I got confused while writing this somehow because of the null descriptor
feature, which enables drivers to consume a null descriptor, which has no
relation to a descriptor layout containing no descriptors

failing to accurately use zero descriptors can put layouts over the maximum
per-stage limits, which causes tests to crash

fixes (lavapipe):
KHR-GL46.shading_language_420pack.binding_uniform_block_array
KHR-GL46.multi_bind.dispatch_bind_buffers_base

Reviewed-by: Dave Airlie <airlied@redhat.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/15067>
This commit is contained in:
Mike Blumenkrantz 2022-01-31 12:23:22 -05:00 committed by Marge Bot
parent 3759a16d8a
commit 7266182be0
2 changed files with 33 additions and 54 deletions

View file

@ -402,20 +402,22 @@ create_layout(struct zink_context *ctx, enum zink_descriptor_type type,
struct zink_descriptor_layout_key **layout_key)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
VkDescriptorSetLayout dsl = descriptor_layout_create(screen, type, bindings, MAX2(num_bindings, 1));
VkDescriptorSetLayout dsl = descriptor_layout_create(screen, type, bindings, num_bindings);
if (!dsl)
return NULL;
struct zink_descriptor_layout_key *k = ralloc(ctx, struct zink_descriptor_layout_key);
k->num_bindings = num_bindings;
size_t bindings_size = MAX2(num_bindings, 1) * sizeof(VkDescriptorSetLayoutBinding);
k->bindings = ralloc_size(k, bindings_size);
if (!k->bindings) {
ralloc_free(k);
VKSCR(DestroyDescriptorSetLayout)(screen->dev, dsl, NULL);
return NULL;
if (num_bindings) {
size_t bindings_size = num_bindings * sizeof(VkDescriptorSetLayoutBinding);
k->bindings = ralloc_size(k, bindings_size);
if (!k->bindings) {
ralloc_free(k);
VKSCR(DestroyDescriptorSetLayout)(screen->dev, dsl, NULL);
return NULL;
}
memcpy(k->bindings, bindings, bindings_size);
}
memcpy(k->bindings, bindings, bindings_size);
struct zink_descriptor_layout *layout = rzalloc(ctx, struct zink_descriptor_layout);
layout->layout = dsl;
@ -434,18 +436,6 @@ zink_descriptor_util_layout_get(struct zink_context *ctx, enum zink_descriptor_t
.bindings = bindings,
};
VkDescriptorSetLayoutBinding null_binding;
if (!bindings) {
null_binding.binding = 0;
null_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
null_binding.descriptorCount = 1;
null_binding.pImmutableSamplers = NULL;
null_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT |
VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
key.bindings = &null_binding;
}
if (type != ZINK_DESCRIPTOR_TYPES) {
hash = hash_descriptor_layout(&key);
struct hash_entry *he = _mesa_hash_table_search_pre_hashed(&ctx->desc_set_layouts[type], hash, &key);
@ -455,7 +445,7 @@ zink_descriptor_util_layout_get(struct zink_context *ctx, enum zink_descriptor_t
}
}
struct zink_descriptor_layout *layout = create_layout(ctx, type, bindings ? bindings : &null_binding, num_bindings, layout_key);
struct zink_descriptor_layout *layout = create_layout(ctx, type, bindings, num_bindings, layout_key);
if (layout && type != ZINK_DESCRIPTOR_TYPES) {
_mesa_hash_table_insert_pre_hashed(&ctx->desc_set_layouts[type], hash, *layout_key, layout);
}
@ -1433,15 +1423,14 @@ zink_descriptors_update(struct zink_context *ctx, bool is_compute)
cache_hit = false;
}
ctx->dd->changed[is_compute][ZINK_DESCRIPTOR_TYPES] = false;
if (!desc_set)
desc_set = ctx->dd->dummy_set;
if (pg->dd->push_usage) // push set
dynamic_offset_idx = update_push_ubo_descriptors(ctx, zds, desc_set,
is_compute, cache_hit, dynamic_offsets);
VKCTX(CmdBindDescriptorSets)(batch->state->cmdbuf, bp,
pg->layout, 0, 1, &desc_set,
dynamic_offset_idx, dynamic_offsets);
if (desc_set) {
if (pg->dd->push_usage) // push set
dynamic_offset_idx = update_push_ubo_descriptors(ctx, zds, desc_set,
is_compute, cache_hit, dynamic_offsets);
VKCTX(CmdBindDescriptorSets)(batch->state->cmdbuf, bp,
pg->layout, 0, 1, &desc_set,
dynamic_offset_idx, dynamic_offsets);
}
}
{
@ -1467,15 +1456,16 @@ zink_descriptors_update(struct zink_context *ctx, bool is_compute)
}
} else
zds = NULL;
/* reuse dummy set for bind */
desc_set = zds ? zds->desc_set : ctx->dd->dummy_set;
update_descriptors_internal(ctx, h, zds, pg, cache_hit);
if (zds) {
desc_set = zds->desc_set;
update_descriptors_internal(ctx, h, zds, pg, cache_hit);
VKCTX(CmdBindDescriptorSets)(batch->state->cmdbuf, bp,
pg->layout, h + 1, 1, &desc_set,
0, NULL);
if (pdd_cached(pg)->cache_misses[h] == MAX_CACHE_MISSES)
zink_descriptor_pool_reference(ctx, &pdd_cached(pg)->pool[h], NULL);
VKCTX(CmdBindDescriptorSets)(batch->state->cmdbuf, bp,
pg->layout, h + 1, 1, &desc_set,
0, NULL);
if (pdd_cached(pg)->cache_misses[h] == MAX_CACHE_MISSES)
zink_descriptor_pool_reference(ctx, &pdd_cached(pg)->pool[h], NULL);
}
}
} else {
zink_descriptors_update_lazy_masked(ctx, is_compute, BITFIELD_BIT(h), 0);

View file

@ -468,10 +468,10 @@ populate_sets(struct zink_context *ctx, struct zink_batch_descriptor_data_lazy *
if (pg->dd->pool_key[type]) {
struct zink_descriptor_pool *pool = get_descriptor_pool_lazy(ctx, pg, type, bdd, pg->is_compute);
sets[type] = get_descriptor_set_lazy(pool);
if (!sets[type])
return false;
} else
sets[type] = ctx->dd->dummy_set;
if (!sets[type])
return false;
sets[type] = VK_NULL_HANDLE;
}
return true;
}
@ -515,7 +515,7 @@ zink_descriptors_update_lazy_masked(struct zink_context *ctx, bool is_compute, u
}
u_foreach_bit(type, bind_sets & ~changed_sets) {
if (!pg->dd->pool_key[type])
bdd->sets[is_compute][type + 1] = ctx->dd->dummy_set;
continue;
assert(bdd->sets[is_compute][type + 1]);
VKSCR(CmdBindDescriptorSets)(bs->cmdbuf,
is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
@ -611,14 +611,8 @@ zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)
pg->layout, 0, 1, push_set ? &push_set : &bdd->sets[is_compute][0],
0, NULL);
}
dd_lazy(ctx)->push_state_changed[is_compute] = false;
} else if (dd_lazy(ctx)->push_state_changed[is_compute] || bind_sets) {
VKCTX(CmdBindDescriptorSets)(bs->cmdbuf,
is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
pg->layout, 0, 1, &ctx->dd->dummy_set,
0, NULL);
dd_lazy(ctx)->push_state_changed[is_compute] = false;
}
dd_lazy(ctx)->push_state_changed[is_compute] = false;
zink_descriptors_update_lazy_masked(ctx, is_compute, changed_sets, bind_sets);
if (pg->dd->bindless && unlikely(!ctx->dd->bindless_bound)) {
VKCTX(CmdBindDescriptorSets)(ctx->batch.state->cmdbuf, is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
@ -761,11 +755,6 @@ zink_descriptors_init_lazy(struct zink_context *ctx)
ctx->dd->dummy_dsl = zink_descriptor_util_layout_get(ctx, 0, NULL, 0, &layout_key);
if (!ctx->dd->dummy_dsl)
return false;
VkDescriptorPoolSize null_size = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1};
ctx->dd->dummy_pool = create_pool(screen, 1, &null_size, 0);
zink_descriptor_util_alloc_sets(screen, ctx->dd->dummy_dsl->layout,
ctx->dd->dummy_pool, &ctx->dd->dummy_set, 1);
zink_descriptor_util_init_null_set(ctx, ctx->dd->dummy_set);
return true;
}