zink: unify cached descriptor update code

this is all pretty much standardized now, so it can be combined into something
simpler

Acked-by: Hoe Hao Cheng <haochengho12907@gmail.com>
Acked-by: Dave Airlie <airlied@redhat.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/11188>
This commit is contained in:
Mike Blumenkrantz 2021-03-25 11:26:43 -04:00
parent 93021aa6c8
commit e00268894f
3 changed files with 302 additions and 513 deletions

View file

@ -75,35 +75,18 @@ struct zink_descriptor_set {
};
};
struct zink_descriptor_data {
struct zink_descriptor_state gfx_descriptor_states[ZINK_SHADER_COUNT]; // keep incremental hashes here
struct zink_descriptor_state descriptor_states[2]; // gfx, compute
struct hash_table *descriptor_pools[ZINK_DESCRIPTOR_TYPES];
struct zink_descriptor_pool *push_pool[2]; //gfx, compute
VkDescriptorSetLayout push_dsl[2]; //gfx, compute
uint8_t last_push_usage[2];
bool push_valid[2];
uint32_t push_state[2];
bool gfx_push_valid[ZINK_SHADER_COUNT];
uint32_t gfx_push_state[ZINK_SHADER_COUNT];
struct zink_descriptor_set *last_set[2];
struct zink_descriptor_pool *dummy_pool;
VkDescriptorSetLayout dummy_dsl;
VkDescriptorSet dummy_set;
};
struct zink_program_descriptor_data {
struct zink_program_descriptor_data_cached {
struct zink_program_descriptor_data base;
struct zink_descriptor_pool *pool[ZINK_DESCRIPTOR_TYPES];
struct zink_descriptor_set *last_set[ZINK_DESCRIPTOR_TYPES];
uint8_t push_usage;
};
struct zink_batch_descriptor_data {
struct set *desc_sets;
};
static inline struct zink_program_descriptor_data_cached *
pdd_cached(struct zink_program *pg)
{
return (struct zink_program_descriptor_data_cached*)pg->dd;
}
static bool
batch_add_desc_set(struct zink_batch *batch, struct zink_descriptor_set *zds)
@ -523,12 +506,31 @@ zink_descriptor_util_alloc_sets(struct zink_screen *screen, VkDescriptorSetLayou
return true;
}
unsigned
zink_descriptor_program_num_sizes(struct zink_program *pg, enum zink_descriptor_type type)
{
switch (type) {
case ZINK_DESCRIPTOR_TYPE_UBO:
return 1;
case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:
return !!pg->dd->sizes[ZDS_INDEX_COMBINED_SAMPLER].descriptorCount +
!!pg->dd->sizes[ZDS_INDEX_UNIFORM_TEXELS].descriptorCount;
case ZINK_DESCRIPTOR_TYPE_SSBO:
return 1;
case ZINK_DESCRIPTOR_TYPE_IMAGE:
return !!pg->dd->sizes[ZDS_INDEX_STORAGE_IMAGE].descriptorCount +
!!pg->dd->sizes[ZDS_INDEX_STORAGE_TEXELS].descriptorCount;
default: break;
}
unreachable("unknown type");
}
static struct zink_descriptor_set *
allocate_desc_set(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, unsigned descs_used, bool is_compute)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
bool push_set = type == ZINK_DESCRIPTOR_TYPES;
struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pg->dd->pool[type];
struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pdd_cached(pg)->pool[type];
#define DESC_BUCKET_FACTOR 10
unsigned bucket_size = pool->key.layout->num_descriptors ? DESC_BUCKET_FACTOR : 1;
if (pool->key.layout->num_descriptors) {
@ -624,7 +626,7 @@ zink_descriptor_set_get(struct zink_context *ctx,
struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;
struct zink_batch *batch = &ctx->batch;
bool push_set = type == ZINK_DESCRIPTOR_TYPES;
struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pg->dd->pool[type];
struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pdd_cached(pg)->pool[type];
unsigned descs_used = 1;
assert(type <= ZINK_DESCRIPTOR_TYPES);
@ -636,7 +638,7 @@ zink_descriptor_set_get(struct zink_context *ctx,
populate_zds_key(ctx, type, is_compute, &key, pg->dd->push_usage);
simple_mtx_lock(&pool->mtx);
struct zink_descriptor_set *last_set = push_set ? ctx->dd->last_set[is_compute] : pg->dd->last_set[type];
struct zink_descriptor_set *last_set = push_set ? ctx->dd->last_set[is_compute] : pdd_cached(pg)->last_set[type];
if (last_set && last_set->hash == hash && desc_state_equal(&last_set->key, &key)) {
zds = last_set;
*cache_hit = !zds->invalid;
@ -725,7 +727,7 @@ quick_out:
if (push_set)
ctx->dd->last_set[is_compute] = zds;
else
pg->dd->last_set[type] = zds;
pdd_cached(pg)->last_set[type] = zds;
simple_mtx_unlock(&pool->mtx);
return zds;
@ -831,132 +833,36 @@ zink_descriptor_pool_reference(struct zink_screen *screen,
bool
zink_descriptor_program_init(struct zink_context *ctx, struct zink_program *pg)
{
VkDescriptorSetLayoutBinding bindings[ZINK_DESCRIPTOR_TYPES][PIPE_SHADER_TYPES * 32];
unsigned num_bindings[ZINK_DESCRIPTOR_TYPES] = {0};
uint8_t push_usage = 0;
struct zink_screen *screen = zink_screen(ctx->base.screen);
VkDescriptorPoolSize sizes[6] = {0};
int type_map[12];
unsigned num_types = 0;
memset(type_map, -1, sizeof(type_map));
pg->dd = (void*)rzalloc(pg, struct zink_program_descriptor_data_cached);
if (!pg->dd)
return false;
struct zink_shader **stages;
if (pg->is_compute)
stages = &((struct zink_compute_program*)pg)->shader;
else
stages = ((struct zink_gfx_program*)pg)->shaders;
if (!zink_descriptor_program_init_lazy(ctx, pg))
return false;
for (int i = 0; i < (pg->is_compute ? 1 : ZINK_SHADER_COUNT); i++) {
struct zink_shader *shader = stages[i];
if (!shader)
continue;
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
/* no descriptors */
if (!pg->dd)
return true;
VkShaderStageFlagBits stage_flags = zink_shader_stage(pipe_shader_type_from_mesa(shader->nir->info.stage));
for (int j = 0; j < ZINK_DESCRIPTOR_TYPES; j++) {
for (int k = 0; k < shader->num_bindings[j]; k++) {
assert(num_bindings[j] < ARRAY_SIZE(bindings[j]));
if (shader->bindings[j][k].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
push_usage |= BITFIELD_BIT(stage);
continue;
}
bindings[j][num_bindings[j]].binding = shader->bindings[j][k].binding;
bindings[j][num_bindings[j]].descriptorType = shader->bindings[j][k].type;
bindings[j][num_bindings[j]].descriptorCount = shader->bindings[j][k].size;
bindings[j][num_bindings[j]].stageFlags = stage_flags;
bindings[j][num_bindings[j]].pImmutableSamplers = NULL;
if (type_map[shader->bindings[j][k].type] == -1) {
type_map[shader->bindings[j][k].type] = num_types++;
sizes[type_map[shader->bindings[j][k].type]].type = shader->bindings[j][k].type;
}
sizes[type_map[shader->bindings[j][k].type]].descriptorCount += shader->bindings[j][k].size;
++num_bindings[j];
}
}
}
unsigned total_descs = 0;
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
total_descs += num_bindings[i];
}
if (total_descs || push_usage) {
pg->dd = rzalloc(pg, struct zink_program_descriptor_data);
if (!pg->dd)
return false;
pg->dd->push_usage = push_usage;
pg->dsl[pg->num_dsl++] = push_usage ? ctx->dd->push_dsl[pg->is_compute] : ctx->dd->dummy_dsl;
}
if (!total_descs) {
pg->layout = zink_pipeline_layout_create(zink_screen(ctx->base.screen), pg);
return !!pg->layout;
}
for (int i = 0; i < num_types; i++)
sizes[i].descriptorCount *= ZINK_DEFAULT_MAX_DESCS;
bool found_descriptors = false;
struct zink_descriptor_layout_key *layout_key[ZINK_DESCRIPTOR_TYPES] = {0};
for (unsigned i = ZINK_DESCRIPTOR_TYPES - 1; i < ZINK_DESCRIPTOR_TYPES; i--) {
if (!num_bindings[i]) {
if (!found_descriptors)
continue;
pg->dsl[i + 1] = ctx->dd->dummy_dsl;
/* pool is null here for detection during update */
pg->num_dsl++;
if (!pg->dd->layout_key[i])
continue;
}
found_descriptors = true;
VkDescriptorPoolSize type_sizes[2] = {0};
int num_type_sizes = 0;
switch (i) {
case ZINK_DESCRIPTOR_TYPE_UBO:
if (type_map[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] != -1) {
type_sizes[num_type_sizes] = sizes[type_map[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER]];
num_type_sizes++;
}
break;
case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:
if (type_map[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER] != -1) {
type_sizes[num_type_sizes] = sizes[type_map[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER]];
num_type_sizes++;
}
if (type_map[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] != -1) {
type_sizes[num_type_sizes] = sizes[type_map[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER]];
num_type_sizes++;
}
break;
case ZINK_DESCRIPTOR_TYPE_SSBO:
if (type_map[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] != -1) {
num_type_sizes = 1;
type_sizes[0] = sizes[type_map[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER]];
}
break;
case ZINK_DESCRIPTOR_TYPE_IMAGE:
if (type_map[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER] != -1) {
type_sizes[num_type_sizes] = sizes[type_map[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER]];
num_type_sizes++;
}
if (type_map[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] != -1) {
type_sizes[num_type_sizes] = sizes[type_map[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE]];
num_type_sizes++;
}
break;
}
pg->dsl[i + 1] = zink_descriptor_util_layout_get(ctx, i, bindings[i], num_bindings[i], &layout_key[i]);
if (!pg->dsl[i + 1])
return false;
struct zink_descriptor_pool *pool = descriptor_pool_get(ctx, i, layout_key[i], type_sizes, num_type_sizes);
unsigned idx = zink_descriptor_type_to_size_idx(i);
VkDescriptorPoolSize *size = &pg->dd->sizes[idx];
/* this is a sampler/image set with no images only texels */
if (!size->descriptorCount)
size++;
unsigned num_sizes = zink_descriptor_program_num_sizes(pg, i);
struct zink_descriptor_pool *pool = descriptor_pool_get(ctx, i, pg->dd->layout_key[i], size, num_sizes);
if (!pool)
return false;
zink_descriptor_pool_reference(zink_screen(ctx->base.screen), &pg->dd->pool[i], pool);
pg->num_dsl++;
zink_descriptor_pool_reference(screen, &pdd_cached(pg)->pool[i], pool);
}
pg->layout = zink_pipeline_layout_create(zink_screen(ctx->base.screen), pg);
return !!pg->layout;
return true;
}
void
@ -965,7 +871,9 @@ zink_descriptor_program_deinit(struct zink_screen *screen, struct zink_program *
if (!pg->dd)
return;
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++)
zink_descriptor_pool_reference(screen, &pg->dd->pool[i], NULL);
zink_descriptor_pool_reference(screen, &pdd_cached(pg)->pool[i], NULL);
zink_descriptor_program_deinit_lazy(screen, pg);
}
static void
@ -979,7 +887,6 @@ zink_descriptor_pool_deinit(struct zink_context *ctx)
}
_mesa_hash_table_destroy(ctx->dd->descriptor_pools[i], NULL);
}
zink_descriptor_pool_reference(screen, &ctx->dd->dummy_pool, NULL);
}
static bool
@ -990,32 +897,14 @@ zink_descriptor_pool_init(struct zink_context *ctx)
if (!ctx->dd->descriptor_pools[i])
return false;
}
struct zink_descriptor_layout_key *layout_keys[2];
struct zink_screen *screen = zink_screen(ctx->base.screen);
if (!zink_descriptor_util_push_layouts_get(ctx, ctx->dd->push_dsl, layout_keys))
return false;
VkDescriptorPoolSize sizes;
sizes.type = screen->lazy_descriptors ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
sizes.descriptorCount = ZINK_SHADER_COUNT * ZINK_DEFAULT_MAX_DESCS;
ctx->dd->push_pool[0] = descriptor_pool_get(ctx, 0, layout_keys[0], &sizes, 1);
ctx->dd->push_pool[0] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[0], &sizes, 1);
sizes.descriptorCount = ZINK_DEFAULT_MAX_DESCS;
ctx->dd->push_pool[1] = descriptor_pool_get(ctx, 0, layout_keys[1], &sizes, 1);
if (!ctx->dd->push_pool[0] || !ctx->dd->push_pool[1])
return false;
ctx->dd->dummy_dsl = zink_descriptor_util_layout_get(ctx, 0, NULL, 0, &layout_keys[0]);
if (!ctx->dd->dummy_dsl)
return false;
VkDescriptorPoolSize null_size = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1};
ctx->dd->dummy_pool = descriptor_pool_create(screen, 0, layout_keys[0], &null_size, 1);
if (!ctx->dd->dummy_pool)
return false;
zink_descriptor_util_alloc_sets(screen, ctx->dd->dummy_dsl,
ctx->dd->dummy_pool->descpool, &ctx->dd->dummy_set, 1);
if (!ctx->dd->dummy_set)
return false;
zink_descriptor_util_init_null_set(ctx, ctx->dd->dummy_set);
return true;
ctx->dd->push_pool[1] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[1], &sizes, 1);
return ctx->dd->push_pool[0] && ctx->dd->push_pool[1];
}
@ -1071,15 +960,6 @@ desc_set_image_add(struct zink_context *ctx, struct zink_descriptor_set *zds, st
zink_image_view_desc_set_add(image_view, zds, i);
}
static void
write_descriptors(struct zink_context *ctx, unsigned num_wds, VkWriteDescriptorSet *wds, bool cache_hit)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
if (!cache_hit && num_wds)
vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
}
static unsigned
init_write_descriptor(struct zink_shader *shader, struct zink_descriptor_set *zds, enum zink_descriptor_type type, int idx, VkWriteDescriptorSet *wd, unsigned num_wds)
{
@ -1097,22 +977,23 @@ static unsigned
update_push_ubo_descriptors(struct zink_context *ctx, struct zink_descriptor_set *zds,
bool is_compute, bool cache_hit, uint32_t *dynamic_offsets)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
VkWriteDescriptorSet wds[ZINK_SHADER_COUNT];
VkDescriptorBufferInfo buffer_infos[ZINK_SHADER_COUNT];
struct zink_shader **stages;
unsigned num_stages = is_compute ? 1 : ZINK_SHADER_COUNT;
if (is_compute)
stages = &ctx->curr_compute->shader;
else
stages = &ctx->gfx_stages[0];
for (int i = 0; i < num_stages; i++) {
struct zink_shader *shader = stages[i];
enum pipe_shader_type pstage = shader ? pipe_shader_type_from_mesa(shader->nir->info.stage) : i;
VkDescriptorBufferInfo *info = &ctx->di.ubos[pstage][0];
unsigned dynamic_idx = is_compute ? 0 : tgsi_processor_to_shader_stage(pstage);
/* Values are taken from pDynamicOffsets in an order such that all entries for set N come before set N+1;
* within a set, entries are ordered by the binding numbers in the descriptor set layouts
* - vkCmdBindDescriptorSets spec
@ -1132,205 +1013,98 @@ update_push_ubo_descriptors(struct zink_context *ctx, struct zink_descriptor_set
}
}
write_descriptors(ctx, num_stages, wds, cache_hit);
if (!cache_hit)
vkUpdateDescriptorSets(screen->dev, num_stages, wds, 0, NULL);
return num_stages;
}
static void
update_ubo_descriptors(struct zink_context *ctx, struct zink_descriptor_set *zds,
bool is_compute, bool cache_hit)
update_descriptors_internal(struct zink_context *ctx, struct zink_descriptor_set **zds, struct zink_program *pg, bool *cache_hit)
{
struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;
unsigned num_descriptors = pg->dd->pool[ZINK_DESCRIPTOR_TYPE_UBO]->key.layout->num_descriptors;
unsigned num_bindings = zds->pool->num_resources;
VkWriteDescriptorSet wds[num_descriptors];
unsigned num_wds = 0;
unsigned num_resources = 0;
struct zink_screen *screen = zink_screen(ctx->base.screen);
struct zink_shader **stages;
unsigned num_stages = is_compute ? 1 : ZINK_SHADER_COUNT;
if (is_compute)
unsigned num_stages = pg->is_compute ? 1 : ZINK_SHADER_COUNT;
if (pg->is_compute)
stages = &ctx->curr_compute->shader;
else
stages = &ctx->gfx_stages[0];
for (int i = 0; !cache_hit && i < num_stages; i++) {
struct zink_shader *shader = stages[i];
if (!shader)
for (unsigned h = 0; h < ZINK_DESCRIPTOR_TYPES; h++) {
if (cache_hit[h] || !zds[h])
continue;
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
for (int j = 0; j < shader->num_bindings[ZINK_DESCRIPTOR_TYPE_UBO]; j++) {
int index = shader->bindings[ZINK_DESCRIPTOR_TYPE_UBO][j].index;
VkDescriptorBufferInfo *info = &ctx->di.ubos[stage][index];
/* skip push descriptors for general ubo set */
if (!index)
unsigned num_resources = 0;
unsigned num_descriptors = zds[h]->pool->key.layout->num_descriptors;
ASSERTED unsigned num_bindings = zds[h]->pool->num_resources;
VkWriteDescriptorSet wds[num_descriptors];
unsigned num_wds = 0;
for (int i = 0; i < num_stages; i++) {
struct zink_shader *shader = stages[i];
if (!shader)
continue;
assert(shader->bindings[ZINK_DESCRIPTOR_TYPE_UBO][j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
struct zink_resource *res = zink_get_resource_for_descriptor(ctx, ZINK_DESCRIPTOR_TYPE_UBO, stage, index);
assert(!res || info->range > 0);
assert(!res || info->buffer);
assert(num_resources < num_bindings);
desc_set_res_add(zds, res, num_resources++, cache_hit);
wds[num_wds].pBufferInfo = info;
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
for (int j = 0; j < shader->num_bindings[h]; j++) {
int index = shader->bindings[h][j].index;
switch (h) {
case ZINK_DESCRIPTOR_TYPE_UBO:
if (!index)
continue;
FALLTHROUGH;
case ZINK_DESCRIPTOR_TYPE_SSBO: {
VkDescriptorBufferInfo *info;
struct zink_resource *res = zink_get_resource_for_descriptor(ctx, h, stage, index);
if (h == ZINK_DESCRIPTOR_TYPE_UBO)
info = &ctx->di.ubos[stage][index];
else
info = &ctx->di.ssbos[stage][index];
assert(num_resources < num_bindings);
desc_set_res_add(zds[h], res, num_resources++, cache_hit[h]);
wds[num_wds].pBufferInfo = info;
}
break;
case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:
case ZINK_DESCRIPTOR_TYPE_IMAGE: {
VkDescriptorImageInfo *image_info;
VkBufferView *buffer_info;
if (h == ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW) {
image_info = &ctx->di.textures[stage][index];
buffer_info = &ctx->di.tbos[stage][index];
} else {
image_info = &ctx->di.images[stage][index];
buffer_info = &ctx->di.texel_images[stage][index];
}
bool is_buffer = zink_shader_descriptor_is_buffer(shader, h, j);
for (unsigned k = 0; k < shader->bindings[h][j].size; k++) {
assert(num_resources < num_bindings);
if (h == ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW) {
struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->sampler_views[stage][index + k]);
struct zink_sampler_state *sampler = NULL;
if (!is_buffer && image_info->imageView)
sampler = ctx->sampler_states[stage][index + k];;
num_wds = init_write_descriptor(shader, zds, ZINK_DESCRIPTOR_TYPE_UBO, j, &wds[num_wds], num_wds);
}
}
write_descriptors(ctx, num_wds, wds, cache_hit);
}
static void
update_ssbo_descriptors(struct zink_context *ctx, struct zink_descriptor_set *zds,
bool is_compute, bool cache_hit)
{
struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;
unsigned num_descriptors = pg->dd->pool[ZINK_DESCRIPTOR_TYPE_SSBO]->key.layout->num_descriptors;
unsigned num_bindings = zds->pool->num_resources;
VkWriteDescriptorSet wds[num_descriptors];
unsigned num_wds = 0;
unsigned num_resources = 0;
struct zink_shader **stages;
unsigned num_stages = is_compute ? 1 : ZINK_SHADER_COUNT;
if (is_compute)
stages = &ctx->curr_compute->shader;
else
stages = &ctx->gfx_stages[0];
for (int i = 0; !cache_hit && i < num_stages; i++) {
struct zink_shader *shader = stages[i];
if (!shader)
continue;
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
for (int j = 0; j < shader->num_bindings[ZINK_DESCRIPTOR_TYPE_SSBO]; j++) {
int index = shader->bindings[ZINK_DESCRIPTOR_TYPE_SSBO][j].index;
VkDescriptorBufferInfo *info = &ctx->di.ssbos[stage][index];
assert(shader->bindings[ZINK_DESCRIPTOR_TYPE_SSBO][j].type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
assert(num_resources < num_bindings);
struct zink_resource *res = zink_get_resource_for_descriptor(ctx, ZINK_DESCRIPTOR_TYPE_SSBO, stage, index);
desc_set_res_add(zds, res, num_resources++, cache_hit);
wds[num_wds].pBufferInfo = info;
num_wds = init_write_descriptor(shader, zds, ZINK_DESCRIPTOR_TYPE_SSBO, j, &wds[num_wds], num_wds);
}
}
write_descriptors(ctx, num_wds, wds, cache_hit);
}
static void
update_sampler_descriptors(struct zink_context *ctx, struct zink_descriptor_set *zds,
bool is_compute, bool cache_hit)
{
struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;
unsigned num_descriptors = pg->dd->pool[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW]->key.layout->num_descriptors;
unsigned num_bindings = zds->pool->num_resources;
VkWriteDescriptorSet wds[num_descriptors];
unsigned num_wds = 0;
unsigned num_resources = 0;
struct zink_shader **stages;
unsigned num_stages = is_compute ? 1 : ZINK_SHADER_COUNT;
if (is_compute)
stages = &ctx->curr_compute->shader;
else
stages = &ctx->gfx_stages[0];
for (int i = 0; !cache_hit && i < num_stages; i++) {
struct zink_shader *shader = stages[i];
if (!shader)
continue;
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
for (int j = 0; j < shader->num_bindings[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW]; j++) {
int index = shader->bindings[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][j].index;
assert(shader->bindings[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][j].type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
shader->bindings[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][j].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
for (unsigned k = 0; k < shader->bindings[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][j].size; k++) {
struct zink_resource *res = zink_get_resource_for_descriptor(ctx, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, stage, index + k);
VkDescriptorImageInfo *image_info = &ctx->di.textures[stage][index + k];
VkBufferView *buffer_info = &ctx->di.tbos[stage][index + k];
bool is_buffer = zink_shader_descriptor_is_buffer(shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, j);
struct pipe_sampler_view *psampler_view = ctx->sampler_views[stage][index + k];
struct zink_sampler_view *sampler_view = zink_sampler_view(psampler_view);
struct zink_sampler_state *sampler = NULL;
if (!is_buffer && res)
sampler = ctx->sampler_states[stage][index + k];
assert(num_resources < num_bindings);
if (!k) {
desc_set_sampler_add(ctx, zds[h], sampler_view, sampler, num_resources++, is_buffer, cache_hit[h]);
} else {
struct zink_image_view *image_view = &ctx->image_views[stage][index + k];
desc_set_image_add(ctx, zds[h], image_view, num_resources++, is_buffer, cache_hit[h]);
}
}
if (is_buffer)
wds[num_wds].pTexelBufferView = buffer_info;
else
wds[num_wds].pImageInfo = image_info;
}
desc_set_sampler_add(ctx, zds, sampler_view, sampler, num_resources++,
zink_shader_descriptor_is_buffer(shader, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, j),
cache_hit);
}
assert(num_wds < num_descriptors);
num_wds = init_write_descriptor(shader, zds, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, j, &wds[num_wds], num_wds);
}
}
write_descriptors(ctx, num_wds, wds, cache_hit);
}
static void
update_image_descriptors(struct zink_context *ctx, struct zink_descriptor_set *zds,
bool is_compute, bool cache_hit)
{
struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;
unsigned num_descriptors = pg->dd->pool[ZINK_DESCRIPTOR_TYPE_IMAGE]->key.layout->num_descriptors;
unsigned num_bindings = zds->pool->num_resources;
VkWriteDescriptorSet wds[num_descriptors];
unsigned num_wds = 0;
unsigned num_resources = 0;
struct zink_shader **stages;
unsigned num_stages = is_compute ? 1 : ZINK_SHADER_COUNT;
if (is_compute)
stages = &ctx->curr_compute->shader;
else
stages = &ctx->gfx_stages[0];
for (int i = 0; !cache_hit && i < num_stages; i++) {
struct zink_shader *shader = stages[i];
if (!shader)
continue;
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
for (int j = 0; j < shader->num_bindings[ZINK_DESCRIPTOR_TYPE_IMAGE]; j++) {
int index = shader->bindings[ZINK_DESCRIPTOR_TYPE_IMAGE][j].index;
assert(shader->bindings[ZINK_DESCRIPTOR_TYPE_IMAGE][j].type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER ||
shader->bindings[ZINK_DESCRIPTOR_TYPE_IMAGE][j].type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
for (unsigned k = 0; k < shader->bindings[ZINK_DESCRIPTOR_TYPE_IMAGE][j].size; k++) {
VkDescriptorImageInfo *image_info = &ctx->di.images[stage][index + k];
VkBufferView *buffer_info = &ctx->di.texel_images[stage][index + k];
bool is_buffer = zink_shader_descriptor_is_buffer(shader, ZINK_DESCRIPTOR_TYPE_IMAGE, j);
struct zink_image_view *image_view = &ctx->image_views[stage][index + k];
assert(num_resources < num_bindings);
desc_set_image_add(ctx, zds, image_view, num_resources++,
zink_shader_descriptor_is_buffer(shader, ZINK_DESCRIPTOR_TYPE_IMAGE, j),
cache_hit);
if (!k) {
if (is_buffer)
wds[num_wds].pTexelBufferView = buffer_info;
else
wds[num_wds].pImageInfo = image_info;
break;
default:
unreachable("unknown descriptor type");
}
num_wds = init_write_descriptor(shader, zds[h], h, j, &wds[num_wds], num_wds);
}
assert(num_wds < num_descriptors);
num_wds = init_write_descriptor(shader, zds, ZINK_DESCRIPTOR_TYPE_IMAGE, j, &wds[num_wds], num_wds);
}
if (num_wds)
vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
}
write_descriptors(ctx, num_wds, wds, cache_hit);
}
static void
@ -1356,7 +1130,7 @@ zink_descriptors_update(struct zink_context *ctx, bool is_compute)
for (int h = 0; h < ZINK_DESCRIPTOR_TYPES; h++) {
if (pg->dsl[h + 1]) {
/* null set has null pool */
if (pg->dd->pool[h])
if (pdd_cached(pg)->pool[h])
zds[h] = zink_descriptor_set_get(ctx, h, is_compute, &cache_hit[h]);
else
zds[h] = NULL;
@ -1365,6 +1139,8 @@ zink_descriptors_update(struct zink_context *ctx, bool is_compute)
} else {
zds[h] = NULL;
}
if (!zds[h])
cache_hit[h] = false;
}
struct zink_batch *batch = &ctx->batch;
zink_batch_reference_program(batch, pg);
@ -1376,18 +1152,7 @@ zink_descriptors_update(struct zink_context *ctx, bool is_compute)
dynamic_offset_idx = update_push_ubo_descriptors(ctx, zds[ZINK_DESCRIPTOR_TYPES],
is_compute, cache_hit[ZINK_DESCRIPTOR_TYPES], dynamic_offsets);
if (zds[ZINK_DESCRIPTOR_TYPE_UBO])
update_ubo_descriptors(ctx, zds[ZINK_DESCRIPTOR_TYPE_UBO],
is_compute, cache_hit[ZINK_DESCRIPTOR_TYPE_UBO]);
if (zds[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW])
update_sampler_descriptors(ctx, zds[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW],
is_compute, cache_hit[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW]);
if (zds[ZINK_DESCRIPTOR_TYPE_SSBO])
update_ssbo_descriptors(ctx, zds[ZINK_DESCRIPTOR_TYPE_SSBO],
is_compute, cache_hit[ZINK_DESCRIPTOR_TYPE_SSBO]);
if (zds[ZINK_DESCRIPTOR_TYPE_IMAGE])
update_image_descriptors(ctx, zds[ZINK_DESCRIPTOR_TYPE_IMAGE],
is_compute, cache_hit[ZINK_DESCRIPTOR_TYPE_IMAGE]);
update_descriptors_internal(ctx, zds, pg, cache_hit);
vkCmdBindDescriptorSets(batch->state->cmdbuf, is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
pg->layout, 0, pg->num_dsl, sets,
@ -1400,7 +1165,10 @@ zink_batch_descriptor_deinit(struct zink_screen *screen, struct zink_batch_state
if (!bs->dd)
return;
_mesa_set_destroy(bs->dd->desc_sets, NULL);
ralloc_free(bs->dd);
if (screen->info.have_KHR_descriptor_update_template)
zink_batch_descriptor_deinit_lazy(screen, bs);
else
ralloc_free(bs->dd);
}
void
@ -1416,12 +1184,17 @@ zink_batch_descriptor_reset(struct zink_screen *screen, struct zink_batch_state
zink_descriptor_set_recycle(zds);
_mesa_set_remove(bs->dd->desc_sets, entry);
}
if (screen->info.have_KHR_descriptor_update_template)
zink_batch_descriptor_reset_lazy(screen, bs);
}
bool
zink_batch_descriptor_init(struct zink_screen *screen, struct zink_batch_state *bs)
{
bs->dd = rzalloc(bs, struct zink_batch_descriptor_data);
if (screen->info.have_KHR_descriptor_update_template)
zink_batch_descriptor_init_lazy(screen, bs);
else
bs->dd = rzalloc(bs, struct zink_batch_descriptor_data);
if (!bs->dd)
return false;
bs->dd->desc_sets = _mesa_pointer_set_create(bs);
@ -1675,7 +1448,7 @@ zink_context_invalidate_descriptor_state(struct zink_context *ctx, enum pipe_sha
bool
zink_descriptors_init(struct zink_context *ctx)
{
ctx->dd = rzalloc(ctx, struct zink_descriptor_data);
zink_descriptors_init_lazy(ctx);
if (!ctx->dd)
return false;
return zink_descriptor_pool_init(ctx);
@ -1685,6 +1458,7 @@ void
zink_descriptors_deinit(struct zink_context *ctx)
{
zink_descriptor_pool_deinit(ctx);
zink_descriptors_deinit_lazy(ctx);
}
bool

View file

@ -57,6 +57,15 @@ struct zink_descriptor_state {
uint32_t state[ZINK_DESCRIPTOR_TYPES];
};
enum zink_descriptor_size_index {
ZDS_INDEX_UBO,
ZDS_INDEX_COMBINED_SAMPLER,
ZDS_INDEX_UNIFORM_TEXELS,
ZDS_INDEX_STORAGE_BUFFER,
ZDS_INDEX_STORAGE_IMAGE,
ZDS_INDEX_STORAGE_TEXELS,
};
struct hash_table;
struct zink_context;
@ -90,6 +99,78 @@ struct zink_descriptor_reference {
bool *invalid;
};
struct zink_descriptor_data {
struct zink_descriptor_state gfx_descriptor_states[ZINK_SHADER_COUNT]; // keep incremental hashes here
struct zink_descriptor_state descriptor_states[2]; // gfx, compute
struct hash_table *descriptor_pools[ZINK_DESCRIPTOR_TYPES];
struct zink_descriptor_layout_key *push_layout_keys[2]; //gfx, compute
struct zink_descriptor_pool *push_pool[2]; //gfx, compute
VkDescriptorSetLayout push_dsl[2]; //gfx, compute
uint8_t last_push_usage[2];
bool push_valid[2];
uint32_t push_state[2];
bool gfx_push_valid[ZINK_SHADER_COUNT];
uint32_t gfx_push_state[ZINK_SHADER_COUNT];
struct zink_descriptor_set *last_set[2];
VkDescriptorPool dummy_pool;
VkDescriptorSetLayout dummy_dsl;
VkDescriptorSet dummy_set;
};
struct zink_program_descriptor_data {
uint8_t push_usage;
VkDescriptorPoolSize sizes[6]; //zink_descriptor_size_index
struct zink_descriptor_layout_key *layout_key[ZINK_DESCRIPTOR_TYPES]; //push set doesn't need one
uint8_t binding_usage;
VkDescriptorUpdateTemplateKHR templates[ZINK_DESCRIPTOR_TYPES + 1];
};
struct zink_batch_descriptor_data {
struct set *desc_sets;
};
static inline enum zink_descriptor_size_index
zink_vktype_to_size_idx(VkDescriptorType type)
{
switch (type) {
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
return ZDS_INDEX_UBO;
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
return ZDS_INDEX_COMBINED_SAMPLER;
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
return ZDS_INDEX_UNIFORM_TEXELS;
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
return ZDS_INDEX_STORAGE_BUFFER;
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
return ZDS_INDEX_STORAGE_IMAGE;
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
return ZDS_INDEX_STORAGE_TEXELS;
default: break;
}
unreachable("unknown type");
}
static inline enum zink_descriptor_size_index
zink_descriptor_type_to_size_idx(enum zink_descriptor_type type)
{
switch (type) {
case ZINK_DESCRIPTOR_TYPE_UBO:
return ZDS_INDEX_UBO;
case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:
return ZDS_INDEX_COMBINED_SAMPLER;
case ZINK_DESCRIPTOR_TYPE_SSBO:
return ZDS_INDEX_STORAGE_BUFFER;
case ZINK_DESCRIPTOR_TYPE_IMAGE:
return ZDS_INDEX_STORAGE_IMAGE;
default: break;
}
unreachable("unknown type");
}
unsigned
zink_descriptor_program_num_sizes(struct zink_program *pg, enum zink_descriptor_type type);
bool
zink_descriptor_layouts_init(struct zink_context *ctx);

View file

@ -34,35 +34,14 @@
#include "zink_resource.h"
#include "zink_screen.h"
struct zink_descriptor_data {
VkDescriptorSetLayout push_dsl[2]; //gfx, compute
VkDescriptorSetLayout dummy_dsl;
VkDescriptorPool dummy_pool;
VkDescriptorSet dummy_set;
struct zink_descriptor_data_lazy {
struct zink_descriptor_data base;
VkDescriptorUpdateTemplateEntry push_entries[PIPE_SHADER_TYPES];
bool push_state_changed[2]; //gfx, compute
uint8_t state_changed[2]; //gfx, compute
struct zink_program *pg[2]; //gfx, compute
};
enum zink_descriptor_size_index {
ZDS_INDEX_UBO,
ZDS_INDEX_COMBINED_SAMPLER,
ZDS_INDEX_UNIFORM_TEXELS,
ZDS_INDEX_STORAGE_BUFFER,
ZDS_INDEX_STORAGE_IMAGE,
ZDS_INDEX_STORAGE_TEXELS,
};
struct zink_program_descriptor_data {
VkDescriptorPoolSize sizes[6]; //zink_descriptor_size_index
unsigned has_descriptors_mask[ZINK_SHADER_COUNT];
struct zink_descriptor_layout_key *layout_key[ZINK_DESCRIPTOR_TYPES]; //push set doesn't need one
uint8_t binding_usage;
unsigned push_usage;
VkDescriptorUpdateTemplateKHR templates[ZINK_DESCRIPTOR_TYPES + 1];
};
struct zink_descriptor_pool {
VkDescriptorPool pool;
VkDescriptorSet sets[ZINK_DEFAULT_MAX_DESCS];
@ -70,7 +49,8 @@ struct zink_descriptor_pool {
unsigned sets_alloc;
};
struct zink_batch_descriptor_data {
struct zink_batch_descriptor_data_lazy {
struct zink_batch_descriptor_data base;
struct hash_table pools[ZINK_DESCRIPTOR_TYPES];
struct zink_descriptor_pool *push_pool[2];
struct zink_program *pg[2]; //gfx, compute
@ -78,61 +58,16 @@ struct zink_batch_descriptor_data {
unsigned push_usage[2];
};
static inline enum zink_descriptor_size_index
vktype_to_size_idx(VkDescriptorType type)
ALWAYS_INLINE static struct zink_descriptor_data_lazy *
dd_lazy(struct zink_context *ctx)
{
switch (type) {
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
return ZDS_INDEX_UBO;
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
return ZDS_INDEX_COMBINED_SAMPLER;
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
return ZDS_INDEX_UNIFORM_TEXELS;
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
return ZDS_INDEX_STORAGE_BUFFER;
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
return ZDS_INDEX_STORAGE_IMAGE;
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
return ZDS_INDEX_STORAGE_TEXELS;
default: break;
}
unreachable("unknown type");
return (struct zink_descriptor_data_lazy*)ctx->dd;
}
static inline enum zink_descriptor_size_index
ztype_to_size_idx(enum zink_descriptor_type type)
ALWAYS_INLINE static struct zink_batch_descriptor_data_lazy *
bdd_lazy(struct zink_batch_state *bs)
{
switch (type) {
case ZINK_DESCRIPTOR_TYPE_UBO:
return ZDS_INDEX_UBO;
case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:
return ZDS_INDEX_COMBINED_SAMPLER;
case ZINK_DESCRIPTOR_TYPE_SSBO:
return ZDS_INDEX_STORAGE_BUFFER;
case ZINK_DESCRIPTOR_TYPE_IMAGE:
return ZDS_INDEX_STORAGE_IMAGE;
default: break;
}
unreachable("unknown type");
}
static inline unsigned
num_descriptor_sizes(const struct zink_program *pg, enum zink_descriptor_type type)
{
switch (type) {
case ZINK_DESCRIPTOR_TYPE_UBO:
return 1;
case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:
return !!pg->dd->sizes[ZDS_INDEX_COMBINED_SAMPLER].descriptorCount +
!!pg->dd->sizes[ZDS_INDEX_UNIFORM_TEXELS].descriptorCount;
case ZINK_DESCRIPTOR_TYPE_SSBO:
return 1;
case ZINK_DESCRIPTOR_TYPE_IMAGE:
return !!pg->dd->sizes[ZDS_INDEX_STORAGE_IMAGE].descriptorCount +
!!pg->dd->sizes[ZDS_INDEX_STORAGE_TEXELS].descriptorCount;
default: break;
}
unreachable("unknown type");
return (struct zink_batch_descriptor_data_lazy*)bs->dd;
}
static void
@ -201,9 +136,8 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
else
stages = ((struct zink_gfx_program*)pg)->shaders;
if (!pg->dd)
pg->dd = rzalloc(pg, struct zink_program_descriptor_data);
pg->dd = (void*)rzalloc(pg, struct zink_program_descriptor_data);
if (!pg->dd)
return false;
@ -221,7 +155,6 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
VkShaderStageFlagBits stage_flags = zink_shader_stage(stage);
for (int j = 0; j < ZINK_DESCRIPTOR_TYPES; j++) {
for (int k = 0; k < shader->num_bindings[j]; k++) {
pg->dd->has_descriptors_mask[stage] |= BITFIELD64_BIT(j);
/* dynamic ubos handled in push */
if (shader->bindings[j][k].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
pg->dd->push_usage |= BITFIELD64_BIT(stage);
@ -238,7 +171,7 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
binding->stageFlags = stage_flags;
binding->pImmutableSamplers = NULL;
enum zink_descriptor_size_index idx = vktype_to_size_idx(shader->bindings[j][k].type);
enum zink_descriptor_size_index idx = zink_vktype_to_size_idx(shader->bindings[j][k].type);
pg->dd->sizes[idx].descriptorCount += shader->bindings[j][k].size;
pg->dd->sizes[idx].type = shader->bindings[j][k].type;
switch (shader->bindings[j][k].type) {
@ -262,7 +195,6 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
}
}
pg->dd->binding_usage = has_bindings;
if (!has_bindings && !push_count) {
ralloc_free(pg->dd);
pg->dd = NULL;
@ -272,7 +204,6 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
}
pg->dsl[pg->num_dsl++] = push_count ? ctx->dd->push_dsl[pg->is_compute] : ctx->dd->dummy_dsl;
if (has_bindings) {
u_foreach_bit(type, has_bindings) {
for (unsigned i = 0; i < type; i++) {
@ -292,11 +223,13 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
pg->layout = zink_pipeline_layout_create(screen, pg);
if (!pg->layout)
return false;
if (!screen->info.have_KHR_descriptor_update_template)
return true;
VkDescriptorUpdateTemplateCreateInfo template[ZINK_DESCRIPTOR_TYPES + 1] = {};
/* type of template */
VkDescriptorUpdateTemplateType types[ZINK_DESCRIPTOR_TYPES + 1] = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET};
if (have_push)
if (have_push && screen->lazy_descriptors)
types[0] = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
/* number of descriptors in template */
@ -307,8 +240,8 @@ zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program
wd_count[i + 1] = pg->dd->layout_key[i] ? pg->dd->layout_key[i]->num_descriptors : 0;
VkDescriptorUpdateTemplateEntry *push_entries[2] = {
ctx->dd->push_entries,
&ctx->dd->push_entries[PIPE_SHADER_COMPUTE],
dd_lazy(ctx)->push_entries,
&dd_lazy(ctx)->push_entries[PIPE_SHADER_COMPUTE],
};
for (unsigned i = 0; i < pg->num_dsl; i++) {
bool is_push = i == 0;
@ -366,23 +299,23 @@ static struct zink_descriptor_pool *
get_descriptor_pool_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, struct zink_batch_state *bs)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
struct hash_entry *he = _mesa_hash_table_search(&bs->dd->pools[type], pg->dd->layout_key[type]);
struct hash_entry *he = _mesa_hash_table_search(&bdd_lazy(bs)->pools[type], pg->dd->layout_key[type]);
if (he)
return he->data;
struct zink_descriptor_pool *pool = rzalloc(bs, struct zink_descriptor_pool);
if (!pool)
return NULL;
unsigned idx = ztype_to_size_idx(type);
unsigned idx = zink_descriptor_type_to_size_idx(type);
VkDescriptorPoolSize *size = &pg->dd->sizes[idx];
/* this is a sampler/image set with no images only texels */
if (!size->descriptorCount)
size++;
pool->pool = create_pool(screen, num_descriptor_sizes(pg, type), size, 0);
pool->pool = create_pool(screen, zink_descriptor_program_num_sizes(pg, type), size, 0);
if (!pool->pool) {
ralloc_free(pool);
return NULL;
}
_mesa_hash_table_insert(&bs->dd->pools[type], pg->dd->layout_key[type], pool);
_mesa_hash_table_insert(&bdd_lazy(bs)->pools[type], pg->dd->layout_key[type], pool);
return pool;
}
@ -414,7 +347,7 @@ populate_sets(struct zink_context *ctx, struct zink_program *pg, uint8_t *change
{
struct zink_batch_state *bs = ctx->batch.state;
if (need_push && !zink_screen(ctx->base.screen)->info.have_KHR_push_descriptor) {
struct zink_descriptor_pool *pool = bs->dd->push_pool[pg->is_compute];
struct zink_descriptor_pool *pool = bdd_lazy(bs)->push_pool[pg->is_compute];
sets[0] = get_descriptor_set_lazy(ctx, NULL, 0, pool, pg->is_compute);
if (!sets[0])
return false;
@ -453,34 +386,34 @@ zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)
struct zink_batch_state *bs = ctx->batch.state;
struct zink_program *pg = is_compute ? &ctx->curr_compute->base : &ctx->curr_program->base;
bool batch_changed = !bs->dd->pg[is_compute];
bool batch_changed = !bdd_lazy(bs)->pg[is_compute];
if (batch_changed) {
/* update all sets and bind null sets */
ctx->dd->state_changed[is_compute] = pg->dd->binding_usage;
ctx->dd->push_state_changed[is_compute] = !!pg->dd->push_usage;
dd_lazy(ctx)->state_changed[is_compute] = pg->dd->binding_usage;
dd_lazy(ctx)->push_state_changed[is_compute] = !!pg->dd->push_usage;
}
if (pg != bs->dd->pg[is_compute]) {
if (pg != bdd_lazy(bs)->pg[is_compute]) {
/* if we don't already know that we have to update all sets,
* check to see if any dsls changed
*
* also always update the dsl pointers on program change
*/
for (unsigned i = 0; i < ARRAY_SIZE(bs->dd->dsl[is_compute]); i++) {
for (unsigned i = 0; i < ARRAY_SIZE(bdd_lazy(bs)->dsl[is_compute]); i++) {
/* push set is already detected, start at 1 */
if (bs->dd->dsl[is_compute][i] != pg->dsl[i + 1])
ctx->dd->state_changed[is_compute] |= BITFIELD_BIT(i);
bs->dd->dsl[is_compute][i] = pg->dsl[i + 1];
if (bdd_lazy(bs)->dsl[is_compute][i] != pg->dsl[i + 1])
dd_lazy(ctx)->state_changed[is_compute] |= BITFIELD_BIT(i);
bdd_lazy(bs)->dsl[is_compute][i] = pg->dsl[i + 1];
}
ctx->dd->push_state_changed[is_compute] |= bs->dd->push_usage[is_compute] != pg->dd->push_usage;
bs->dd->push_usage[is_compute] = pg->dd->push_usage;
dd_lazy(ctx)->push_state_changed[is_compute] |= bdd_lazy(bs)->push_usage[is_compute] != pg->dd->push_usage;
bdd_lazy(bs)->push_usage[is_compute] = pg->dd->push_usage;
}
bs->dd->pg[is_compute] = pg;
bdd_lazy(bs)->pg[is_compute] = pg;
VkDescriptorSet desc_sets[5];
uint8_t changed_sets = pg->dd->binding_usage & ctx->dd->state_changed[is_compute];
uint8_t changed_sets = pg->dd->binding_usage & dd_lazy(ctx)->state_changed[is_compute];
bool need_push = pg->dd->push_usage &&
(ctx->dd->push_state_changed[is_compute] || batch_changed);
(dd_lazy(ctx)->push_state_changed[is_compute] || batch_changed);
if (!populate_sets(ctx, pg, &changed_sets, need_push, desc_sets)) {
debug_printf("ZINK: couldn't get descriptor sets!\n");
return;
@ -488,9 +421,9 @@ zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)
if (ctx->batch.state != bs) {
/* recheck: populate may have overflowed the pool and triggered a flush */
batch_changed = true;
ctx->dd->state_changed[is_compute] = pg->dd->binding_usage;
changed_sets = pg->dd->binding_usage & ctx->dd->state_changed[is_compute];
ctx->dd->push_state_changed[is_compute] = !!pg->dd->push_usage;
dd_lazy(ctx)->state_changed[is_compute] = pg->dd->binding_usage;
changed_sets = pg->dd->binding_usage & dd_lazy(ctx)->state_changed[is_compute];
dd_lazy(ctx)->push_state_changed[is_compute] = !!pg->dd->push_usage;
}
bs = ctx->batch.state;
@ -505,10 +438,10 @@ zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)
pg->layout, type + 1, 1, &desc_sets[type + 1],
0, NULL);
}
ctx->dd->state_changed[is_compute] = false;
dd_lazy(ctx)->state_changed[is_compute] = false;
}
if (pg->dd->push_usage && ctx->dd->push_state_changed[is_compute]) {
if (pg->dd->push_usage && dd_lazy(ctx)->push_state_changed[is_compute]) {
if (screen->info.have_KHR_push_descriptor)
screen->vk.CmdPushDescriptorSetWithTemplateKHR(batch->state->cmdbuf, pg->dd->templates[0],
pg->layout, 0, ctx);
@ -520,26 +453,26 @@ zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)
pg->layout, 0, 1, &desc_sets[0],
0, NULL);
}
ctx->dd->push_state_changed[is_compute] = false;
} else if (ctx->dd->push_state_changed[is_compute]) {
dd_lazy(ctx)->push_state_changed[is_compute] = false;
} else if (dd_lazy(ctx)->push_state_changed[is_compute]) {
vkCmdBindDescriptorSets(bs->cmdbuf,
is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
pg->layout, 0, 1, &ctx->dd->dummy_set,
0, NULL);
ctx->dd->push_state_changed[is_compute] = false;
dd_lazy(ctx)->push_state_changed[is_compute] = false;
}
/* set again in case of flushing */
bs->dd->pg[is_compute] = pg;
ctx->dd->pg[is_compute] = pg;
bdd_lazy(bs)->pg[is_compute] = pg;
dd_lazy(ctx)->pg[is_compute] = pg;
}
void
zink_context_invalidate_descriptor_state_lazy(struct zink_context *ctx, enum pipe_shader_type shader, enum zink_descriptor_type type, unsigned start, unsigned count)
{
if (type == ZINK_DESCRIPTOR_TYPE_UBO && !start)
ctx->dd->push_state_changed[shader == PIPE_SHADER_COMPUTE] = true;
dd_lazy(ctx)->push_state_changed[shader == PIPE_SHADER_COMPUTE] = true;
else
ctx->dd->state_changed[shader == PIPE_SHADER_COMPUTE] |= BITFIELD_BIT(type);
dd_lazy(ctx)->state_changed[shader == PIPE_SHADER_COMPUTE] |= BITFIELD_BIT(type);
}
void
@ -548,15 +481,15 @@ zink_batch_descriptor_deinit_lazy(struct zink_screen *screen, struct zink_batch_
if (!bs->dd)
return;
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
hash_table_foreach(&bs->dd->pools[i], entry) {
hash_table_foreach(&bdd_lazy(bs)->pools[i], entry) {
struct zink_descriptor_pool *pool = (void*)entry->data;
vkDestroyDescriptorPool(screen->dev, pool->pool, NULL);
}
}
if (bs->dd->push_pool[0])
vkDestroyDescriptorPool(screen->dev, bs->dd->push_pool[0]->pool, NULL);
if (bs->dd->push_pool[1])
vkDestroyDescriptorPool(screen->dev, bs->dd->push_pool[1]->pool, NULL);
if (bdd_lazy(bs)->push_pool[0])
vkDestroyDescriptorPool(screen->dev, bdd_lazy(bs)->push_pool[0]->pool, NULL);
if (bdd_lazy(bs)->push_pool[1])
vkDestroyDescriptorPool(screen->dev, bdd_lazy(bs)->push_pool[1]->pool, NULL);
ralloc_free(bs->dd);
}
@ -564,37 +497,37 @@ void
zink_batch_descriptor_reset_lazy(struct zink_screen *screen, struct zink_batch_state *bs)
{
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
hash_table_foreach(&bs->dd->pools[i], entry) {
hash_table_foreach(&bdd_lazy(bs)->pools[i], entry) {
struct zink_descriptor_pool *pool = (void*)entry->data;
pool->set_idx = 0;
}
}
for (unsigned i = 0; i < 2; i++) {
bs->dd->pg[i] = NULL;
if (bs->dd->push_pool[i])
bs->dd->push_pool[i]->set_idx = 0;
bdd_lazy(bs)->pg[i] = NULL;
if (bdd_lazy(bs)->push_pool[i])
bdd_lazy(bs)->push_pool[i]->set_idx = 0;
}
}
bool
zink_batch_descriptor_init_lazy(struct zink_screen *screen, struct zink_batch_state *bs)
{
bs->dd = rzalloc(bs, struct zink_batch_descriptor_data);
bs->dd = (void*)rzalloc(bs, struct zink_batch_descriptor_data_lazy);
if (!bs->dd)
return false;
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
if (!_mesa_hash_table_init(&bs->dd->pools[i], bs->dd, _mesa_hash_pointer, _mesa_key_pointer_equal))
if (!_mesa_hash_table_init(&bdd_lazy(bs)->pools[i], bs->dd, _mesa_hash_pointer, _mesa_key_pointer_equal))
return false;
}
if (!screen->info.have_KHR_push_descriptor) {
VkDescriptorPoolSize sizes;
sizes.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
sizes.descriptorCount = ZINK_SHADER_COUNT * ZINK_DEFAULT_MAX_DESCS;
bs->dd->push_pool[0] = rzalloc(bs, struct zink_descriptor_pool);
bs->dd->push_pool[0]->pool = create_pool(screen, 1, &sizes, 0);
bdd_lazy(bs)->push_pool[0] = rzalloc(bs, struct zink_descriptor_pool);
bdd_lazy(bs)->push_pool[0]->pool = create_pool(screen, 1, &sizes, 0);
sizes.descriptorCount = ZINK_DEFAULT_MAX_DESCS;
bs->dd->push_pool[1] = rzalloc(bs, struct zink_descriptor_pool);
bs->dd->push_pool[1]->pool = create_pool(screen, 1, &sizes, 0);
bdd_lazy(bs)->push_pool[1] = rzalloc(bs, struct zink_descriptor_pool);
bdd_lazy(bs)->push_pool[1]->pool = create_pool(screen, 1, &sizes, 0);
}
return true;
}
@ -602,25 +535,27 @@ zink_batch_descriptor_init_lazy(struct zink_screen *screen, struct zink_batch_st
bool
zink_descriptors_init_lazy(struct zink_context *ctx)
{
ctx->dd = rzalloc(ctx, struct zink_descriptor_data);
ctx->dd = (void*)rzalloc(ctx, struct zink_descriptor_data_lazy);
if (!ctx->dd)
return false;
for (unsigned i = 0; i < PIPE_SHADER_TYPES; i++) {
VkDescriptorUpdateTemplateEntry *entry = &ctx->dd->push_entries[i];
entry->dstBinding = tgsi_processor_to_shader_stage(i);
entry->descriptorCount = 1;
entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
entry->offset = offsetof(struct zink_context, di.ubos[i][0]);
entry->stride = sizeof(VkDescriptorBufferInfo);
if (zink_screen(ctx->base.screen)->info.have_KHR_descriptor_update_template) {
for (unsigned i = 0; i < PIPE_SHADER_TYPES; i++) {
VkDescriptorUpdateTemplateEntry *entry = &dd_lazy(ctx)->push_entries[i];
entry->dstBinding = tgsi_processor_to_shader_stage(i);
entry->descriptorCount = 1;
entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
entry->offset = offsetof(struct zink_context, di.ubos[i][0]);
entry->stride = sizeof(VkDescriptorBufferInfo);
}
printf("ZINK: USING EXPERIMENTAL LAZY DESCRIPTORS\n");
}
struct zink_screen *screen = zink_screen(ctx->base.screen);
struct zink_descriptor_layout_key *layout_keys[2];
if (!zink_descriptor_util_push_layouts_get(ctx, ctx->dd->push_dsl, layout_keys))
if (!ctx->dd->push_dsl[0] || !ctx->dd->push_dsl[1])
struct zink_descriptor_layout_key *layout_key;
if (!zink_descriptor_util_push_layouts_get(ctx, ctx->dd->push_dsl, ctx->dd->push_layout_keys))
return false;
ctx->dd->dummy_dsl = zink_descriptor_util_layout_get(ctx, 0, NULL, 0, &layout_keys[0]);
ctx->dd->dummy_dsl = zink_descriptor_util_layout_get(ctx, 0, NULL, 0, &layout_key);
if (!ctx->dd->dummy_dsl)
return false;
VkDescriptorPoolSize null_size = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1};
@ -628,7 +563,6 @@ zink_descriptors_init_lazy(struct zink_context *ctx)
zink_descriptor_util_alloc_sets(screen, ctx->dd->dummy_dsl,
ctx->dd->dummy_pool, &ctx->dd->dummy_set, 1);
zink_descriptor_util_init_null_set(ctx, ctx->dd->dummy_set);
printf("ZINK: USING EXPERIMENTAL LAZY DESCRIPTORS\n");
return true;
}
@ -639,7 +573,7 @@ zink_descriptors_deinit_lazy(struct zink_context *ctx)
struct zink_screen *screen = zink_screen(ctx->base.screen);
if (ctx->dd->dummy_pool)
vkDestroyDescriptorPool(screen->dev, ctx->dd->dummy_pool, NULL);
if (screen->info.have_KHR_push_descriptor) {
if (screen->lazy_descriptors) {
vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[0], NULL);
vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[1], NULL);
}