anv: add a helper to flush the descriptors for indirect compute execution

When we don't know what shader is executed. We'll still have the bind
map from the indirect execution set.

Signed-off-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
Acked-by: Alyssa Rosenzweig <alyssa.rosenzweig@intel.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/31384>
This commit is contained in:
Lionel Landwerlin 2026-04-10 12:09:54 +03:00 committed by Marge Bot
parent 6f5d30c0a2
commit 4960e103ef
2 changed files with 78 additions and 23 deletions

View file

@ -517,6 +517,9 @@ void genX(emit_sampler_state)(const struct anv_device *device,
uint32_t border_color_offset,
struct anv_sampler_state *state);
void genX(cmd_buffer_flush_indirect_cs_descriptor_sets)(struct anv_cmd_buffer *cmd_buffer,
const struct anv_pipeline_bind_map *bind_map);
void genX(emit_embedded_sampler)(struct anv_device *device,
struct anv_embedded_sampler *sampler,
struct anv_pipeline_embedded_sampler_binding *binding);

View file

@ -2683,28 +2683,28 @@ emit_direct_descriptor_binding_table_entry(struct anv_cmd_buffer *cmd_buffer,
static VkResult
emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
struct anv_cmd_pipeline_state *pipe_state,
const struct anv_shader *shader,
const struct anv_pipeline_bind_map *bind_map,
struct anv_push_descriptor_info push_desc_info,
struct anv_state *bt_state)
{
uint32_t state_offset;
const struct anv_pipeline_bind_map *map = &shader->bind_map;
if (map->surface_count == 0) {
if (bind_map->surface_count == 0) {
*bt_state = (struct anv_state) { 0, };
return VK_SUCCESS;
}
*bt_state = anv_cmd_buffer_alloc_binding_table(cmd_buffer,
map->surface_count,
bind_map->surface_count,
&state_offset);
uint32_t *bt_map = bt_state->map;
if (bt_state->map == NULL)
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
for (uint32_t s = 0; s < map->surface_count; s++) {
for (uint32_t s = 0; s < bind_map->surface_count; s++) {
const struct anv_pipeline_binding *binding =
&map->surface_to_descriptor[s];
&bind_map->surface_to_descriptor[s];
struct anv_state surface_state;
@ -2715,7 +2715,6 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
case ANV_DESCRIPTOR_SET_COLOR_ATTACHMENTS: {
/* Color attachment binding */
assert(shader->vk.stage == MESA_SHADER_FRAGMENT);
uint32_t index = binding->index < MAX_RTS ?
cmd_buffer->state.gfx.color_output_mapping[binding->index] :
binding->index;
@ -2737,7 +2736,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
* needed.
*/
assert(!cmd_buffer->device->info->has_lsc);
if (shader->bind_map.layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER) {
if (bind_map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER) {
assert(pipe_state->descriptor_buffers[binding->index].state.alloc_size);
bt_map[s] = pipe_state->descriptor_buffers[binding->index].state.offset +
state_offset;
@ -2748,7 +2747,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
/* If the shader doesn't access the set buffer, just put the null
* surface.
*/
if (set->is_push && shader->push_desc_info.push_set_buffer == 0) {
if (set->is_push && push_desc_info.push_set_buffer == 0) {
bt_map[s] = 0;
break;
}
@ -2793,7 +2792,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
uint32_t desc_idx = set->layout->binding[binding->binding].descriptor_index;
assert(desc_idx < MAX_PUSH_DESCRIPTORS);
if (shader->push_desc_info.fully_promoted_ubo_descriptors & BITFIELD_BIT(desc_idx)) {
if (push_desc_info.fully_promoted_ubo_descriptors & BITFIELD_BIT(desc_idx)) {
surface_state =
anv_null_surface_state_for_binding_table(cmd_buffer->device);
break;
@ -2808,14 +2807,14 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
}
uint32_t surface_state_offset;
if (map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT) {
if (bind_map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT) {
surface_state_offset =
emit_indirect_descriptor_binding_table_entry(cmd_buffer,
pipe_state,
binding, desc);
} else {
assert(map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT ||
map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER);
assert(bind_map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT ||
bind_map->layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER);
surface_state_offset =
emit_direct_descriptor_binding_table_entry(cmd_buffer, pipe_state,
set, binding, desc);
@ -2833,24 +2832,23 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
static VkResult
emit_samplers(struct anv_cmd_buffer *cmd_buffer,
struct anv_cmd_pipeline_state *pipe_state,
const struct anv_shader *shader,
const struct anv_pipeline_bind_map *bind_map,
struct anv_state *state)
{
const struct anv_pipeline_bind_map *map = &shader->bind_map;
if (map->sampler_count == 0) {
if (bind_map->sampler_count == 0) {
*state = (struct anv_state) { 0, };
return VK_SUCCESS;
}
uint32_t size = map->sampler_count * 16;
uint32_t size = bind_map->sampler_count * 16;
*state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, size, 32);
if (state->map == NULL)
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
for (uint32_t s = 0; s < map->sampler_count; s++) {
for (uint32_t s = 0; s < bind_map->sampler_count; s++) {
const struct anv_pipeline_binding *binding =
&map->sampler_to_descriptor[s];
&bind_map->sampler_to_descriptor[s];
const struct anv_descriptor *desc =
&pipe_state->descriptors[binding->set]->descriptors[binding->index];
@ -2894,13 +2892,15 @@ genX(cmd_buffer_flush_descriptor_sets)(struct anv_cmd_buffer *cmd_buffer,
continue;
assert(stage < ARRAY_SIZE(cmd_buffer->state.samplers));
result = emit_samplers(cmd_buffer, pipe_state, shaders[i],
result = emit_samplers(cmd_buffer, pipe_state, &shaders[i]->bind_map,
&cmd_buffer->state.samplers[stage]);
if (result != VK_SUCCESS)
break;
assert(stage < ARRAY_SIZE(cmd_buffer->state.binding_tables));
result = emit_binding_table(cmd_buffer, pipe_state, shaders[i],
result = emit_binding_table(cmd_buffer, pipe_state,
&shaders[i]->bind_map,
shaders[i]->push_desc_info,
&cmd_buffer->state.binding_tables[stage]);
if (result != VK_SUCCESS)
break;
@ -2929,13 +2929,15 @@ genX(cmd_buffer_flush_descriptor_sets)(struct anv_cmd_buffer *cmd_buffer,
mesa_shader_stage stage = shaders[i]->vk.stage;
result = emit_samplers(cmd_buffer, pipe_state, shaders[i],
result = emit_samplers(cmd_buffer, pipe_state, &shaders[i]->bind_map,
&cmd_buffer->state.samplers[stage]);
if (result != VK_SUCCESS) {
anv_batch_set_error(&cmd_buffer->batch, result);
return 0;
}
result = emit_binding_table(cmd_buffer, pipe_state, shaders[i],
result = emit_binding_table(cmd_buffer, pipe_state,
&shaders[i]->bind_map,
shaders[i]->push_desc_info,
&cmd_buffer->state.binding_tables[stage]);
if (result != VK_SUCCESS) {
anv_batch_set_error(&cmd_buffer->batch, result);
@ -2949,6 +2951,56 @@ genX(cmd_buffer_flush_descriptor_sets)(struct anv_cmd_buffer *cmd_buffer,
return flushed;
}
void
genX(cmd_buffer_flush_indirect_cs_descriptor_sets)(struct anv_cmd_buffer *cmd_buffer,
const struct anv_pipeline_bind_map *bind_map)
{
struct anv_cmd_pipeline_state *pipe_state = &cmd_buffer->state.compute.base;
/* Assume all descriptors are used */
struct anv_push_descriptor_info push_desc_info = {
.used_descriptors = 0xffffffff,
.push_set_buffer = 0xff,
};
VkResult result = VK_SUCCESS;
result = emit_samplers(cmd_buffer, pipe_state, bind_map,
&cmd_buffer->state.samplers[MESA_SHADER_COMPUTE]);
if (result != VK_SUCCESS) {
anv_batch_set_error(&cmd_buffer->batch, result);
return;
}
result = emit_binding_table(cmd_buffer, pipe_state, bind_map, push_desc_info,
&cmd_buffer->state.binding_tables[MESA_SHADER_COMPUTE]);
if (result != VK_SUCCESS) {
result = anv_cmd_buffer_new_binding_table_block(cmd_buffer);
if (result != VK_SUCCESS) {
anv_batch_set_error(&cmd_buffer->batch, result);
return;
}
/* Re-emit the BT base address so we get the new surface state base
* address before we start emitting binding tables etc.
*/
genX(cmd_buffer_emit_bt_pool_base_address)(cmd_buffer);
result = emit_samplers(cmd_buffer, pipe_state, bind_map,
&cmd_buffer->state.samplers[MESA_SHADER_COMPUTE]);
if (result != VK_SUCCESS) {
anv_batch_set_error(&cmd_buffer->batch, result);
return;
}
result = emit_binding_table(cmd_buffer, pipe_state,
bind_map, push_desc_info,
&cmd_buffer->state.binding_tables[MESA_SHADER_COMPUTE]);
if (result != VK_SUCCESS) {
anv_batch_set_error(&cmd_buffer->batch, result);
return;
}
}
}
/* This function generates the surface state used to read the content of the
* descriptor buffer.
*/