diff --git a/src/intel/vulkan/anv_cmd_buffer.c b/src/intel/vulkan/anv_cmd_buffer.c index 81ac5f135b7..fe237d024a0 100644 --- a/src/intel/vulkan/anv_cmd_buffer.c +++ b/src/intel/vulkan/anv_cmd_buffer.c @@ -1108,6 +1108,36 @@ void anv_CmdBindVertexBuffers2( } } +void anv_CmdBindIndexBuffer2KHR( + VkCommandBuffer commandBuffer, + VkBuffer _buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType) +{ + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + ANV_FROM_HANDLE(anv_buffer, buffer, _buffer); + + if (cmd_buffer->state.gfx.index_type != indexType) { + cmd_buffer->state.gfx.index_type = indexType; + cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_INDEX_TYPE; + } + + uint64_t index_addr = buffer ? + anv_address_physical(anv_address_add(buffer->address, offset)) : 0; + uint32_t index_size = buffer ? vk_buffer_range(&buffer->vk, offset, size) : 0; + if (cmd_buffer->state.gfx.index_addr != index_addr || + cmd_buffer->state.gfx.index_size != index_size) { + cmd_buffer->state.gfx.index_addr = index_addr; + cmd_buffer->state.gfx.index_size = index_size; + cmd_buffer->state.gfx.index_mocs = + anv_mocs(cmd_buffer->device, buffer->address.bo, + ISL_SURF_USAGE_INDEX_BUFFER_BIT); + cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER; + } +} + + void anv_CmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, diff --git a/src/intel/vulkan/anv_private.h b/src/intel/vulkan/anv_private.h index c230590baf9..dc29b813b76 100644 --- a/src/intel/vulkan/anv_private.h +++ b/src/intel/vulkan/anv_private.h @@ -3468,7 +3468,7 @@ enum anv_cmd_dirty_bits { ANV_CMD_DIRTY_RENDER_AREA = 1 << 2, ANV_CMD_DIRTY_RENDER_TARGETS = 1 << 3, ANV_CMD_DIRTY_XFB_ENABLE = 1 << 4, - ANV_CMD_DIRTY_RESTART_INDEX = 1 << 5, + ANV_CMD_DIRTY_INDEX_TYPE = 1 << 5, ANV_CMD_DIRTY_OCCLUSION_QUERY_ACTIVE = 1 << 6, ANV_CMD_DIRTY_INDIRECT_DATA_STRIDE = 1 << 7, }; @@ -4009,9 +4009,9 @@ struct anv_cmd_graphics_state { bool used_task_shader; - struct anv_buffer *index_buffer; - uint32_t index_type; /**< 3DSTATE_INDEX_BUFFER.IndexFormat */ - uint32_t index_offset; + uint64_t index_addr; + uint32_t index_mocs; + VkIndexType index_type; uint32_t index_size; uint32_t indirect_data_stride; diff --git a/src/intel/vulkan/genX_blorp_exec.c b/src/intel/vulkan/genX_blorp_exec.c index ae74ee6722d..965a54ed6df 100644 --- a/src/intel/vulkan/genX_blorp_exec.c +++ b/src/intel/vulkan/genX_blorp_exec.c @@ -430,7 +430,7 @@ blorp_exec_on_render(struct blorp_batch *batch, anv_cmd_dirty_mask_t dirty = ~(ANV_CMD_DIRTY_INDEX_BUFFER | ANV_CMD_DIRTY_XFB_ENABLE | ANV_CMD_DIRTY_OCCLUSION_QUERY_ACTIVE | - ANV_CMD_DIRTY_RESTART_INDEX); + ANV_CMD_DIRTY_INDEX_TYPE); cmd_buffer->state.gfx.vb_dirty = ~0; cmd_buffer->state.gfx.dirty |= dirty; diff --git a/src/intel/vulkan/genX_cmd_buffer.c b/src/intel/vulkan/genX_cmd_buffer.c index 9249c358cba..a54ddd6ce1a 100644 --- a/src/intel/vulkan/genX_cmd_buffer.c +++ b/src/intel/vulkan/genX_cmd_buffer.c @@ -6089,50 +6089,6 @@ void genX(CmdWaitEvents2)( cmd_buffer_barrier(cmd_buffer, eventCount, pDependencyInfos, "wait event"); } -static uint32_t vk_to_intel_index_type(VkIndexType type) -{ - switch (type) { - case VK_INDEX_TYPE_UINT8_KHR: - return INDEX_BYTE; - case VK_INDEX_TYPE_UINT16: - return INDEX_WORD; - case VK_INDEX_TYPE_UINT32: - return INDEX_DWORD; - default: - unreachable("invalid index type"); - } -} - -void genX(CmdBindIndexBuffer2KHR)( - VkCommandBuffer commandBuffer, - VkBuffer _buffer, - VkDeviceSize offset, - VkDeviceSize size, - VkIndexType indexType) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - ANV_FROM_HANDLE(anv_buffer, buffer, _buffer); - - uint32_t restart_index = vk_index_to_restart(indexType); - if (cmd_buffer->state.gfx.restart_index != restart_index) { - cmd_buffer->state.gfx.restart_index = restart_index; - cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_RESTART_INDEX; - } - - uint32_t index_size = buffer ? vk_buffer_range(&buffer->vk, offset, size) : 0; - uint32_t index_type = vk_to_intel_index_type(indexType); - if (cmd_buffer->state.gfx.index_buffer != buffer || - cmd_buffer->state.gfx.index_type != index_type || - cmd_buffer->state.gfx.index_offset != offset || - cmd_buffer->state.gfx.index_size != index_size) { - cmd_buffer->state.gfx.index_buffer = buffer; - cmd_buffer->state.gfx.index_type = vk_to_intel_index_type(indexType); - cmd_buffer->state.gfx.index_offset = offset; - cmd_buffer->state.gfx.index_size = index_size; - cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER; - } -} - VkResult genX(CmdSetPerformanceOverrideINTEL)( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) diff --git a/src/intel/vulkan/genX_gfx_state.c b/src/intel/vulkan/genX_gfx_state.c index ed1546f2fdf..f9d2d1a28d9 100644 --- a/src/intel/vulkan/genX_gfx_state.c +++ b/src/intel/vulkan/genX_gfx_state.c @@ -138,6 +138,20 @@ static const uint32_t vk_to_intel_primitive_type[] = { [VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY] = _3DPRIM_TRISTRIP_ADJ, }; +static uint32_t vk_to_intel_index_type(VkIndexType type) +{ + switch (type) { + case VK_INDEX_TYPE_UINT8_KHR: + return INDEX_BYTE; + case VK_INDEX_TYPE_UINT16: + return INDEX_WORD; + case VK_INDEX_TYPE_UINT32: + return INDEX_DWORD; + default: + unreachable("invalid index type"); + } +} + void genX(batch_emit_wa_16014912113)(struct anv_batch *batch, const struct intel_urb_config *urb_cfg) @@ -1305,7 +1319,7 @@ update_vf_restart(struct anv_gfx_dynamic_state *hw_state, const struct anv_cmd_graphics_state *gfx) { SET(VF, vf.IndexedDrawCutIndexEnable, dyn->ia.primitive_restart_enable); - SET(VF, vf.CutIndex, gfx->restart_index); + SET(VF, vf.CutIndex, vk_index_to_restart(gfx->index_type)); } ALWAYS_INLINE static void @@ -1929,11 +1943,12 @@ cmd_buffer_flush_gfx_runtime_state(struct anv_gfx_dynamic_state *hw_state, BITSET_TEST(dyn->dirty, MESA_VK_DYNAMIC_RS_LINE_STIPPLE_ENABLE)) update_line_stipple(hw_state, dyn); - if ((gfx->dirty & ANV_CMD_DIRTY_RESTART_INDEX) || + if ((gfx->dirty & ANV_CMD_DIRTY_INDEX_TYPE) || BITSET_TEST(dyn->dirty, MESA_VK_DYNAMIC_IA_PRIMITIVE_RESTART_ENABLE)) update_vf_restart(hw_state, dyn, gfx); - if (gfx->dirty & ANV_CMD_DIRTY_INDEX_BUFFER) + if ((gfx->dirty & ANV_CMD_DIRTY_INDEX_BUFFER) || + (gfx->dirty & ANV_CMD_DIRTY_INDEX_TYPE)) BITSET_SET(hw_state->dirty, ANV_GFX_STATE_INDEX_BUFFER); #if GFX_VERx10 >= 125 @@ -2698,20 +2713,16 @@ cmd_buffer_gfx_state_emission(struct anv_cmd_buffer *cmd_buffer) } if (BITSET_TEST(hw_state->dirty, ANV_GFX_STATE_INDEX_BUFFER)) { - struct anv_buffer *buffer = gfx->index_buffer; - uint32_t offset = gfx->index_offset; anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_INDEX_BUFFER), ib) { - ib.IndexFormat = gfx->index_type; - ib.MOCS = anv_mocs(device, - buffer ? buffer->address.bo : NULL, - ISL_SURF_USAGE_INDEX_BUFFER_BIT); + ib.IndexFormat = vk_to_intel_index_type(gfx->index_type); + ib.MOCS = gfx->index_addr == 0 ? + anv_mocs(cmd_buffer->device, NULL, ISL_SURF_USAGE_INDEX_BUFFER_BIT) : + gfx->index_mocs; #if GFX_VER >= 12 ib.L3BypassDisable = true; #endif - if (buffer) { - ib.BufferStartingAddress = anv_address_add(buffer->address, offset); - ib.BufferSize = gfx->index_size; - } + ib.BufferStartingAddress = anv_address_from_u64(gfx->index_addr); + ib.BufferSize = gfx->index_size; } } diff --git a/src/intel/vulkan/genX_gpu_memcpy.c b/src/intel/vulkan/genX_gpu_memcpy.c index f91f6e14d65..81d6eac655a 100644 --- a/src/intel/vulkan/genX_gpu_memcpy.c +++ b/src/intel/vulkan/genX_gpu_memcpy.c @@ -356,7 +356,8 @@ genX(emit_so_memcpy_fini)(struct anv_memcpy_state *state) } state->cmd_buffer->state.gfx.dirty |= ~(ANV_CMD_DIRTY_PIPELINE | - ANV_CMD_DIRTY_INDEX_BUFFER); + ANV_CMD_DIRTY_INDEX_BUFFER | + ANV_CMD_DIRTY_INDEX_TYPE); memcpy(&state->cmd_buffer->state.gfx.urb_cfg, &state->urb_cfg, sizeof(struct intel_urb_config)); diff --git a/src/intel/vulkan/genX_simple_shader.c b/src/intel/vulkan/genX_simple_shader.c index efe21944b7c..50b24906574 100644 --- a/src/intel/vulkan/genX_simple_shader.c +++ b/src/intel/vulkan/genX_simple_shader.c @@ -380,7 +380,7 @@ genX(emit_simpler_shader_init_fragment)(struct anv_simple_shader *state) state->cmd_buffer->state.gfx.dirty |= ~(ANV_CMD_DIRTY_INDEX_BUFFER | ANV_CMD_DIRTY_XFB_ENABLE | ANV_CMD_DIRTY_OCCLUSION_QUERY_ACTIVE | - ANV_CMD_DIRTY_RESTART_INDEX); + ANV_CMD_DIRTY_INDEX_TYPE); state->cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_FRAGMENT_BIT; state->cmd_buffer->state.gfx.push_constant_stages = VK_SHADER_STAGE_FRAGMENT_BIT; }