diff --git a/src/intel/vulkan/genX_blorp_exec.c b/src/intel/vulkan/genX_blorp_exec.c index 140e50f4511..3f693eaabec 100644 --- a/src/intel/vulkan/genX_blorp_exec.c +++ b/src/intel/vulkan/genX_blorp_exec.c @@ -208,6 +208,7 @@ blorp_vf_invalidate_for_vb_48b_transitions(struct blorp_batch *batch, uint32_t *sizes, unsigned num_vbs) { +#if GFX_VER == 9 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; for (unsigned i = 0; i < num_vbs; i++) { @@ -227,6 +228,7 @@ blorp_vf_invalidate_for_vb_48b_transitions(struct blorp_batch *batch, */ genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(cmd_buffer, SEQUENTIAL, (1 << num_vbs) - 1); +#endif } UNUSED static struct blorp_address diff --git a/src/intel/vulkan/genX_cmd_buffer.c b/src/intel/vulkan/genX_cmd_buffer.c index 99c866b026a..7ec38eda980 100644 --- a/src/intel/vulkan/genX_cmd_buffer.c +++ b/src/intel/vulkan/genX_cmd_buffer.c @@ -3690,8 +3690,10 @@ emit_vertex_bo(struct anv_cmd_buffer *cmd_buffer, .BufferSize = size }); +#if GFX_VER == 9 genX(cmd_buffer_set_binding_for_gfx8_vb_flush)(cmd_buffer, index, addr, size); +#endif } static void @@ -3743,6 +3745,7 @@ static void update_dirty_vbs_for_gfx8_vb_flush(struct anv_cmd_buffer *cmd_buffer, uint32_t access_type) { +#if GFX_VER == 9 struct anv_graphics_pipeline *pipeline = cmd_buffer->state.gfx.pipeline; const struct brw_vs_prog_data *vs_prog_data = get_vs_prog_data(pipeline); @@ -3756,6 +3759,7 @@ update_dirty_vbs_for_gfx8_vb_flush(struct anv_cmd_buffer *cmd_buffer, genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(cmd_buffer, access_type == RANDOM, vb_used); +#endif } ALWAYS_INLINE static void @@ -5907,6 +5911,7 @@ genX(cmd_buffer_emit_gfx12_depth_wa)(struct anv_cmd_buffer *cmd_buffer, #endif } +#if GFX_VER == 9 /* From the Skylake PRM, 3DSTATE_VERTEX_BUFFERS: * * "The VF cache needs to be invalidated before binding and then using @@ -5970,9 +5975,6 @@ genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(struct anv_cmd_buffer *cmd_b uint32_t access_type, uint64_t vb_used) { - if (GFX_VER > 9) - return; - if (access_type == RANDOM) { /* We have an index buffer */ struct anv_vb_cache_range *bound = &cmd_buffer->state.gfx.ib_bound_range; @@ -6001,6 +6003,7 @@ genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(struct anv_cmd_buffer *cmd_b } } } +#endif /* GFX_VER == 9 */ /** * Update the pixel hashing modes that determine the balancing of PS threads diff --git a/src/intel/vulkan/genX_gpu_memcpy.c b/src/intel/vulkan/genX_gpu_memcpy.c index 7322fd200f8..b33e7f22241 100644 --- a/src/intel/vulkan/genX_gpu_memcpy.c +++ b/src/intel/vulkan/genX_gpu_memcpy.c @@ -280,7 +280,9 @@ genX(cmd_buffer_so_memcpy)(struct anv_cmd_buffer *cmd_buffer, genX(cmd_buffer_config_l3)(cmd_buffer, cfg); } +#if GFX_VER == 9 genX(cmd_buffer_set_binding_for_gfx8_vb_flush)(cmd_buffer, 32, src, size); +#endif genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer); genX(flush_pipeline_select_3d)(cmd_buffer); @@ -289,8 +291,10 @@ genX(cmd_buffer_so_memcpy)(struct anv_cmd_buffer *cmd_buffer, cmd_buffer->state.current_l3_config); emit_so_memcpy(&cmd_buffer->batch, cmd_buffer->device, dst, src, size); +#if GFX_VER == 9 genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(cmd_buffer, SEQUENTIAL, 1ull << 32); +#endif /* Invalidate pipeline & raster discard since we touch * 3DSTATE_STREAMOUT.