mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2025-12-23 17:40:11 +01:00
radv: add radeon_event_write() macros
Similar to RadeonSI. Signed-off-by: Samuel Pitoiset <samuel.pitoiset@gmail.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/34145>
This commit is contained in:
parent
344aa38925
commit
ae8c0b06a7
5 changed files with 42 additions and 59 deletions
|
|
@ -837,8 +837,7 @@ radv_cmd_buffer_after_draw(struct radv_cmd_buffer *cmd_buffer, enum radv_cmd_flu
|
||||||
if (unlikely(device->sqtt.bo) && !dgc) {
|
if (unlikely(device->sqtt.bo) && !dgc) {
|
||||||
radeon_check_space(device->ws, cmd_buffer->cs, 2);
|
radeon_check_space(device->ws, cmd_buffer->cs, 2);
|
||||||
|
|
||||||
radeon_emit(cmd_buffer->cs, PKT3(PKT3_EVENT_WRITE, 0, cmd_buffer->state.predicating));
|
radeon_event_write_predicate(cmd_buffer->cs, V_028A90_THREAD_TRACE_MARKER, cmd_buffer->state.predicating);
|
||||||
radeon_emit(cmd_buffer->cs, EVENT_TYPE(V_028A90_THREAD_TRACE_MARKER) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (instance->debug_flags & RADV_DEBUG_SYNC_SHADERS) {
|
if (instance->debug_flags & RADV_DEBUG_SYNC_SHADERS) {
|
||||||
|
|
@ -2932,8 +2931,7 @@ radv_emit_graphics_pipeline(struct radv_cmd_buffer *cmd_buffer)
|
||||||
cmd_buffer->state.graphics_pipeline->base.shaders[MESA_SHADER_FRAGMENT]) &&
|
cmd_buffer->state.graphics_pipeline->base.shaders[MESA_SHADER_FRAGMENT]) &&
|
||||||
(settings->context_states_per_bin > 1 || settings->persistent_states_per_bin > 1)) {
|
(settings->context_states_per_bin > 1 || settings->persistent_states_per_bin > 1)) {
|
||||||
/* Break the batch on PS changes. */
|
/* Break the batch on PS changes. */
|
||||||
radeon_emit(cmd_buffer->cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cmd_buffer->cs, V_028A90_BREAK_BATCH);
|
||||||
radeon_emit(cmd_buffer->cs, EVENT_TYPE(V_028A90_BREAK_BATCH) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -3386,8 +3384,7 @@ radv_emit_primitive_restart_enable(struct radv_cmd_buffer *cmd_buffer)
|
||||||
const bool en = d->vk.ia.primitive_restart_enable;
|
const bool en = d->vk.ia.primitive_restart_enable;
|
||||||
|
|
||||||
if (pdev->info.has_prim_restart_sync_bug) {
|
if (pdev->info.has_prim_restart_sync_bug) {
|
||||||
radeon_emit(cmd_buffer->cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cmd_buffer->cs, V_028A90_SQ_NON_EVENT);
|
||||||
radeon_emit(cmd_buffer->cs, EVENT_TYPE(V_028A90_SQ_NON_EVENT) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (gfx_level >= GFX11) {
|
if (gfx_level >= GFX11) {
|
||||||
|
|
@ -3485,8 +3482,7 @@ radv_emit_color_write(struct radv_cmd_buffer *cmd_buffer)
|
||||||
if (device->pbb_allowed && settings->context_states_per_bin > 1 &&
|
if (device->pbb_allowed && settings->context_states_per_bin > 1 &&
|
||||||
cmd_buffer->state.last_cb_target_mask != cb_target_mask) {
|
cmd_buffer->state.last_cb_target_mask != cb_target_mask) {
|
||||||
/* Flush DFSM on CB_TARGET_MASK changes. */
|
/* Flush DFSM on CB_TARGET_MASK changes. */
|
||||||
radeon_emit(cmd_buffer->cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cmd_buffer->cs, V_028A90_BREAK_BATCH);
|
||||||
radeon_emit(cmd_buffer->cs, EVENT_TYPE(V_028A90_BREAK_BATCH) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
cmd_buffer->state.last_cb_target_mask = cb_target_mask;
|
cmd_buffer->state.last_cb_target_mask = cb_target_mask;
|
||||||
}
|
}
|
||||||
|
|
@ -13336,8 +13332,7 @@ radv_flush_vgt_streamout(struct radv_cmd_buffer *cmd_buffer)
|
||||||
radeon_set_config_reg(cs, reg_strmout_cntl, 0);
|
radeon_set_config_reg(cs, reg_strmout_cntl, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_SO_VGTSTREAMOUT_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_SO_VGTSTREAMOUT_FLUSH) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
radeon_emit(cs, PKT3(PKT3_WAIT_REG_MEM, 5, 0));
|
radeon_emit(cs, PKT3(PKT3_WAIT_REG_MEM, 5, 0));
|
||||||
radeon_emit(cs, WAIT_REG_MEM_EQUAL); /* wait until the register is equal to the reference value */
|
radeon_emit(cs, WAIT_REG_MEM_EQUAL); /* wait until the register is equal to the reference value */
|
||||||
|
|
@ -13654,8 +13649,7 @@ radv_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanc
|
||||||
if (pdev->info.gfx_level == GFX12) {
|
if (pdev->info.gfx_level == GFX12) {
|
||||||
/* DrawTransformFeedback requires 3 SQ_NON_EVENTs after the packet. */
|
/* DrawTransformFeedback requires 3 SQ_NON_EVENTs after the packet. */
|
||||||
for (unsigned i = 0; i < 3; i++) {
|
for (unsigned i = 0; i < 3; i++) {
|
||||||
radeon_emit(cmd_buffer->cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cmd_buffer->cs, V_028A90_SQ_NON_EVENT);
|
||||||
radeon_emit(cmd_buffer->cs, EVENT_TYPE(V_028A90_SQ_NON_EVENT) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -179,8 +179,7 @@ gfx10_cs_emit_cache_flush(struct radeon_cmdbuf *cs, enum amd_gfx_level gfx_level
|
||||||
/* TODO: trigger on RADV_CMD_FLAG_FLUSH_AND_INV_CB_META */
|
/* TODO: trigger on RADV_CMD_FLAG_FLUSH_AND_INV_CB_META */
|
||||||
if (gfx_level < GFX12 && flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_CB) {
|
if (gfx_level < GFX12 && flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_CB) {
|
||||||
/* Flush CMASK/FMASK/DCC. Will wait for idle later. */
|
/* Flush CMASK/FMASK/DCC. Will wait for idle later. */
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_FLUSH_AND_INV_CB_META);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_FLUSH_AND_INV_CB_META) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_CB | RGP_FLUSH_INVAL_CB;
|
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_CB | RGP_FLUSH_INVAL_CB;
|
||||||
}
|
}
|
||||||
|
|
@ -189,8 +188,7 @@ gfx10_cs_emit_cache_flush(struct radeon_cmdbuf *cs, enum amd_gfx_level gfx_level
|
||||||
/* TODO: trigger on RADV_CMD_FLAG_FLUSH_AND_INV_DB_META ? */
|
/* TODO: trigger on RADV_CMD_FLAG_FLUSH_AND_INV_DB_META ? */
|
||||||
if (gfx_level < GFX12 && gfx_level != GFX11 && (flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_DB)) {
|
if (gfx_level < GFX12 && gfx_level != GFX11 && (flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_DB)) {
|
||||||
/* Flush HTILE. Will wait for idle later. */
|
/* Flush HTILE. Will wait for idle later. */
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_FLUSH_AND_INV_DB_META);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_FLUSH_AND_INV_DB_META) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_DB | RGP_FLUSH_INVAL_DB;
|
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_DB | RGP_FLUSH_INVAL_DB;
|
||||||
}
|
}
|
||||||
|
|
@ -215,21 +213,18 @@ gfx10_cs_emit_cache_flush(struct radeon_cmdbuf *cs, enum amd_gfx_level gfx_level
|
||||||
} else {
|
} else {
|
||||||
/* Wait for graphics shaders to go idle if requested. */
|
/* Wait for graphics shaders to go idle if requested. */
|
||||||
if (flush_bits & RADV_CMD_FLAG_PS_PARTIAL_FLUSH) {
|
if (flush_bits & RADV_CMD_FLAG_PS_PARTIAL_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_PS_PARTIAL_FLUSH;
|
*sqtt_flush_bits |= RGP_FLUSH_PS_PARTIAL_FLUSH;
|
||||||
} else if (flush_bits & RADV_CMD_FLAG_VS_PARTIAL_FLUSH) {
|
} else if (flush_bits & RADV_CMD_FLAG_VS_PARTIAL_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_VS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_VS_PARTIAL_FLUSH;
|
*sqtt_flush_bits |= RGP_FLUSH_VS_PARTIAL_FLUSH;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_CS_PARTIAL_FLUSH) {
|
if (flush_bits & RADV_CMD_FLAG_CS_PARTIAL_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_CS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_CS_PARTIAL_FLUSH | EVENT_INDEX(4)));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_CS_PARTIAL_FLUSH;
|
*sqtt_flush_bits |= RGP_FLUSH_CS_PARTIAL_FLUSH;
|
||||||
}
|
}
|
||||||
|
|
@ -317,8 +312,7 @@ gfx10_cs_emit_cache_flush(struct radeon_cmdbuf *cs, enum amd_gfx_level gfx_level
|
||||||
|
|
||||||
/* VGT state sync */
|
/* VGT state sync */
|
||||||
if (flush_bits & RADV_CMD_FLAG_VGT_FLUSH) {
|
if (flush_bits & RADV_CMD_FLAG_VGT_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_VGT_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VGT_FLUSH) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Ignore fields that only modify the behavior of other fields. */
|
/* Ignore fields that only modify the behavior of other fields. */
|
||||||
|
|
@ -347,15 +341,13 @@ gfx10_cs_emit_cache_flush(struct radeon_cmdbuf *cs, enum amd_gfx_level gfx_level
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_START_PIPELINE_STATS) {
|
if (flush_bits & RADV_CMD_FLAG_START_PIPELINE_STATS) {
|
||||||
if (qf == RADV_QUEUE_GENERAL) {
|
if (qf == RADV_QUEUE_GENERAL) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PIPELINESTAT_START);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PIPELINESTAT_START) | EVENT_INDEX(0));
|
|
||||||
} else if (qf == RADV_QUEUE_COMPUTE) {
|
} else if (qf == RADV_QUEUE_COMPUTE) {
|
||||||
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(1));
|
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(1));
|
||||||
}
|
}
|
||||||
} else if (flush_bits & RADV_CMD_FLAG_STOP_PIPELINE_STATS) {
|
} else if (flush_bits & RADV_CMD_FLAG_STOP_PIPELINE_STATS) {
|
||||||
if (qf == RADV_QUEUE_GENERAL) {
|
if (qf == RADV_QUEUE_GENERAL) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PIPELINESTAT_STOP);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PIPELINESTAT_STOP) | EVENT_INDEX(0));
|
|
||||||
} else if (qf == RADV_QUEUE_COMPUTE) {
|
} else if (qf == RADV_QUEUE_COMPUTE) {
|
||||||
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(0));
|
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(0));
|
||||||
}
|
}
|
||||||
|
|
@ -413,34 +405,29 @@ radv_cs_emit_cache_flush(struct radeon_winsys *ws, struct radeon_cmdbuf *cs, enu
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_CB_META) {
|
if (flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_CB_META) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_FLUSH_AND_INV_CB_META);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_FLUSH_AND_INV_CB_META) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_CB | RGP_FLUSH_INVAL_CB;
|
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_CB | RGP_FLUSH_INVAL_CB;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_DB_META) {
|
if (flush_bits & RADV_CMD_FLAG_FLUSH_AND_INV_DB_META) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_FLUSH_AND_INV_DB_META);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_FLUSH_AND_INV_DB_META) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_DB | RGP_FLUSH_INVAL_DB;
|
*sqtt_flush_bits |= RGP_FLUSH_FLUSH_DB | RGP_FLUSH_INVAL_DB;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_PS_PARTIAL_FLUSH) {
|
if (flush_bits & RADV_CMD_FLAG_PS_PARTIAL_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_PS_PARTIAL_FLUSH;
|
*sqtt_flush_bits |= RGP_FLUSH_PS_PARTIAL_FLUSH;
|
||||||
} else if (flush_bits & RADV_CMD_FLAG_VS_PARTIAL_FLUSH) {
|
} else if (flush_bits & RADV_CMD_FLAG_VS_PARTIAL_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_VS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_VS_PARTIAL_FLUSH;
|
*sqtt_flush_bits |= RGP_FLUSH_VS_PARTIAL_FLUSH;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_CS_PARTIAL_FLUSH) {
|
if (flush_bits & RADV_CMD_FLAG_CS_PARTIAL_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_CS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_CS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
|
||||||
|
|
||||||
*sqtt_flush_bits |= RGP_FLUSH_CS_PARTIAL_FLUSH;
|
*sqtt_flush_bits |= RGP_FLUSH_CS_PARTIAL_FLUSH;
|
||||||
}
|
}
|
||||||
|
|
@ -488,14 +475,12 @@ radv_cs_emit_cache_flush(struct radeon_winsys *ws, struct radeon_cmdbuf *cs, enu
|
||||||
|
|
||||||
/* VGT state sync */
|
/* VGT state sync */
|
||||||
if (flush_bits & RADV_CMD_FLAG_VGT_FLUSH) {
|
if (flush_bits & RADV_CMD_FLAG_VGT_FLUSH) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_VGT_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VGT_FLUSH) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* VGT streamout state sync */
|
/* VGT streamout state sync */
|
||||||
if (flush_bits & RADV_CMD_FLAG_VGT_STREAMOUT_SYNC) {
|
if (flush_bits & RADV_CMD_FLAG_VGT_STREAMOUT_SYNC) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_VGT_STREAMOUT_SYNC);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VGT_STREAMOUT_SYNC) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Make sure ME is idle (it executes most packets) before continuing.
|
/* Make sure ME is idle (it executes most packets) before continuing.
|
||||||
|
|
@ -547,15 +532,13 @@ radv_cs_emit_cache_flush(struct radeon_winsys *ws, struct radeon_cmdbuf *cs, enu
|
||||||
|
|
||||||
if (flush_bits & RADV_CMD_FLAG_START_PIPELINE_STATS) {
|
if (flush_bits & RADV_CMD_FLAG_START_PIPELINE_STATS) {
|
||||||
if (qf == RADV_QUEUE_GENERAL) {
|
if (qf == RADV_QUEUE_GENERAL) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PIPELINESTAT_START);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PIPELINESTAT_START) | EVENT_INDEX(0));
|
|
||||||
} else if (qf == RADV_QUEUE_COMPUTE) {
|
} else if (qf == RADV_QUEUE_COMPUTE) {
|
||||||
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(1));
|
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(1));
|
||||||
}
|
}
|
||||||
} else if (flush_bits & RADV_CMD_FLAG_STOP_PIPELINE_STATS) {
|
} else if (flush_bits & RADV_CMD_FLAG_STOP_PIPELINE_STATS) {
|
||||||
if (qf == RADV_QUEUE_GENERAL) {
|
if (qf == RADV_QUEUE_GENERAL) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PIPELINESTAT_STOP);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PIPELINESTAT_STOP) | EVENT_INDEX(0));
|
|
||||||
} else if (qf == RADV_QUEUE_COMPUTE) {
|
} else if (qf == RADV_QUEUE_COMPUTE) {
|
||||||
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(0));
|
radeon_set_sh_reg(cs, R_00B828_COMPUTE_PIPELINESTAT_ENABLE, S_00B828_PIPELINESTAT_ENABLE(0));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -198,6 +198,19 @@ radeon_check_space(struct radeon_winsys *ws, struct radeon_cmdbuf *cs, unsigned
|
||||||
radeon_emit(cs, 0); /* unused */ \
|
radeon_emit(cs, 0); /* unused */ \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
|
#define radeon_event_write_predicate(cs, event_type, predicate) \
|
||||||
|
do { \
|
||||||
|
unsigned __event_type = (event_type); \
|
||||||
|
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, predicate)); \
|
||||||
|
radeon_emit(cs, EVENT_TYPE(__event_type) | EVENT_INDEX(__event_type == V_028A90_VS_PARTIAL_FLUSH || \
|
||||||
|
__event_type == V_028A90_PS_PARTIAL_FLUSH || \
|
||||||
|
__event_type == V_028A90_CS_PARTIAL_FLUSH \
|
||||||
|
? 4 \
|
||||||
|
: 0)); \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
#define radeon_event_write(cs, event_type) radeon_event_write_predicate(cs, event_type, false)
|
||||||
|
|
||||||
ALWAYS_INLINE static void
|
ALWAYS_INLINE static void
|
||||||
radv_cp_wait_mem(struct radeon_cmdbuf *cs, const enum radv_queue_family qf, const uint32_t op, const uint64_t va,
|
radv_cp_wait_mem(struct radeon_cmdbuf *cs, const enum radv_queue_family qf, const uint32_t op, const uint64_t va,
|
||||||
const uint32_t ref, const uint32_t mask)
|
const uint32_t ref, const uint32_t mask)
|
||||||
|
|
|
||||||
|
|
@ -34,8 +34,7 @@ static void
|
||||||
radv_emit_windowed_counters(struct radv_device *device, struct radeon_cmdbuf *cs, int family, bool enable)
|
radv_emit_windowed_counters(struct radv_device *device, struct radeon_cmdbuf *cs, int family, bool enable)
|
||||||
{
|
{
|
||||||
if (family == RADV_QUEUE_GENERAL) {
|
if (family == RADV_QUEUE_GENERAL) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, enable ? V_028A90_PERFCOUNTER_START : V_028A90_PERFCOUNTER_STOP);
|
||||||
radeon_emit(cs, EVENT_TYPE(enable ? V_028A90_PERFCOUNTER_START : V_028A90_PERFCOUNTER_STOP) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
radeon_set_sh_reg(cs, R_00B82C_COMPUTE_PERFCOUNT_ENABLE, S_00B82C_PERFCOUNT_ENABLE(enable));
|
radeon_set_sh_reg(cs, R_00B82C_COMPUTE_PERFCOUNT_ENABLE, S_00B82C_PERFCOUNT_ENABLE(enable));
|
||||||
|
|
@ -534,8 +533,7 @@ radv_pc_wait_idle(struct radv_cmd_buffer *cmd_buffer)
|
||||||
{
|
{
|
||||||
struct radeon_cmdbuf *cs = cmd_buffer->cs;
|
struct radeon_cmdbuf *cs = cmd_buffer->cs;
|
||||||
|
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_CS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_CS_PARTIAL_FLUSH | EVENT_INDEX(4)));
|
|
||||||
|
|
||||||
radeon_emit(cs, PKT3(PKT3_ACQUIRE_MEM, 6, 0));
|
radeon_emit(cs, PKT3(PKT3_ACQUIRE_MEM, 6, 0));
|
||||||
radeon_emit(cs, 0); /* CP_COHER_CNTL */
|
radeon_emit(cs, 0); /* CP_COHER_CNTL */
|
||||||
|
|
@ -557,8 +555,7 @@ radv_pc_stop_and_sample(struct radv_cmd_buffer *cmd_buffer, struct radv_pc_query
|
||||||
const struct radv_physical_device *pdev = radv_device_physical(device);
|
const struct radv_physical_device *pdev = radv_device_physical(device);
|
||||||
struct radeon_cmdbuf *cs = cmd_buffer->cs;
|
struct radeon_cmdbuf *cs = cmd_buffer->cs;
|
||||||
|
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_PERFCOUNTER_SAMPLE);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_PERFCOUNTER_SAMPLE) | EVENT_INDEX(0));
|
|
||||||
|
|
||||||
radv_pc_wait_idle(cmd_buffer);
|
radv_pc_wait_idle(cmd_buffer);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1111,11 +1111,8 @@ radv_update_preamble_cs(struct radv_queue_state *queue, struct radv_device *devi
|
||||||
radv_init_graphics_state(cs, device);
|
radv_init_graphics_state(cs, device);
|
||||||
|
|
||||||
if (esgs_ring_bo || gsvs_ring_bo || tess_rings_bo || task_rings_bo) {
|
if (esgs_ring_bo || gsvs_ring_bo || tess_rings_bo || task_rings_bo) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_VS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
radeon_event_write(cs, V_028A90_VGT_FLUSH);
|
||||||
|
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_VGT_FLUSH) | EVENT_INDEX(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
radv_emit_gs_ring_sizes(device, cs, esgs_ring_bo, needs->esgs_ring_size, gsvs_ring_bo, needs->gsvs_ring_size);
|
radv_emit_gs_ring_sizes(device, cs, esgs_ring_bo, needs->esgs_ring_size, gsvs_ring_bo, needs->gsvs_ring_size);
|
||||||
|
|
@ -1131,8 +1128,7 @@ radv_update_preamble_cs(struct radv_queue_state *queue, struct radv_device *devi
|
||||||
radv_emit_compute(device, cs, true);
|
radv_emit_compute(device, cs, true);
|
||||||
|
|
||||||
if (task_rings_bo) {
|
if (task_rings_bo) {
|
||||||
radeon_emit(cs, PKT3(PKT3_EVENT_WRITE, 0, 0));
|
radeon_event_write(cs, V_028A90_CS_PARTIAL_FLUSH);
|
||||||
radeon_emit(cs, EVENT_TYPE(V_028A90_CS_PARTIAL_FLUSH) | EVENT_INDEX(4));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
radv_emit_task_rings(device, cs, task_rings_bo, true);
|
radv_emit_task_rings(device, cs, task_rings_bo, true);
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue