mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2025-12-24 02:20:11 +01:00
r600g: split INVAL_READ_CACHES into vertex, tex, and const cache flags
also flushing any cache in evergreen_emit_cs_shader seems to be superfluous (we don't flush caches when changing the other shaders either) Reviewed-by: Alex Deucher <alexander.deucher@amd.com>
This commit is contained in:
parent
098316211c
commit
1b40398d02
5 changed files with 52 additions and 27 deletions
|
|
@ -159,7 +159,7 @@ static void evergreen_cs_set_vertex_buffer(
|
|||
|
||||
/* The vertex instructions in the compute shaders use the texture cache,
|
||||
* so we need to invalidate it. */
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
rctx->flags |= R600_CONTEXT_INV_VERTEX_CACHE;
|
||||
state->enabled_mask |= 1 << vb_index;
|
||||
state->dirty_mask |= 1 << vb_index;
|
||||
state->atom.dirty = true;
|
||||
|
|
@ -470,7 +470,9 @@ static void compute_emit_cs(struct r600_context *ctx, const uint *block_layout,
|
|||
|
||||
/* XXX evergreen_flush_emit() hardcodes the CP_COHER_SIZE to 0xffffffff
|
||||
*/
|
||||
ctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
ctx->flags |= R600_CONTEXT_INV_CONST_CACHE |
|
||||
R600_CONTEXT_INV_VERTEX_CACHE |
|
||||
R600_CONTEXT_INV_TEX_CACHE;
|
||||
r600_flush_emit(ctx);
|
||||
|
||||
#if 0
|
||||
|
|
@ -519,8 +521,6 @@ void evergreen_emit_cs_shader(
|
|||
r600_write_value(cs, PKT3C(PKT3_NOP, 0, 0));
|
||||
r600_write_value(cs, r600_context_bo_reloc(rctx, &rctx->rings.gfx,
|
||||
kernel->code_bo, RADEON_USAGE_READ));
|
||||
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
}
|
||||
|
||||
static void evergreen_launch_grid(
|
||||
|
|
|
|||
|
|
@ -123,7 +123,9 @@ void evergreen_cp_dma_clear_buffer(struct r600_context *rctx,
|
|||
|
||||
/* We flush the caches, because we might read from or write
|
||||
* to resources which are bound right now. */
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES |
|
||||
rctx->flags |= R600_CONTEXT_INV_CONST_CACHE |
|
||||
R600_CONTEXT_INV_VERTEX_CACHE |
|
||||
R600_CONTEXT_INV_TEX_CACHE |
|
||||
R600_CONTEXT_FLUSH_AND_INV |
|
||||
R600_CONTEXT_FLUSH_AND_INV_CB |
|
||||
R600_CONTEXT_FLUSH_AND_INV_DB |
|
||||
|
|
@ -168,7 +170,9 @@ void evergreen_cp_dma_clear_buffer(struct r600_context *rctx,
|
|||
}
|
||||
|
||||
/* Invalidate the read caches. */
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
rctx->flags |= R600_CONTEXT_INV_CONST_CACHE |
|
||||
R600_CONTEXT_INV_VERTEX_CACHE |
|
||||
R600_CONTEXT_INV_TEX_CACHE;
|
||||
|
||||
util_range_add(&r600_resource(dst)->valid_buffer_range, offset,
|
||||
offset + size);
|
||||
|
|
|
|||
|
|
@ -220,6 +220,14 @@ void r600_flush_emit(struct r600_context *rctx)
|
|||
(rctx->flags & R600_CONTEXT_FLUSH_AND_INV_DB_META)) {
|
||||
cs->buf[cs->cdw++] = PKT3(PKT3_EVENT_WRITE, 0, 0);
|
||||
cs->buf[cs->cdw++] = EVENT_TYPE(EVENT_TYPE_FLUSH_AND_INV_DB_META) | EVENT_INDEX(0);
|
||||
|
||||
/* Set FULL_CACHE_ENA for DB META flushes on r7xx and later.
|
||||
*
|
||||
* This hack predates use of FLUSH_AND_INV_DB_META, so it's
|
||||
* unclear whether it's still needed or even whether it has
|
||||
* any effect.
|
||||
*/
|
||||
cp_coher_cntl |= S_0085F0_FULL_CACHE_ENA(1);
|
||||
}
|
||||
|
||||
if (rctx->flags & R600_CONTEXT_FLUSH_AND_INV) {
|
||||
|
|
@ -227,11 +235,15 @@ void r600_flush_emit(struct r600_context *rctx)
|
|||
cs->buf[cs->cdw++] = EVENT_TYPE(EVENT_TYPE_CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0);
|
||||
}
|
||||
|
||||
if (rctx->flags & R600_CONTEXT_INVAL_READ_CACHES) {
|
||||
cp_coher_cntl |= S_0085F0_VC_ACTION_ENA(1) |
|
||||
S_0085F0_TC_ACTION_ENA(1) |
|
||||
S_0085F0_SH_ACTION_ENA(1) |
|
||||
S_0085F0_FULL_CACHE_ENA(1);
|
||||
if (rctx->flags & R600_CONTEXT_INV_CONST_CACHE) {
|
||||
cp_coher_cntl |= S_0085F0_SH_ACTION_ENA(1);
|
||||
}
|
||||
if (rctx->flags & R600_CONTEXT_INV_VERTEX_CACHE) {
|
||||
cp_coher_cntl |= rctx->has_vertex_cache ? S_0085F0_VC_ACTION_ENA(1)
|
||||
: S_0085F0_TC_ACTION_ENA(1);
|
||||
}
|
||||
if (rctx->flags & R600_CONTEXT_INV_TEX_CACHE) {
|
||||
cp_coher_cntl |= S_0085F0_TC_ACTION_ENA(1);
|
||||
}
|
||||
|
||||
if (rctx->flags & R600_CONTEXT_FLUSH_AND_INV_DB) {
|
||||
|
|
@ -616,7 +628,9 @@ void r600_cp_dma_copy_buffer(struct r600_context *rctx,
|
|||
|
||||
/* We flush the caches, because we might read from or write
|
||||
* to resources which are bound right now. */
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES |
|
||||
rctx->flags |= R600_CONTEXT_INV_CONST_CACHE |
|
||||
R600_CONTEXT_INV_VERTEX_CACHE |
|
||||
R600_CONTEXT_INV_TEX_CACHE |
|
||||
R600_CONTEXT_FLUSH_AND_INV |
|
||||
R600_CONTEXT_FLUSH_AND_INV_CB |
|
||||
R600_CONTEXT_FLUSH_AND_INV_DB |
|
||||
|
|
@ -666,7 +680,9 @@ void r600_cp_dma_copy_buffer(struct r600_context *rctx,
|
|||
}
|
||||
|
||||
/* Invalidate the read caches. */
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
rctx->flags |= R600_CONTEXT_INV_CONST_CACHE |
|
||||
R600_CONTEXT_INV_VERTEX_CACHE |
|
||||
R600_CONTEXT_INV_TEX_CACHE;
|
||||
|
||||
util_range_add(&r600_resource(dst)->valid_buffer_range, dst_offset,
|
||||
dst_offset + size);
|
||||
|
|
|
|||
|
|
@ -64,16 +64,21 @@
|
|||
#define R600_ERR(fmt, args...) \
|
||||
fprintf(stderr, "EE %s:%d %s - "fmt, __FILE__, __LINE__, __func__, ##args)
|
||||
|
||||
#define R600_CONTEXT_INVAL_READ_CACHES (1 << 0)
|
||||
#define R600_CONTEXT_STREAMOUT_FLUSH (1 << 1)
|
||||
#define R600_CONTEXT_WAIT_3D_IDLE (1 << 2)
|
||||
#define R600_CONTEXT_WAIT_CP_DMA_IDLE (1 << 3)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV (1 << 4)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_CB_META (1 << 5)
|
||||
#define R600_CONTEXT_PS_PARTIAL_FLUSH (1 << 6)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_DB_META (1 << 7)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_DB (1 << 8)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_CB (1 << 9)
|
||||
/* read caches */
|
||||
#define R600_CONTEXT_INV_VERTEX_CACHE (1 << 0)
|
||||
#define R600_CONTEXT_INV_TEX_CACHE (1 << 1)
|
||||
#define R600_CONTEXT_INV_CONST_CACHE (1 << 2)
|
||||
/* read-write caches */
|
||||
#define R600_CONTEXT_STREAMOUT_FLUSH (1 << 8)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV (1 << 9)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_CB_META (1 << 10)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_DB_META (1 << 11)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_DB (1 << 12)
|
||||
#define R600_CONTEXT_FLUSH_AND_INV_CB (1 << 13)
|
||||
/* engine synchronization */
|
||||
#define R600_CONTEXT_PS_PARTIAL_FLUSH (1 << 16)
|
||||
#define R600_CONTEXT_WAIT_3D_IDLE (1 << 17)
|
||||
#define R600_CONTEXT_WAIT_CP_DMA_IDLE (1 << 18)
|
||||
|
||||
#define R600_QUERY_DRAW_CALLS (PIPE_QUERY_DRIVER_SPECIFIC + 0)
|
||||
#define R600_QUERY_REQUESTED_VRAM (PIPE_QUERY_DRIVER_SPECIFIC + 1)
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ static void r600_texture_barrier(struct pipe_context *ctx)
|
|||
{
|
||||
struct r600_context *rctx = (struct r600_context *)ctx;
|
||||
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES |
|
||||
rctx->flags |= R600_CONTEXT_INV_TEX_CACHE |
|
||||
R600_CONTEXT_FLUSH_AND_INV_CB |
|
||||
R600_CONTEXT_FLUSH_AND_INV |
|
||||
R600_CONTEXT_WAIT_3D_IDLE;
|
||||
|
|
@ -500,7 +500,7 @@ static void r600_set_index_buffer(struct pipe_context *ctx,
|
|||
void r600_vertex_buffers_dirty(struct r600_context *rctx)
|
||||
{
|
||||
if (rctx->vertex_buffer_state.dirty_mask) {
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
rctx->flags |= R600_CONTEXT_INV_VERTEX_CACHE;
|
||||
rctx->vertex_buffer_state.atom.num_dw = (rctx->chip_class >= EVERGREEN ? 12 : 11) *
|
||||
util_bitcount(rctx->vertex_buffer_state.dirty_mask);
|
||||
rctx->vertex_buffer_state.atom.dirty = true;
|
||||
|
|
@ -557,7 +557,7 @@ void r600_sampler_views_dirty(struct r600_context *rctx,
|
|||
struct r600_samplerview_state *state)
|
||||
{
|
||||
if (state->dirty_mask) {
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
rctx->flags |= R600_CONTEXT_INV_TEX_CACHE;
|
||||
state->atom.num_dw = (rctx->chip_class >= EVERGREEN ? 14 : 13) *
|
||||
util_bitcount(state->dirty_mask);
|
||||
state->atom.dirty = true;
|
||||
|
|
@ -912,7 +912,7 @@ static void r600_delete_vs_state(struct pipe_context *ctx, void *state)
|
|||
void r600_constant_buffers_dirty(struct r600_context *rctx, struct r600_constbuf_state *state)
|
||||
{
|
||||
if (state->dirty_mask) {
|
||||
rctx->flags |= R600_CONTEXT_INVAL_READ_CACHES;
|
||||
rctx->flags |= R600_CONTEXT_INV_CONST_CACHE;
|
||||
state->atom.num_dw = rctx->chip_class >= EVERGREEN ? util_bitcount(state->dirty_mask)*20
|
||||
: util_bitcount(state->dirty_mask)*19;
|
||||
state->atom.dirty = true;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue