anv: Make use_softpin compile-time in genX code

Whenever we have the GFX_VERx10 macro available, we can make use_softpin
a compile-time thing for everything but Broadwell and Cherryview.  This
should save us some CPU cycles especially on SKL+.

Reviewed-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/11236>
This commit is contained in:
Jason Ekstrand 2021-06-07 19:53:42 -05:00 committed by Marge Bot
parent 04ccfeae98
commit a63e97e09a
3 changed files with 32 additions and 11 deletions

View file

@ -1258,6 +1258,26 @@ struct anv_device {
struct intel_debug_block_frame *debug_frame_desc;
};
static inline bool
anv_use_softpin(const struct anv_physical_device *pdevice)
{
#if defined(GFX_VERx10) && GFX_VERx10 >= 90
/* Sky Lake and later always uses softpin */
assert(pdevice->use_softpin);
return true;
#elif defined(GFX_VERx10) && GFX_VERx10 < 80
/* Haswell and earlier never use softpin */
assert(!pdevice->use_softpin);
return false;
#else
/* If we don't have a GFX_VERx10 #define, we need to look at the physical
* device. Also, for GFX version 8, we need to look at the physical
* device because Broadwell softpins but Cherryview doesn't.
*/
return pdevice->use_softpin;
#endif
}
static inline struct anv_instance *
anv_device_instance_or_null(const struct anv_device *device)
{
@ -1267,15 +1287,16 @@ anv_device_instance_or_null(const struct anv_device *device)
static inline struct anv_state_pool *
anv_binding_table_pool(struct anv_device *device)
{
if (device->physical->use_softpin)
if (anv_use_softpin(device->physical))
return &device->binding_table_pool;
else
return &device->surface_state_pool;
}
static inline struct anv_state
anv_binding_table_pool_alloc(struct anv_device *device) {
if (device->physical->use_softpin)
anv_binding_table_pool_alloc(struct anv_device *device)
{
if (anv_use_softpin(device->physical))
return anv_state_pool_alloc(&device->binding_table_pool,
device->binding_table_pool.block_size, 0);
else
@ -3010,7 +3031,7 @@ struct anv_cmd_buffer {
static inline bool
anv_cmd_buffer_is_chainable(struct anv_cmd_buffer *cmd_buffer)
{
return cmd_buffer->device->physical->use_softpin &&
return anv_use_softpin(cmd_buffer->device->physical) &&
!(cmd_buffer->usage_flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT);
}

View file

@ -132,7 +132,7 @@ genX(cmd_buffer_emit_state_base_address)(struct anv_cmd_buffer *cmd_buffer)
*/
sba.GeneralStateBufferSize = 0xfffff;
sba.IndirectObjectBufferSize = 0xfffff;
if (device->physical->use_softpin) {
if (anv_use_softpin(device->physical)) {
/* With softpin, we use fixed addresses so we actually know how big
* our base addresses are.
*/
@ -165,7 +165,7 @@ genX(cmd_buffer_emit_state_base_address)(struct anv_cmd_buffer *cmd_buffer)
sba.InstructionAccessUpperBoundModifyEnable = true;
# endif
# if (GFX_VER >= 9)
if (cmd_buffer->device->physical->use_softpin) {
if (anv_use_softpin(device->physical)) {
sba.BindlessSurfaceStateBaseAddress = (struct anv_address) {
.bo = device->surface_state_pool.block_pool.bo,
.offset = 0,
@ -523,7 +523,7 @@ anv_image_init_aux_tt(struct anv_cmd_buffer *cmd_buffer,
aux_entry_map = intel_aux_map_get_entry(cmd_buffer->device->aux_map_ctx,
address, &aux_entry_addr64);
assert(cmd_buffer->device->physical->use_softpin);
assert(anv_use_softpin(cmd_buffer->device->physical));
struct anv_address aux_entry_address = {
.bo = NULL,
.offset = aux_entry_addr64,
@ -2542,7 +2542,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
* softpin then we always keep all user-allocated memory objects resident.
*/
const bool need_client_mem_relocs =
!cmd_buffer->device->physical->use_softpin;
!anv_use_softpin(cmd_buffer->device->physical);
struct anv_push_constants *push = &pipe_state->push_constants;
for (uint32_t s = 0; s < map->surface_count; s++) {
@ -5045,7 +5045,7 @@ genX(cmd_buffer_set_binding_for_gfx8_vb_flush)(struct anv_cmd_buffer *cmd_buffer
uint32_t vb_size)
{
if (GFX_VER < 8 || GFX_VER > 9 ||
!cmd_buffer->device->physical->use_softpin)
!anv_use_softpin(cmd_buffer->device->physical))
return;
struct anv_vb_cache_range *bound, *dirty;
@ -5093,7 +5093,7 @@ genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(struct anv_cmd_buffer *cmd_b
uint64_t vb_used)
{
if (GFX_VER < 8 || GFX_VER > 9 ||
!cmd_buffer->device->physical->use_softpin)
!anv_use_softpin(cmd_buffer->device->physical))
return;
if (access_type == RANDOM) {

View file

@ -198,7 +198,7 @@ VkResult genX(CreateQueryPool)(
if (pdevice->supports_48bit_addresses)
bo_flags |= EXEC_OBJECT_SUPPORTS_48B_ADDRESS;
if (pdevice->use_softpin)
if (anv_use_softpin(pdevice))
bo_flags |= EXEC_OBJECT_PINNED;
if (pdevice->has_exec_async)