mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2025-12-20 11:40:10 +01:00
Merge branch 'pvr-multiarch-cleanup' into 'main'
Draft: pvr: misc multi-arch cleanups See merge request mesa/mesa!39035
This commit is contained in:
commit
c6362c6371
44 changed files with 764 additions and 701 deletions
|
|
@ -66,6 +66,37 @@ Hardware documentation can be found at: https://docs.imgtec.com/
|
||||||
Note: GPUs prior to Series6 do not have the hardware capabilities required to
|
Note: GPUs prior to Series6 do not have the hardware capabilities required to
|
||||||
support Vulkan and therefore cannot be supported by this driver.
|
support Vulkan and therefore cannot be supported by this driver.
|
||||||
|
|
||||||
|
Multi-Architecture support
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
In order to support multiple distinct hardware generations without too much
|
||||||
|
spaghetti-code, ther PowerVR compiles a few files multiple times (once per
|
||||||
|
hardware architecture), and uses a system of macros and aliases to be able
|
||||||
|
to refer to the different versions.
|
||||||
|
|
||||||
|
The files that gets compiled multiple times are those named
|
||||||
|
:file:`pvr_arch_*.c`. These files contains definitions of functions prefixed
|
||||||
|
with ``pvr_arch_`` (instead of the normal ``pvr_``-prefix). The ``arch``-bit
|
||||||
|
of that function is a placeholder, which gets replaced at compile-time, thanks
|
||||||
|
to a system of defines in the corresponding header file, supported by a set
|
||||||
|
of macros defined in :file:`pvr_macros.h`.
|
||||||
|
|
||||||
|
The intention is that these functions are mostly called from architecture
|
||||||
|
specific entrypoints, that are handled by the common vulkan dispatch-table
|
||||||
|
code. This means that a architecture specific function can easily call either
|
||||||
|
architecture specific or architecture agnostic code.
|
||||||
|
|
||||||
|
The tricky bit comes when architecture agnostic calls architecture specific
|
||||||
|
code. In that case, we have the ``PVR_ARCH_DISPATCH`` and
|
||||||
|
``PVR_ARCH_DISPATCH_RET`` macros. These are a bit error-prone to use, because
|
||||||
|
they need to see definition for all architecture versions of each entrypoint,
|
||||||
|
which isn't something we have available. To work around this, we define a
|
||||||
|
``PER_ARCH_FUNCS(arch)`` macro in each source-file that needs to use these
|
||||||
|
dispatch macros, and make sure to instantiate it once per architecture.
|
||||||
|
|
||||||
|
To avoid confusion, please do not add functions that are prefixed with
|
||||||
|
``pvr_arch_`` if they are not part of the system described here.
|
||||||
|
|
||||||
Chat
|
Chat
|
||||||
----
|
----
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -415,7 +415,7 @@ static inline void pvr_border_color_swizzle_to_tex_format(
|
||||||
*color = swizzled_color;
|
*color = swizzled_color;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(border_color_table_init)(struct pvr_device *const device)
|
VkResult pvr_arch_border_color_table_init(struct pvr_device *const device)
|
||||||
{
|
{
|
||||||
struct pvr_border_color_table *table = device->border_color_table =
|
struct pvr_border_color_table *table = device->border_color_table =
|
||||||
vk_zalloc(&device->vk.alloc,
|
vk_zalloc(&device->vk.alloc,
|
||||||
|
|
@ -470,7 +470,7 @@ err_out:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(border_color_table_finish)(struct pvr_device *const device)
|
void pvr_arch_border_color_table_finish(struct pvr_device *const device)
|
||||||
{
|
{
|
||||||
#if MESA_DEBUG
|
#if MESA_DEBUG
|
||||||
BITSET_SET_RANGE_INSIDE_WORD(device->border_color_table->unused_entries,
|
BITSET_SET_RANGE_INSIDE_WORD(device->border_color_table->unused_entries,
|
||||||
|
|
@ -496,7 +496,7 @@ static inline void pvr_border_color_table_set_custom_entry(
|
||||||
struct pvr_border_color_table_entry *const entry = &entries[index];
|
struct pvr_border_color_table_entry *const entry = &entries[index];
|
||||||
|
|
||||||
const enum pipe_format format = vk_format_to_pipe_format(vk_format);
|
const enum pipe_format format = vk_format_to_pipe_format(vk_format);
|
||||||
uint32_t tex_format = pvr_get_tex_format(vk_format);
|
uint32_t tex_format = pvr_arch_get_tex_format(vk_format);
|
||||||
|
|
||||||
assert(tex_format != ROGUE_TEXSTATE_FORMAT_INVALID);
|
assert(tex_format != ROGUE_TEXSTATE_FORMAT_INVALID);
|
||||||
|
|
||||||
|
|
@ -527,7 +527,7 @@ static inline void pvr_border_color_table_set_custom_entry(
|
||||||
* format relative to the depth-only or stencil-only compoment
|
* format relative to the depth-only or stencil-only compoment
|
||||||
* associated with this Vulkan format.
|
* associated with this Vulkan format.
|
||||||
*/
|
*/
|
||||||
tex_format = pvr_get_tex_format_aspect(vk_format, aspect_mask);
|
tex_format = pvr_arch_get_tex_format_aspect(vk_format, aspect_mask);
|
||||||
assert(tex_format != ROGUE_TEXSTATE_FORMAT_INVALID);
|
assert(tex_format != ROGUE_TEXSTATE_FORMAT_INVALID);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -593,7 +593,7 @@ err_out:
|
||||||
"Failed to allocate border color table entry");
|
"Failed to allocate border color table entry");
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(border_color_table_get_or_create_entry)(
|
VkResult pvr_arch_border_color_table_get_or_create_entry(
|
||||||
struct pvr_device *const device,
|
struct pvr_device *const device,
|
||||||
const struct pvr_sampler *const sampler,
|
const struct pvr_sampler *const sampler,
|
||||||
struct pvr_border_color_table *const table,
|
struct pvr_border_color_table *const table,
|
||||||
|
|
@ -612,7 +612,7 @@ VkResult PVR_PER_ARCH(border_color_table_get_or_create_entry)(
|
||||||
index_out);
|
index_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(border_color_table_release_entry)(
|
void pvr_arch_border_color_table_release_entry(
|
||||||
struct pvr_border_color_table *const table,
|
struct pvr_border_color_table *const table,
|
||||||
const uint32_t index)
|
const uint32_t index)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -31,7 +31,8 @@ void PVR_PER_ARCH(CmdResetQueryPool)(VkCommandBuffer commandBuffer,
|
||||||
/* make the query-reset program wait for previous geom/frag,
|
/* make the query-reset program wait for previous geom/frag,
|
||||||
* to not overwrite them
|
* to not overwrite them
|
||||||
*/
|
*/
|
||||||
result = pvr_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
result =
|
||||||
|
pvr_arch_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
@ -44,14 +45,15 @@ void PVR_PER_ARCH(CmdResetQueryPool)(VkCommandBuffer commandBuffer,
|
||||||
};
|
};
|
||||||
|
|
||||||
/* add the query-program itself */
|
/* add the query-program itself */
|
||||||
result = pvr_add_query_program(cmd_buffer, &query_info);
|
result = pvr_arch_add_query_program(cmd_buffer, &query_info);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
/* make future geom/frag wait for the query-reset program to
|
/* make future geom/frag wait for the query-reset program to
|
||||||
* reset the counters to 0
|
* reset the counters to 0
|
||||||
*/
|
*/
|
||||||
result = pvr_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
result =
|
||||||
|
pvr_arch_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
@ -89,7 +91,8 @@ void PVR_PER_ARCH(CmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer,
|
||||||
query_info.copy_query_results.stride = stride;
|
query_info.copy_query_results.stride = stride;
|
||||||
query_info.copy_query_results.flags = flags;
|
query_info.copy_query_results.flags = flags;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
result =
|
||||||
|
pvr_arch_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
@ -114,13 +117,14 @@ void PVR_PER_ARCH(CmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
result = pvr_cmd_buffer_end_sub_cmd(cmd_buffer);
|
result = pvr_arch_cmd_buffer_end_sub_cmd(cmd_buffer);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
pvr_add_query_program(cmd_buffer, &query_info);
|
pvr_arch_add_query_program(cmd_buffer, &query_info);
|
||||||
|
|
||||||
result = pvr_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
result =
|
||||||
|
pvr_arch_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_EVENT);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
@ -141,7 +145,7 @@ pvr_cmd_buffer_state_get_view_count(const struct pvr_cmd_buffer_state *state)
|
||||||
const struct pvr_sub_cmd_gfx *gfx_sub_cmd = &state->current_sub_cmd->gfx;
|
const struct pvr_sub_cmd_gfx *gfx_sub_cmd = &state->current_sub_cmd->gfx;
|
||||||
const uint32_t hw_render_idx = gfx_sub_cmd->hw_render_idx;
|
const uint32_t hw_render_idx = gfx_sub_cmd->hw_render_idx;
|
||||||
const struct pvr_renderpass_hwsetup_render *hw_render =
|
const struct pvr_renderpass_hwsetup_render *hw_render =
|
||||||
pvr_pass_info_get_hw_render(render_pass_info, hw_render_idx);
|
pvr_arch_pass_info_get_hw_render(render_pass_info, hw_render_idx);
|
||||||
const uint32_t view_count = util_bitcount(hw_render->view_mask);
|
const uint32_t view_count = util_bitcount(hw_render->view_mask);
|
||||||
|
|
||||||
assert(state->current_sub_cmd->type == PVR_SUB_CMD_TYPE_GRAPHICS);
|
assert(state->current_sub_cmd->type == PVR_SUB_CMD_TYPE_GRAPHICS);
|
||||||
|
|
@ -177,12 +181,12 @@ void PVR_PER_ARCH(CmdBeginQuery)(VkCommandBuffer commandBuffer,
|
||||||
/* Kick render. */
|
/* Kick render. */
|
||||||
state->current_sub_cmd->gfx.barrier_store = true;
|
state->current_sub_cmd->gfx.barrier_store = true;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_end_sub_cmd(cmd_buffer);
|
result = pvr_arch_cmd_buffer_end_sub_cmd(cmd_buffer);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
result =
|
result = pvr_arch_cmd_buffer_start_sub_cmd(cmd_buffer,
|
||||||
pvr_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_GRAPHICS);
|
PVR_SUB_CMD_TYPE_GRAPHICS);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,7 @@ pvr_csb_emit_link_unmarked(struct pvr_csb *csb, pvr_dev_addr_t addr, bool ret)
|
||||||
* current buffer, a few bytes including guard padding size are reserved at the
|
* current buffer, a few bytes including guard padding size are reserved at the
|
||||||
* end, every time a buffer is created. Every time we allocate a new buffer we
|
* end, every time a buffer is created. Every time we allocate a new buffer we
|
||||||
* fix the current buffer in use to emit the stream link dwords. This makes sure
|
* fix the current buffer in use to emit the stream link dwords. This makes sure
|
||||||
* that when #pvr_csb_alloc_dwords() is called from #pvr_csb_emit() to add
|
* that when #pvr_arch_csb_alloc_dwords() is called from #pvr_csb_emit() to add
|
||||||
* STREAM_LINK0 and STREAM_LINK1, it succeeds without trying to allocate new
|
* STREAM_LINK0 and STREAM_LINK1, it succeeds without trying to allocate new
|
||||||
* pages.
|
* pages.
|
||||||
*
|
*
|
||||||
|
|
@ -188,7 +188,7 @@ static bool pvr_csb_buffer_extend(struct pvr_csb *csb)
|
||||||
* \param[in] num_dwords Number of dwords to allocate.
|
* \param[in] num_dwords Number of dwords to allocate.
|
||||||
* \return Valid host virtual address or NULL otherwise.
|
* \return Valid host virtual address or NULL otherwise.
|
||||||
*/
|
*/
|
||||||
void *PVR_PER_ARCH(csb_alloc_dwords)(struct pvr_csb *csb, uint32_t num_dwords)
|
void *pvr_arch_csb_alloc_dwords(struct pvr_csb *csb, uint32_t num_dwords)
|
||||||
{
|
{
|
||||||
const uint32_t required_space = PVR_DW_TO_BYTES(num_dwords);
|
const uint32_t required_space = PVR_DW_TO_BYTES(num_dwords);
|
||||||
void *p;
|
void *p;
|
||||||
|
|
@ -241,8 +241,7 @@ void *PVR_PER_ARCH(csb_alloc_dwords)(struct pvr_csb *csb, uint32_t num_dwords)
|
||||||
* \param[in,out] csb_dst Destination control Stream Builder object.
|
* \param[in,out] csb_dst Destination control Stream Builder object.
|
||||||
* \param[in] csb_src Source Control Stream Builder object.
|
* \param[in] csb_src Source Control Stream Builder object.
|
||||||
*/
|
*/
|
||||||
VkResult PVR_PER_ARCH(csb_copy)(struct pvr_csb *csb_dst,
|
VkResult pvr_arch_csb_copy(struct pvr_csb *csb_dst, struct pvr_csb *csb_src)
|
||||||
struct pvr_csb *csb_src)
|
|
||||||
{
|
{
|
||||||
const uint8_t stream_reserved_space =
|
const uint8_t stream_reserved_space =
|
||||||
PVR_DW_TO_BYTES(pvr_cmd_length(VDMCTRL_STREAM_LINK0) +
|
PVR_DW_TO_BYTES(pvr_cmd_length(VDMCTRL_STREAM_LINK0) +
|
||||||
|
|
@ -270,7 +269,7 @@ VkResult PVR_PER_ARCH(csb_copy)(struct pvr_csb *csb_dst,
|
||||||
assert(!"CSB source buffer too large to do a full copy");
|
assert(!"CSB source buffer too large to do a full copy");
|
||||||
}
|
}
|
||||||
|
|
||||||
destination = PVR_PER_ARCH(csb_alloc_dwords)(csb_dst, size);
|
destination = pvr_arch_csb_alloc_dwords(csb_dst, size);
|
||||||
if (!destination) {
|
if (!destination) {
|
||||||
assert(csb_dst->status != VK_SUCCESS);
|
assert(csb_dst->status != VK_SUCCESS);
|
||||||
return csb_dst->status;
|
return csb_dst->status;
|
||||||
|
|
@ -290,9 +289,7 @@ VkResult PVR_PER_ARCH(csb_copy)(struct pvr_csb *csb_dst,
|
||||||
* \param[in] ret Selects whether the sub control stream will return or
|
* \param[in] ret Selects whether the sub control stream will return or
|
||||||
* terminate.
|
* terminate.
|
||||||
*/
|
*/
|
||||||
void PVR_PER_ARCH(csb_emit_link)(struct pvr_csb *csb,
|
void pvr_arch_csb_emit_link(struct pvr_csb *csb, pvr_dev_addr_t addr, bool ret)
|
||||||
pvr_dev_addr_t addr,
|
|
||||||
bool ret)
|
|
||||||
{
|
{
|
||||||
pvr_csb_set_relocation_mark(csb);
|
pvr_csb_set_relocation_mark(csb);
|
||||||
pvr_csb_emit_link_unmarked(csb, addr, ret);
|
pvr_csb_emit_link_unmarked(csb, addr, ret);
|
||||||
|
|
@ -307,7 +304,7 @@ void PVR_PER_ARCH(csb_emit_link)(struct pvr_csb *csb,
|
||||||
* \param[in] csb Control Stream Builder object to add VDMCTRL_STREAM_RETURN to.
|
* \param[in] csb Control Stream Builder object to add VDMCTRL_STREAM_RETURN to.
|
||||||
* \return VK_SUCCESS on success, or error code otherwise.
|
* \return VK_SUCCESS on success, or error code otherwise.
|
||||||
*/
|
*/
|
||||||
VkResult PVR_PER_ARCH(csb_emit_return)(struct pvr_csb *csb)
|
VkResult pvr_arch_csb_emit_return(struct pvr_csb *csb)
|
||||||
{
|
{
|
||||||
/* STREAM_RETURN is only supported by graphics control stream. */
|
/* STREAM_RETURN is only supported by graphics control stream. */
|
||||||
assert(csb->stream_type == PVR_CMD_STREAM_TYPE_GRAPHICS ||
|
assert(csb->stream_type == PVR_CMD_STREAM_TYPE_GRAPHICS ||
|
||||||
|
|
@ -330,7 +327,7 @@ VkResult PVR_PER_ARCH(csb_emit_return)(struct pvr_csb *csb)
|
||||||
* \param[in] csb Control Stream Builder object to terminate.
|
* \param[in] csb Control Stream Builder object to terminate.
|
||||||
* \return VK_SUCCESS on success, or error code otherwise.
|
* \return VK_SUCCESS on success, or error code otherwise.
|
||||||
*/
|
*/
|
||||||
VkResult PVR_PER_ARCH(csb_emit_terminate)(struct pvr_csb *csb)
|
VkResult pvr_arch_csb_emit_terminate(struct pvr_csb *csb)
|
||||||
{
|
{
|
||||||
pvr_csb_set_relocation_mark(csb);
|
pvr_csb_set_relocation_mark(csb);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -86,7 +86,7 @@ static uint32_t pvr_get_simultaneous_num_allocs(
|
||||||
return 4;
|
return 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(calc_fscommon_size_and_tiles_in_flight)(
|
uint32_t pvr_arch_calc_fscommon_size_and_tiles_in_flight(
|
||||||
const struct pvr_device_info *dev_info,
|
const struct pvr_device_info *dev_info,
|
||||||
const struct pvr_device_runtime_info *dev_runtime_info,
|
const struct pvr_device_runtime_info *dev_runtime_info,
|
||||||
uint32_t fs_common_size,
|
uint32_t fs_common_size,
|
||||||
|
|
@ -148,7 +148,7 @@ uint32_t PVR_PER_ARCH(calc_fscommon_size_and_tiles_in_flight)(
|
||||||
return MIN2(num_tile_in_flight, max_tiles_in_flight);
|
return MIN2(num_tile_in_flight, max_tiles_in_flight);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(pds_compute_shader_create_and_upload)(
|
VkResult pvr_arch_pds_compute_shader_create_and_upload(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
struct pvr_pds_compute_shader_program *program,
|
struct pvr_pds_compute_shader_program *program,
|
||||||
struct pvr_pds_upload *const pds_upload_out)
|
struct pvr_pds_upload *const pds_upload_out)
|
||||||
|
|
@ -224,7 +224,7 @@ static VkResult pvr_device_init_compute_fence_program(struct pvr_device *device)
|
||||||
program.fence = true;
|
program.fence = true;
|
||||||
program.clear_pds_barrier = true;
|
program.clear_pds_barrier = true;
|
||||||
|
|
||||||
return pvr_pds_compute_shader_create_and_upload(
|
return pvr_arch_pds_compute_shader_create_and_upload(
|
||||||
device,
|
device,
|
||||||
&program,
|
&program,
|
||||||
&device->pds_compute_fence_program);
|
&device->pds_compute_fence_program);
|
||||||
|
|
@ -237,7 +237,7 @@ static VkResult pvr_device_init_compute_empty_program(struct pvr_device *device)
|
||||||
pvr_pds_compute_shader_program_init(&program);
|
pvr_pds_compute_shader_program_init(&program);
|
||||||
program.clear_pds_barrier = true;
|
program.clear_pds_barrier = true;
|
||||||
|
|
||||||
return pvr_pds_compute_shader_create_and_upload(
|
return pvr_arch_pds_compute_shader_create_and_upload(
|
||||||
device,
|
device,
|
||||||
&program,
|
&program,
|
||||||
&device->pds_compute_empty_program);
|
&device->pds_compute_empty_program);
|
||||||
|
|
@ -444,7 +444,7 @@ static VkResult pvr_device_init_compute_idfwdf_state(struct pvr_device *device)
|
||||||
.addr = device->idfwdf_state.store_bo->vma->dev_addr,
|
.addr = device->idfwdf_state.store_bo->vma->dev_addr,
|
||||||
};
|
};
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device, &tex_info, &image_state);
|
result = pvr_arch_pack_tex_state(device, &tex_info, &image_state);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_free_shareds_buffer;
|
goto err_free_shareds_buffer;
|
||||||
|
|
||||||
|
|
@ -809,7 +809,7 @@ VkResult PVR_PER_ARCH(create_device)(struct pvr_physical_device *pdevice,
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_free_compute_empty;
|
goto err_pvr_free_compute_empty;
|
||||||
|
|
||||||
result = pvr_device_create_compute_query_programs(device);
|
result = pvr_arch_device_create_compute_query_programs(device);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_free_view_index;
|
goto err_pvr_free_view_index;
|
||||||
|
|
||||||
|
|
@ -821,13 +821,13 @@ VkResult PVR_PER_ARCH(create_device)(struct pvr_physical_device *pdevice,
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_finish_compute_idfwdf;
|
goto err_pvr_finish_compute_idfwdf;
|
||||||
|
|
||||||
result = pvr_device_init_spm_load_state(device);
|
result = pvr_arch_device_init_spm_load_state(device);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_finish_graphics_static_clear_state;
|
goto err_pvr_finish_graphics_static_clear_state;
|
||||||
|
|
||||||
pvr_device_init_tile_buffer_state(device);
|
pvr_device_init_tile_buffer_state(device);
|
||||||
|
|
||||||
result = pvr_queues_create(device, pCreateInfo);
|
result = pvr_arch_queues_create(device, pCreateInfo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_finish_tile_buffer_state;
|
goto err_pvr_finish_tile_buffer_state;
|
||||||
|
|
||||||
|
|
@ -839,7 +839,7 @@ VkResult PVR_PER_ARCH(create_device)(struct pvr_physical_device *pdevice,
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_spm_finish_scratch_buffer_store;
|
goto err_pvr_spm_finish_scratch_buffer_store;
|
||||||
|
|
||||||
result = pvr_border_color_table_init(device);
|
result = pvr_arch_border_color_table_init(device);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_pvr_robustness_buffer_finish;
|
goto err_pvr_robustness_buffer_finish;
|
||||||
|
|
||||||
|
|
@ -867,11 +867,11 @@ err_pvr_robustness_buffer_finish:
|
||||||
err_pvr_spm_finish_scratch_buffer_store:
|
err_pvr_spm_finish_scratch_buffer_store:
|
||||||
pvr_spm_finish_scratch_buffer_store(device);
|
pvr_spm_finish_scratch_buffer_store(device);
|
||||||
|
|
||||||
pvr_queues_destroy(device);
|
pvr_arch_queues_destroy(device);
|
||||||
|
|
||||||
err_pvr_finish_tile_buffer_state:
|
err_pvr_finish_tile_buffer_state:
|
||||||
pvr_device_finish_tile_buffer_state(device);
|
pvr_device_finish_tile_buffer_state(device);
|
||||||
pvr_device_finish_spm_load_state(device);
|
pvr_arch_device_finish_spm_load_state(device);
|
||||||
|
|
||||||
err_pvr_finish_graphics_static_clear_state:
|
err_pvr_finish_graphics_static_clear_state:
|
||||||
pvr_device_finish_graphics_static_clear_state(device);
|
pvr_device_finish_graphics_static_clear_state(device);
|
||||||
|
|
@ -880,7 +880,7 @@ err_pvr_finish_compute_idfwdf:
|
||||||
pvr_device_finish_compute_idfwdf_state(device);
|
pvr_device_finish_compute_idfwdf_state(device);
|
||||||
|
|
||||||
err_pvr_destroy_compute_query_programs:
|
err_pvr_destroy_compute_query_programs:
|
||||||
pvr_device_destroy_compute_query_programs(device);
|
pvr_arch_device_destroy_compute_query_programs(device);
|
||||||
|
|
||||||
err_pvr_free_view_index:
|
err_pvr_free_view_index:
|
||||||
for (uint32_t u = 0; u < PVR_MAX_MULTIVIEW; ++u)
|
for (uint32_t u = 0; u < PVR_MAX_MULTIVIEW; ++u)
|
||||||
|
|
@ -942,15 +942,15 @@ void PVR_PER_ARCH(destroy_device)(struct pvr_device *device,
|
||||||
simple_mtx_unlock(&device->rs_mtx);
|
simple_mtx_unlock(&device->rs_mtx);
|
||||||
simple_mtx_destroy(&device->rs_mtx);
|
simple_mtx_destroy(&device->rs_mtx);
|
||||||
|
|
||||||
pvr_border_color_table_finish(device);
|
pvr_arch_border_color_table_finish(device);
|
||||||
pvr_robustness_buffer_finish(device);
|
pvr_robustness_buffer_finish(device);
|
||||||
pvr_spm_finish_scratch_buffer_store(device);
|
pvr_spm_finish_scratch_buffer_store(device);
|
||||||
pvr_queues_destroy(device);
|
pvr_arch_queues_destroy(device);
|
||||||
pvr_device_finish_tile_buffer_state(device);
|
pvr_device_finish_tile_buffer_state(device);
|
||||||
pvr_device_finish_spm_load_state(device);
|
pvr_arch_device_finish_spm_load_state(device);
|
||||||
pvr_device_finish_graphics_static_clear_state(device);
|
pvr_device_finish_graphics_static_clear_state(device);
|
||||||
pvr_device_finish_compute_idfwdf_state(device);
|
pvr_device_finish_compute_idfwdf_state(device);
|
||||||
pvr_device_destroy_compute_query_programs(device);
|
pvr_arch_device_destroy_compute_query_programs(device);
|
||||||
pvr_bo_suballoc_free(device->pds_compute_empty_program.pvr_bo);
|
pvr_bo_suballoc_free(device->pds_compute_empty_program.pvr_bo);
|
||||||
|
|
||||||
for (uint32_t u = 0; u < PVR_MAX_MULTIVIEW; ++u)
|
for (uint32_t u = 0; u < PVR_MAX_MULTIVIEW; ++u)
|
||||||
|
|
|
||||||
|
|
@ -255,15 +255,14 @@ static const struct pvr_pbe_format pvr_pbe_format_table[] = {
|
||||||
#undef FORMAT
|
#undef FORMAT
|
||||||
#undef FORMAT_DEPTH_STENCIL
|
#undef FORMAT_DEPTH_STENCIL
|
||||||
|
|
||||||
const struct pvr_format *PVR_PER_ARCH(get_format_table)(unsigned *num_formats)
|
const struct pvr_format *pvr_arch_get_format_table(unsigned *num_formats)
|
||||||
{
|
{
|
||||||
assert(num_formats != NULL);
|
assert(num_formats != NULL);
|
||||||
*num_formats = ARRAY_SIZE(pvr_format_table);
|
*num_formats = ARRAY_SIZE(pvr_format_table);
|
||||||
return pvr_format_table;
|
return pvr_format_table;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline const struct pvr_format *
|
static inline const struct pvr_format *get_format(VkFormat vk_format)
|
||||||
PVR_PER_ARCH(get_format)(VkFormat vk_format)
|
|
||||||
{
|
{
|
||||||
if (vk_format < ARRAY_SIZE(pvr_format_table) &&
|
if (vk_format < ARRAY_SIZE(pvr_format_table) &&
|
||||||
pvr_format_table[vk_format].bind != 0) {
|
pvr_format_table[vk_format].bind != 0) {
|
||||||
|
|
@ -284,9 +283,9 @@ pvr_get_pbe_format(VkFormat vk_format)
|
||||||
return &pvr_pbe_format_table[vk_format];
|
return &pvr_pbe_format_table[vk_format];
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(get_tex_format)(VkFormat vk_format)
|
uint32_t pvr_arch_get_tex_format(VkFormat vk_format)
|
||||||
{
|
{
|
||||||
const struct pvr_format *pvr_format = PVR_PER_ARCH(get_format)(vk_format);
|
const struct pvr_format *pvr_format = get_format(vk_format);
|
||||||
if (pvr_format) {
|
if (pvr_format) {
|
||||||
return pvr_format->tex_format;
|
return pvr_format->tex_format;
|
||||||
}
|
}
|
||||||
|
|
@ -294,10 +293,10 @@ uint32_t PVR_PER_ARCH(get_tex_format)(VkFormat vk_format)
|
||||||
return ROGUE_TEXSTATE_FORMAT_INVALID;
|
return ROGUE_TEXSTATE_FORMAT_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(get_tex_format_aspect)(VkFormat vk_format,
|
uint32_t pvr_arch_get_tex_format_aspect(VkFormat vk_format,
|
||||||
VkImageAspectFlags aspect_mask)
|
VkImageAspectFlags aspect_mask)
|
||||||
{
|
{
|
||||||
const struct pvr_format *pvr_format = PVR_PER_ARCH(get_format)(vk_format);
|
const struct pvr_format *pvr_format = get_format(vk_format);
|
||||||
if (pvr_format) {
|
if (pvr_format) {
|
||||||
if (aspect_mask == VK_IMAGE_ASPECT_DEPTH_BIT)
|
if (aspect_mask == VK_IMAGE_ASPECT_DEPTH_BIT)
|
||||||
return pvr_format->depth_tex_format;
|
return pvr_format->depth_tex_format;
|
||||||
|
|
@ -310,7 +309,7 @@ uint32_t PVR_PER_ARCH(get_tex_format_aspect)(VkFormat vk_format,
|
||||||
return ROGUE_TEXSTATE_FORMAT_INVALID;
|
return ROGUE_TEXSTATE_FORMAT_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t pvr_get_pbe_packmode(VkFormat vk_format)
|
uint32_t pvr_arch_get_pbe_packmode(VkFormat vk_format)
|
||||||
{
|
{
|
||||||
if (vk_format_is_block_compressed(vk_format))
|
if (vk_format_is_block_compressed(vk_format))
|
||||||
return ROGUE_PBESTATE_PACKMODE_INVALID;
|
return ROGUE_PBESTATE_PACKMODE_INVALID;
|
||||||
|
|
@ -318,7 +317,7 @@ uint32_t pvr_get_pbe_packmode(VkFormat vk_format)
|
||||||
return pvr_get_pbe_format(vk_format)->packmode;
|
return pvr_get_pbe_format(vk_format)->packmode;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t pvr_get_pbe_accum_format(VkFormat vk_format)
|
uint32_t pvr_arch_get_pbe_accum_format(VkFormat vk_format)
|
||||||
{
|
{
|
||||||
if (vk_format_is_block_compressed(vk_format))
|
if (vk_format_is_block_compressed(vk_format))
|
||||||
return PVR_PBE_ACCUM_FORMAT_INVALID;
|
return PVR_PBE_ACCUM_FORMAT_INVALID;
|
||||||
|
|
@ -326,9 +325,8 @@ uint32_t pvr_get_pbe_accum_format(VkFormat vk_format)
|
||||||
return pvr_get_pbe_format(vk_format)->accum_format;
|
return pvr_get_pbe_format(vk_format)->accum_format;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool PVR_PER_ARCH(format_is_pbe_downscalable)(
|
bool pvr_arch_format_is_pbe_downscalable(const struct pvr_device_info *dev_info,
|
||||||
const struct pvr_device_info *dev_info,
|
VkFormat vk_format)
|
||||||
VkFormat vk_format)
|
|
||||||
{
|
{
|
||||||
if (vk_format_is_int(vk_format)) {
|
if (vk_format_is_int(vk_format)) {
|
||||||
/* PBE downscale behavior for integer formats does not match Vulkan
|
/* PBE downscale behavior for integer formats does not match Vulkan
|
||||||
|
|
@ -338,7 +336,7 @@ bool PVR_PER_ARCH(format_is_pbe_downscalable)(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (PVR_PER_ARCH(get_pbe_packmode)(vk_format)) {
|
switch (pvr_arch_get_pbe_packmode(vk_format)) {
|
||||||
default:
|
default:
|
||||||
return true;
|
return true;
|
||||||
case ROGUE_PBESTATE_PACKMODE_F16:
|
case ROGUE_PBESTATE_PACKMODE_F16:
|
||||||
|
|
|
||||||
|
|
@ -91,12 +91,12 @@ err_mutex_destroy:
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(render_state_setup)(
|
VkResult
|
||||||
struct pvr_device *device,
|
pvr_arch_render_state_setup(struct pvr_device *device,
|
||||||
const VkAllocationCallbacks *pAllocator,
|
const VkAllocationCallbacks *pAllocator,
|
||||||
struct pvr_render_state *rstate,
|
struct pvr_render_state *rstate,
|
||||||
uint32_t render_count,
|
uint32_t render_count,
|
||||||
const struct pvr_renderpass_hwsetup_render *renders)
|
const struct pvr_renderpass_hwsetup_render *renders)
|
||||||
{
|
{
|
||||||
struct pvr_spm_bgobj_state *spm_bgobj_state_per_render;
|
struct pvr_spm_bgobj_state *spm_bgobj_state_per_render;
|
||||||
struct pvr_spm_eot_state *spm_eot_state_per_render;
|
struct pvr_spm_eot_state *spm_eot_state_per_render;
|
||||||
|
|
@ -147,17 +147,17 @@ VkResult PVR_PER_ARCH(render_state_setup)(
|
||||||
goto err_release_scratch_buffer;
|
goto err_release_scratch_buffer;
|
||||||
|
|
||||||
for (uint32_t i = 0; i < render_count; i++) {
|
for (uint32_t i = 0; i < render_count; i++) {
|
||||||
result = pvr_spm_init_eot_state(device,
|
result = pvr_arch_spm_init_eot_state(device,
|
||||||
&spm_eot_state_per_render[i],
|
&spm_eot_state_per_render[i],
|
||||||
rstate,
|
rstate,
|
||||||
&renders[i]);
|
&renders[i]);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_finish_eot_state;
|
goto err_finish_eot_state;
|
||||||
|
|
||||||
result = pvr_spm_init_bgobj_state(device,
|
result = pvr_arch_spm_init_bgobj_state(device,
|
||||||
&spm_bgobj_state_per_render[i],
|
&spm_bgobj_state_per_render[i],
|
||||||
rstate,
|
rstate,
|
||||||
&renders[i]);
|
&renders[i]);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_finish_bgobj_state;
|
goto err_finish_bgobj_state;
|
||||||
|
|
||||||
|
|
@ -202,7 +202,7 @@ pvr_render_pass_get_scratch_buffer_size(struct pvr_device *device,
|
||||||
const struct pvr_render_pass *pass,
|
const struct pvr_render_pass *pass,
|
||||||
const struct pvr_render_state *rstate)
|
const struct pvr_render_state *rstate)
|
||||||
{
|
{
|
||||||
return pvr_spm_scratch_buffer_calc_required_size(
|
return pvr_arch_spm_scratch_buffer_calc_required_size(
|
||||||
pass->hw_setup->renders,
|
pass->hw_setup->renders,
|
||||||
pass->hw_setup->render_count,
|
pass->hw_setup->render_count,
|
||||||
pass->max_sample_count,
|
pass->max_sample_count,
|
||||||
|
|
@ -267,11 +267,11 @@ PVR_PER_ARCH(CreateFramebuffer)(VkDevice _device,
|
||||||
rstate->scratch_buffer_size =
|
rstate->scratch_buffer_size =
|
||||||
pvr_render_pass_get_scratch_buffer_size(device, pass, rstate);
|
pvr_render_pass_get_scratch_buffer_size(device, pass, rstate);
|
||||||
|
|
||||||
result = pvr_render_state_setup(device,
|
result = pvr_arch_render_state_setup(device,
|
||||||
pAllocator,
|
pAllocator,
|
||||||
rstate,
|
rstate,
|
||||||
pass->hw_setup->render_count,
|
pass->hw_setup->render_count,
|
||||||
pass->hw_setup->renders);
|
pass->hw_setup->renders);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_free_framebuffer;
|
goto err_free_framebuffer;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2454,9 +2454,8 @@ pvr_count_uses_in_color_output_list(struct pvr_render_subpass *subpass,
|
||||||
*resolve_output_count_out = resolve_count;
|
*resolve_output_count_out = resolve_count;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(destroy_renderpass_hwsetup)(
|
void pvr_arch_destroy_renderpass_hwsetup(const VkAllocationCallbacks *alloc,
|
||||||
const VkAllocationCallbacks *alloc,
|
struct pvr_renderpass_hwsetup *hw_setup)
|
||||||
struct pvr_renderpass_hwsetup *hw_setup)
|
|
||||||
{
|
{
|
||||||
for (uint32_t i = 0U; i < hw_setup->render_count; i++) {
|
for (uint32_t i = 0U; i < hw_setup->render_count; i++) {
|
||||||
struct pvr_renderpass_hwsetup_render *hw_render = &hw_setup->renders[i];
|
struct pvr_renderpass_hwsetup_render *hw_render = &hw_setup->renders[i];
|
||||||
|
|
@ -2482,7 +2481,7 @@ void PVR_PER_ARCH(destroy_renderpass_hwsetup)(
|
||||||
vk_free(alloc, hw_setup);
|
vk_free(alloc, hw_setup);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(create_renderpass_hwsetup)(
|
VkResult pvr_arch_create_renderpass_hwsetup(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
const VkAllocationCallbacks *alloc,
|
const VkAllocationCallbacks *alloc,
|
||||||
struct pvr_render_pass *pass,
|
struct pvr_render_pass *pass,
|
||||||
|
|
@ -2552,7 +2551,7 @@ VkResult PVR_PER_ARCH(create_renderpass_hwsetup)(
|
||||||
const uint32_t part_bits = 0;
|
const uint32_t part_bits = 0;
|
||||||
|
|
||||||
if (vk_format_is_color(format) &&
|
if (vk_format_is_color(format) &&
|
||||||
pvr_get_pbe_accum_format(attachment->vk_format) ==
|
pvr_arch_get_pbe_accum_format(attachment->vk_format) ==
|
||||||
PVR_PBE_ACCUM_FORMAT_INVALID) {
|
PVR_PBE_ACCUM_FORMAT_INVALID) {
|
||||||
/* The VkFormat is not supported as a color attachment so `0`.
|
/* The VkFormat is not supported as a color attachment so `0`.
|
||||||
* Vulkan doesn't seems to restrict vkCreateRenderPass() to supported
|
* Vulkan doesn't seems to restrict vkCreateRenderPass() to supported
|
||||||
|
|
@ -2720,7 +2719,7 @@ end_create_renderpass_hwsetup:
|
||||||
pvr_free_render(ctx);
|
pvr_free_render(ctx);
|
||||||
|
|
||||||
if (hw_setup) {
|
if (hw_setup) {
|
||||||
PVR_PER_ARCH(destroy_renderpass_hwsetup)(alloc, hw_setup);
|
pvr_arch_destroy_renderpass_hwsetup(alloc, hw_setup);
|
||||||
hw_setup = NULL;
|
hw_setup = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -122,9 +122,9 @@ VkResult PVR_PER_ARCH(CreateImageView)(VkDevice _device,
|
||||||
|
|
||||||
util_format_compose_swizzles(format_swizzle, input_swizzle, info.swizzle);
|
util_format_compose_swizzles(format_swizzle, input_swizzle, info.swizzle);
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device,
|
result = pvr_arch_pack_tex_state(device,
|
||||||
&info,
|
&info,
|
||||||
&iview->image_state[info.tex_state_type]);
|
&iview->image_state[info.tex_state_type]);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_vk_image_view_destroy;
|
goto err_vk_image_view_destroy;
|
||||||
|
|
||||||
|
|
@ -134,9 +134,10 @@ VkResult PVR_PER_ARCH(CreateImageView)(VkDevice _device,
|
||||||
if (info.is_cube && image->vk.usage & VK_IMAGE_USAGE_STORAGE_BIT) {
|
if (info.is_cube && image->vk.usage & VK_IMAGE_USAGE_STORAGE_BIT) {
|
||||||
info.tex_state_type = PVR_TEXTURE_STATE_STORAGE;
|
info.tex_state_type = PVR_TEXTURE_STATE_STORAGE;
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device,
|
result =
|
||||||
&info,
|
pvr_arch_pack_tex_state(device,
|
||||||
&iview->image_state[info.tex_state_type]);
|
&info,
|
||||||
|
&iview->image_state[info.tex_state_type]);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_vk_image_view_destroy;
|
goto err_vk_image_view_destroy;
|
||||||
}
|
}
|
||||||
|
|
@ -165,9 +166,10 @@ VkResult PVR_PER_ARCH(CreateImageView)(VkDevice _device,
|
||||||
info.type = iview->vk.view_type;
|
info.type = iview->vk.view_type;
|
||||||
}
|
}
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device,
|
result =
|
||||||
&info,
|
pvr_arch_pack_tex_state(device,
|
||||||
&iview->image_state[info.tex_state_type]);
|
&info,
|
||||||
|
&iview->image_state[info.tex_state_type]);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_vk_image_view_destroy;
|
goto err_vk_image_view_destroy;
|
||||||
}
|
}
|
||||||
|
|
@ -251,7 +253,7 @@ PVR_PER_ARCH(CreateBufferView)(VkDevice _device,
|
||||||
format_swizzle = pvr_get_format_swizzle(info.format);
|
format_swizzle = pvr_get_format_swizzle(info.format);
|
||||||
memcpy(info.swizzle, format_swizzle, sizeof(info.swizzle));
|
memcpy(info.swizzle, format_swizzle, sizeof(info.swizzle));
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device, &info, &bview->image_state);
|
result = pvr_arch_pack_tex_state(device, &info, &bview->image_state);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_vk_buffer_view_destroy;
|
goto err_vk_buffer_view_destroy;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -36,12 +36,11 @@
|
||||||
#include "vk_format.h"
|
#include "vk_format.h"
|
||||||
#include "vk_object.h"
|
#include "vk_object.h"
|
||||||
|
|
||||||
void PVR_PER_ARCH(pbe_get_src_format_and_gamma)(
|
void pvr_arch_pbe_get_src_format_and_gamma(VkFormat vk_format,
|
||||||
VkFormat vk_format,
|
enum pvr_pbe_gamma default_gamma,
|
||||||
enum pvr_pbe_gamma default_gamma,
|
bool with_packed_usc_channel,
|
||||||
bool with_packed_usc_channel,
|
uint32_t *const src_format_out,
|
||||||
uint32_t *const src_format_out,
|
enum pvr_pbe_gamma *const gamma_out)
|
||||||
enum pvr_pbe_gamma *const gamma_out)
|
|
||||||
{
|
{
|
||||||
const struct util_format_description *desc =
|
const struct util_format_description *desc =
|
||||||
vk_format_description(vk_format);
|
vk_format_description(vk_format);
|
||||||
|
|
@ -80,7 +79,7 @@ void PVR_PER_ARCH(pbe_get_src_format_and_gamma)(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(pbe_pack_state)(
|
void pvr_arch_pbe_pack_state(
|
||||||
const struct pvr_device_info *dev_info,
|
const struct pvr_device_info *dev_info,
|
||||||
const struct pvr_pbe_surf_params *surface_params,
|
const struct pvr_pbe_surf_params *surface_params,
|
||||||
const struct pvr_pbe_render_params *render_params,
|
const struct pvr_pbe_render_params *render_params,
|
||||||
|
|
@ -302,7 +301,7 @@ void PVR_PER_ARCH(pbe_pack_state)(
|
||||||
* total_tiles_in_flight so that CR_ISP_CTL can be fully packed in
|
* total_tiles_in_flight so that CR_ISP_CTL can be fully packed in
|
||||||
* pvr_render_job_ws_fragment_state_init().
|
* pvr_render_job_ws_fragment_state_init().
|
||||||
*/
|
*/
|
||||||
void PVR_PER_ARCH(setup_tiles_in_flight)(
|
void pvr_arch_setup_tiles_in_flight(
|
||||||
const struct pvr_device_info *dev_info,
|
const struct pvr_device_info *dev_info,
|
||||||
const struct pvr_device_runtime_info *dev_runtime_info,
|
const struct pvr_device_runtime_info *dev_runtime_info,
|
||||||
uint32_t msaa_mode,
|
uint32_t msaa_mode,
|
||||||
|
|
|
||||||
|
|
@ -212,10 +212,10 @@ static void pvr_compute_job_ws_submit_info_init(
|
||||||
pvr_submit_info_flags_init(dev_info, sub_cmd, &submit_info->flags);
|
pvr_submit_info_flags_init(dev_info, sub_cmd, &submit_info->flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(compute_job_submit)(struct pvr_compute_ctx *ctx,
|
VkResult pvr_arch_compute_job_submit(struct pvr_compute_ctx *ctx,
|
||||||
struct pvr_sub_cmd_compute *sub_cmd,
|
struct pvr_sub_cmd_compute *sub_cmd,
|
||||||
struct vk_sync *wait,
|
struct vk_sync *wait,
|
||||||
struct vk_sync *signal_sync)
|
struct vk_sync *signal_sync)
|
||||||
{
|
{
|
||||||
struct pvr_winsys_compute_submit_info submit_info;
|
struct pvr_winsys_compute_submit_info submit_info;
|
||||||
struct pvr_device *device = ctx->device;
|
struct pvr_device *device = ctx->device;
|
||||||
|
|
|
||||||
|
|
@ -853,9 +853,9 @@ static void pvr_render_ctx_ws_create_info_init(
|
||||||
pvr_render_ctx_ws_static_state_init(ctx, &create_info->static_state);
|
pvr_render_ctx_ws_static_state_init(ctx, &create_info->static_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(render_ctx_create)(struct pvr_device *device,
|
VkResult pvr_arch_render_ctx_create(struct pvr_device *device,
|
||||||
enum pvr_winsys_ctx_priority priority,
|
enum pvr_winsys_ctx_priority priority,
|
||||||
struct pvr_render_ctx **const ctx_out)
|
struct pvr_render_ctx **const ctx_out)
|
||||||
{
|
{
|
||||||
const uint64_t vdm_callstack_size =
|
const uint64_t vdm_callstack_size =
|
||||||
sizeof(uint64_t) * PVR_VDM_CALLSTACK_MAX_DEPTH;
|
sizeof(uint64_t) * PVR_VDM_CALLSTACK_MAX_DEPTH;
|
||||||
|
|
@ -920,7 +920,7 @@ err_vk_free_ctx:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(render_ctx_destroy)(struct pvr_render_ctx *ctx)
|
void pvr_arch_render_ctx_destroy(struct pvr_render_ctx *ctx)
|
||||||
{
|
{
|
||||||
struct pvr_device *device = ctx->device;
|
struct pvr_device *device = ctx->device;
|
||||||
|
|
||||||
|
|
@ -1103,10 +1103,9 @@ static void pvr_compute_ctx_ws_create_info_init(
|
||||||
&create_info->static_state);
|
&create_info->static_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult
|
VkResult pvr_arch_compute_ctx_create(struct pvr_device *const device,
|
||||||
PVR_PER_ARCH(compute_ctx_create)(struct pvr_device *const device,
|
enum pvr_winsys_ctx_priority priority,
|
||||||
enum pvr_winsys_ctx_priority priority,
|
struct pvr_compute_ctx **const ctx_out)
|
||||||
struct pvr_compute_ctx **const ctx_out)
|
|
||||||
{
|
{
|
||||||
struct pvr_winsys_compute_ctx_create_info create_info;
|
struct pvr_winsys_compute_ctx_create_info create_info;
|
||||||
struct pvr_compute_ctx *ctx;
|
struct pvr_compute_ctx *ctx;
|
||||||
|
|
@ -1190,7 +1189,7 @@ err_free_ctx:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_ctx_destroy)(struct pvr_compute_ctx *const ctx)
|
void pvr_arch_compute_ctx_destroy(struct pvr_compute_ctx *const ctx)
|
||||||
{
|
{
|
||||||
struct pvr_device *device = ctx->device;
|
struct pvr_device *device = ctx->device;
|
||||||
|
|
||||||
|
|
@ -1305,10 +1304,9 @@ static void pvr_transfer_ctx_shaders_fini(struct pvr_device *device,
|
||||||
pvr_transfer_frag_store_fini(device, &ctx->frag_store);
|
pvr_transfer_frag_store_fini(device, &ctx->frag_store);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult
|
VkResult pvr_arch_transfer_ctx_create(struct pvr_device *const device,
|
||||||
PVR_PER_ARCH(transfer_ctx_create)(struct pvr_device *const device,
|
enum pvr_winsys_ctx_priority priority,
|
||||||
enum pvr_winsys_ctx_priority priority,
|
struct pvr_transfer_ctx **const ctx_out)
|
||||||
struct pvr_transfer_ctx **const ctx_out)
|
|
||||||
{
|
{
|
||||||
struct pvr_winsys_transfer_ctx_create_info create_info;
|
struct pvr_winsys_transfer_ctx_create_info create_info;
|
||||||
struct pvr_transfer_ctx *ctx;
|
struct pvr_transfer_ctx *ctx;
|
||||||
|
|
@ -1345,7 +1343,7 @@ PVR_PER_ARCH(transfer_ctx_create)(struct pvr_device *const device,
|
||||||
if (i == 0U && j == 0U)
|
if (i == 0U && j == 0U)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
result = pvr_pds_unitex_state_program_create_and_upload(
|
result = pvr_arch_pds_unitex_state_program_create_and_upload(
|
||||||
device,
|
device,
|
||||||
NULL,
|
NULL,
|
||||||
i,
|
i,
|
||||||
|
|
@ -1385,7 +1383,7 @@ err_free_ctx:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(transfer_ctx_destroy)(struct pvr_transfer_ctx *const ctx)
|
void pvr_arch_transfer_ctx_destroy(struct pvr_transfer_ctx *const ctx)
|
||||||
{
|
{
|
||||||
struct pvr_device *device = ctx->device;
|
struct pvr_device *device = ctx->device;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -108,11 +108,11 @@ static inline void pvr_get_samples_in_xy(uint32_t samples,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(rt_mtile_info_init)(const struct pvr_device_info *dev_info,
|
void pvr_arch_rt_mtile_info_init(const struct pvr_device_info *dev_info,
|
||||||
struct pvr_rt_mtile_info *info,
|
struct pvr_rt_mtile_info *info,
|
||||||
uint32_t width,
|
uint32_t width,
|
||||||
uint32_t height,
|
uint32_t height,
|
||||||
uint32_t samples)
|
uint32_t samples)
|
||||||
{
|
{
|
||||||
uint32_t samples_in_x;
|
uint32_t samples_in_x;
|
||||||
uint32_t samples_in_y;
|
uint32_t samples_in_y;
|
||||||
|
|
@ -611,7 +611,7 @@ static void pvr_rt_dataset_ws_create_info_init(
|
||||||
pvr_rt_get_isp_region_size(device, mtile_info);
|
pvr_rt_get_isp_region_size(device, mtile_info);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(render_target_dataset_create)(
|
VkResult pvr_arch_render_target_dataset_create(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
uint32_t width,
|
uint32_t width,
|
||||||
uint32_t height,
|
uint32_t height,
|
||||||
|
|
@ -632,7 +632,7 @@ VkResult PVR_PER_ARCH(render_target_dataset_create)(
|
||||||
assert(height <= rogue_get_render_size_max_y(dev_info));
|
assert(height <= rogue_get_render_size_max_y(dev_info));
|
||||||
assert(layers > 0 && layers <= PVR_MAX_FRAMEBUFFER_LAYERS);
|
assert(layers > 0 && layers <= PVR_MAX_FRAMEBUFFER_LAYERS);
|
||||||
|
|
||||||
pvr_rt_mtile_info_init(dev_info, &mtile_info, width, height, samples);
|
pvr_arch_rt_mtile_info_init(dev_info, &mtile_info, width, height, samples);
|
||||||
|
|
||||||
rt_dataset = vk_zalloc(&device->vk.alloc,
|
rt_dataset = vk_zalloc(&device->vk.alloc,
|
||||||
sizeof(*rt_dataset),
|
sizeof(*rt_dataset),
|
||||||
|
|
@ -925,14 +925,14 @@ static void pvr_frag_state_stream_init(struct pvr_render_ctx *ctx,
|
||||||
stream_ptr += pvr_cmd_length(KMD_STREAM_HDR);
|
stream_ptr += pvr_cmd_length(KMD_STREAM_HDR);
|
||||||
|
|
||||||
/* FIXME: pass in the number of samples rather than isp_aa_mode? */
|
/* FIXME: pass in the number of samples rather than isp_aa_mode? */
|
||||||
pvr_setup_tiles_in_flight(dev_info,
|
pvr_arch_setup_tiles_in_flight(dev_info,
|
||||||
dev_runtime_info,
|
dev_runtime_info,
|
||||||
isp_aa_mode,
|
isp_aa_mode,
|
||||||
job->pixel_output_width,
|
job->pixel_output_width,
|
||||||
false,
|
false,
|
||||||
job->max_tiles_in_flight,
|
job->max_tiles_in_flight,
|
||||||
&isp_ctl,
|
&isp_ctl,
|
||||||
&pixel_ctl);
|
&pixel_ctl);
|
||||||
|
|
||||||
pvr_csb_pack ((uint64_t *)stream_ptr, CR_ISP_SCISSOR_BASE, value) {
|
pvr_csb_pack ((uint64_t *)stream_ptr, CR_ISP_SCISSOR_BASE, value) {
|
||||||
value.addr = job->scissor_table_addr;
|
value.addr = job->scissor_table_addr;
|
||||||
|
|
@ -1136,11 +1136,11 @@ static void pvr_frag_state_stream_init(struct pvr_render_ctx *ctx,
|
||||||
}
|
}
|
||||||
stream_ptr += pvr_cmd_length(CR_ISP_AA);
|
stream_ptr += pvr_cmd_length(CR_ISP_AA);
|
||||||
|
|
||||||
pvr_rt_mtile_info_init(dev_info,
|
pvr_arch_rt_mtile_info_init(dev_info,
|
||||||
&tiling_info,
|
&tiling_info,
|
||||||
rt_dataset->width,
|
rt_dataset->width,
|
||||||
rt_dataset->height,
|
rt_dataset->height,
|
||||||
rt_dataset->samples);
|
rt_dataset->samples);
|
||||||
pvr_csb_pack (stream_ptr, CR_ISP_CTL, value) {
|
pvr_csb_pack (stream_ptr, CR_ISP_CTL, value) {
|
||||||
value.sample_pos = true;
|
value.sample_pos = true;
|
||||||
value.process_empty_tiles = job->process_empty_tiles;
|
value.process_empty_tiles = job->process_empty_tiles;
|
||||||
|
|
@ -1452,12 +1452,12 @@ static void pvr_render_job_ws_submit_info_init(
|
||||||
&submit_info->fragment_pr);
|
&submit_info->fragment_pr);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(render_job_submit)(struct pvr_render_ctx *ctx,
|
VkResult pvr_arch_render_job_submit(struct pvr_render_ctx *ctx,
|
||||||
struct pvr_render_job *job,
|
struct pvr_render_job *job,
|
||||||
struct vk_sync *wait_geom,
|
struct vk_sync *wait_geom,
|
||||||
struct vk_sync *wait_frag,
|
struct vk_sync *wait_frag,
|
||||||
struct vk_sync *signal_sync_geom,
|
struct vk_sync *signal_sync_geom,
|
||||||
struct vk_sync *signal_sync_frag)
|
struct vk_sync *signal_sync_frag)
|
||||||
{
|
{
|
||||||
struct pvr_rt_dataset *rt_dataset =
|
struct pvr_rt_dataset *rt_dataset =
|
||||||
job->view_state.rt_datasets[job->view_state.view_index];
|
job->view_state.rt_datasets[job->view_state.view_index];
|
||||||
|
|
|
||||||
|
|
@ -816,14 +816,14 @@ pvr_pbe_setup_codegen_defaults(const struct pvr_device_info *dev_info,
|
||||||
swizzle = pvr_get_format_swizzle(format);
|
swizzle = pvr_get_format_swizzle(format);
|
||||||
memcpy(surface_params->swizzle, swizzle, sizeof(surface_params->swizzle));
|
memcpy(surface_params->swizzle, swizzle, sizeof(surface_params->swizzle));
|
||||||
|
|
||||||
pvr_pbe_get_src_format_and_gamma(format,
|
pvr_arch_pbe_get_src_format_and_gamma(format,
|
||||||
PVR_PBE_GAMMA_NONE,
|
PVR_PBE_GAMMA_NONE,
|
||||||
false,
|
false,
|
||||||
&surface_params->source_format,
|
&surface_params->source_format,
|
||||||
&surface_params->gamma);
|
&surface_params->gamma);
|
||||||
|
|
||||||
surface_params->is_normalized = pvr_vk_format_is_fully_normalized(format);
|
surface_params->is_normalized = pvr_vk_format_is_fully_normalized(format);
|
||||||
surface_params->pbe_packmode = pvr_get_pbe_packmode(format);
|
surface_params->pbe_packmode = pvr_arch_get_pbe_packmode(format);
|
||||||
surface_params->nr_components = vk_format_get_nr_components(format);
|
surface_params->nr_components = vk_format_get_nr_components(format);
|
||||||
|
|
||||||
result = pvr_mem_layout_spec(dst,
|
result = pvr_mem_layout_spec(dst,
|
||||||
|
|
@ -1206,16 +1206,16 @@ static VkResult pvr_pbe_setup_emit(const struct pvr_transfer_cmd *transfer_cmd,
|
||||||
staging_buffer + program.data_size,
|
staging_buffer + program.data_size,
|
||||||
dev_info);
|
dev_info);
|
||||||
|
|
||||||
result =
|
result = pvr_arch_cmd_buffer_upload_pds(
|
||||||
pvr_cmd_buffer_upload_pds(transfer_cmd->cmd_buffer,
|
transfer_cmd->cmd_buffer,
|
||||||
staging_buffer,
|
staging_buffer,
|
||||||
program.data_size,
|
program.data_size,
|
||||||
ROGUE_CR_EVENT_PIXEL_PDS_DATA_ADDR_ALIGNMENT,
|
ROGUE_CR_EVENT_PIXEL_PDS_DATA_ADDR_ALIGNMENT,
|
||||||
staging_buffer + program.data_size,
|
staging_buffer + program.data_size,
|
||||||
program.code_size,
|
program.code_size,
|
||||||
ROGUE_CR_EVENT_PIXEL_PDS_CODE_ADDR_ALIGNMENT,
|
ROGUE_CR_EVENT_PIXEL_PDS_CODE_ADDR_ALIGNMENT,
|
||||||
ROGUE_CR_EVENT_PIXEL_PDS_DATA_ADDR_ALIGNMENT,
|
ROGUE_CR_EVENT_PIXEL_PDS_DATA_ADDR_ALIGNMENT,
|
||||||
&pds_upload);
|
&pds_upload);
|
||||||
vk_free(&device->vk.alloc, staging_buffer);
|
vk_free(&device->vk.alloc, staging_buffer);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -1300,11 +1300,11 @@ static VkResult pvr_pbe_setup(const struct pvr_transfer_cmd *transfer_cmd,
|
||||||
|
|
||||||
pvr_pbe_setup_swizzle(transfer_cmd, state, &surf_params);
|
pvr_pbe_setup_swizzle(transfer_cmd, state, &surf_params);
|
||||||
|
|
||||||
pvr_pbe_pack_state(dev_info,
|
pvr_arch_pbe_pack_state(dev_info,
|
||||||
&surf_params,
|
&surf_params,
|
||||||
&render_params,
|
&render_params,
|
||||||
pbe_words,
|
pbe_words,
|
||||||
pbe_regs);
|
pbe_regs);
|
||||||
|
|
||||||
if (PVR_HAS_ERN(dev_info, 42064)) {
|
if (PVR_HAS_ERN(dev_info, 42064)) {
|
||||||
uint64_t temp_reg;
|
uint64_t temp_reg;
|
||||||
|
|
@ -1438,14 +1438,14 @@ static VkResult pvr_isp_tiles(const struct pvr_device *device,
|
||||||
reg.y = origin_y;
|
reg.y = origin_y;
|
||||||
}
|
}
|
||||||
|
|
||||||
pvr_setup_tiles_in_flight(dev_info,
|
pvr_arch_setup_tiles_in_flight(dev_info,
|
||||||
dev_runtime_info,
|
dev_runtime_info,
|
||||||
pvr_cr_isp_aa_mode_type(samples),
|
pvr_cr_isp_aa_mode_type(samples),
|
||||||
state->usc_pixel_width,
|
state->usc_pixel_width,
|
||||||
state->pair_tiles != PVR_PAIRED_TILES_NONE,
|
state->pair_tiles != PVR_PAIRED_TILES_NONE,
|
||||||
0,
|
0,
|
||||||
&isp_tiles_in_flight,
|
&isp_tiles_in_flight,
|
||||||
&state->regs.usc_pixel_output_ctrl);
|
&state->regs.usc_pixel_output_ctrl);
|
||||||
|
|
||||||
pvr_csb_pack (&state->regs.isp_ctl, CR_ISP_CTL, reg) {
|
pvr_csb_pack (&state->regs.isp_ctl, CR_ISP_CTL, reg) {
|
||||||
reg.process_empty_tiles = true;
|
reg.process_empty_tiles = true;
|
||||||
|
|
@ -1737,7 +1737,7 @@ static inline VkResult pvr_image_state_set_codegen_defaults(
|
||||||
else
|
else
|
||||||
info.type = VK_IMAGE_VIEW_TYPE_1D;
|
info.type = VK_IMAGE_VIEW_TYPE_1D;
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device, &info, &image_state);
|
result = pvr_arch_pack_tex_state(device, &info, &image_state);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -2241,10 +2241,10 @@ pvr_pds_unitex(const struct pvr_device_info *dev_info,
|
||||||
ROGUE_TA_STATE_PDS_SIZEINFO1_PDS_TEXTURESTATESIZE_UNIT_SIZE);
|
ROGUE_TA_STATE_PDS_SIZEINFO1_PDS_TEXTURESTATESIZE_UNIT_SIZE);
|
||||||
|
|
||||||
result =
|
result =
|
||||||
pvr_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
pvr_arch_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
||||||
ctx->device->heaps.pds_heap,
|
ctx->device->heaps.pds_heap,
|
||||||
PVR_DW_TO_BYTES(state->tex_state_data_size),
|
PVR_DW_TO_BYTES(state->tex_state_data_size),
|
||||||
&pvr_bo);
|
&pvr_bo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -2375,7 +2375,7 @@ static VkResult pvr_pack_clear_color(VkFormat format,
|
||||||
{
|
{
|
||||||
const uint32_t red_width =
|
const uint32_t red_width =
|
||||||
vk_format_get_component_bits(format, UTIL_FORMAT_COLORSPACE_RGB, 0U);
|
vk_format_get_component_bits(format, UTIL_FORMAT_COLORSPACE_RGB, 0U);
|
||||||
uint32_t pbe_pack_mode = pvr_get_pbe_packmode(format);
|
uint32_t pbe_pack_mode = pvr_arch_get_pbe_packmode(format);
|
||||||
const bool pbe_norm = pvr_vk_format_is_fully_normalized(format);
|
const bool pbe_norm = pvr_vk_format_is_fully_normalized(format);
|
||||||
|
|
||||||
/* TODO: Use PBE Accum format NOT PBE pack format! */
|
/* TODO: Use PBE Accum format NOT PBE pack format! */
|
||||||
|
|
@ -2950,10 +2950,11 @@ static VkResult pvr_3d_copy_blit_core(struct pvr_transfer_ctx *ctx,
|
||||||
unitex_prog.num_texture_dma_kicks = 1U;
|
unitex_prog.num_texture_dma_kicks = 1U;
|
||||||
unitex_prog.num_uniform_dma_kicks = 0U;
|
unitex_prog.num_uniform_dma_kicks = 0U;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
result =
|
||||||
device->heaps.general_heap,
|
pvr_arch_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
||||||
PVR_DW_TO_BYTES(tex_state_dma_size_dw),
|
device->heaps.general_heap,
|
||||||
&pvr_bo);
|
PVR_DW_TO_BYTES(tex_state_dma_size_dw),
|
||||||
|
&pvr_bo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -3132,7 +3133,7 @@ pvr_pds_coeff_task(struct pvr_transfer_ctx *ctx,
|
||||||
|
|
||||||
pvr_pds_set_sizes_coeff_loading(&program);
|
pvr_pds_set_sizes_coeff_loading(&program);
|
||||||
|
|
||||||
result = pvr_cmd_buffer_alloc_mem(
|
result = pvr_arch_cmd_buffer_alloc_mem(
|
||||||
transfer_cmd->cmd_buffer,
|
transfer_cmd->cmd_buffer,
|
||||||
ctx->device->heaps.pds_heap,
|
ctx->device->heaps.pds_heap,
|
||||||
PVR_DW_TO_BYTES(program.data_size + program.code_size),
|
PVR_DW_TO_BYTES(program.data_size + program.code_size),
|
||||||
|
|
@ -4131,10 +4132,10 @@ static VkResult pvr_isp_ctrl_stream(const struct pvr_device_info *dev_info,
|
||||||
total_stream_size = region_arrays_size + prim_blk_size;
|
total_stream_size = region_arrays_size + prim_blk_size;
|
||||||
|
|
||||||
/* Allocate space for IPF control stream. */
|
/* Allocate space for IPF control stream. */
|
||||||
result = pvr_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
result = pvr_arch_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
||||||
ctx->device->heaps.transfer_frag_heap,
|
ctx->device->heaps.transfer_frag_heap,
|
||||||
total_stream_size,
|
total_stream_size,
|
||||||
&pvr_cs_bo);
|
&pvr_cs_bo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -4295,10 +4296,11 @@ static VkResult pvr_isp_ctrl_stream(const struct pvr_device_info *dev_info,
|
||||||
unitex_pds_prog.num_uniform_dma_kicks = 0U;
|
unitex_pds_prog.num_uniform_dma_kicks = 0U;
|
||||||
|
|
||||||
/* Allocate memory for DMA. */
|
/* Allocate memory for DMA. */
|
||||||
result = pvr_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
result =
|
||||||
ctx->device->heaps.general_heap,
|
pvr_arch_cmd_buffer_alloc_mem(transfer_cmd->cmd_buffer,
|
||||||
tex_state_dma_size << 2U,
|
ctx->device->heaps.general_heap,
|
||||||
&pvr_bo);
|
tex_state_dma_size << 2U,
|
||||||
|
&pvr_bo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -6012,10 +6014,10 @@ static VkResult pvr_queue_transfer(struct pvr_transfer_ctx *ctx,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(transfer_job_submit)(struct pvr_transfer_ctx *ctx,
|
VkResult pvr_arch_transfer_job_submit(struct pvr_transfer_ctx *ctx,
|
||||||
struct pvr_sub_cmd_transfer *sub_cmd,
|
struct pvr_sub_cmd_transfer *sub_cmd,
|
||||||
struct vk_sync *wait_sync,
|
struct vk_sync *wait_sync,
|
||||||
struct vk_sync *signal_sync)
|
struct vk_sync *signal_sync)
|
||||||
{
|
{
|
||||||
list_for_each_entry_safe (struct pvr_transfer_cmd,
|
list_for_each_entry_safe (struct pvr_transfer_cmd,
|
||||||
transfer_cmd,
|
transfer_cmd,
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ static int32_t pvr_mrt_alloc_from_buffer(const struct pvr_device_info *dev_info,
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(init_mrt_desc)(VkFormat format, struct usc_mrt_desc *desc)
|
void pvr_arch_init_mrt_desc(VkFormat format, struct usc_mrt_desc *desc)
|
||||||
{
|
{
|
||||||
uint32_t pixel_size_in_chunks;
|
uint32_t pixel_size_in_chunks;
|
||||||
uint32_t pixel_size_in_bits;
|
uint32_t pixel_size_in_bits;
|
||||||
|
|
@ -89,7 +89,7 @@ void PVR_PER_ARCH(init_mrt_desc)(VkFormat format, struct usc_mrt_desc *desc)
|
||||||
*/
|
*/
|
||||||
const uint32_t part_bits = 0;
|
const uint32_t part_bits = 0;
|
||||||
if (vk_format_is_color(format) &&
|
if (vk_format_is_color(format) &&
|
||||||
pvr_get_pbe_accum_format(format) == PVR_PBE_ACCUM_FORMAT_INVALID) {
|
pvr_arch_get_pbe_accum_format(format) == PVR_PBE_ACCUM_FORMAT_INVALID) {
|
||||||
/* The VkFormat is not supported as a color attachment so `0`.
|
/* The VkFormat is not supported as a color attachment so `0`.
|
||||||
* vulkan doesn't seem to restrict vkCreateRenderPass() to supported
|
* vulkan doesn't seem to restrict vkCreateRenderPass() to supported
|
||||||
* formats only.
|
* formats only.
|
||||||
|
|
@ -179,7 +179,7 @@ static VkResult pvr_alloc_mrt(const struct pvr_device_info *dev_info,
|
||||||
MAX2(alloc->output_regs_count, resource->mem.offset_dw + pixel_size);
|
MAX2(alloc->output_regs_count, resource->mem.offset_dw + pixel_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
pvr_init_mrt_desc(format, &resource->mrt_desc);
|
pvr_arch_init_mrt_desc(format, &resource->mrt_desc);
|
||||||
resource->intermediate_size = resource->mrt_desc.intermediate_size;
|
resource->intermediate_size = resource->mrt_desc.intermediate_size;
|
||||||
|
|
||||||
setup->num_render_targets++;
|
setup->num_render_targets++;
|
||||||
|
|
@ -187,11 +187,11 @@ static VkResult pvr_alloc_mrt(const struct pvr_device_info *dev_info,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(init_usc_mrt_setup)(
|
VkResult
|
||||||
struct pvr_device *device,
|
pvr_arch_init_usc_mrt_setup(struct pvr_device *device,
|
||||||
uint32_t attachment_count,
|
uint32_t attachment_count,
|
||||||
const VkFormat attachment_formats[attachment_count],
|
const VkFormat attachment_formats[attachment_count],
|
||||||
struct usc_mrt_setup *setup)
|
struct usc_mrt_setup *setup)
|
||||||
{
|
{
|
||||||
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
||||||
struct pvr_mrt_alloc_ctx alloc = { 0 };
|
struct pvr_mrt_alloc_ctx alloc = { 0 };
|
||||||
|
|
@ -230,8 +230,8 @@ fail:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(destroy_mrt_setup)(const struct pvr_device *device,
|
void pvr_arch_destroy_mrt_setup(const struct pvr_device *device,
|
||||||
struct usc_mrt_setup *setup)
|
struct usc_mrt_setup *setup)
|
||||||
{
|
{
|
||||||
if (!setup)
|
if (!setup)
|
||||||
return;
|
return;
|
||||||
|
|
@ -298,7 +298,7 @@ static VkResult pvr_mrt_load_op_init(struct pvr_device *device,
|
||||||
|
|
||||||
load_op->clears_loads_state.mrt_setup = &dr_info->hw_render.init_setup;
|
load_op->clears_loads_state.mrt_setup = &dr_info->hw_render.init_setup;
|
||||||
|
|
||||||
result = pvr_load_op_shader_generate(device, alloc, load_op);
|
result = pvr_arch_load_op_shader_generate(device, alloc, load_op);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free2(&device->vk.alloc, alloc, load_op);
|
vk_free2(&device->vk.alloc, alloc, load_op);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -328,9 +328,9 @@ static void pvr_load_op_destroy(struct pvr_device *device,
|
||||||
vk_free2(&device->vk.alloc, allocator, load_op);
|
vk_free2(&device->vk.alloc, allocator, load_op);
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(mrt_load_op_state_cleanup)(const struct pvr_device *device,
|
void pvr_arch_mrt_load_op_state_cleanup(const struct pvr_device *device,
|
||||||
const VkAllocationCallbacks *alloc,
|
const VkAllocationCallbacks *alloc,
|
||||||
struct pvr_load_op_state *state)
|
struct pvr_load_op_state *state)
|
||||||
{
|
{
|
||||||
if (!state)
|
if (!state)
|
||||||
return;
|
return;
|
||||||
|
|
@ -382,7 +382,7 @@ pvr_mrt_load_op_state_create(struct pvr_device *device,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
|
|
||||||
err_load_op_state_cleanup:
|
err_load_op_state_cleanup:
|
||||||
pvr_mrt_load_op_state_cleanup(device, alloc, load_op_state);
|
pvr_arch_mrt_load_op_state_cleanup(device, alloc, load_op_state);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
@ -441,10 +441,9 @@ pvr_mrt_add_missing_output_register_write(struct usc_mrt_setup *setup,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult
|
VkResult pvr_arch_mrt_load_ops_setup(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
PVR_PER_ARCH(mrt_load_ops_setup)(struct pvr_cmd_buffer *cmd_buffer,
|
const VkAllocationCallbacks *alloc,
|
||||||
const VkAllocationCallbacks *alloc,
|
struct pvr_load_op_state **load_op_state)
|
||||||
struct pvr_load_op_state **load_op_state)
|
|
||||||
{
|
{
|
||||||
const struct pvr_cmd_buffer_state *state = &cmd_buffer->state;
|
const struct pvr_cmd_buffer_state *state = &cmd_buffer->state;
|
||||||
const struct pvr_dynamic_render_info *dr_info =
|
const struct pvr_dynamic_render_info *dr_info =
|
||||||
|
|
@ -477,7 +476,7 @@ PVR_PER_ARCH(mrt_load_ops_setup)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(pds_unitex_state_program_create_and_upload)(
|
VkResult pvr_arch_pds_unitex_state_program_create_and_upload(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
const VkAllocationCallbacks *allocator,
|
const VkAllocationCallbacks *allocator,
|
||||||
uint32_t texture_kicks,
|
uint32_t texture_kicks,
|
||||||
|
|
@ -593,9 +592,9 @@ static VkResult pvr_pds_fragment_program_create_and_upload(
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(load_op_shader_generate)(struct pvr_device *device,
|
pvr_arch_load_op_shader_generate(struct pvr_device *device,
|
||||||
const VkAllocationCallbacks *allocator,
|
const VkAllocationCallbacks *allocator,
|
||||||
struct pvr_load_op *load_op)
|
struct pvr_load_op *load_op)
|
||||||
{
|
{
|
||||||
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
||||||
const uint32_t cache_line_size = pvr_get_slc_cache_line_size(dev_info);
|
const uint32_t cache_line_size = pvr_get_slc_cache_line_size(dev_info);
|
||||||
|
|
@ -634,7 +633,7 @@ PVR_PER_ARCH(load_op_shader_generate)(struct pvr_device *device,
|
||||||
* one buffer to be DMAed. See `pvr_load_op_data_create_and_upload()`, where
|
* one buffer to be DMAed. See `pvr_load_op_data_create_and_upload()`, where
|
||||||
* we upload the buffer and upload the code section.
|
* we upload the buffer and upload the code section.
|
||||||
*/
|
*/
|
||||||
result = pvr_pds_unitex_state_program_create_and_upload(
|
result = pvr_arch_pds_unitex_state_program_create_and_upload(
|
||||||
device,
|
device,
|
||||||
allocator,
|
allocator,
|
||||||
1U,
|
1U,
|
||||||
|
|
|
||||||
|
|
@ -208,7 +208,7 @@ pvr_subpass_load_op_init(struct pvr_device *device,
|
||||||
load_op->subpass = subpass;
|
load_op->subpass = subpass;
|
||||||
load_op->clears_loads_state.mrt_setup = &hw_subpass->setup;
|
load_op->clears_loads_state.mrt_setup = &hw_subpass->setup;
|
||||||
|
|
||||||
result = pvr_load_op_shader_generate(device, allocator, load_op);
|
result = pvr_arch_load_op_shader_generate(device, allocator, load_op);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free2(&device->vk.alloc, allocator, load_op);
|
vk_free2(&device->vk.alloc, allocator, load_op);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -328,7 +328,7 @@ static VkResult pvr_render_load_op_init(
|
||||||
load_op->view_indices[0] = view_index;
|
load_op->view_indices[0] = view_index;
|
||||||
load_op->view_count = 1;
|
load_op->view_count = 1;
|
||||||
|
|
||||||
return pvr_load_op_shader_generate(device, allocator, load_op);
|
return pvr_arch_load_op_shader_generate(device, allocator, load_op);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void pvr_load_op_fini(struct pvr_load_op *load_op)
|
static void pvr_load_op_fini(struct pvr_load_op *load_op)
|
||||||
|
|
@ -848,8 +848,8 @@ PVR_PER_ARCH(CreateRenderPass2)(VkDevice _device,
|
||||||
*/
|
*/
|
||||||
attachment->is_pbe_downscalable =
|
attachment->is_pbe_downscalable =
|
||||||
PVR_HAS_FEATURE(dev_info, gs_rta_support) &&
|
PVR_HAS_FEATURE(dev_info, gs_rta_support) &&
|
||||||
pvr_format_is_pbe_downscalable(&device->pdevice->dev_info,
|
pvr_arch_format_is_pbe_downscalable(&device->pdevice->dev_info,
|
||||||
attachment->vk_format);
|
attachment->vk_format);
|
||||||
|
|
||||||
if (attachment->sample_count > pass->max_sample_count)
|
if (attachment->sample_count > pass->max_sample_count)
|
||||||
pass->max_sample_count = attachment->sample_count;
|
pass->max_sample_count = attachment->sample_count;
|
||||||
|
|
@ -1034,8 +1034,11 @@ PVR_PER_ARCH(CreateRenderPass2)(VkDevice _device,
|
||||||
pass->max_tilebuffer_count =
|
pass->max_tilebuffer_count =
|
||||||
PVR_SPM_LOAD_IN_BUFFERS_COUNT(&device->pdevice->dev_info);
|
PVR_SPM_LOAD_IN_BUFFERS_COUNT(&device->pdevice->dev_info);
|
||||||
|
|
||||||
result =
|
result = pvr_arch_create_renderpass_hwsetup(device,
|
||||||
pvr_create_renderpass_hwsetup(device, alloc, pass, false, &pass->hw_setup);
|
alloc,
|
||||||
|
pass,
|
||||||
|
false,
|
||||||
|
&pass->hw_setup);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_free_pass;
|
goto err_free_pass;
|
||||||
|
|
||||||
|
|
@ -1050,7 +1053,7 @@ PVR_PER_ARCH(CreateRenderPass2)(VkDevice _device,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
|
|
||||||
err_destroy_renderpass_hwsetup:
|
err_destroy_renderpass_hwsetup:
|
||||||
pvr_destroy_renderpass_hwsetup(alloc, pass->hw_setup);
|
pvr_arch_destroy_renderpass_hwsetup(alloc, pass->hw_setup);
|
||||||
|
|
||||||
err_free_pass:
|
err_free_pass:
|
||||||
vk_object_base_finish(&pass->base);
|
vk_object_base_finish(&pass->base);
|
||||||
|
|
@ -1075,7 +1078,7 @@ void PVR_PER_ARCH(DestroyRenderPass)(VkDevice _device,
|
||||||
allocator,
|
allocator,
|
||||||
pass,
|
pass,
|
||||||
pass->hw_setup->render_count);
|
pass->hw_setup->render_count);
|
||||||
PVR_PER_ARCH(destroy_renderpass_hwsetup)(allocator, pass->hw_setup);
|
pvr_arch_destroy_renderpass_hwsetup(allocator, pass->hw_setup);
|
||||||
vk_object_base_finish(&pass->base);
|
vk_object_base_finish(&pass->base);
|
||||||
vk_free2(&device->vk.alloc, pAllocator, pass);
|
vk_free2(&device->vk.alloc, pAllocator, pass);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2736,10 +2736,10 @@ pvr_graphics_pipeline_compile(struct pvr_device *const device,
|
||||||
if (!pCreateInfo->renderPass) {
|
if (!pCreateInfo->renderPass) {
|
||||||
const struct vk_render_pass_state *rp = state->rp;
|
const struct vk_render_pass_state *rp = state->rp;
|
||||||
|
|
||||||
result = pvr_init_usc_mrt_setup(device,
|
result = pvr_arch_init_usc_mrt_setup(device,
|
||||||
rp->color_attachment_count,
|
rp->color_attachment_count,
|
||||||
rp->color_attachment_formats,
|
rp->color_attachment_formats,
|
||||||
&mrt_setup);
|
&mrt_setup);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
@ -2831,7 +2831,7 @@ pvr_graphics_pipeline_compile(struct pvr_device *const device,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!pCreateInfo->renderPass)
|
if (!pCreateInfo->renderPass)
|
||||||
pvr_destroy_mrt_setup(device, &mrt_setup);
|
pvr_arch_destroy_mrt_setup(device, &mrt_setup);
|
||||||
|
|
||||||
for (mesa_shader_stage stage = 0; stage < MESA_SHADER_STAGES; ++stage) {
|
for (mesa_shader_stage stage = 0; stage < MESA_SHADER_STAGES; ++stage) {
|
||||||
pco_shader **pco = &pco_shaders[stage];
|
pco_shader **pco = &pco_shaders[stage];
|
||||||
|
|
@ -2971,7 +2971,7 @@ err_free_vertex_bo:
|
||||||
err_free_build_context:
|
err_free_build_context:
|
||||||
ralloc_free(shader_mem_ctx);
|
ralloc_free(shader_mem_ctx);
|
||||||
if (!pCreateInfo->renderPass)
|
if (!pCreateInfo->renderPass)
|
||||||
pvr_destroy_mrt_setup(device, &mrt_setup);
|
pvr_arch_destroy_mrt_setup(device, &mrt_setup);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -172,9 +172,9 @@ static VkResult pvr_create_compute_query_precomp_program(
|
||||||
false);
|
false);
|
||||||
|
|
||||||
result =
|
result =
|
||||||
pvr_pds_compute_shader_create_and_upload(device,
|
pvr_arch_pds_compute_shader_create_and_upload(device,
|
||||||
&pds_primary_prog,
|
&pds_primary_prog,
|
||||||
&query_prog->pds_prim_code);
|
&query_prog->pds_prim_code);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_free_usc_bo;
|
goto err_free_usc_bo;
|
||||||
|
|
||||||
|
|
@ -212,10 +212,11 @@ static VkResult pvr_write_compute_query_pds_data_section(
|
||||||
uint64_t *qword_buffer;
|
uint64_t *qword_buffer;
|
||||||
VkResult result;
|
VkResult result;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_alloc_mem(cmd_buffer,
|
result =
|
||||||
cmd_buffer->device->heaps.pds_heap,
|
pvr_arch_cmd_buffer_alloc_mem(cmd_buffer,
|
||||||
PVR_DW_TO_BYTES(info->data_size_in_dwords),
|
cmd_buffer->device->heaps.pds_heap,
|
||||||
&pvr_bo);
|
PVR_DW_TO_BYTES(info->data_size_in_dwords),
|
||||||
|
&pvr_bo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -329,12 +330,14 @@ static void pvr_write_private_compute_dispatch(
|
||||||
|
|
||||||
assert(sub_cmd->type == PVR_SUB_CMD_TYPE_QUERY);
|
assert(sub_cmd->type == PVR_SUB_CMD_TYPE_QUERY);
|
||||||
|
|
||||||
pvr_compute_update_shared_private(cmd_buffer, &sub_cmd->compute, pipeline);
|
pvr_arch_compute_update_shared_private(cmd_buffer,
|
||||||
pvr_compute_update_kernel_private(cmd_buffer,
|
&sub_cmd->compute,
|
||||||
&sub_cmd->compute,
|
pipeline);
|
||||||
pipeline,
|
pvr_arch_compute_update_kernel_private(cmd_buffer,
|
||||||
workgroup_size);
|
&sub_cmd->compute,
|
||||||
pvr_compute_generate_fence(cmd_buffer, &sub_cmd->compute, false);
|
pipeline,
|
||||||
|
workgroup_size);
|
||||||
|
pvr_arch_compute_generate_fence(cmd_buffer, &sub_cmd->compute, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
|
|
@ -347,7 +350,7 @@ pvr_destroy_compute_query_program(struct pvr_device *device,
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(device_create_compute_query_programs)(struct pvr_device *device)
|
pvr_arch_device_create_compute_query_programs(struct pvr_device *device)
|
||||||
{
|
{
|
||||||
VkResult result;
|
VkResult result;
|
||||||
|
|
||||||
|
|
@ -389,8 +392,7 @@ err_destroy_availability_query_program:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(device_destroy_compute_query_programs)(
|
void pvr_arch_device_destroy_compute_query_programs(struct pvr_device *device)
|
||||||
struct pvr_device *device)
|
|
||||||
{
|
{
|
||||||
pvr_destroy_compute_query_program(device, &device->availability_shader);
|
pvr_destroy_compute_query_program(device, &device->availability_shader);
|
||||||
pvr_destroy_compute_query_program(device, &device->copy_results_shader);
|
pvr_destroy_compute_query_program(device, &device->copy_results_shader);
|
||||||
|
|
@ -398,9 +400,8 @@ void PVR_PER_ARCH(device_destroy_compute_query_programs)(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* TODO: Split this function into per program type functions. */
|
/* TODO: Split this function into per program type functions. */
|
||||||
VkResult
|
VkResult pvr_arch_add_query_program(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
const struct pvr_query_info *query_info)
|
||||||
const struct pvr_query_info *query_info)
|
|
||||||
{
|
{
|
||||||
struct pvr_device *device = cmd_buffer->device;
|
struct pvr_device *device = cmd_buffer->device;
|
||||||
const struct pvr_compute_query_shader *query_prog;
|
const struct pvr_compute_query_shader *query_prog;
|
||||||
|
|
@ -410,7 +411,8 @@ PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_suballoc_bo *pvr_bo;
|
struct pvr_suballoc_bo *pvr_bo;
|
||||||
VkResult result;
|
VkResult result;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_QUERY);
|
result =
|
||||||
|
pvr_arch_cmd_buffer_start_sub_cmd(cmd_buffer, PVR_SUB_CMD_TYPE_QUERY);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -564,7 +566,7 @@ PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
UNREACHABLE("Invalid query type");
|
UNREACHABLE("Invalid query type");
|
||||||
}
|
}
|
||||||
|
|
||||||
result = pvr_cmd_buffer_upload_general(
|
result = pvr_arch_cmd_buffer_upload_general(
|
||||||
cmd_buffer,
|
cmd_buffer,
|
||||||
const_buffer,
|
const_buffer,
|
||||||
PVR_DW_TO_BYTES(pipeline.const_shared_regs_count),
|
PVR_DW_TO_BYTES(pipeline.const_shared_regs_count),
|
||||||
|
|
@ -592,5 +594,5 @@ PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
|
|
||||||
pvr_write_private_compute_dispatch(cmd_buffer, &pipeline, num_query_indices);
|
pvr_write_private_compute_dispatch(cmd_buffer, &pipeline, num_query_indices);
|
||||||
|
|
||||||
return pvr_cmd_buffer_end_sub_cmd(cmd_buffer);
|
return pvr_arch_cmd_buffer_end_sub_cmd(cmd_buffer);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -89,26 +89,27 @@ static VkResult pvr_queue_init(struct pvr_device *device,
|
||||||
goto err_vk_queue_finish;
|
goto err_vk_queue_finish;
|
||||||
}
|
}
|
||||||
|
|
||||||
result = pvr_transfer_ctx_create(device,
|
result = pvr_arch_transfer_ctx_create(device,
|
||||||
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
||||||
&transfer_ctx);
|
&transfer_ctx);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_vk_queue_finish;
|
goto err_vk_queue_finish;
|
||||||
|
|
||||||
result = pvr_compute_ctx_create(device,
|
result = pvr_arch_compute_ctx_create(device,
|
||||||
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
||||||
&compute_ctx);
|
&compute_ctx);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_transfer_ctx_destroy;
|
goto err_transfer_ctx_destroy;
|
||||||
|
|
||||||
result = pvr_compute_ctx_create(device,
|
result = pvr_arch_compute_ctx_create(device,
|
||||||
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
||||||
&query_ctx);
|
&query_ctx);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_compute_ctx_destroy;
|
goto err_compute_ctx_destroy;
|
||||||
|
|
||||||
result =
|
result = pvr_arch_render_ctx_create(device,
|
||||||
pvr_render_ctx_create(device, PVR_WINSYS_CTX_PRIORITY_MEDIUM, &gfx_ctx);
|
PVR_WINSYS_CTX_PRIORITY_MEDIUM,
|
||||||
|
&gfx_ctx);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
goto err_query_ctx_destroy;
|
goto err_query_ctx_destroy;
|
||||||
|
|
||||||
|
|
@ -123,13 +124,13 @@ static VkResult pvr_queue_init(struct pvr_device *device,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
|
|
||||||
err_query_ctx_destroy:
|
err_query_ctx_destroy:
|
||||||
pvr_compute_ctx_destroy(query_ctx);
|
pvr_arch_compute_ctx_destroy(query_ctx);
|
||||||
|
|
||||||
err_compute_ctx_destroy:
|
err_compute_ctx_destroy:
|
||||||
pvr_compute_ctx_destroy(compute_ctx);
|
pvr_arch_compute_ctx_destroy(compute_ctx);
|
||||||
|
|
||||||
err_transfer_ctx_destroy:
|
err_transfer_ctx_destroy:
|
||||||
pvr_transfer_ctx_destroy(transfer_ctx);
|
pvr_arch_transfer_ctx_destroy(transfer_ctx);
|
||||||
|
|
||||||
err_vk_queue_finish:
|
err_vk_queue_finish:
|
||||||
vk_queue_finish(&queue->vk);
|
vk_queue_finish(&queue->vk);
|
||||||
|
|
@ -137,8 +138,8 @@ err_vk_queue_finish:
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(queues_create)(struct pvr_device *device,
|
VkResult pvr_arch_queues_create(struct pvr_device *device,
|
||||||
const VkDeviceCreateInfo *pCreateInfo)
|
const VkDeviceCreateInfo *pCreateInfo)
|
||||||
{
|
{
|
||||||
VkResult result;
|
VkResult result;
|
||||||
|
|
||||||
|
|
@ -170,7 +171,7 @@ VkResult PVR_PER_ARCH(queues_create)(struct pvr_device *device,
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
|
|
||||||
err_queues_finish:
|
err_queues_finish:
|
||||||
PVR_PER_ARCH(queues_destroy)(device);
|
pvr_arch_queues_destroy(device);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -186,15 +187,15 @@ static void pvr_queue_finish(struct pvr_queue *queue)
|
||||||
vk_sync_destroy(&queue->device->vk, queue->last_job_signal_sync[i]);
|
vk_sync_destroy(&queue->device->vk, queue->last_job_signal_sync[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
pvr_render_ctx_destroy(queue->gfx_ctx);
|
pvr_arch_render_ctx_destroy(queue->gfx_ctx);
|
||||||
pvr_compute_ctx_destroy(queue->query_ctx);
|
pvr_arch_compute_ctx_destroy(queue->query_ctx);
|
||||||
pvr_compute_ctx_destroy(queue->compute_ctx);
|
pvr_arch_compute_ctx_destroy(queue->compute_ctx);
|
||||||
pvr_transfer_ctx_destroy(queue->transfer_ctx);
|
pvr_arch_transfer_ctx_destroy(queue->transfer_ctx);
|
||||||
|
|
||||||
vk_queue_finish(&queue->vk);
|
vk_queue_finish(&queue->vk);
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(queues_destroy)(struct pvr_device *device)
|
void pvr_arch_queues_destroy(struct pvr_device *device)
|
||||||
{
|
{
|
||||||
for (uint32_t q_idx = 0; q_idx < device->queue_count; q_idx++)
|
for (uint32_t q_idx = 0; q_idx < device->queue_count; q_idx++)
|
||||||
pvr_queue_finish(&device->queues[q_idx]);
|
pvr_queue_finish(&device->queues[q_idx]);
|
||||||
|
|
@ -275,13 +276,13 @@ pvr_process_graphics_cmd_for_view(struct pvr_device *device,
|
||||||
job->geometry_terminate = false;
|
job->geometry_terminate = false;
|
||||||
job->run_frag = false;
|
job->run_frag = false;
|
||||||
|
|
||||||
result =
|
result = pvr_arch_render_job_submit(
|
||||||
pvr_render_job_submit(queue->gfx_ctx,
|
queue->gfx_ctx,
|
||||||
&sub_cmd->job,
|
&sub_cmd->job,
|
||||||
queue->next_job_wait_sync[PVR_JOB_TYPE_GEOM],
|
queue->next_job_wait_sync[PVR_JOB_TYPE_GEOM],
|
||||||
NULL,
|
NULL,
|
||||||
NULL,
|
NULL,
|
||||||
NULL);
|
NULL);
|
||||||
|
|
||||||
job->geometry_terminate = true;
|
job->geometry_terminate = true;
|
||||||
job->run_frag = true;
|
job->run_frag = true;
|
||||||
|
|
@ -303,12 +304,13 @@ pvr_process_graphics_cmd_for_view(struct pvr_device *device,
|
||||||
(view_index * PVR_DW_TO_BYTES(sub_cmd->multiview_ctrl_stream_stride));
|
(view_index * PVR_DW_TO_BYTES(sub_cmd->multiview_ctrl_stream_stride));
|
||||||
}
|
}
|
||||||
|
|
||||||
result = pvr_render_job_submit(queue->gfx_ctx,
|
result =
|
||||||
&sub_cmd->job,
|
pvr_arch_render_job_submit(queue->gfx_ctx,
|
||||||
queue->next_job_wait_sync[PVR_JOB_TYPE_GEOM],
|
&sub_cmd->job,
|
||||||
queue->next_job_wait_sync[PVR_JOB_TYPE_FRAG],
|
queue->next_job_wait_sync[PVR_JOB_TYPE_GEOM],
|
||||||
geom_signal_sync,
|
queue->next_job_wait_sync[PVR_JOB_TYPE_FRAG],
|
||||||
frag_signal_sync);
|
geom_signal_sync,
|
||||||
|
frag_signal_sync);
|
||||||
|
|
||||||
if (original_ctrl_stream_addr.addr > 0)
|
if (original_ctrl_stream_addr.addr > 0)
|
||||||
job->ctrl_stream_addr = original_ctrl_stream_addr;
|
job->ctrl_stream_addr = original_ctrl_stream_addr;
|
||||||
|
|
@ -373,11 +375,11 @@ static VkResult pvr_process_compute_cmd(struct pvr_device *device,
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
result =
|
result = pvr_arch_compute_job_submit(
|
||||||
pvr_compute_job_submit(queue->compute_ctx,
|
queue->compute_ctx,
|
||||||
sub_cmd,
|
sub_cmd,
|
||||||
queue->next_job_wait_sync[PVR_JOB_TYPE_COMPUTE],
|
queue->next_job_wait_sync[PVR_JOB_TYPE_COMPUTE],
|
||||||
sync);
|
sync);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_sync_destroy(&device->vk, sync);
|
vk_sync_destroy(&device->vk, sync);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -403,11 +405,11 @@ static VkResult pvr_process_transfer_cmds(struct pvr_device *device,
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
result =
|
result = pvr_arch_transfer_job_submit(
|
||||||
pvr_transfer_job_submit(queue->transfer_ctx,
|
queue->transfer_ctx,
|
||||||
sub_cmd,
|
sub_cmd,
|
||||||
queue->next_job_wait_sync[PVR_JOB_TYPE_TRANSFER],
|
queue->next_job_wait_sync[PVR_JOB_TYPE_TRANSFER],
|
||||||
sync);
|
sync);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_sync_destroy(&device->vk, sync);
|
vk_sync_destroy(&device->vk, sync);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -440,10 +442,10 @@ static VkResult pvr_process_query_cmd(struct pvr_device *device,
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
result =
|
result =
|
||||||
pvr_compute_job_submit(queue->query_ctx,
|
pvr_arch_compute_job_submit(queue->query_ctx,
|
||||||
sub_cmd,
|
sub_cmd,
|
||||||
queue->next_job_wait_sync[PVR_JOB_TYPE_QUERY],
|
queue->next_job_wait_sync[PVR_JOB_TYPE_QUERY],
|
||||||
sync);
|
sync);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_sync_destroy(&device->vk, sync);
|
vk_sync_destroy(&device->vk, sync);
|
||||||
return result;
|
return result;
|
||||||
|
|
|
||||||
|
|
@ -78,7 +78,7 @@ VkResult PVR_PER_ARCH(CreateSampler)(VkDevice _device,
|
||||||
mag_filter = pCreateInfo->magFilter;
|
mag_filter = pCreateInfo->magFilter;
|
||||||
min_filter = pCreateInfo->minFilter;
|
min_filter = pCreateInfo->minFilter;
|
||||||
|
|
||||||
result = pvr_border_color_table_get_or_create_entry(
|
result = pvr_arch_border_color_table_get_or_create_entry(
|
||||||
device,
|
device,
|
||||||
sampler,
|
sampler,
|
||||||
device->border_color_table,
|
device->border_color_table,
|
||||||
|
|
@ -229,8 +229,8 @@ void PVR_PER_ARCH(DestroySampler)(VkDevice _device,
|
||||||
if (!sampler)
|
if (!sampler)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
pvr_border_color_table_release_entry(device->border_color_table,
|
pvr_arch_border_color_table_release_entry(device->border_color_table,
|
||||||
sampler->border_color_table_index);
|
sampler->border_color_table_index);
|
||||||
|
|
||||||
vk_sampler_destroy(&device->vk, pAllocator, &sampler->vk);
|
vk_sampler_destroy(&device->vk, pAllocator, &sampler->vk);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,7 @@ struct pvr_spm_scratch_buffer {
|
||||||
uint64_t size;
|
uint64_t size;
|
||||||
};
|
};
|
||||||
|
|
||||||
uint64_t PVR_PER_ARCH(spm_scratch_buffer_calc_required_size)(
|
uint64_t pvr_arch_spm_scratch_buffer_calc_required_size(
|
||||||
const struct pvr_renderpass_hwsetup_render *renders,
|
const struct pvr_renderpass_hwsetup_render *renders,
|
||||||
uint32_t render_count,
|
uint32_t render_count,
|
||||||
uint32_t sample_count,
|
uint32_t sample_count,
|
||||||
|
|
@ -69,7 +69,7 @@ uint64_t PVR_PER_ARCH(spm_scratch_buffer_calc_required_size)(
|
||||||
return buffer_size;
|
return buffer_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult pvr_device_init_spm_load_state(struct pvr_device *device)
|
VkResult pvr_arch_device_init_spm_load_state(struct pvr_device *device)
|
||||||
{
|
{
|
||||||
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
||||||
uint32_t pds_texture_aligned_offsets[PVR_NUM_SPM_LOAD_SHADERS];
|
uint32_t pds_texture_aligned_offsets[PVR_NUM_SPM_LOAD_SHADERS];
|
||||||
|
|
@ -244,7 +244,7 @@ VkResult pvr_device_init_spm_load_state(struct pvr_device *device)
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PVR_PER_ARCH(device_finish_spm_load_state)(struct pvr_device *device)
|
void pvr_arch_device_finish_spm_load_state(struct pvr_device *device)
|
||||||
{
|
{
|
||||||
pvr_bo_suballoc_free(device->spm_load_state.pds_programs);
|
pvr_bo_suballoc_free(device->spm_load_state.pds_programs);
|
||||||
pvr_bo_suballoc_free(device->spm_load_state.usc_programs);
|
pvr_bo_suballoc_free(device->spm_load_state.usc_programs);
|
||||||
|
|
@ -310,11 +310,11 @@ static uint64_t pvr_spm_setup_pbe_state(
|
||||||
.source_start = source_start,
|
.source_start = source_start,
|
||||||
};
|
};
|
||||||
|
|
||||||
pvr_pbe_pack_state(dev_info,
|
pvr_arch_pbe_pack_state(dev_info,
|
||||||
&surface_params,
|
&surface_params,
|
||||||
&render_params,
|
&render_params,
|
||||||
pbe_state_words_out,
|
pbe_state_words_out,
|
||||||
pbe_reg_words_out);
|
pbe_reg_words_out);
|
||||||
|
|
||||||
return (uint64_t)stride * framebuffer_size->height * sample_count *
|
return (uint64_t)stride * framebuffer_size->height * sample_count *
|
||||||
PVR_DW_TO_BYTES(dword_count);
|
PVR_DW_TO_BYTES(dword_count);
|
||||||
|
|
@ -452,7 +452,7 @@ static VkResult pvr_pds_pixel_event_program_create_and_upload(
|
||||||
* This sets up an EOT program to store the render pass'es on-chip and
|
* This sets up an EOT program to store the render pass'es on-chip and
|
||||||
* off-chip tile data to the SPM scratch buffer on the EOT event.
|
* off-chip tile data to the SPM scratch buffer on the EOT event.
|
||||||
*/
|
*/
|
||||||
VkResult PVR_PER_ARCH(spm_init_eot_state)(
|
VkResult pvr_arch_spm_init_eot_state(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
struct pvr_spm_eot_state *spm_eot_state,
|
struct pvr_spm_eot_state *spm_eot_state,
|
||||||
const struct pvr_render_state *rstate,
|
const struct pvr_render_state *rstate,
|
||||||
|
|
@ -666,7 +666,7 @@ pvr_spm_setup_texture_state_words(struct pvr_device *device,
|
||||||
format_swizzle = pvr_get_format_swizzle(info.format);
|
format_swizzle = pvr_get_format_swizzle(info.format);
|
||||||
memcpy(info.swizzle, format_swizzle, sizeof(info.swizzle));
|
memcpy(info.swizzle, format_swizzle, sizeof(info.swizzle));
|
||||||
|
|
||||||
result = pvr_pack_tex_state(device, &info, &image_descriptor);
|
result = pvr_arch_pack_tex_state(device, &info, &image_descriptor);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return result;
|
return result;
|
||||||
|
|
||||||
|
|
@ -746,7 +746,7 @@ static VkResult pvr_pds_bgnd_program_create_and_upload(
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(spm_init_bgobj_state)(
|
VkResult pvr_arch_spm_init_bgobj_state(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
struct pvr_spm_bgobj_state *spm_bgobj_state,
|
struct pvr_spm_bgobj_state *spm_bgobj_state,
|
||||||
const struct pvr_render_state *rstate,
|
const struct pvr_render_state *rstate,
|
||||||
|
|
|
||||||
|
|
@ -151,9 +151,9 @@ static uint32_t setup_pck_info(VkFormat vk_format)
|
||||||
return pck_info;
|
return pck_info;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(pack_tex_state)(struct pvr_device *device,
|
VkResult pvr_arch_pack_tex_state(struct pvr_device *device,
|
||||||
const struct pvr_texture_state_info *info,
|
const struct pvr_texture_state_info *info,
|
||||||
struct pvr_image_descriptor *state)
|
struct pvr_image_descriptor *state)
|
||||||
{
|
{
|
||||||
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
const struct pvr_device_info *dev_info = &device->pdevice->dev_info;
|
||||||
enum pvr_memlayout mem_layout;
|
enum pvr_memlayout mem_layout;
|
||||||
|
|
@ -226,7 +226,7 @@ VkResult PVR_PER_ARCH(pack_tex_state)(struct pvr_device *device,
|
||||||
* to avoid this.
|
* to avoid this.
|
||||||
*/
|
*/
|
||||||
word0.texformat =
|
word0.texformat =
|
||||||
pvr_get_tex_format_aspect(info->format, info->aspect_mask);
|
pvr_arch_get_tex_format_aspect(info->format, info->aspect_mask);
|
||||||
word0.smpcnt = util_logbase2(info->sample_count);
|
word0.smpcnt = util_logbase2(info->sample_count);
|
||||||
word0.swiz0 =
|
word0.swiz0 =
|
||||||
pvr_get_hw_swizzle(VK_COMPONENT_SWIZZLE_R, info->swizzle[0]);
|
pvr_get_hw_swizzle(VK_COMPONENT_SWIZZLE_R, info->swizzle[0]);
|
||||||
|
|
|
||||||
|
|
@ -64,23 +64,25 @@ struct pvr_border_color_table {
|
||||||
#ifdef PVR_PER_ARCH
|
#ifdef PVR_PER_ARCH
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(border_color_table_init)(struct pvr_device *const device);
|
VkResult PVR_PER_ARCH(border_color_table_init)(struct pvr_device *const device);
|
||||||
# define pvr_border_color_table_init PVR_PER_ARCH(border_color_table_init)
|
# define pvr_arch_border_color_table_init \
|
||||||
|
PVR_PER_ARCH(border_color_table_init)
|
||||||
|
|
||||||
void PVR_PER_ARCH(border_color_table_finish)(struct pvr_device *device);
|
void PVR_PER_ARCH(border_color_table_finish)(struct pvr_device *device);
|
||||||
# define pvr_border_color_table_finish PVR_PER_ARCH(border_color_table_finish)
|
# define pvr_arch_border_color_table_finish \
|
||||||
|
PVR_PER_ARCH(border_color_table_finish)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(border_color_table_get_or_create_entry)(
|
VkResult PVR_PER_ARCH(border_color_table_get_or_create_entry)(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
const struct pvr_sampler *sampler,
|
const struct pvr_sampler *sampler,
|
||||||
struct pvr_border_color_table *table,
|
struct pvr_border_color_table *table,
|
||||||
uint32_t *index_out);
|
uint32_t *index_out);
|
||||||
# define pvr_border_color_table_get_or_create_entry \
|
# define pvr_arch_border_color_table_get_or_create_entry \
|
||||||
PVR_PER_ARCH(border_color_table_get_or_create_entry)
|
PVR_PER_ARCH(border_color_table_get_or_create_entry)
|
||||||
|
|
||||||
void PVR_PER_ARCH(border_color_table_release_entry)(
|
void PVR_PER_ARCH(border_color_table_release_entry)(
|
||||||
struct pvr_border_color_table *table,
|
struct pvr_border_color_table *table,
|
||||||
uint32_t index);
|
uint32_t index);
|
||||||
# define pvr_border_color_table_release_entry \
|
# define pvr_arch_border_color_table_release_entry \
|
||||||
PVR_PER_ARCH(border_color_table_release_entry)
|
PVR_PER_ARCH(border_color_table_release_entry)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -599,7 +599,7 @@ VkResult PVR_PER_ARCH(cmd_buffer_add_transfer_cmd)(
|
||||||
struct pvr_cmd_buffer *cmd_buffer,
|
struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_transfer_cmd *transfer_cmd);
|
struct pvr_transfer_cmd *transfer_cmd);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_add_transfer_cmd \
|
# define pvr_arch_cmd_buffer_add_transfer_cmd \
|
||||||
PVR_PER_ARCH(cmd_buffer_add_transfer_cmd)
|
PVR_PER_ARCH(cmd_buffer_add_transfer_cmd)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(cmd_buffer_alloc_mem)(
|
VkResult PVR_PER_ARCH(cmd_buffer_alloc_mem)(
|
||||||
|
|
@ -608,7 +608,7 @@ VkResult PVR_PER_ARCH(cmd_buffer_alloc_mem)(
|
||||||
uint64_t size,
|
uint64_t size,
|
||||||
struct pvr_suballoc_bo **const pvr_bo_out);
|
struct pvr_suballoc_bo **const pvr_bo_out);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_alloc_mem PVR_PER_ARCH(cmd_buffer_alloc_mem)
|
# define pvr_arch_cmd_buffer_alloc_mem PVR_PER_ARCH(cmd_buffer_alloc_mem)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(cmd_buffer_upload_general)(
|
VkResult PVR_PER_ARCH(cmd_buffer_upload_general)(
|
||||||
struct pvr_cmd_buffer *const cmd_buffer,
|
struct pvr_cmd_buffer *const cmd_buffer,
|
||||||
|
|
@ -616,7 +616,8 @@ VkResult PVR_PER_ARCH(cmd_buffer_upload_general)(
|
||||||
const size_t size,
|
const size_t size,
|
||||||
struct pvr_suballoc_bo **const pvr_bo_out);
|
struct pvr_suballoc_bo **const pvr_bo_out);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_upload_general PVR_PER_ARCH(cmd_buffer_upload_general)
|
# define pvr_arch_cmd_buffer_upload_general \
|
||||||
|
PVR_PER_ARCH(cmd_buffer_upload_general)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(cmd_buffer_upload_pds)(
|
VkResult PVR_PER_ARCH(cmd_buffer_upload_pds)(
|
||||||
struct pvr_cmd_buffer *const cmd_buffer,
|
struct pvr_cmd_buffer *const cmd_buffer,
|
||||||
|
|
@ -629,32 +630,33 @@ VkResult PVR_PER_ARCH(cmd_buffer_upload_pds)(
|
||||||
uint64_t min_alignment,
|
uint64_t min_alignment,
|
||||||
struct pvr_pds_upload *const pds_upload_out);
|
struct pvr_pds_upload *const pds_upload_out);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_upload_pds PVR_PER_ARCH(cmd_buffer_upload_pds)
|
# define pvr_arch_cmd_buffer_upload_pds PVR_PER_ARCH(cmd_buffer_upload_pds)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(cmd_buffer_start_sub_cmd)(struct pvr_cmd_buffer *cmd_buffer,
|
PVR_PER_ARCH(cmd_buffer_start_sub_cmd)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
enum pvr_sub_cmd_type type);
|
enum pvr_sub_cmd_type type);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_start_sub_cmd PVR_PER_ARCH(cmd_buffer_start_sub_cmd)
|
# define pvr_arch_cmd_buffer_start_sub_cmd \
|
||||||
|
PVR_PER_ARCH(cmd_buffer_start_sub_cmd)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(cmd_buffer_end_sub_cmd)(struct pvr_cmd_buffer *cmd_buffer);
|
PVR_PER_ARCH(cmd_buffer_end_sub_cmd)(struct pvr_cmd_buffer *cmd_buffer);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_end_sub_cmd PVR_PER_ARCH(cmd_buffer_end_sub_cmd)
|
# define pvr_arch_cmd_buffer_end_sub_cmd PVR_PER_ARCH(cmd_buffer_end_sub_cmd)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_generate_fence)(
|
void PVR_PER_ARCH(compute_generate_fence)(
|
||||||
struct pvr_cmd_buffer *cmd_buffer,
|
struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_sub_cmd_compute *const sub_cmd,
|
struct pvr_sub_cmd_compute *const sub_cmd,
|
||||||
bool deallocate_shareds);
|
bool deallocate_shareds);
|
||||||
|
|
||||||
# define pvr_compute_generate_fence PVR_PER_ARCH(compute_generate_fence)
|
# define pvr_arch_compute_generate_fence PVR_PER_ARCH(compute_generate_fence)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_update_shared_private)(
|
void PVR_PER_ARCH(compute_update_shared_private)(
|
||||||
struct pvr_cmd_buffer *cmd_buffer,
|
struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_sub_cmd_compute *const sub_cmd,
|
struct pvr_sub_cmd_compute *const sub_cmd,
|
||||||
struct pvr_private_compute_pipeline *pipeline);
|
struct pvr_private_compute_pipeline *pipeline);
|
||||||
|
|
||||||
# define pvr_compute_update_shared_private \
|
# define pvr_arch_compute_update_shared_private \
|
||||||
PVR_PER_ARCH(compute_update_shared_private)
|
PVR_PER_ARCH(compute_update_shared_private)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_update_kernel_private)(
|
void PVR_PER_ARCH(compute_update_kernel_private)(
|
||||||
|
|
@ -663,20 +665,20 @@ void PVR_PER_ARCH(compute_update_kernel_private)(
|
||||||
struct pvr_private_compute_pipeline *pipeline,
|
struct pvr_private_compute_pipeline *pipeline,
|
||||||
const uint32_t global_workgroup_size[static const PVR_WORKGROUP_DIMENSIONS]);
|
const uint32_t global_workgroup_size[static const PVR_WORKGROUP_DIMENSIONS]);
|
||||||
|
|
||||||
# define pvr_compute_update_kernel_private \
|
# define pvr_arch_compute_update_kernel_private \
|
||||||
PVR_PER_ARCH(compute_update_kernel_private)
|
PVR_PER_ARCH(compute_update_kernel_private)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
const struct pvr_query_info *query_info);
|
const struct pvr_query_info *query_info);
|
||||||
|
|
||||||
# define pvr_add_query_program PVR_PER_ARCH(add_query_program)
|
# define pvr_arch_add_query_program PVR_PER_ARCH(add_query_program)
|
||||||
|
|
||||||
void PVR_PER_ARCH(reset_graphics_dirty_state)(
|
void PVR_PER_ARCH(reset_graphics_dirty_state)(
|
||||||
struct pvr_cmd_buffer *const cmd_buffer,
|
struct pvr_cmd_buffer *const cmd_buffer,
|
||||||
bool start_geom);
|
bool start_geom);
|
||||||
|
|
||||||
# define pvr_reset_graphics_dirty_state \
|
# define pvr_arch_reset_graphics_dirty_state \
|
||||||
PVR_PER_ARCH(reset_graphics_dirty_state)
|
PVR_PER_ARCH(reset_graphics_dirty_state)
|
||||||
|
|
||||||
void PVR_PER_ARCH(calculate_vertex_cam_size)(
|
void PVR_PER_ARCH(calculate_vertex_cam_size)(
|
||||||
|
|
@ -686,21 +688,21 @@ void PVR_PER_ARCH(calculate_vertex_cam_size)(
|
||||||
uint32_t *const cam_size_out,
|
uint32_t *const cam_size_out,
|
||||||
uint32_t *const vs_max_instances_out);
|
uint32_t *const vs_max_instances_out);
|
||||||
|
|
||||||
# define pvr_cmd_buffer_end_sub_cmd PVR_PER_ARCH(cmd_buffer_end_sub_cmd)
|
# define pvr_arch_cmd_buffer_end_sub_cmd PVR_PER_ARCH(cmd_buffer_end_sub_cmd)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_generate_fence)(
|
void PVR_PER_ARCH(compute_generate_fence)(
|
||||||
struct pvr_cmd_buffer *cmd_buffer,
|
struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_sub_cmd_compute *const sub_cmd,
|
struct pvr_sub_cmd_compute *const sub_cmd,
|
||||||
bool deallocate_shareds);
|
bool deallocate_shareds);
|
||||||
|
|
||||||
# define pvr_compute_generate_fence PVR_PER_ARCH(compute_generate_fence)
|
# define pvr_arch_compute_generate_fence PVR_PER_ARCH(compute_generate_fence)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_update_shared_private)(
|
void PVR_PER_ARCH(compute_update_shared_private)(
|
||||||
struct pvr_cmd_buffer *cmd_buffer,
|
struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_sub_cmd_compute *const sub_cmd,
|
struct pvr_sub_cmd_compute *const sub_cmd,
|
||||||
struct pvr_private_compute_pipeline *pipeline);
|
struct pvr_private_compute_pipeline *pipeline);
|
||||||
|
|
||||||
# define pvr_compute_update_shared_private \
|
# define pvr_arch_compute_update_shared_private \
|
||||||
PVR_PER_ARCH(compute_update_shared_private)
|
PVR_PER_ARCH(compute_update_shared_private)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_update_kernel_private)(
|
void PVR_PER_ARCH(compute_update_kernel_private)(
|
||||||
|
|
@ -709,20 +711,20 @@ void PVR_PER_ARCH(compute_update_kernel_private)(
|
||||||
struct pvr_private_compute_pipeline *pipeline,
|
struct pvr_private_compute_pipeline *pipeline,
|
||||||
const uint32_t global_workgroup_size[static const PVR_WORKGROUP_DIMENSIONS]);
|
const uint32_t global_workgroup_size[static const PVR_WORKGROUP_DIMENSIONS]);
|
||||||
|
|
||||||
# define pvr_compute_update_kernel_private \
|
# define pvr_arch_compute_update_kernel_private \
|
||||||
PVR_PER_ARCH(compute_update_kernel_private)
|
PVR_PER_ARCH(compute_update_kernel_private)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
PVR_PER_ARCH(add_query_program)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
const struct pvr_query_info *query_info);
|
const struct pvr_query_info *query_info);
|
||||||
|
|
||||||
# define pvr_add_query_program PVR_PER_ARCH(add_query_program)
|
# define pvr_arch_add_query_program PVR_PER_ARCH(add_query_program)
|
||||||
|
|
||||||
void PVR_PER_ARCH(reset_graphics_dirty_state)(
|
void PVR_PER_ARCH(reset_graphics_dirty_state)(
|
||||||
struct pvr_cmd_buffer *const cmd_buffer,
|
struct pvr_cmd_buffer *const cmd_buffer,
|
||||||
bool start_geom);
|
bool start_geom);
|
||||||
|
|
||||||
# define pvr_reset_graphics_dirty_state \
|
# define pvr_arch_reset_graphics_dirty_state \
|
||||||
PVR_PER_ARCH(reset_graphics_dirty_state)
|
PVR_PER_ARCH(reset_graphics_dirty_state)
|
||||||
|
|
||||||
void PVR_PER_ARCH(calculate_vertex_cam_size)(
|
void PVR_PER_ARCH(calculate_vertex_cam_size)(
|
||||||
|
|
@ -732,19 +734,21 @@ void PVR_PER_ARCH(calculate_vertex_cam_size)(
|
||||||
uint32_t *const cam_size_out,
|
uint32_t *const cam_size_out,
|
||||||
uint32_t *const vs_max_instances_out);
|
uint32_t *const vs_max_instances_out);
|
||||||
|
|
||||||
# define pvr_calculate_vertex_cam_size PVR_PER_ARCH(calculate_vertex_cam_size)
|
# define pvr_arch_calculate_vertex_cam_size \
|
||||||
|
PVR_PER_ARCH(calculate_vertex_cam_size)
|
||||||
|
|
||||||
const struct pvr_renderpass_hwsetup_subpass *
|
const struct pvr_renderpass_hwsetup_subpass *
|
||||||
PVR_PER_ARCH(get_hw_subpass)(const struct pvr_render_pass *pass,
|
PVR_PER_ARCH(get_hw_subpass)(const struct pvr_render_pass *pass,
|
||||||
const uint32_t subpass);
|
const uint32_t subpass);
|
||||||
|
|
||||||
# define pvr_get_hw_subpass PVR_PER_ARCH(get_hw_subpass)
|
# define pvr_arch_get_hw_subpass PVR_PER_ARCH(get_hw_subpass)
|
||||||
|
|
||||||
struct pvr_renderpass_hwsetup_render *PVR_PER_ARCH(pass_info_get_hw_render)(
|
struct pvr_renderpass_hwsetup_render *PVR_PER_ARCH(pass_info_get_hw_render)(
|
||||||
const struct pvr_render_pass_info *render_pass_info,
|
const struct pvr_render_pass_info *render_pass_info,
|
||||||
uint32_t idx);
|
uint32_t idx);
|
||||||
|
|
||||||
# define pvr_pass_info_get_hw_render PVR_PER_ARCH(pass_info_get_hw_render)
|
# define pvr_arch_pass_info_get_hw_render \
|
||||||
|
PVR_PER_ARCH(pass_info_get_hw_render)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -255,26 +255,26 @@ VkResult pvr_csb_bake(struct pvr_csb *csb, struct list_head *bo_list_out);
|
||||||
|
|
||||||
void *PVR_PER_ARCH(csb_alloc_dwords)(struct pvr_csb *csb, uint32_t num_dwords);
|
void *PVR_PER_ARCH(csb_alloc_dwords)(struct pvr_csb *csb, uint32_t num_dwords);
|
||||||
|
|
||||||
# define pvr_csb_alloc_dwords PVR_PER_ARCH(csb_alloc_dwords)
|
# define pvr_arch_csb_alloc_dwords PVR_PER_ARCH(csb_alloc_dwords)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(csb_copy)(struct pvr_csb *csb_dst,
|
VkResult PVR_PER_ARCH(csb_copy)(struct pvr_csb *csb_dst,
|
||||||
struct pvr_csb *csb_src);
|
struct pvr_csb *csb_src);
|
||||||
|
|
||||||
# define pvr_csb_copy PVR_PER_ARCH(csb_copy)
|
# define pvr_arch_csb_copy PVR_PER_ARCH(csb_copy)
|
||||||
|
|
||||||
void PVR_PER_ARCH(csb_emit_link)(struct pvr_csb *csb,
|
void PVR_PER_ARCH(csb_emit_link)(struct pvr_csb *csb,
|
||||||
pvr_dev_addr_t addr,
|
pvr_dev_addr_t addr,
|
||||||
bool ret);
|
bool ret);
|
||||||
|
|
||||||
# define pvr_csb_emit_link PVR_PER_ARCH(csb_emit_link)
|
# define pvr_arch_csb_emit_link PVR_PER_ARCH(csb_emit_link)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(csb_emit_return)(struct pvr_csb *csb);
|
VkResult PVR_PER_ARCH(csb_emit_return)(struct pvr_csb *csb);
|
||||||
|
|
||||||
# define pvr_csb_emit_return PVR_PER_ARCH(csb_emit_return)
|
# define pvr_arch_csb_emit_return PVR_PER_ARCH(csb_emit_return)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(csb_emit_terminate)(struct pvr_csb *csb);
|
VkResult PVR_PER_ARCH(csb_emit_terminate)(struct pvr_csb *csb);
|
||||||
|
|
||||||
# define pvr_csb_emit_terminate PVR_PER_ARCH(csb_emit_terminate)
|
# define pvr_arch_csb_emit_terminate PVR_PER_ARCH(csb_emit_terminate)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
||||||
|
|
@ -301,14 +301,14 @@ void pvr_csb_dump(const struct pvr_csb *csb,
|
||||||
* used by the caller to modify the command or state
|
* used by the caller to modify the command or state
|
||||||
* information before it's packed.
|
* information before it's packed.
|
||||||
*/
|
*/
|
||||||
#define pvr_csb_emit(csb, cmd, name) \
|
#define pvr_csb_emit(csb, cmd, name) \
|
||||||
for (struct ROGUE_##cmd \
|
for (struct ROGUE_##cmd \
|
||||||
name = { pvr_cmd_header(cmd) }, \
|
name = { pvr_cmd_header(cmd) }, \
|
||||||
*_dst = pvr_csb_alloc_dwords(csb, pvr_cmd_length(cmd)); \
|
*_dst = pvr_arch_csb_alloc_dwords(csb, pvr_cmd_length(cmd)); \
|
||||||
__builtin_expect(_dst != NULL, 1); \
|
__builtin_expect(_dst != NULL, 1); \
|
||||||
({ \
|
({ \
|
||||||
pvr_cmd_pack(cmd)(_dst, &name); \
|
pvr_cmd_pack(cmd)(_dst, &name); \
|
||||||
_dst = NULL; \
|
_dst = NULL; \
|
||||||
}))
|
}))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -229,7 +229,7 @@ uint32_t PVR_PER_ARCH(calc_fscommon_size_and_tiles_in_flight)(
|
||||||
uint32_t fs_common_size,
|
uint32_t fs_common_size,
|
||||||
uint32_t min_tiles_in_flight);
|
uint32_t min_tiles_in_flight);
|
||||||
|
|
||||||
# define pvr_calc_fscommon_size_and_tiles_in_flight \
|
# define pvr_arch_calc_fscommon_size_and_tiles_in_flight \
|
||||||
PVR_PER_ARCH(calc_fscommon_size_and_tiles_in_flight)
|
PVR_PER_ARCH(calc_fscommon_size_and_tiles_in_flight)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(pds_compute_shader_create_and_upload)(
|
VkResult PVR_PER_ARCH(pds_compute_shader_create_and_upload)(
|
||||||
|
|
@ -237,7 +237,7 @@ VkResult PVR_PER_ARCH(pds_compute_shader_create_and_upload)(
|
||||||
struct pvr_pds_compute_shader_program *program,
|
struct pvr_pds_compute_shader_program *program,
|
||||||
struct pvr_pds_upload *const pds_upload_out);
|
struct pvr_pds_upload *const pds_upload_out);
|
||||||
|
|
||||||
# define pvr_pds_compute_shader_create_and_upload \
|
# define pvr_arch_pds_compute_shader_create_and_upload \
|
||||||
PVR_PER_ARCH(pds_compute_shader_create_and_upload)
|
PVR_PER_ARCH(pds_compute_shader_create_and_upload)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
|
||||||
|
|
@ -687,7 +687,7 @@ VkResult pvr_GetPhysicalDeviceImageFormatProperties2(
|
||||||
break;
|
break;
|
||||||
case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
|
case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
|
||||||
/* Nothing to do here, it's handled in
|
/* Nothing to do here, it's handled in
|
||||||
* PVR_PER_ARCH(get_image_format_properties)
|
* pvr_get_image_format_properties)
|
||||||
*/
|
*/
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
|
||||||
|
|
@ -306,25 +306,25 @@ pvr_vk_format_get_common_color_channel_count(VkFormat src_format,
|
||||||
#ifdef PVR_PER_ARCH
|
#ifdef PVR_PER_ARCH
|
||||||
|
|
||||||
const struct pvr_format *PVR_PER_ARCH(get_format_table)(unsigned *num_formats);
|
const struct pvr_format *PVR_PER_ARCH(get_format_table)(unsigned *num_formats);
|
||||||
# define pvr_get_format_table PVR_PER_ARCH(get_format_table)
|
# define pvr_arch_get_format_table PVR_PER_ARCH(get_format_table)
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(get_tex_format)(VkFormat vk_format);
|
uint32_t PVR_PER_ARCH(get_tex_format)(VkFormat vk_format);
|
||||||
# define pvr_get_tex_format PVR_PER_ARCH(get_tex_format)
|
# define pvr_arch_get_tex_format PVR_PER_ARCH(get_tex_format)
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(get_tex_format_aspect)(VkFormat vk_format,
|
uint32_t PVR_PER_ARCH(get_tex_format_aspect)(VkFormat vk_format,
|
||||||
VkImageAspectFlags aspect_mask);
|
VkImageAspectFlags aspect_mask);
|
||||||
# define pvr_get_tex_format_aspect PVR_PER_ARCH(get_tex_format_aspect)
|
# define pvr_arch_get_tex_format_aspect PVR_PER_ARCH(get_tex_format_aspect)
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(get_pbe_packmode)(VkFormat vk_format);
|
uint32_t PVR_PER_ARCH(get_pbe_packmode)(VkFormat vk_format);
|
||||||
# define pvr_get_pbe_packmode PVR_PER_ARCH(get_pbe_packmode)
|
# define pvr_arch_get_pbe_packmode PVR_PER_ARCH(get_pbe_packmode)
|
||||||
|
|
||||||
uint32_t PVR_PER_ARCH(get_pbe_accum_format)(VkFormat vk_format);
|
uint32_t PVR_PER_ARCH(get_pbe_accum_format)(VkFormat vk_format);
|
||||||
# define pvr_get_pbe_accum_format PVR_PER_ARCH(get_pbe_accum_format)
|
# define pvr_arch_get_pbe_accum_format PVR_PER_ARCH(get_pbe_accum_format)
|
||||||
|
|
||||||
bool PVR_PER_ARCH(format_is_pbe_downscalable)(
|
bool PVR_PER_ARCH(format_is_pbe_downscalable)(
|
||||||
const struct pvr_device_info *dev_info,
|
const struct pvr_device_info *dev_info,
|
||||||
VkFormat vk_format);
|
VkFormat vk_format);
|
||||||
# define pvr_format_is_pbe_downscalable \
|
# define pvr_arch_format_is_pbe_downscalable \
|
||||||
PVR_PER_ARCH(format_is_pbe_downscalable)
|
PVR_PER_ARCH(format_is_pbe_downscalable)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
|
||||||
|
|
@ -77,7 +77,7 @@ VkResult PVR_PER_ARCH(render_state_setup)(
|
||||||
uint32_t render_count,
|
uint32_t render_count,
|
||||||
const struct pvr_renderpass_hwsetup_render *renders);
|
const struct pvr_renderpass_hwsetup_render *renders);
|
||||||
|
|
||||||
# define pvr_render_state_setup PVR_PER_ARCH(render_state_setup)
|
# define pvr_arch_render_state_setup PVR_PER_ARCH(render_state_setup)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -229,13 +229,14 @@ VkResult PVR_PER_ARCH(create_renderpass_hwsetup)(
|
||||||
bool disable_merge,
|
bool disable_merge,
|
||||||
struct pvr_renderpass_hwsetup **const hw_setup_out);
|
struct pvr_renderpass_hwsetup **const hw_setup_out);
|
||||||
|
|
||||||
# define pvr_create_renderpass_hwsetup PVR_PER_ARCH(create_renderpass_hwsetup)
|
# define pvr_arch_create_renderpass_hwsetup \
|
||||||
|
PVR_PER_ARCH(create_renderpass_hwsetup)
|
||||||
|
|
||||||
void PVR_PER_ARCH(destroy_renderpass_hwsetup)(
|
void PVR_PER_ARCH(destroy_renderpass_hwsetup)(
|
||||||
const VkAllocationCallbacks *alloc,
|
const VkAllocationCallbacks *alloc,
|
||||||
struct pvr_renderpass_hwsetup *hw_setup);
|
struct pvr_renderpass_hwsetup *hw_setup);
|
||||||
|
|
||||||
# define pvr_destroy_renderpass_hwsetup \
|
# define pvr_arch_destroy_renderpass_hwsetup \
|
||||||
PVR_PER_ARCH(destroy_renderpass_hwsetup)
|
PVR_PER_ARCH(destroy_renderpass_hwsetup)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -121,7 +121,7 @@ void PVR_PER_ARCH(pbe_pack_state)(
|
||||||
uint32_t pbe_cs_words[static const ROGUE_NUM_PBESTATE_STATE_WORDS],
|
uint32_t pbe_cs_words[static const ROGUE_NUM_PBESTATE_STATE_WORDS],
|
||||||
uint64_t pbe_reg_words[static const ROGUE_NUM_PBESTATE_REG_WORDS]);
|
uint64_t pbe_reg_words[static const ROGUE_NUM_PBESTATE_REG_WORDS]);
|
||||||
|
|
||||||
# define pvr_pbe_pack_state PVR_PER_ARCH(pbe_pack_state)
|
# define pvr_arch_pbe_pack_state PVR_PER_ARCH(pbe_pack_state)
|
||||||
|
|
||||||
/* Helper to calculate pvr_pbe_surf_params::gamma and
|
/* Helper to calculate pvr_pbe_surf_params::gamma and
|
||||||
* pvr_pbe_surf_params::source_format.
|
* pvr_pbe_surf_params::source_format.
|
||||||
|
|
@ -133,7 +133,7 @@ void PVR_PER_ARCH(pbe_get_src_format_and_gamma)(
|
||||||
uint32_t *const src_format_out,
|
uint32_t *const src_format_out,
|
||||||
enum pvr_pbe_gamma *const gamma_out);
|
enum pvr_pbe_gamma *const gamma_out);
|
||||||
|
|
||||||
# define pvr_pbe_get_src_format_and_gamma \
|
# define pvr_arch_pbe_get_src_format_and_gamma \
|
||||||
PVR_PER_ARCH(pbe_get_src_format_and_gamma)
|
PVR_PER_ARCH(pbe_get_src_format_and_gamma)
|
||||||
|
|
||||||
void PVR_PER_ARCH(setup_tiles_in_flight)(
|
void PVR_PER_ARCH(setup_tiles_in_flight)(
|
||||||
|
|
@ -146,7 +146,7 @@ void PVR_PER_ARCH(setup_tiles_in_flight)(
|
||||||
uint32_t *const isp_ctl_out,
|
uint32_t *const isp_ctl_out,
|
||||||
uint32_t *const pixel_ctl_out);
|
uint32_t *const pixel_ctl_out);
|
||||||
|
|
||||||
# define pvr_setup_tiles_in_flight PVR_PER_ARCH(setup_tiles_in_flight)
|
# define pvr_arch_setup_tiles_in_flight PVR_PER_ARCH(setup_tiles_in_flight)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,6 @@ VkResult PVR_PER_ARCH(compute_job_submit)(struct pvr_compute_ctx *ctx,
|
||||||
struct vk_sync *wait,
|
struct vk_sync *wait,
|
||||||
struct vk_sync *signal_sync);
|
struct vk_sync *signal_sync);
|
||||||
|
|
||||||
#define pvr_compute_job_submit PVR_PER_ARCH(compute_job_submit)
|
#define pvr_arch_compute_job_submit PVR_PER_ARCH(compute_job_submit)
|
||||||
|
|
||||||
#endif /* PVR_JOB_COMPUTE_H */
|
#endif /* PVR_JOB_COMPUTE_H */
|
||||||
|
|
|
||||||
|
|
@ -163,32 +163,32 @@ VkResult PVR_PER_ARCH(render_ctx_create)(struct pvr_device *device,
|
||||||
enum pvr_winsys_ctx_priority priority,
|
enum pvr_winsys_ctx_priority priority,
|
||||||
struct pvr_render_ctx **const ctx_out);
|
struct pvr_render_ctx **const ctx_out);
|
||||||
|
|
||||||
#define pvr_render_ctx_create PVR_PER_ARCH(render_ctx_create)
|
#define pvr_arch_render_ctx_create PVR_PER_ARCH(render_ctx_create)
|
||||||
|
|
||||||
void PVR_PER_ARCH(render_ctx_destroy)(struct pvr_render_ctx *ctx);
|
void PVR_PER_ARCH(render_ctx_destroy)(struct pvr_render_ctx *ctx);
|
||||||
|
|
||||||
#define pvr_render_ctx_destroy PVR_PER_ARCH(render_ctx_destroy)
|
#define pvr_arch_render_ctx_destroy PVR_PER_ARCH(render_ctx_destroy)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(compute_ctx_create)(struct pvr_device *const device,
|
PVR_PER_ARCH(compute_ctx_create)(struct pvr_device *const device,
|
||||||
enum pvr_winsys_ctx_priority priority,
|
enum pvr_winsys_ctx_priority priority,
|
||||||
struct pvr_compute_ctx **const ctx_out);
|
struct pvr_compute_ctx **const ctx_out);
|
||||||
|
|
||||||
#define pvr_compute_ctx_create PVR_PER_ARCH(compute_ctx_create)
|
#define pvr_arch_compute_ctx_create PVR_PER_ARCH(compute_ctx_create)
|
||||||
|
|
||||||
void PVR_PER_ARCH(compute_ctx_destroy)(struct pvr_compute_ctx *ctx);
|
void PVR_PER_ARCH(compute_ctx_destroy)(struct pvr_compute_ctx *ctx);
|
||||||
|
|
||||||
#define pvr_compute_ctx_destroy PVR_PER_ARCH(compute_ctx_destroy)
|
#define pvr_arch_compute_ctx_destroy PVR_PER_ARCH(compute_ctx_destroy)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
PVR_PER_ARCH(transfer_ctx_create)(struct pvr_device *const device,
|
PVR_PER_ARCH(transfer_ctx_create)(struct pvr_device *const device,
|
||||||
enum pvr_winsys_ctx_priority priority,
|
enum pvr_winsys_ctx_priority priority,
|
||||||
struct pvr_transfer_ctx **const ctx_out);
|
struct pvr_transfer_ctx **const ctx_out);
|
||||||
|
|
||||||
#define pvr_transfer_ctx_create PVR_PER_ARCH(transfer_ctx_create)
|
#define pvr_arch_transfer_ctx_create PVR_PER_ARCH(transfer_ctx_create)
|
||||||
|
|
||||||
void PVR_PER_ARCH(transfer_ctx_destroy)(struct pvr_transfer_ctx *const ctx);
|
void PVR_PER_ARCH(transfer_ctx_destroy)(struct pvr_transfer_ctx *const ctx);
|
||||||
|
|
||||||
#define pvr_transfer_ctx_destroy PVR_PER_ARCH(transfer_ctx_destroy)
|
#define pvr_arch_transfer_ctx_destroy PVR_PER_ARCH(transfer_ctx_destroy)
|
||||||
|
|
||||||
#endif /* PVR_JOB_CONTEXT_H */
|
#endif /* PVR_JOB_CONTEXT_H */
|
||||||
|
|
|
||||||
|
|
@ -173,7 +173,7 @@ void PVR_PER_ARCH(rt_mtile_info_init)(const struct pvr_device_info *dev_info,
|
||||||
uint32_t height,
|
uint32_t height,
|
||||||
uint32_t samples);
|
uint32_t samples);
|
||||||
|
|
||||||
# define pvr_rt_mtile_info_init PVR_PER_ARCH(rt_mtile_info_init)
|
# define pvr_arch_rt_mtile_info_init PVR_PER_ARCH(rt_mtile_info_init)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(render_target_dataset_create)(
|
VkResult PVR_PER_ARCH(render_target_dataset_create)(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
|
|
@ -183,7 +183,7 @@ VkResult PVR_PER_ARCH(render_target_dataset_create)(
|
||||||
uint32_t layers,
|
uint32_t layers,
|
||||||
struct pvr_rt_dataset **const rt_dataset_out);
|
struct pvr_rt_dataset **const rt_dataset_out);
|
||||||
|
|
||||||
# define pvr_render_target_dataset_create \
|
# define pvr_arch_render_target_dataset_create \
|
||||||
PVR_PER_ARCH(render_target_dataset_create)
|
PVR_PER_ARCH(render_target_dataset_create)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(render_job_submit)(struct pvr_render_ctx *ctx,
|
VkResult PVR_PER_ARCH(render_job_submit)(struct pvr_render_ctx *ctx,
|
||||||
|
|
@ -193,7 +193,7 @@ VkResult PVR_PER_ARCH(render_job_submit)(struct pvr_render_ctx *ctx,
|
||||||
struct vk_sync *signal_sync_geom,
|
struct vk_sync *signal_sync_geom,
|
||||||
struct vk_sync *signal_sync_frag);
|
struct vk_sync *signal_sync_frag);
|
||||||
|
|
||||||
# define pvr_render_job_submit PVR_PER_ARCH(render_job_submit)
|
# define pvr_arch_render_job_submit PVR_PER_ARCH(render_job_submit)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -48,6 +48,6 @@ VkResult PVR_PER_ARCH(transfer_job_submit)(struct pvr_transfer_ctx *ctx,
|
||||||
struct vk_sync *wait,
|
struct vk_sync *wait,
|
||||||
struct vk_sync *signal_sync);
|
struct vk_sync *signal_sync);
|
||||||
|
|
||||||
#define pvr_transfer_job_submit PVR_PER_ARCH(transfer_job_submit)
|
#define pvr_arch_transfer_job_submit PVR_PER_ARCH(transfer_job_submit)
|
||||||
|
|
||||||
#endif /* PVR_JOB_TRANSFER_H */
|
#endif /* PVR_JOB_TRANSFER_H */
|
||||||
|
|
|
||||||
|
|
@ -177,16 +177,16 @@ VkResult PVR_PER_ARCH(init_usc_mrt_setup)(
|
||||||
const VkFormat attachment_formats[attachment_count],
|
const VkFormat attachment_formats[attachment_count],
|
||||||
struct usc_mrt_setup *setup);
|
struct usc_mrt_setup *setup);
|
||||||
|
|
||||||
# define pvr_init_usc_mrt_setup PVR_PER_ARCH(init_usc_mrt_setup)
|
# define pvr_arch_init_usc_mrt_setup PVR_PER_ARCH(init_usc_mrt_setup)
|
||||||
|
|
||||||
void PVR_PER_ARCH(destroy_mrt_setup)(const struct pvr_device *device,
|
void PVR_PER_ARCH(destroy_mrt_setup)(const struct pvr_device *device,
|
||||||
struct usc_mrt_setup *setup);
|
struct usc_mrt_setup *setup);
|
||||||
|
|
||||||
# define pvr_destroy_mrt_setup PVR_PER_ARCH(destroy_mrt_setup)
|
# define pvr_arch_destroy_mrt_setup PVR_PER_ARCH(destroy_mrt_setup)
|
||||||
|
|
||||||
void PVR_PER_ARCH(init_mrt_desc)(VkFormat format, struct usc_mrt_desc *desc);
|
void PVR_PER_ARCH(init_mrt_desc)(VkFormat format, struct usc_mrt_desc *desc);
|
||||||
|
|
||||||
# define pvr_init_mrt_desc PVR_PER_ARCH(init_mrt_desc)
|
# define pvr_arch_init_mrt_desc PVR_PER_ARCH(init_mrt_desc)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(pds_unitex_state_program_create_and_upload)(
|
VkResult PVR_PER_ARCH(pds_unitex_state_program_create_and_upload)(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
|
|
@ -195,7 +195,7 @@ VkResult PVR_PER_ARCH(pds_unitex_state_program_create_and_upload)(
|
||||||
uint32_t uniform_kicks,
|
uint32_t uniform_kicks,
|
||||||
struct pvr_pds_upload *const pds_upload_out);
|
struct pvr_pds_upload *const pds_upload_out);
|
||||||
|
|
||||||
# define pvr_pds_unitex_state_program_create_and_upload \
|
# define pvr_arch_pds_unitex_state_program_create_and_upload \
|
||||||
PVR_PER_ARCH(pds_unitex_state_program_create_and_upload)
|
PVR_PER_ARCH(pds_unitex_state_program_create_and_upload)
|
||||||
|
|
||||||
VkResult
|
VkResult
|
||||||
|
|
@ -203,19 +203,21 @@ VkResult
|
||||||
const VkAllocationCallbacks *allocator,
|
const VkAllocationCallbacks *allocator,
|
||||||
struct pvr_load_op *load_op);
|
struct pvr_load_op *load_op);
|
||||||
|
|
||||||
# define pvr_load_op_shader_generate PVR_PER_ARCH(load_op_shader_generate)
|
# define pvr_arch_load_op_shader_generate \
|
||||||
|
PVR_PER_ARCH(load_op_shader_generate)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(mrt_load_ops_setup)(struct pvr_cmd_buffer *cmd_buffer,
|
VkResult PVR_PER_ARCH(mrt_load_ops_setup)(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
const VkAllocationCallbacks *alloc,
|
const VkAllocationCallbacks *alloc,
|
||||||
struct pvr_load_op_state **state);
|
struct pvr_load_op_state **state);
|
||||||
|
|
||||||
# define pvr_mrt_load_ops_setup PVR_PER_ARCH(mrt_load_ops_setup)
|
# define pvr_arch_mrt_load_ops_setup PVR_PER_ARCH(mrt_load_ops_setup)
|
||||||
|
|
||||||
void PVR_PER_ARCH(mrt_load_op_state_cleanup)(const struct pvr_device *device,
|
void PVR_PER_ARCH(mrt_load_op_state_cleanup)(const struct pvr_device *device,
|
||||||
const VkAllocationCallbacks *alloc,
|
const VkAllocationCallbacks *alloc,
|
||||||
struct pvr_load_op_state *state);
|
struct pvr_load_op_state *state);
|
||||||
|
|
||||||
# define pvr_mrt_load_op_state_cleanup PVR_PER_ARCH(mrt_load_op_state_cleanup)
|
# define pvr_arch_mrt_load_op_state_cleanup \
|
||||||
|
PVR_PER_ARCH(mrt_load_op_state_cleanup)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -78,13 +78,13 @@ VK_DEFINE_NONDISP_HANDLE_CASTS(pvr_query_pool,
|
||||||
VkResult PVR_PER_ARCH(device_create_compute_query_programs)(
|
VkResult PVR_PER_ARCH(device_create_compute_query_programs)(
|
||||||
struct pvr_device *device);
|
struct pvr_device *device);
|
||||||
|
|
||||||
# define pvr_device_create_compute_query_programs \
|
# define pvr_arch_device_create_compute_query_programs \
|
||||||
PVR_PER_ARCH(device_create_compute_query_programs)
|
PVR_PER_ARCH(device_create_compute_query_programs)
|
||||||
|
|
||||||
void PVR_PER_ARCH(device_destroy_compute_query_programs)(
|
void PVR_PER_ARCH(device_destroy_compute_query_programs)(
|
||||||
struct pvr_device *device);
|
struct pvr_device *device);
|
||||||
|
|
||||||
# define pvr_device_destroy_compute_query_programs \
|
# define pvr_arch_device_destroy_compute_query_programs \
|
||||||
PVR_PER_ARCH(device_destroy_compute_query_programs)
|
PVR_PER_ARCH(device_destroy_compute_query_programs)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -45,10 +45,10 @@ VK_DEFINE_HANDLE_CASTS(pvr_queue, vk.base, VkQueue, VK_OBJECT_TYPE_QUEUE)
|
||||||
#ifdef PVR_PER_ARCH
|
#ifdef PVR_PER_ARCH
|
||||||
VkResult PVR_PER_ARCH(queues_create)(struct pvr_device *device,
|
VkResult PVR_PER_ARCH(queues_create)(struct pvr_device *device,
|
||||||
const VkDeviceCreateInfo *pCreateInfo);
|
const VkDeviceCreateInfo *pCreateInfo);
|
||||||
# define pvr_queues_create PVR_PER_ARCH(queues_create)
|
# define pvr_arch_queues_create PVR_PER_ARCH(queues_create)
|
||||||
|
|
||||||
void PVR_PER_ARCH(queues_destroy)(struct pvr_device *device);
|
void PVR_PER_ARCH(queues_destroy)(struct pvr_device *device);
|
||||||
# define pvr_queues_destroy PVR_PER_ARCH(queues_destroy)
|
# define pvr_arch_queues_destroy PVR_PER_ARCH(queues_destroy)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -116,18 +116,18 @@ uint64_t PVR_PER_ARCH(spm_scratch_buffer_calc_required_size)(
|
||||||
uint32_t framebuffer_width,
|
uint32_t framebuffer_width,
|
||||||
uint32_t framebuffer_height);
|
uint32_t framebuffer_height);
|
||||||
|
|
||||||
# define pvr_spm_scratch_buffer_calc_required_size \
|
# define pvr_arch_spm_scratch_buffer_calc_required_size \
|
||||||
PVR_PER_ARCH(spm_scratch_buffer_calc_required_size)
|
PVR_PER_ARCH(spm_scratch_buffer_calc_required_size)
|
||||||
|
|
||||||
/* The SPM load programs are needed for the SPM background object load op. */
|
/* The SPM load programs are needed for the SPM background object load op. */
|
||||||
VkResult PVR_PER_ARCH(device_init_spm_load_state)(struct pvr_device *device);
|
VkResult PVR_PER_ARCH(device_init_spm_load_state)(struct pvr_device *device);
|
||||||
|
|
||||||
# define pvr_device_init_spm_load_state \
|
# define pvr_arch_device_init_spm_load_state \
|
||||||
PVR_PER_ARCH(device_init_spm_load_state)
|
PVR_PER_ARCH(device_init_spm_load_state)
|
||||||
|
|
||||||
void PVR_PER_ARCH(device_finish_spm_load_state)(struct pvr_device *device);
|
void PVR_PER_ARCH(device_finish_spm_load_state)(struct pvr_device *device);
|
||||||
|
|
||||||
# define pvr_device_finish_spm_load_state \
|
# define pvr_arch_device_finish_spm_load_state \
|
||||||
PVR_PER_ARCH(device_finish_spm_load_state)
|
PVR_PER_ARCH(device_finish_spm_load_state)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(spm_init_eot_state)(
|
VkResult PVR_PER_ARCH(spm_init_eot_state)(
|
||||||
|
|
@ -136,7 +136,7 @@ VkResult PVR_PER_ARCH(spm_init_eot_state)(
|
||||||
const struct pvr_render_state *rstate,
|
const struct pvr_render_state *rstate,
|
||||||
const struct pvr_renderpass_hwsetup_render *hw_render);
|
const struct pvr_renderpass_hwsetup_render *hw_render);
|
||||||
|
|
||||||
# define pvr_spm_init_eot_state PVR_PER_ARCH(spm_init_eot_state)
|
# define pvr_arch_spm_init_eot_state PVR_PER_ARCH(spm_init_eot_state)
|
||||||
|
|
||||||
VkResult PVR_PER_ARCH(spm_init_bgobj_state)(
|
VkResult PVR_PER_ARCH(spm_init_bgobj_state)(
|
||||||
struct pvr_device *device,
|
struct pvr_device *device,
|
||||||
|
|
@ -144,7 +144,7 @@ VkResult PVR_PER_ARCH(spm_init_bgobj_state)(
|
||||||
const struct pvr_render_state *rstate,
|
const struct pvr_render_state *rstate,
|
||||||
const struct pvr_renderpass_hwsetup_render *hw_render);
|
const struct pvr_renderpass_hwsetup_render *hw_render);
|
||||||
|
|
||||||
# define pvr_spm_init_bgobj_state PVR_PER_ARCH(spm_init_bgobj_state)
|
# define pvr_arch_spm_init_bgobj_state PVR_PER_ARCH(spm_init_bgobj_state)
|
||||||
|
|
||||||
#endif /* PVR_PER_ARCH */
|
#endif /* PVR_PER_ARCH */
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -115,7 +115,7 @@ VkResult PVR_PER_ARCH(pack_tex_state)(struct pvr_device *device,
|
||||||
const struct pvr_texture_state_info *info,
|
const struct pvr_texture_state_info *info,
|
||||||
struct pvr_image_descriptor *state);
|
struct pvr_image_descriptor *state);
|
||||||
|
|
||||||
# define pvr_pack_tex_state PVR_PER_ARCH(pack_tex_state)
|
# define pvr_arch_pack_tex_state PVR_PER_ARCH(pack_tex_state)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -413,7 +413,7 @@ void pvr_rogue_CmdBlitImage2(VkCommandBuffer commandBuffer,
|
||||||
transfer_cmd->dst = dst_surface;
|
transfer_cmd->dst = dst_surface;
|
||||||
transfer_cmd->scissor = dst_rect;
|
transfer_cmd->scissor = dst_rect;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
result = pvr_arch_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
||||||
return;
|
return;
|
||||||
|
|
@ -630,7 +630,7 @@ pvr_copy_or_resolve_image_region(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
transfer_cmd->sources[0].mapping_count++;
|
transfer_cmd->sources[0].mapping_count++;
|
||||||
transfer_cmd->source_count = 1;
|
transfer_cmd->source_count = 1;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
result = pvr_arch_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -895,7 +895,7 @@ pvr_copy_buffer_to_image_region_format(struct pvr_cmd_buffer *const cmd_buffer,
|
||||||
transfer_cmd->sources[0].mappings[0].dst_rect = transfer_cmd->scissor;
|
transfer_cmd->sources[0].mappings[0].dst_rect = transfer_cmd->scissor;
|
||||||
transfer_cmd->sources[0].mapping_count++;
|
transfer_cmd->sources[0].mapping_count++;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
result = pvr_arch_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -1081,7 +1081,7 @@ pvr_copy_image_to_buffer_region_format(struct pvr_cmd_buffer *const cmd_buffer,
|
||||||
transfer_cmd->dst = dst_surface;
|
transfer_cmd->dst = dst_surface;
|
||||||
transfer_cmd->scissor = dst_rect;
|
transfer_cmd->scissor = dst_rect;
|
||||||
|
|
||||||
result = pvr_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
result = pvr_arch_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -1233,7 +1233,7 @@ static VkResult pvr_clear_image_range(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
format,
|
format,
|
||||||
psRange->aspectMask);
|
psRange->aspectMask);
|
||||||
|
|
||||||
result = pvr_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
result = pvr_arch_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -1407,7 +1407,7 @@ static VkResult pvr_cmd_copy_buffer_region(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
transfer_cmd->sources[0].mapping_count++;
|
transfer_cmd->sources[0].mapping_count++;
|
||||||
}
|
}
|
||||||
|
|
||||||
result = pvr_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
result = pvr_arch_cmd_buffer_add_transfer_cmd(cmd_buffer, transfer_cmd);
|
||||||
if (result != VK_SUCCESS) {
|
if (result != VK_SUCCESS) {
|
||||||
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd);
|
||||||
return result;
|
return result;
|
||||||
|
|
@ -1432,7 +1432,7 @@ void pvr_rogue_CmdUpdateBuffer(VkCommandBuffer commandBuffer,
|
||||||
|
|
||||||
PVR_CHECK_COMMAND_BUFFER_BUILDING_STATE(cmd_buffer);
|
PVR_CHECK_COMMAND_BUFFER_BUILDING_STATE(cmd_buffer);
|
||||||
|
|
||||||
result = pvr_cmd_buffer_upload_general(cmd_buffer, pData, dataSize, &pvr_bo);
|
result = pvr_arch_cmd_buffer_upload_general(cmd_buffer, pData, dataSize, &pvr_bo);
|
||||||
if (result != VK_SUCCESS)
|
if (result != VK_SUCCESS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
@ -1604,11 +1604,11 @@ static VkResult pvr_clear_color_attachment_static_create_consts_buffer(
|
||||||
VkResult result;
|
VkResult result;
|
||||||
|
|
||||||
/* TODO: This doesn't need to be aligned to slc size. Alignment to 4 is fine.
|
/* TODO: This doesn't need to be aligned to slc size. Alignment to 4 is fine.
|
||||||
* Change pvr_cmd_buffer_alloc_mem() to take in an alignment?
|
* Change pvr_arch_cmd_buffer_alloc_mem() to take in an alignment?
|
||||||
*/
|
*/
|
||||||
/* TODO: only allocate what's needed, not always
|
/* TODO: only allocate what's needed, not always
|
||||||
* _PVR_CLEAR_ATTACH_DATA_COUNT? */
|
* _PVR_CLEAR_ATTACH_DATA_COUNT? */
|
||||||
result = pvr_cmd_buffer_alloc_mem(cmd_buffer,
|
result = pvr_arch_cmd_buffer_alloc_mem(cmd_buffer,
|
||||||
device->heaps.general_heap,
|
device->heaps.general_heap,
|
||||||
_PVR_CLEAR_ATTACH_DATA_COUNT,
|
_PVR_CLEAR_ATTACH_DATA_COUNT,
|
||||||
&const_shareds_buffer);
|
&const_shareds_buffer);
|
||||||
|
|
@ -1715,9 +1715,9 @@ static VkResult pvr_clear_color_attachment_static(
|
||||||
&dev_clear_state->pds_clear_attachment_program_info[program_idx];
|
&dev_clear_state->pds_clear_attachment_program_info[program_idx];
|
||||||
|
|
||||||
/* TODO: This doesn't need to be aligned to slc size. Alignment to 4 is fine.
|
/* TODO: This doesn't need to be aligned to slc size. Alignment to 4 is fine.
|
||||||
* Change pvr_cmd_buffer_alloc_mem() to take in an alignment?
|
* Change pvr_arch_cmd_buffer_alloc_mem() to take in an alignment?
|
||||||
*/
|
*/
|
||||||
result = pvr_cmd_buffer_alloc_mem(
|
result = pvr_arch_cmd_buffer_alloc_mem(
|
||||||
cmd_buffer,
|
cmd_buffer,
|
||||||
device->heaps.pds_heap,
|
device->heaps.pds_heap,
|
||||||
clear_attachment_program->texture_program_data_size,
|
clear_attachment_program->texture_program_data_size,
|
||||||
|
|
@ -1835,7 +1835,7 @@ static VkResult pvr_add_deferred_rta_clear(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
struct pvr_render_pass_info *pass_info = &cmd_buffer->state.render_pass_info;
|
struct pvr_render_pass_info *pass_info = &cmd_buffer->state.render_pass_info;
|
||||||
struct pvr_sub_cmd_gfx *sub_cmd = &cmd_buffer->state.current_sub_cmd->gfx;
|
struct pvr_sub_cmd_gfx *sub_cmd = &cmd_buffer->state.current_sub_cmd->gfx;
|
||||||
const struct pvr_renderpass_hwsetup_render *hw_render =
|
const struct pvr_renderpass_hwsetup_render *hw_render =
|
||||||
pvr_pass_info_get_hw_render(pass_info, sub_cmd->hw_render_idx);
|
pvr_arch_pass_info_get_hw_render(pass_info, sub_cmd->hw_render_idx);
|
||||||
const struct pvr_image_view *image_view;
|
const struct pvr_image_view *image_view;
|
||||||
const struct pvr_image *image;
|
const struct pvr_image *image;
|
||||||
uint32_t base_layer;
|
uint32_t base_layer;
|
||||||
|
|
@ -1882,7 +1882,7 @@ static VkResult pvr_add_deferred_rta_clear(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
image_view = pass_info->attachments[index];
|
image_view = pass_info->attachments[index];
|
||||||
} else {
|
} else {
|
||||||
const struct pvr_renderpass_hwsetup_subpass *hw_pass =
|
const struct pvr_renderpass_hwsetup_subpass *hw_pass =
|
||||||
pvr_get_hw_subpass(pass_info->pass, pass_info->subpass_idx);
|
pvr_arch_get_hw_subpass(pass_info->pass, pass_info->subpass_idx);
|
||||||
const struct pvr_render_subpass *sub_pass =
|
const struct pvr_render_subpass *sub_pass =
|
||||||
&pass_info->pass->subpasses[hw_pass->index];
|
&pass_info->pass->subpasses[hw_pass->index];
|
||||||
const uint32_t attachment_idx =
|
const uint32_t attachment_idx =
|
||||||
|
|
@ -1958,7 +1958,7 @@ static void pvr_clear_attachments(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if (pass) {
|
if (pass) {
|
||||||
hw_pass = pvr_get_hw_subpass(pass, pass_info->subpass_idx);
|
hw_pass = pvr_arch_get_hw_subpass(pass, pass_info->subpass_idx);
|
||||||
multiview_enabled = pass->multiview_enabled;
|
multiview_enabled = pass->multiview_enabled;
|
||||||
} else {
|
} else {
|
||||||
multiview_enabled = pass_info->dr_info->hw_render.multiview_enabled;
|
multiview_enabled = pass_info->dr_info->hw_render.multiview_enabled;
|
||||||
|
|
@ -1967,7 +1967,7 @@ static void pvr_clear_attachments(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
|
|
||||||
assert(cmd_buffer->state.current_sub_cmd->type == PVR_SUB_CMD_TYPE_GRAPHICS);
|
assert(cmd_buffer->state.current_sub_cmd->type == PVR_SUB_CMD_TYPE_GRAPHICS);
|
||||||
|
|
||||||
pvr_reset_graphics_dirty_state(cmd_buffer, false);
|
pvr_arch_reset_graphics_dirty_state(cmd_buffer, false);
|
||||||
|
|
||||||
/* We'll be emitting to the control stream. */
|
/* We'll be emitting to the control stream. */
|
||||||
sub_cmd->empty_cmd = false;
|
sub_cmd->empty_cmd = false;
|
||||||
|
|
@ -2003,7 +2003,7 @@ static void pvr_clear_attachments(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
assert(cmd_buffer->state.current_sub_cmd->is_dynamic_render ||
|
assert(cmd_buffer->state.current_sub_cmd->is_dynamic_render ||
|
||||||
pass->hw_setup->render_count > 0);
|
pass->hw_setup->render_count > 0);
|
||||||
hw_render =
|
hw_render =
|
||||||
pvr_pass_info_get_hw_render(&cmd_buffer->state.render_pass_info, 0);
|
pvr_arch_pass_info_get_hw_render(&cmd_buffer->state.render_pass_info, 0);
|
||||||
|
|
||||||
/* TODO: verify that the hw_render if is_render_init is true is
|
/* TODO: verify that the hw_render if is_render_init is true is
|
||||||
* exclusive to a non dynamic rendering path.
|
* exclusive to a non dynamic rendering path.
|
||||||
|
|
@ -2258,7 +2258,7 @@ static void pvr_clear_attachments(struct pvr_cmd_buffer *cmd_buffer,
|
||||||
pvr_csb_set_relocation_mark(&sub_cmd->control_stream);
|
pvr_csb_set_relocation_mark(&sub_cmd->control_stream);
|
||||||
|
|
||||||
vdm_cs_buffer =
|
vdm_cs_buffer =
|
||||||
pvr_csb_alloc_dwords(&sub_cmd->control_stream, vdm_cs_size_in_dw);
|
pvr_arch_csb_alloc_dwords(&sub_cmd->control_stream, vdm_cs_size_in_dw);
|
||||||
if (!vdm_cs_buffer) {
|
if (!vdm_cs_buffer) {
|
||||||
pvr_cmd_buffer_set_error_unwarned(cmd_buffer,
|
pvr_cmd_buffer_set_error_unwarned(cmd_buffer,
|
||||||
sub_cmd->control_stream.status);
|
sub_cmd->control_stream.status);
|
||||||
|
|
|
||||||
|
|
@ -748,7 +748,7 @@ VkResult pvr_pds_clear_vertex_shader_program_create_and_upload_data(
|
||||||
PDS_GENERATE_DATA_SEGMENT,
|
PDS_GENERATE_DATA_SEGMENT,
|
||||||
dev_info);
|
dev_info);
|
||||||
|
|
||||||
result = pvr_cmd_buffer_upload_pds(cmd_buffer,
|
result = pvr_arch_cmd_buffer_upload_pds(cmd_buffer,
|
||||||
staging_buffer,
|
staging_buffer,
|
||||||
program->data_size,
|
program->data_size,
|
||||||
4,
|
4,
|
||||||
|
|
@ -822,7 +822,7 @@ VkResult pvr_pds_clear_rta_vertex_shader_program_create_and_upload_code(
|
||||||
PDS_GENERATE_CODE_SEGMENT,
|
PDS_GENERATE_CODE_SEGMENT,
|
||||||
dev_info);
|
dev_info);
|
||||||
|
|
||||||
result = pvr_cmd_buffer_upload_pds(cmd_buffer,
|
result = pvr_arch_cmd_buffer_upload_pds(cmd_buffer,
|
||||||
NULL,
|
NULL,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
|
|
@ -880,7 +880,7 @@ void pvr_pack_clear_vdm_state(const struct pvr_device_info *const dev_info,
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
pvr_calculate_vertex_cam_size(dev_info,
|
pvr_arch_calculate_vertex_cam_size(dev_info,
|
||||||
vs_output_size,
|
vs_output_size,
|
||||||
true,
|
true,
|
||||||
&cam_size,
|
&cam_size,
|
||||||
|
|
|
||||||
|
|
@ -350,11 +350,11 @@ VkResult PVR_PER_ARCH(srv_render_target_dataset_create)(
|
||||||
/* If not 2 the arrays used in the bridge call will require updating. */
|
/* If not 2 the arrays used in the bridge call will require updating. */
|
||||||
STATIC_ASSERT(ROGUE_FWIF_NUM_RTDATAS == 2);
|
STATIC_ASSERT(ROGUE_FWIF_NUM_RTDATAS == 2);
|
||||||
|
|
||||||
pvr_rt_mtile_info_init(dev_info,
|
pvr_arch_rt_mtile_info_init(dev_info,
|
||||||
&mtile_info,
|
&mtile_info,
|
||||||
create_info->width,
|
create_info->width,
|
||||||
create_info->height,
|
create_info->height,
|
||||||
create_info->samples);
|
create_info->samples);
|
||||||
|
|
||||||
isp_mtile_size = pvr_rogue_get_cr_isp_mtile_size_val(dev_info,
|
isp_mtile_size = pvr_rogue_get_cr_isp_mtile_size_val(dev_info,
|
||||||
&mtile_info,
|
&mtile_info,
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue