Merge branch 'clear_color_image' into 'main'

panvk/csf: Just issue RUN_FRAGMENT for cmdClearColorImage/cmdClearDepthStencilImage

See merge request mesa/mesa!41288
This commit is contained in:
Jakob Sinclair 2026-05-08 02:09:22 +02:00
commit e4a9d55d86
8 changed files with 254 additions and 258 deletions

View file

@ -164,7 +164,6 @@ struct panvk_cs_deps {
enum mali_cs_condition cond;
struct cs_index cond_value;
} dst[PANVK_SUBQUEUE_COUNT];
bool needs_layout_transitions;
};
enum panvk_sb_ids {
@ -541,6 +540,12 @@ panvk_cache_flush_is_nop(const struct panvk_cache_flush_info *cache_flush)
extern const struct vk_command_buffer_ops panvk_per_arch(cmd_buffer_ops);
void panvk_per_arch(cmd_fb_barrier)(struct panvk_cmd_buffer *cmdbuf);
void panvk_per_arch(cmd_clear_image)(
struct panvk_cmd_buffer *cmdbuf, struct panvk_device *dev,
struct panvk_image *image, VkImageLayout image_layout,
const VkClearColorValue *color_value,
const VkClearDepthStencilValue *depth_stencil_value, uint32_t range_count,
const VkImageSubresourceRange *ranges);
#if PAN_ARCH == 10
/* Match against all possible iter_sb values. The constant iter_sb value for
@ -723,14 +728,8 @@ cs_next_iter_sb(struct panvk_cmd_buffer *cmdbuf,
}
}
enum panvk_barrier_stage {
PANVK_BARRIER_STAGE_FIRST,
PANVK_BARRIER_STAGE_AFTER_LAYOUT_TRANSITION,
};
void panvk_per_arch(add_cs_deps)(
struct panvk_cmd_buffer *cmdbuf,
enum panvk_barrier_stage barrier_stage,
const VkDependencyInfo *in,
struct panvk_cs_deps *out,
bool is_set_event);

View file

@ -440,66 +440,40 @@ collect_cs_deps(struct panvk_cmd_buffer *cmdbuf, const VkDependencyInfo *info,
static void
normalize_dependency(struct panvk_sync_scope *src,
struct panvk_sync_scope *dst,
struct panvk_sync_scope transition,
uint32_t src_qfi, uint32_t dst_qfi,
enum panvk_barrier_stage barrier_stage)
uint32_t src_qfi, uint32_t dst_qfi)
{
switch (barrier_stage) {
case PANVK_BARRIER_STAGE_FIRST:
if (transition.stages) {
/* We need to do layout transition, so we want to sync src with layout
* transition, and then later layout transition with dst.
*/
*dst = transition;
}
break;
case PANVK_BARRIER_STAGE_AFTER_LAYOUT_TRANSITION:
/* If transition.stages is empty, there was no layout transition and so we
* won't be waiting for anything.
*/
*src = transition;
break;
}
/* Perform queue family ownership transfer if src and dst are unequal. */
if (src_qfi != dst_qfi) {
/* Only normalize if we're actually syncing acquire, and not layout
* transition, with dst.
*/
if (barrier_stage == PANVK_BARRIER_STAGE_FIRST) {
/* queue family acquire operation */
switch (src_qfi) {
case VK_QUEUE_FAMILY_EXTERNAL:
/* no execution dependency and no availability operation */
*src = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_NONE, VK_ACCESS_2_NONE};
break;
case VK_QUEUE_FAMILY_FOREIGN_EXT:
/* treat the foreign queue as the host */
*src = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_HOST_BIT, VK_ACCESS_2_HOST_WRITE_BIT};
break;
default:
break;
}
/* queue family acquire operation */
switch (src_qfi) {
case VK_QUEUE_FAMILY_EXTERNAL:
/* no execution dependency and no availability operation */
*src = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_NONE,
VK_ACCESS_2_NONE};
break;
case VK_QUEUE_FAMILY_FOREIGN_EXT:
/* treat the foreign queue as the host */
*src = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_HOST_BIT,
VK_ACCESS_2_HOST_WRITE_BIT};
break;
default:
break;
}
/* Only normalize if we're actually syncing the latest of either src or
* layout transition, with release.
*/
if ((barrier_stage == PANVK_BARRIER_STAGE_FIRST && !transition.stages) ||
(barrier_stage == PANVK_BARRIER_STAGE_AFTER_LAYOUT_TRANSITION && transition.stages)) {
/* queue family release operation */
switch (dst_qfi) {
case VK_QUEUE_FAMILY_EXTERNAL:
/* no execution dependency and no visibility operation */
*dst = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_NONE, VK_ACCESS_2_NONE};
break;
case VK_QUEUE_FAMILY_FOREIGN_EXT:
/* treat the foreign queue as the host */
*dst = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_HOST_BIT, VK_ACCESS_2_HOST_WRITE_BIT};
break;
default:
break;
}
/* queue family release operation */
switch (dst_qfi) {
case VK_QUEUE_FAMILY_EXTERNAL:
/* no execution dependency and no visibility operation */
*dst = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_NONE,
VK_ACCESS_2_NONE};
break;
case VK_QUEUE_FAMILY_FOREIGN_EXT:
/* treat the foreign queue as the host */
*dst = (struct panvk_sync_scope){VK_PIPELINE_STAGE_2_HOST_BIT,
VK_ACCESS_2_HOST_WRITE_BIT};
break;
default:
break;
}
}
@ -511,7 +485,6 @@ normalize_dependency(struct panvk_sync_scope *src,
void
panvk_per_arch(add_cs_deps)(struct panvk_cmd_buffer *cmdbuf,
enum panvk_barrier_stage barrier_stage,
const VkDependencyInfo *in,
struct panvk_cs_deps *out,
bool is_set_event)
@ -535,10 +508,8 @@ panvk_per_arch(add_cs_deps)(struct panvk_cmd_buffer *cmdbuf,
if (is_asymmetric_event)
dst.stages = src.stages;
normalize_dependency(&src, &dst, (struct panvk_sync_scope){0},
VK_QUEUE_FAMILY_IGNORED,
VK_QUEUE_FAMILY_IGNORED,
barrier_stage);
normalize_dependency(&src, &dst, VK_QUEUE_FAMILY_IGNORED,
VK_QUEUE_FAMILY_IGNORED);
collect_cs_deps(cmdbuf, in, src, dst, out);
}
@ -547,10 +518,8 @@ panvk_per_arch(add_cs_deps)(struct panvk_cmd_buffer *cmdbuf,
const VkBufferMemoryBarrier2 *barrier = &in->pBufferMemoryBarriers[i];
struct panvk_sync_scope src = {barrier->srcStageMask, barrier->srcAccessMask};
struct panvk_sync_scope dst = {barrier->dstStageMask, barrier->dstAccessMask};
normalize_dependency(&src, &dst, (struct panvk_sync_scope){0},
barrier->srcQueueFamilyIndex,
barrier->dstQueueFamilyIndex,
barrier_stage);
normalize_dependency(&src, &dst, barrier->srcQueueFamilyIndex,
barrier->dstQueueFamilyIndex);
collect_cs_deps(cmdbuf, in, src, dst, out);
}
@ -559,18 +528,10 @@ panvk_per_arch(add_cs_deps)(struct panvk_cmd_buffer *cmdbuf,
const VkImageMemoryBarrier2 *barrier = &in->pImageMemoryBarriers[i];
struct panvk_sync_scope src = {barrier->srcStageMask, barrier->srcAccessMask};
struct panvk_sync_scope dst = {barrier->dstStageMask, barrier->dstAccessMask};
struct panvk_sync_scope transition;
panvk_per_arch(transition_image_layout_sync_scope)(barrier,
&transition.stages, &transition.access);
normalize_dependency(&src, &dst, transition,
barrier->srcQueueFamilyIndex,
barrier->dstQueueFamilyIndex,
barrier_stage);
normalize_dependency(&src, &dst, barrier->srcQueueFamilyIndex,
barrier->dstQueueFamilyIndex);
collect_cs_deps(cmdbuf, in, src, dst, out);
if (barrier_stage == PANVK_BARRIER_STAGE_FIRST && transition.stages)
out->needs_layout_transitions = true;
}
}
@ -697,30 +658,12 @@ panvk_per_arch(CmdPipelineBarrier2)(VkCommandBuffer commandBuffer,
VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
struct panvk_cs_deps deps = {0};
panvk_per_arch(add_cs_deps)(cmdbuf, PANVK_BARRIER_STAGE_FIRST, pDependencyInfo, &deps, false);
panvk_per_arch(add_cs_deps)(cmdbuf, pDependencyInfo, &deps, false);
if (deps.needs_fb_barrier)
panvk_per_arch(cmd_fb_barrier)(cmdbuf);
panvk_per_arch(emit_barrier)(cmdbuf, deps);
if (deps.needs_layout_transitions) {
for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; i++) {
const VkImageMemoryBarrier2 *barrier = &pDependencyInfo->pImageMemoryBarriers[i];
panvk_per_arch(cmd_transition_image_layout)(commandBuffer, barrier);
}
struct panvk_cs_deps trans_deps = {0};
panvk_per_arch(add_cs_deps)(
cmdbuf, PANVK_BARRIER_STAGE_AFTER_LAYOUT_TRANSITION,
pDependencyInfo, &trans_deps, false);
assert(!trans_deps.needs_fb_barrier);
panvk_per_arch(emit_barrier)(cmdbuf, trans_deps);
}
}
static struct cs_buffer

View file

@ -3648,6 +3648,201 @@ handle_deferred_queries(struct panvk_cmd_buffer *cmdbuf)
}
}
static struct panvk_image_view *
create_internal_panvk_image_view(struct vk_command_buffer *cmdbuf,
struct vk_meta_device *meta, VkImage image,
uint32_t level, uint32_t base_array_level,
uint32_t layer_count)
{
VK_FROM_HANDLE(panvk_image, pan_image, image);
VkImageViewUsageCreateInfo view_usage = {
.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
};
if (pan_image->vk.aspects == VK_IMAGE_ASPECT_COLOR_BIT)
view_usage.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
if (pan_image->vk.aspects &
(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))
view_usage.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
VkImageViewType view_type;
if (pan_image->vk.image_type == VK_IMAGE_TYPE_1D) {
view_type =
layer_count == 1 ? VK_IMAGE_VIEW_TYPE_1D : VK_IMAGE_VIEW_TYPE_1D_ARRAY;
} else {
assert((pan_image->vk.image_type == VK_IMAGE_TYPE_2D ||
pan_image->vk.image_type == VK_IMAGE_TYPE_3D) &&
"Invalid image type");
view_type =
layer_count == 1 ? VK_IMAGE_VIEW_TYPE_2D : VK_IMAGE_VIEW_TYPE_2D_ARRAY;
}
const VkImageViewCreateInfo view_info = {
.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
.flags = VK_IMAGE_VIEW_CREATE_DRIVER_INTERNAL_BIT_MESA,
.pNext = &view_usage,
.image = image,
.viewType = view_type,
.format = pan_image->vk.format,
.subresourceRange = {
.aspectMask = pan_image->vk.aspects,
.baseMipLevel = level,
.levelCount = 1,
.baseArrayLayer = base_array_level,
.layerCount = layer_count,
}};
VkImageView image_view;
VkResult result =
vk_meta_create_image_view(cmdbuf, meta, &view_info, &image_view);
if (unlikely(result != VK_SUCCESS))
return NULL;
VK_FROM_HANDLE(panvk_image_view, pan_image_view, image_view);
return pan_image_view;
}
static void
setup_clear_render_state(struct panvk_cmd_buffer *cmdbuf,
struct panvk_image_view *clear_image_view,
VkImageLayout image_layout,
const VkClearValue clear_value, uint32_t layer_count,
VkImageAspectFlags aspects)
{
const VkRenderingAttachmentInfo clear_att = {
.sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
.imageView = panvk_image_view_to_handle(clear_image_view),
.imageLayout = image_layout,
.resolveMode = VK_RESOLVE_MODE_NONE,
.resolveImageView = VK_NULL_HANDLE,
.resolveImageLayout = VK_IMAGE_LAYOUT_UNDEFINED,
.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
.storeOp = VK_ATTACHMENT_STORE_OP_STORE,
.clearValue = clear_value,
};
const bool clear_color = aspects & VK_IMAGE_ASPECT_COLOR_BIT;
const bool clear_depth = aspects & VK_IMAGE_ASPECT_DEPTH_BIT;
const bool clear_stencil = aspects & VK_IMAGE_ASPECT_STENCIL_BIT;
const VkRenderingInfo info = {
.sType = VK_STRUCTURE_TYPE_RENDERING_INFO,
.flags = 0,
.renderArea =
{
.offset = {0, 0},
.extent =
{
.width = clear_image_view->vk.extent.width,
.height = clear_image_view->vk.extent.height,
},
},
.layerCount = layer_count,
.viewMask = 0,
.colorAttachmentCount = clear_color,
.pColorAttachments = clear_color ? &clear_att : NULL,
.pDepthAttachment = clear_depth ? &clear_att : NULL,
.pStencilAttachment = clear_stencil ? &clear_att : NULL,
};
panvk_per_arch(cmd_init_render_state)(cmdbuf, &info);
}
static void
reset_render_state(struct panvk_cmd_buffer *cmdbuf, bool suspending,
bool resolve_attachments)
{
memset(&cmdbuf->state.gfx.render.fbds, 0,
sizeof(cmdbuf->state.gfx.render.fbds));
memset(&cmdbuf->state.gfx.render.oq, 0, sizeof(cmdbuf->state.gfx.render.oq));
cmdbuf->state.gfx.render.tiler = 0;
/* If we're finished with this render pass, make sure we reset the flags
* so any barrier encountered after doesn't try to flush
* draws. */
cmdbuf->state.gfx.render.flags = 0;
cmdbuf->state.gfx.render.suspended = suspending;
if (resolve_attachments)
panvk_per_arch(cmd_meta_resolve_attachments)(cmdbuf);
struct panvk_device *dev = to_panvk_device(cmdbuf->vk.base.device);
struct panvk_instr_end_args instr_info = {
.render = {
.flags = cmdbuf->state.gfx.render.flags,
.fb = &cmdbuf->state.gfx.render.fb.layout,
}};
panvk_per_arch(panvk_instr_end_work_async)(
PANVK_SUBQUEUE_VERTEX_TILER, cmdbuf, PANVK_INSTR_WORK_TYPE_RENDER,
&instr_info, cs_defer(dev->csf.sb.all_iters_mask, 0));
panvk_per_arch(panvk_instr_end_work_async)(
PANVK_SUBQUEUE_FRAGMENT, cmdbuf, PANVK_INSTR_WORK_TYPE_RENDER,
&instr_info, cs_defer(dev->csf.sb.all_iters_mask, 0));
}
void
panvk_per_arch(cmd_clear_image)(
struct panvk_cmd_buffer *cmdbuf, struct panvk_device *dev,
struct panvk_image *image, VkImageLayout image_layout,
const VkClearColorValue *color_value,
const VkClearDepthStencilValue *depth_stencil_value, uint32_t range_count,
const VkImageSubresourceRange *ranges)
{
for (uint32_t r = 0; r < range_count; r++) {
const VkImageSubresourceRange *range = &ranges[r];
const uint32_t level_count =
vk_image_subresource_level_count(&image->vk, range);
for (uint32_t l = 0; l < level_count; l++) {
const uint32_t level = range->baseMipLevel + l;
uint32_t base_array_layer, layer_count;
if (image->vk.image_type == VK_IMAGE_TYPE_3D) {
const VkExtent3D level_extent =
vk_image_mip_level_extent(&image->vk, level);
base_array_layer = 0;
layer_count = level_extent.depth;
} else {
base_array_layer = range->baseArrayLayer;
layer_count = vk_image_subresource_layer_count(&image->vk, range);
}
VkImage p_image = panvk_image_to_handle(image);
struct panvk_image_view *image_view = create_internal_panvk_image_view(
&cmdbuf->vk, &dev->meta, p_image, level, base_array_layer,
layer_count);
if (unlikely(image_view == NULL)) {
reset_render_state(cmdbuf, false, false);
return;
}
VkImageAspectFlags aspects;
VkClearValue clear_value;
if (color_value != NULL) {
aspects = image_view->vk.aspects;
clear_value.color = *color_value;
} else {
assert(depth_stencil_value);
aspects = range->aspectMask;
clear_value.depthStencil = *depth_stencil_value;
}
setup_clear_render_state(cmdbuf, image_view, image_layout, clear_value,
layer_count, aspects);
VkResult result = get_render_ctx(cmdbuf);
if (unlikely(result != VK_SUCCESS)) {
reset_render_state(cmdbuf, false, false);
return;
}
issue_fragment_jobs(cmdbuf);
reset_render_state(cmdbuf, false, false);
}
}
}
VKAPI_ATTR void VKAPI_CALL
panvk_per_arch(CmdEndRendering)(VkCommandBuffer commandBuffer)
{
@ -3697,31 +3892,7 @@ panvk_per_arch(CmdEndRendering)(VkCommandBuffer commandBuffer)
return;
}
memset(&cmdbuf->state.gfx.render.fbds, 0,
sizeof(cmdbuf->state.gfx.render.fbds));
memset(&cmdbuf->state.gfx.render.oq, 0, sizeof(cmdbuf->state.gfx.render.oq));
cmdbuf->state.gfx.render.tiler = 0;
/* If we're finished with this render pass, make sure we reset the flags
* so any barrier encountered after EndRendering() doesn't try to flush
* draws. */
cmdbuf->state.gfx.render.flags = 0;
cmdbuf->state.gfx.render.suspended = suspending;
/* If we're not suspending, we need to resolve attachments. */
if (!suspending)
panvk_per_arch(cmd_meta_resolve_attachments)(cmdbuf);
struct panvk_instr_end_args instr_info = {
.render = {
.flags = cmdbuf->state.gfx.render.flags,
.fb = &cmdbuf->state.gfx.render.fb.layout,
}};
struct panvk_device *dev = to_panvk_device(cmdbuf->vk.base.device);
panvk_per_arch(panvk_instr_end_work_async)(
PANVK_SUBQUEUE_VERTEX_TILER, cmdbuf, PANVK_INSTR_WORK_TYPE_RENDER,
&instr_info, cs_defer(dev->csf.sb.all_iters_mask, 0));
panvk_per_arch(panvk_instr_end_work_async)(
PANVK_SUBQUEUE_FRAGMENT, cmdbuf, PANVK_INSTR_WORK_TYPE_RENDER,
&instr_info, cs_defer(dev->csf.sb.all_iters_mask, 0));
bool resolve_attachments = !suspending;
reset_render_state(cmdbuf, suspending, resolve_attachments);
}

View file

@ -7,7 +7,6 @@
#include "panvk_entrypoints.h"
#include "panvk_event.h"
#include "panvk_instr.h"
#include "panvk_meta.h"
#include "util/bitscan.h"
@ -29,7 +28,7 @@ panvk_per_arch(CmdResetEvent2)(VkCommandBuffer commandBuffer, VkEvent _event,
};
struct panvk_cs_deps deps = {0};
panvk_per_arch(add_cs_deps)(cmdbuf, PANVK_BARRIER_STAGE_FIRST, &info, &deps, false);
panvk_per_arch(add_cs_deps)(cmdbuf, &info, &deps, false);
for (uint32_t i = 0; i < PANVK_SUBQUEUE_COUNT; i++) {
struct cs_builder *b = panvk_get_cs_builder(cmdbuf, i);
@ -67,7 +66,7 @@ panvk_per_arch(CmdSetEvent2)(VkCommandBuffer commandBuffer, VkEvent _event,
VK_FROM_HANDLE(panvk_event, event, _event);
struct panvk_cs_deps deps = {0};
panvk_per_arch(add_cs_deps)(cmdbuf, PANVK_BARRIER_STAGE_FIRST, pDependencyInfo, &deps, true);
panvk_per_arch(add_cs_deps)(cmdbuf, pDependencyInfo, &deps, true);
/* vkCmdSetEvents() is not allowed to be called mid-render-pass */
assert(!deps.needs_fb_barrier);
@ -108,12 +107,11 @@ panvk_per_arch(CmdSetEvent2)(VkCommandBuffer commandBuffer, VkEvent _event,
static void
cmd_wait_event(struct panvk_cmd_buffer *cmdbuf, struct panvk_event *event,
const VkDependencyInfo *info, struct panvk_cs_deps *trans_deps,
bool *needs_trans_barrier)
const VkDependencyInfo *info)
{
struct panvk_cs_deps deps = {0};
panvk_per_arch(add_cs_deps)(cmdbuf, PANVK_BARRIER_STAGE_FIRST, info, &deps, false);
panvk_per_arch(add_cs_deps)(cmdbuf, info, &deps, false);
for (uint32_t i = 0; i < PANVK_SUBQUEUE_COUNT; i++) {
struct cs_builder *b = panvk_get_cs_builder(cmdbuf, i);
@ -131,20 +129,6 @@ cmd_wait_event(struct panvk_cmd_buffer *cmdbuf, struct panvk_event *event,
seqno, sync_addr);
}
}
if (deps.needs_layout_transitions) {
for (uint32_t i = 0; i < info->imageMemoryBarrierCount; i++) {
const VkImageMemoryBarrier2 *barrier = &info->pImageMemoryBarriers[i];
panvk_per_arch(cmd_transition_image_layout)(
panvk_cmd_buffer_to_handle(cmdbuf), barrier);
}
panvk_per_arch(add_cs_deps)(
cmdbuf, PANVK_BARRIER_STAGE_AFTER_LAYOUT_TRANSITION,
info, trans_deps, false);
*needs_trans_barrier = true;
}
}
VKAPI_ATTR void VKAPI_CALL
@ -154,19 +138,10 @@ panvk_per_arch(CmdWaitEvents2)(VkCommandBuffer commandBuffer,
{
VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
struct panvk_cs_deps trans_deps = {0};
bool needs_trans_barrier = false;
for (uint32_t i = 0; i < eventCount; i++) {
VK_FROM_HANDLE(panvk_event, event, pEvents[i]);
const VkDependencyInfo *info = &pDependencyInfos[i];
cmd_wait_event(cmdbuf, event, info, &trans_deps, &needs_trans_barrier);
}
if (needs_trans_barrier) {
assert(!trans_deps.needs_fb_barrier);
panvk_per_arch(emit_barrier)(cmdbuf, trans_deps);
cmd_wait_event(cmdbuf, event, info);
}
}

View file

@ -349,17 +349,6 @@ panvk_per_arch(CmdPipelineBarrier2)(VkCommandBuffer commandBuffer,
panvk_per_arch(cmd_close_batch)(cmdbuf);
panvk_per_arch(cmd_open_batch)(cmdbuf);
}
for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; i++) {
const VkImageMemoryBarrier2 *barrier = &pDependencyInfo->pImageMemoryBarriers[i];
panvk_per_arch(cmd_transition_image_layout)(commandBuffer, barrier);
}
/* If we had any layout transition dispatches, the batch will be closed at
* this point, therefore establishing the sync between itself and the
* commands that follow.
*/
}
static void

View file

@ -106,19 +106,7 @@ panvk_per_arch(CmdWaitEvents2)(VkCommandBuffer commandBuffer,
for (uint32_t i = 0; i < eventCount; i++) {
VK_FROM_HANDLE(panvk_event, event, pEvents[i]);
const VkDependencyInfo *info = &pDependencyInfos[i];
panvk_add_wait_event_operation(cmdbuf, event);
for (uint32_t i = 0; i < info->imageMemoryBarrierCount; i++) {
const VkImageMemoryBarrier2 *barrier = &info->pImageMemoryBarriers[i];
panvk_per_arch(cmd_transition_image_layout)(commandBuffer, barrier);
}
/* We don't need to do anything here to establish the sync between layout
* transition dispatches and the commands following the barrier. See the
* comment in ./panvk_vX_cmd_buffer.c:CmdPipelineBarrier2 for details.
*/
}
}

View file

@ -207,12 +207,4 @@ VkResult panvk_per_arch(meta_get_copy_desc_job)(
const struct panvk_shader_desc_state *shader_desc_state,
uint32_t attrib_buf_idx_offset, struct pan_ptr *job_desc);
#endif
void panvk_per_arch(transition_image_layout_sync_scope)(
const VkImageMemoryBarrier2 *barrier,
VkPipelineStageFlags2 *out_stages, VkAccessFlags2 *out_access);
void panvk_per_arch(cmd_transition_image_layout)(
VkCommandBuffer _cmdbuf,
const VkImageMemoryBarrier2 *barrier);
#endif

View file

@ -299,9 +299,14 @@ panvk_per_arch(CmdClearDepthStencilImage)(
struct panvk_cmd_meta_graphics_save_ctx save = {0};
meta_gfx_start(cmdbuf, &save);
#if PAN_ARCH >= 10
panvk_per_arch(cmd_clear_image)(cmdbuf, dev, img, imageLayout, NULL,
pDepthStencil, rangeCount, pRanges);
#else
vk_meta_clear_depth_stencil_image(&cmdbuf->vk, &dev->meta, &img->vk,
imageLayout, pDepthStencil, rangeCount,
pRanges);
#endif
meta_gfx_end(cmdbuf, &save);
}
@ -318,8 +323,13 @@ panvk_per_arch(CmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image,
struct panvk_cmd_meta_graphics_save_ctx save = {0};
meta_gfx_start(cmdbuf, &save);
#if PAN_ARCH >= 10
panvk_per_arch(cmd_clear_image)(cmdbuf, dev, img, imageLayout, pColor, NULL,
rangeCount, pRanges);
#else
vk_meta_clear_color_image(&cmdbuf->vk, &dev->meta, &img->vk, imageLayout,
img->vk.format, pColor, rangeCount, pRanges);
#endif
meta_gfx_end(cmdbuf, &save);
}
@ -641,77 +651,6 @@ panvk_per_arch(CmdCopyImage2)(VkCommandBuffer commandBuffer,
}
}
static bool
panvk_image_has_afbc(struct panvk_image *img, VkImageSubresourceRange range)
{
VkImageAspectFlags aspect_mask =
vk_image_expand_aspect_mask(&img->vk, range.aspectMask);
u_foreach_bit(aspect, aspect_mask) {
unsigned plane_index = panvk_plane_index(img, 1u << aspect);
struct panvk_image_plane *plane = &img->planes[plane_index];
if (drm_is_afbc(plane->image.props.modifier))
return true;
}
return false;
}
static bool
panvk_acquire_unmodified(const VkImageMemoryBarrier2 *barrier)
{
if (barrier->srcQueueFamilyIndex != VK_QUEUE_FAMILY_EXTERNAL &&
barrier->srcQueueFamilyIndex != VK_QUEUE_FAMILY_FOREIGN_EXT)
return false;
const VkExternalMemoryAcquireUnmodifiedEXT *acquire_unmodified =
vk_find_struct_const(barrier->pNext,
EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT);
return acquire_unmodified &&
acquire_unmodified->acquireUnmodifiedMemory == VK_TRUE;
}
/* TODO: pass less data than what's in a VkImageMemoryBarrier2 */
struct panvk_image_layout_transition_handler {
void (*cmd)(VkCommandBuffer cmdbuf, const VkImageMemoryBarrier2 *barrier);
VkPipelineStageFlags2 stages;
VkAccessFlags2 access;
};
static struct panvk_image_layout_transition_handler
panvk_get_image_layout_transition_handler(const VkImageMemoryBarrier2 *barrier)
{
if (barrier->oldLayout == barrier->newLayout ||
panvk_acquire_unmodified(barrier))
return (struct panvk_image_layout_transition_handler){0};
return (struct panvk_image_layout_transition_handler){0};
}
void
panvk_per_arch(transition_image_layout_sync_scope)(
const VkImageMemoryBarrier2 *barrier,
VkPipelineStageFlags2 *out_stages, VkAccessFlags2 *out_access)
{
struct panvk_image_layout_transition_handler handler =
panvk_get_image_layout_transition_handler(barrier);
*out_stages = handler.stages;
*out_access = handler.access;
}
void
panvk_per_arch(cmd_transition_image_layout)(
VkCommandBuffer cmdbuf, const VkImageMemoryBarrier2 *barrier)
{
struct panvk_image_layout_transition_handler handler =
panvk_get_image_layout_transition_handler(barrier);
if (handler.cmd)
handler.cmd(cmdbuf, barrier);
}
void
panvk_per_arch(cmd_meta_resolve_attachments)(struct panvk_cmd_buffer *cmdbuf)
{