vulkan-wsi-layer/wsi/wayland/swapchain.cpp

781 lines
28 KiB
C++
Raw Normal View History

/*
* Copyright (c) 2017-2025 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#define VK_USE_PLATFORM_WAYLAND_KHR 1
#include "surface_properties.hpp"
#include <cstdint>
#include <cstring>
#include <cassert>
#include <unistd.h>
#include <cstdlib>
#include <cerrno>
#include <cstdio>
#include <climits>
#include <functional>
#include "swapchain.hpp"
#include "util/drm/drm_utils.hpp"
#include "util/format_modifiers.hpp"
#include "util/helpers.hpp"
#include "util/log.hpp"
#include "util/macros.hpp"
#include "wl_helpers.hpp"
#include <wsi/extensions/image_compression_control.hpp>
#include <wsi/extensions/swapchain_maintenance.hpp>
#include <wsi/extensions/present_id.hpp>
#include <wsi/swapchain_image_create_extensions/external_memory_extension.hpp>
#include "present_timing_handler.hpp"
#include "present_id_wayland.hpp"
#include "present_wait_wayland.hpp"
#include "wp_presentation_feedback.hpp"
namespace wsi
{
namespace wayland
{
swapchain::swapchain(layer::device_private_data &dev_data, const VkAllocationCallbacks *pAllocator,
surface &wsi_surface)
: swapchain_base(dev_data, pAllocator)
, m_display(wsi_surface.get_wl_display())
, m_surface(wsi_surface.get_wl_surface())
, m_wsi_surface(&wsi_surface)
, m_buffer_queue(nullptr)
, m_wsi_allocator(nullptr)
, m_image_creation_parameters({}, m_allocator, {}, {})
{
}
swapchain::~swapchain()
{
teardown();
if (m_buffer_queue != nullptr)
{
wl_display_roundtrip_queue(m_display, m_buffer_queue);
wl_event_queue_destroy(m_buffer_queue);
}
}
VkResult swapchain::add_required_extensions(VkDevice device, const VkSwapchainCreateInfoKHR *swapchain_create_info)
{
auto compression_control = wsi_ext_image_compression_control::create(device, swapchain_create_info);
if (compression_control)
{
if (!add_swapchain_extension(m_allocator.make_unique<wsi_ext_image_compression_control>(*compression_control)))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
if (m_device_data.is_present_id_enabled() ||
(swapchain_create_info->flags & VK_SWAPCHAIN_CREATE_PRESENT_ID_2_BIT_KHR))
{
#if VULKAN_WSI_LAYER_EXPERIMENTAL
if (!add_swapchain_extension(m_allocator.make_unique<wsi_ext_present_id_wayland>()))
#else
if (!add_swapchain_extension(m_allocator.make_unique<wsi_ext_present_id>()))
#endif
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
if (m_device_data.is_swapchain_maintenance1_enabled())
{
if (!add_swapchain_extension(m_allocator.make_unique<wsi_ext_swapchain_maintenance1>(m_allocator)))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
bool present_wait2;
#if VULKAN_WSI_LAYER_EXPERIMENTAL
constexpr VkSwapchainCreateFlagsKHR present_wait2_mask =
(VK_SWAPCHAIN_CREATE_PRESENT_ID_2_BIT_KHR | VK_SWAPCHAIN_CREATE_PRESENT_WAIT_2_BIT_KHR);
present_wait2 = (swapchain_create_info->flags & present_wait2_mask) == present_wait2_mask;
#else
present_wait2 = false;
#endif
if (m_device_data.is_present_wait_enabled() || present_wait2)
{
if (!add_swapchain_extension(m_allocator.make_unique<wsi_ext_present_wait_wayland>(
*get_swapchain_extension<wsi_ext_present_id>(true), present_wait2)))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
if (m_device_data.should_layer_handle_frame_boundary_events())
{
if (!add_swapchain_extension(m_allocator.make_unique<wsi_ext_frame_boundary>(m_device_data)))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
#if VULKAN_WSI_LAYER_EXPERIMENTAL
bool swapchain_support_enabled = swapchain_create_info->flags & VK_SWAPCHAIN_CREATE_PRESENT_TIMING_BIT_EXT;
if (swapchain_support_enabled)
{
std::optional<VkTimeDomainKHR> image_first_pixel_visible_time_domain;
if (m_wsi_surface->get_presentation_time_interface() != nullptr)
{
switch (m_wsi_surface->clockid())
{
case CLOCK_MONOTONIC:
image_first_pixel_visible_time_domain = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR;
break;
case CLOCK_MONOTONIC_RAW:
image_first_pixel_visible_time_domain = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR;
break;
}
}
if (!add_swapchain_extension(wsi_ext_present_timing_wayland::create(
m_device, m_allocator, image_first_pixel_visible_time_domain, swapchain_create_info->minImageCount)))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
#endif
return VK_SUCCESS;
}
VkResult swapchain::init_platform(VkDevice device, const VkSwapchainCreateInfoKHR *swapchain_create_info,
bool &use_presentation_thread)
{
UNUSED(device);
UNUSED(swapchain_create_info);
UNUSED(use_presentation_thread);
if ((m_display == nullptr) || (m_surface == nullptr) || (m_wsi_surface->get_dmabuf_interface() == nullptr))
{
return VK_ERROR_INITIALIZATION_FAILED;
}
m_buffer_queue = wl_display_create_queue(m_display);
if (m_buffer_queue == nullptr)
{
WSI_LOG_ERROR("Failed to create buffer wl queue.");
return VK_ERROR_INITIALIZATION_FAILED;
}
auto wsi_allocator = swapchain_wsialloc_allocator::create();
if (!wsi_allocator.has_value())
{
return VK_ERROR_INITIALIZATION_FAILED;
}
m_wsi_allocator = m_allocator.make_unique<swapchain_wsialloc_allocator>(std::move(*wsi_allocator));
if (m_wsi_allocator == nullptr)
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
/*
* When VK_PRESENT_MODE_MAILBOX_KHR or VK_PRESENT_MODE_FIFO_LATEST_READY_EXT has
* been chosen by the application we don't initialize the page flip thread
* so the present_image function can be called during vkQueuePresent.
*/
use_presentation_thread = WAYLAND_FIFO_PRESENTATION_THREAD_ENABLED &&
(m_present_mode != VK_PRESENT_MODE_FIFO_LATEST_READY_EXT) &&
(m_present_mode != VK_PRESENT_MODE_MAILBOX_KHR);
auto present_wait = get_swapchain_extension<wsi_ext_present_wait_wayland>();
if (present_wait)
{
present_wait->set_wayland_dispatcher(m_display, m_buffer_queue);
}
#if VULKAN_WSI_LAYER_EXPERIMENTAL
auto *present_timing_ext = get_swapchain_extension<wsi_ext_present_timing_wayland>();
if (present_timing_ext != nullptr)
{
present_timing_ext->init(m_display, m_buffer_queue);
}
#endif
return VK_SUCCESS;
}
VWL_CAPI_CALL(void) buffer_release(void *data, struct wl_buffer *wayl_buffer) VWL_API_POST
{
auto sc = reinterpret_cast<swapchain *>(data);
sc->release_buffer(wayl_buffer);
}
void swapchain::release_buffer(struct wl_buffer *wayl_buffer)
{
uint32_t i;
for (i = 0; i < m_swapchain_images.size(); i++)
{
auto data = reinterpret_cast<wayland_image_data *>(m_swapchain_images[i].data);
if (data && data->buffer == wayl_buffer)
{
unpresent_image(i);
break;
}
}
/* check we found a buffer to unpresent */
assert(i < m_swapchain_images.size());
}
static struct wl_buffer_listener buffer_listener = { buffer_release };
VkResult swapchain::get_surface_compatible_formats(const VkImageCreateInfo &info,
util::vector<wsialloc_format> &importable_formats,
util::vector<uint64_t> &exportable_modifers,
util::vector<VkDrmFormatModifierPropertiesEXT> &drm_format_props)
{
TRY_LOG(util::get_drm_format_properties(m_device_data.physical_device, info.format, drm_format_props),
"Failed to get format properties");
for (const auto &prop : drm_format_props)
{
bool is_supported = false;
util::drm::drm_format_pair drm_format{ util::drm::vk_to_drm_format(info.format), prop.drmFormatModifier };
for (const auto &format : m_wsi_surface->get_formats())
{
if (format.fourcc == drm_format.fourcc && format.modifier == drm_format.modifier)
{
is_supported = true;
break;
}
}
if (!is_supported)
{
continue;
}
VkExternalImageFormatPropertiesKHR external_props = {};
external_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR;
VkImageFormatProperties2KHR format_props = {};
format_props.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
format_props.pNext = &external_props;
VkResult result = VK_SUCCESS;
{
VkPhysicalDeviceExternalImageFormatInfoKHR external_info = {};
external_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR;
external_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
VkPhysicalDeviceImageDrmFormatModifierInfoEXT drm_mod_info = {};
drm_mod_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
drm_mod_info.pNext = &external_info;
drm_mod_info.drmFormatModifier = prop.drmFormatModifier;
drm_mod_info.sharingMode = info.sharingMode;
drm_mod_info.queueFamilyIndexCount = info.queueFamilyIndexCount;
drm_mod_info.pQueueFamilyIndices = info.pQueueFamilyIndices;
VkPhysicalDeviceImageFormatInfo2KHR image_info = {};
image_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR;
image_info.pNext = &drm_mod_info;
image_info.format = info.format;
image_info.type = info.imageType;
image_info.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
image_info.usage = info.usage;
image_info.flags = info.flags;
VkImageCompressionControlEXT compression_control = {};
if (m_device_data.is_swapchain_compression_control_enabled())
{
auto *ext = get_swapchain_extension<wsi_ext_image_compression_control>();
if (ext)
{
compression_control = ext->get_compression_control_properties();
compression_control.pNext = image_info.pNext;
image_info.pNext = &compression_control;
}
}
result = m_device_data.instance_data.disp.GetPhysicalDeviceImageFormatProperties2KHR(
m_device_data.physical_device, &image_info, &format_props);
}
if (result != VK_SUCCESS)
{
continue;
}
if (format_props.imageFormatProperties.maxExtent.width < info.extent.width ||
format_props.imageFormatProperties.maxExtent.height < info.extent.height ||
format_props.imageFormatProperties.maxExtent.depth < info.extent.depth)
{
continue;
}
if (format_props.imageFormatProperties.maxMipLevels < info.mipLevels ||
format_props.imageFormatProperties.maxArrayLayers < info.arrayLayers)
{
continue;
}
if ((format_props.imageFormatProperties.sampleCounts & info.samples) != info.samples)
{
continue;
}
if (external_props.externalMemoryProperties.externalMemoryFeatures &
VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR)
{
if (!exportable_modifers.try_push_back(drm_format.modifier))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
if (external_props.externalMemoryProperties.externalMemoryFeatures &
VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR)
{
uint64_t flags =
(prop.drmFormatModifierTilingFeatures & VK_FORMAT_FEATURE_DISJOINT_BIT) ? 0 : WSIALLOC_FORMAT_NON_DISJOINT;
wsialloc_format import_format{ drm_format.fourcc, drm_format.modifier, flags };
if (!importable_formats.try_push_back(import_format))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
}
return VK_SUCCESS;
}
VkResult swapchain::allocate_wsialloc(VkImageCreateInfo &image_create_info, wayland_image_data *image_data,
util::vector<wsialloc_format> &importable_formats,
wsialloc_format *allocated_format, bool avoid_allocation)
{
bool enable_fixed_rate = false;
if (m_device_data.is_swapchain_compression_control_enabled())
{
auto *ext = get_swapchain_extension<wsi_ext_image_compression_control>();
/* For Image compression control, additional requirements to be satisfied such as
existence of VkImageCompressionControlEXT in swaphain_create_info for
the ext to be added to the list. so we check whether we got a valid pointer
and proceed if yes. */
if (ext)
{
if (ext->get_bitmask_for_image_compression_flags() & VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT)
{
enable_fixed_rate = true;
}
}
}
allocation_params params = { (image_create_info.flags & VK_IMAGE_CREATE_PROTECTED_BIT) != 0,
image_create_info.extent, importable_formats, enable_fixed_rate, avoid_allocation };
wsialloc_allocate_result alloc_result = { {}, { 0 }, { 0 }, { -1 }, false };
TRY(m_wsi_allocator->allocate(params, &alloc_result));
*allocated_format = alloc_result.format;
auto &external_memory = image_data->external_mem;
external_memory.set_strides(alloc_result.average_row_strides);
external_memory.set_buffer_fds(alloc_result.buffer_fds);
external_memory.set_offsets(alloc_result.offsets);
uint32_t num_planes = util::drm::drm_fourcc_format_get_num_planes(alloc_result.format.fourcc);
if (!avoid_allocation)
{
uint32_t num_memory_planes = 0;
for (uint32_t i = 0; i < num_planes; ++i)
{
auto it = std::find(std::begin(alloc_result.buffer_fds) + i + 1, std::end(alloc_result.buffer_fds),
alloc_result.buffer_fds[i]);
if (it == std::end(alloc_result.buffer_fds))
{
num_memory_planes++;
}
}
assert(alloc_result.is_disjoint == (num_memory_planes > 1));
external_memory.set_num_memories(num_memory_planes);
}
external_memory.set_format_info(alloc_result.is_disjoint, num_planes);
external_memory.set_memory_handle_type(VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
return VK_SUCCESS;
}
VkResult swapchain::allocate_image(wayland_image_data *image_data)
{
util::vector<wsialloc_format> importable_formats(util::allocator(m_allocator, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND));
auto &m_allocated_format = m_image_creation_parameters.m_allocated_format;
if (!importable_formats.try_push_back(m_allocated_format))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
TRY_LOG_CALL(allocate_wsialloc(m_image_create_info, image_data, importable_formats, &m_allocated_format, false));
return VK_SUCCESS;
}
VkResult swapchain::create_wl_buffer(const VkImageCreateInfo &image_create_info, swapchain_image &image,
wayland_image_data *image_data)
{
/* create a wl_buffer using the dma_buf protocol */
zwp_linux_buffer_params_v1 *params = zwp_linux_dmabuf_v1_create_params(m_wsi_surface->get_dmabuf_interface());
uint32_t modifier_hi = m_image_creation_parameters.m_allocated_format.modifier >> 32;
uint32_t modifier_low = m_image_creation_parameters.m_allocated_format.modifier & 0xFFFFFFFF;
for (uint32_t plane = 0; plane < image_data->external_mem.get_num_planes(); plane++)
{
zwp_linux_buffer_params_v1_add(params, image_data->external_mem.get_buffer_fds()[plane], plane,
image_data->external_mem.get_offsets()[plane],
image_data->external_mem.get_strides()[plane], modifier_hi, modifier_low);
}
const auto fourcc = util::drm::vk_to_drm_format(image_create_info.format);
assert(image_create_info.extent.width <= INT32_MAX);
assert(image_create_info.extent.height <= INT32_MAX);
image_data->buffer = zwp_linux_buffer_params_v1_create_immed(params, image_create_info.extent.width,
image_create_info.extent.height, fourcc, 0);
zwp_linux_buffer_params_v1_destroy(params);
wl_proxy_set_queue(reinterpret_cast<wl_proxy *>(image_data->buffer), m_buffer_queue);
auto res = wl_buffer_add_listener(image_data->buffer, &buffer_listener, this);
if (res < 0)
{
destroy_image(image);
return VK_ERROR_INITIALIZATION_FAILED;
}
return VK_SUCCESS;
}
VkResult swapchain::allocate_and_bind_swapchain_image(VkImageCreateInfo image_create_info, swapchain_image &image)
{
std::unique_lock<std::recursive_mutex> image_status_lock(m_image_status_mutex);
image.status = swapchain_image::FREE;
assert(image.data != nullptr);
auto image_data = static_cast<wayland_image_data *>(image.data);
TRY_LOG(allocate_image(image_data), "Failed to allocate image");
image_status_lock.unlock();
TRY_LOG(create_wl_buffer(image_create_info, image, image_data), "Failed to create wl_buffer");
TRY_LOG(image_data->external_mem.import_memory_and_bind_swapchain_image(image.image),
"Failed to import memory and bind swapchain image");
/* Initialize presentation fence. */
auto present_fence = sync_fd_fence_sync::create(m_device_data);
if (!present_fence.has_value())
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
image_data->present_fence = std::move(present_fence.value());
return VK_SUCCESS;
}
VkResult swapchain::create_swapchain_image(VkImageCreateInfo image_create_info, swapchain_image &image)
{
/* Create image_data */
auto image_data = m_allocator.create<wayland_image_data>(1, m_device, m_allocator);
if (image_data == nullptr)
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
image.data = image_data;
if (m_image_creation_parameters.m_allocated_format.fourcc == DRM_FORMAT_INVALID)
{
util::vector<wsialloc_format> importable_formats(
util::allocator(m_allocator, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND));
util::vector<uint64_t> exportable_modifiers(util::allocator(m_allocator, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND));
/* Query supported modifiers. */
util::vector<VkDrmFormatModifierPropertiesEXT> drm_format_props(
util::allocator(m_allocator, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND));
TRY_LOG_CALL(
get_surface_compatible_formats(image_create_info, importable_formats, exportable_modifiers, drm_format_props));
/* TODO: Handle exportable images which use ICD allocated memory in preference to an external allocator. */
if (importable_formats.empty())
{
WSI_LOG_ERROR("Export/Import not supported.");
return VK_ERROR_INITIALIZATION_FAILED;
}
wsialloc_format allocated_format = { 0, 0, 0 };
TRY_LOG_CALL(allocate_wsialloc(image_create_info, image_data, importable_formats, &allocated_format, true));
for (auto &prop : drm_format_props)
{
if (prop.drmFormatModifier == allocated_format.modifier)
{
image_data->external_mem.set_num_memories(prop.drmFormatModifierPlaneCount);
}
}
m_image_creation_parameters.m_allocated_format = allocated_format;
}
return m_device_data.disp.CreateImage(m_device, &m_image_create_info, get_allocation_callbacks(), &image.image);
}
void swapchain::present_image(const pending_present_request &pending_present)
{
int res;
wayland_image_data *image_data =
reinterpret_cast<wayland_image_data *>(m_swapchain_images[pending_present.image_index].data);
/* if a frame is already pending, wait for a hint to present again */
if (!m_wsi_surface->wait_next_frame_event())
{
set_error_state(VK_ERROR_SURFACE_LOST_KHR);
}
wl_surface_attach(m_surface, image_data->buffer, 0, 0);
auto present_sync_fd = image_data->present_fence.export_sync_fd();
if (!present_sync_fd.has_value())
{
WSI_LOG_ERROR("Failed to export present fence.");
set_error_state(VK_ERROR_SURFACE_LOST_KHR);
}
else if (present_sync_fd->is_valid())
{
zwp_linux_surface_synchronization_v1_set_acquire_fence(m_wsi_surface->get_surface_sync_interface(),
present_sync_fd->get());
}
/* TODO: work out damage */
wl_surface_damage(m_surface, 0, 0, INT32_MAX, INT32_MAX);
if (m_present_mode == VK_PRESENT_MODE_FIFO_KHR)
{
if (!m_wsi_surface->set_frame_callback())
{
set_error_state(VK_ERROR_SURFACE_LOST_KHR);
}
}
#if VULKAN_WSI_LAYER_EXPERIMENTAL
if (m_device_data.is_present_id_enabled())
{
auto *ext = get_swapchain_extension<wsi_ext_present_id_wayland>(true);
if (m_wsi_surface->get_presentation_time_interface() != nullptr)
{
wp_presentation *pres = m_wsi_surface->get_presentation_time_interface();
struct wp_presentation_feedback *feedback = wp_presentation_feedback(pres, m_wsi_surface->get_wl_surface());
wl_proxy_set_queue(reinterpret_cast<wl_proxy *>(feedback), m_buffer_queue);
presentation_feedback *feedback_obj =
ext->insert_into_pending_present_feedback_list(pending_present.present_id, feedback);
if (feedback_obj == nullptr)
{
WSI_LOG_ERROR("Error adding to pending present feedback list");
set_error_state(VK_ERROR_SURFACE_LOST_KHR);
return;
}
register_wp_presentation_feedback_listener(feedback, feedback_obj);
}
}
auto *present_timing_ext = get_swapchain_extension<wsi_ext_present_timing_wayland>();
if (present_timing_ext != nullptr)
{
if ((present_timing_ext->is_stage_pending_for_image_index(pending_present.image_index,
VK_PRESENT_STAGE_IMAGE_FIRST_PIXEL_OUT_BIT_EXT)) &&
(m_wsi_surface->get_presentation_time_interface() != nullptr))
{
wp_presentation *pres = m_wsi_surface->get_presentation_time_interface();
struct wp_presentation_feedback *feedback = wp_presentation_feedback(pres, m_wsi_surface->get_wl_surface());
wl_proxy_set_queue(reinterpret_cast<wl_proxy *>(feedback), m_buffer_queue);
presentation_feedback *feedback_obj =
present_timing_ext->insert_into_pending_present_feedback_list(pending_present.image_index, feedback);
if (feedback_obj == nullptr)
{
WSI_LOG_ERROR("Error adding to pending present feedback list");
set_error_state(VK_ERROR_SURFACE_LOST_KHR);
return;
}
register_wp_presentation_feedback_listener(feedback, feedback_obj);
}
}
#endif
wl_surface_commit(m_surface);
res = wl_display_flush(m_display);
if (res < 0)
{
WSI_LOG_ERROR("error flushing the display");
/* Setting the swapchain as invalid */
set_error_state(VK_ERROR_SURFACE_LOST_KHR);
}
if (m_device_data.is_present_id_enabled())
{
#if VULKAN_WSI_LAYER_EXPERIMENTAL
auto *ext = get_swapchain_extension<wsi_ext_present_id_wayland>();
if (ext != nullptr)
{
if (m_wsi_surface->get_presentation_time_interface() == nullptr)
#else
auto *ext = get_swapchain_extension<wsi_ext_present_id>();
if (ext != nullptr)
#endif
{
ext->mark_delivered(pending_present.present_id);
}
#if VULKAN_WSI_LAYER_EXPERIMENTAL
}
#endif
}
}
void swapchain::destroy_image(swapchain_image &image)
{
std::unique_lock<std::recursive_mutex> image_status_lock(m_image_status_mutex);
if (image.status != swapchain_image::INVALID)
{
if (image.image != VK_NULL_HANDLE)
{
m_device_data.disp.DestroyImage(m_device, image.image, get_allocation_callbacks());
image.image = VK_NULL_HANDLE;
}
image.status = swapchain_image::INVALID;
}
image_status_lock.unlock();
if (image.data != nullptr)
{
auto image_data = reinterpret_cast<wayland_image_data *>(image.data);
if (image_data->buffer != nullptr)
{
wl_buffer_destroy(image_data->buffer);
}
m_allocator.destroy(1, image_data);
image.data = nullptr;
}
}
bool swapchain::free_image_found()
{
for (auto &img : m_swapchain_images)
{
if (img.status == swapchain_image::FREE)
{
return true;
}
}
return false;
}
VkResult swapchain::get_free_buffer(uint64_t *timeout)
{
int ms_timeout, res;
if (*timeout >= INT_MAX * 1000llu * 1000llu)
{
ms_timeout = INT_MAX;
}
else
{
ms_timeout = *timeout / 1000llu / 1000llu;
}
/* The current dispatch_queue implementation will return if any
* events are returned, even if no events are dispatched to the buffer
* queue. Therefore dispatch repeatedly until a buffer has been freed.
*/
do
{
res = dispatch_queue(m_display, m_buffer_queue, ms_timeout);
} while (!free_image_found() && res > 0);
if (res > 0)
{
*timeout = 0;
return VK_SUCCESS;
}
else if (res == 0)
{
if (*timeout == 0)
{
return VK_NOT_READY;
}
else
{
return VK_TIMEOUT;
}
}
else
{
return VK_ERROR_SURFACE_LOST_KHR;
}
}
VkResult swapchain::image_set_present_payload(swapchain_image &image, VkQueue queue,
const queue_submit_semaphores &semaphores, const void *submission_pnext)
{
auto image_data = reinterpret_cast<wayland_image_data *>(image.data);
return image_data->present_fence.set_payload(queue, semaphores, submission_pnext);
}
VkResult swapchain::image_wait_present(swapchain_image &, uint64_t)
{
/* With explicit sync in use there is no need to wait for the present sync before submiting the image to the
* compositor. */
return VK_SUCCESS;
}
VkResult swapchain::bind_swapchain_image(VkDevice &device, const VkBindImageMemoryInfo *bind_image_mem_info,
const VkBindImageMemorySwapchainInfoKHR *bind_sc_info)
{
UNUSED(device);
const wsi::swapchain_image &swapchain_image = m_swapchain_images[bind_sc_info->imageIndex];
auto image_data = reinterpret_cast<wayland_image_data *>(swapchain_image.data);
return image_data->external_mem.bind_swapchain_image_memory(bind_image_mem_info->image);
}
VkResult swapchain::get_required_image_creator_extensions(
const VkSwapchainCreateInfoKHR &swapchain_create_info,
util::vector<util::unique_ptr<swapchain_image_create_info_extension>> *extensions)
{
auto compression_control = swapchain_image_create_compression_control::create(
m_device_data.is_swapchain_compression_control_enabled(), swapchain_create_info);
if (!extensions->try_push_back(m_allocator.make_unique<swapchain_image_create_external_memory>(
compression_control, m_wsi_allocator.get(), &m_wsi_surface->get_formats(), m_device_data.physical_device,
m_allocator)))
{
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
return VK_SUCCESS;
}
} // namespace wayland
} // namespace wsi