Merge branch 'venus-win32' into 'main'

Draft: Introduce Windows support for Venus

See merge request mesa/mesa!38731
This commit is contained in:
anonymix007 2026-03-11 04:58:19 +00:00
commit 330ea6ff43
22 changed files with 3377 additions and 30 deletions

View file

@ -0,0 +1,53 @@
#ifndef VULKAN_D3DDDI_H_
#define VULKAN_D3DDDI_H_ 1
#ifdef __cplusplus
extern "C" {
#endif
#define VK_STRUCTURE_TYPE_D3DDDI_CALLBACKS ((VkStructureType)4281808695u)
#define VK_STRUCTURE_TYPE_D3DDDI_CREATE_RESOURCE ((VkStructureType)4281808696u)
#define VK_STRUCTURE_TYPE_D3DDDI_OPEN_RESOURCE ((VkStructureType)4281808697u)
typedef struct {
VkStructureType sType;
void *pNext;
LUID AdapterLuid;
HANDLE hRTAdapter; // in: Runtime handle
HANDLE hRTDevice; // in: Runtime handle
const D3DDDI_ADAPTERCALLBACKS *pAdapterCallbacks; // in: Pointer to runtime callbacks that invoke kernel
const D3DDDI_DEVICECALLBACKS *pKTCallbacks; // in: Pointer to runtime callbacks that invoke kernel
const DXGI_DDI_BASE_CALLBACKS *pDXGIBaseCallbacks; // in: The driver should record this pointer for later use
D3D10DDI_HRTCORELAYER hRTCoreLayer; // in: CoreLayer handle
const D3D11DDI_CORELAYER_DEVICECALLBACKS* p11UMCallbacks; // in: callbacks that stay in usermode
HANDLE hContext; // out: Context handle
} VkD3DDDICallbacks;
typedef struct {
VkStructureType sType;
void *pNext;
HANDLE hRTResource;
const D3D10DDIARG_CREATERESOURCE *pCreateResource;
} VkD3DDDICreateResource;
typedef struct {
VkStructureType sType;
void *pNext;
HANDLE hRTResource;
const D3D10DDIARG_OPENRESOURCE *pOpenResource;
const void *pResourceInfo; /* VIOGPU_RES_INFO_REQ */
} VkD3DDDIOpenResource;
#define VK_STRUCTURE_TYPE_D3DDDI_CALLBACKS_cast VkD3DDDICallbacks
#define VK_STRUCTURE_TYPE_D3DDDI_CREATE_RESOURCE_cast VkD3DDDICreateResource
#define VK_STRUCTURE_TYPE_D3DDDI_OPEN_RESOURCE_cast VkD3DDDIOpenResource
#ifdef __cplusplus
}
#endif
#endif

View file

@ -1,6 +1,8 @@
d3d10tokenizedprogramformat.hpp
d3d10TokenizedProgramFormat.hpp
d3d10umddi.h
d3d11tokenizedprogramformat.hpp
d3d11TokenizedProgramFormat.hpp
d3dkmddi.h
d3dkmdt.h
d3dkmthk.h

View file

@ -0,0 +1,323 @@
/*
* Copyright (C) 2019-2020 Red Hat, Inc.
*
* Written By: Vadim Rozenfeld <vrozenfe@redhat.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met :
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and / or other materials provided with the distribution.
* 3. Neither the names of the copyright holders nor the names of their contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#pragma once
#include <d3dkmthk.h>
#pragma pack(1)
typedef struct _VIOGPU_BOX
{
ULONG x;
ULONG y;
ULONG z;
ULONG width;
ULONG height;
ULONG depth;
} VIOGPU_BOX;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_BLOB_INFO {
ULONG width;
ULONG height;
ULONG format;
ULONG bind; // Same as virgl
ULONG strides[4];
ULONG offsets[4];
} VIOGPU_BLOB_INFO, *PVIOGPU_BLOB_INFO;
#pragma pack()
// ================= QueryAdapterInfo UMDRIVERPRIVATE
#define VIOGPU_IAM 0x56696f475055 // Identifier for queryadapterinfo (VioGPU as hex)
#define VIOGPU_CAPSET_GFXSTREAM_VULKAN 3
#define VIOGPU_CAPSET_VENUS 4
typedef struct _VIOGPU_ADAPTERINFO
{
ULONGLONG IamVioGPU; // Should be set by driver to VIOGPU_IAM
struct
{
UINT Supports3d : 1;
UINT HasShmem : 1;
UINT Reserved : 30;
} Flags;
ULONGLONG SupportedCapsetIDs;
LUID AdapterLuid;
} VIOGPU_ADAPTERINFO;
// ================= ESCAPES
#define VIOGPU_GET_DEVICE_ID 0x000
#define VIOGPU_GET_CUSTOM_RESOLUTION 0x001
#define VIOGPU_GET_CAPS 0x002
#define VIOGPU_GET_PCI_INFO 0x003
#define VIOGPU_RES_INFO 0x100
#define VIOGPU_RES_BUSY 0x101
#define VIOGPU_RES_BLOB_SET_INFO 0x102
#define VIOGPU_CTX_INIT 0x200
#define VIOGPU_BLIT_INIT 0x300
#pragma pack(1)
typedef struct _VIOGPU_DISP_MODE
{
USHORT XResolution;
USHORT YResolution;
} VIOGPU_DISP_MODE, *PVIOGPU_DISP_MODE;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_PARAM_REQ
{
ULONG ParamId;
UINT64 Value;
} VIOGPU_PARAM_REQ;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_CAPSET_REQ
{
ULONG CapsetId;
ULONG Version;
ULONG Size;
UCHAR *Capset;
} VIOGPU_CAPSET_REQ;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_PCI_INFO_REQ
{
ULONG Domain;
ULONG Bus;
ULONG Dev;
ULONG Func;
} VIOGPU_PCI_INFO_REQ;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_RES_INFO_REQ
{
D3DKMT_HANDLE ResHandle;
ULONG Id;
BOOL IsBlob;
BOOL IsCreated;
BOOL InfoValid;
VIOGPU_BLOB_INFO Info;
ULONG BlobMem;
ULONGLONG BlobId;
ULONGLONG Size;
} VIOGPU_RES_INFO_REQ;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_RES_BUSY_REQ
{
D3DKMT_HANDLE ResHandle;
BOOL Wait;
BOOL IsBusy;
} VIOGPU_RES_BUSY_REQ;
#pragma pack()
#pragma pack(1)
typedef struct {
D3DKMT_HANDLE ResHandle;
VIOGPU_BLOB_INFO Info;
} VIOGPU_RES_BLOB_SET_INFO_REQ, *PVIOGPU_RES_BLOB_SET_INFO_REQ;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_CTX_INIT_REQ
{
UINT CapsetID;
UINT NumRings;
UCHAR DebugName[64];
} VIOGPU_CTX_INIT_REQ;
#pragma pack()
typedef struct _VIOGPU_BLIT_PRESENT VIOGPU_BLIT_PRESENT, *PVIOGPU_BLIT_PRESENT;
#pragma pack(1)
typedef struct _VIOGPU_BLIT_INIT_REQ
{
HANDLE EventUM;
HANDLE EventKM;
PVIOGPU_BLIT_PRESENT pBlitPresent;
} VIOGPU_BLIT_INIT_REQ;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_ESCAPE
{
USHORT Type;
USHORT DataLength;
union {
ULONG Id;
VIOGPU_DISP_MODE Resolution;
VIOGPU_PARAM_REQ Parameter;
VIOGPU_CAPSET_REQ Capset;
VIOGPU_PCI_INFO_REQ PciInfo;
VIOGPU_RES_INFO_REQ ResourceInfo;
VIOGPU_RES_BUSY_REQ ResourceBusy;
VIOGPU_RES_BLOB_SET_INFO_REQ BlobInfoSet;
VIOGPU_CTX_INIT_REQ CtxInit;
VIOGPU_BLIT_INIT_REQ BlitInit;
} DUMMYUNIONNAME;
} VIOGPU_ESCAPE, *PVIOGPU_ESCAPE;
#pragma pack()
// ================= CreateResource
#pragma pack(1)
typedef struct _VIOGPU_RESOURCE_3D_OPTIONS
{
ULONG target;
ULONG format;
ULONG bind;
ULONG width;
ULONG height;
ULONG depth;
ULONG array_size;
ULONG last_level;
ULONG nr_samples;
ULONG flags;
} VIOGPU_RESOURCE_3D_OPTIONS;
#pragma pack()
#define VIOGPU_BLOB_MEM_GUEST 0x0001
#define VIOGPU_BLOB_MEM_HOST3D 0x0002
#define VIOGPU_BLOB_MEM_HOST3D_GUEST 0x0003
#define VIOGPU_BLOB_FLAG_USE_MAPPABLE 0x0001
#define VIOGPU_BLOB_FLAG_USE_SHAREABLE 0x0002
//#define VIOGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
#pragma pack(1)
typedef struct _VIOGPU_RESOURCE_BLOB_OPTIONS
{
ULONG blob_mem;
ULONG blob_flags;
ULONGLONG blob_id;
} VIOGPU_RESOURCE_BLOB_OPTIONS;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_CREATE_RESOURCE_EXCHANGE
{
ULONG magic;
} VIOGPU_CREATE_RESOURCE_EXCHANGE;
#pragma pack()
#define VIOGPU_RESOURCE_TYPE_3D 0
#define VIOGPU_RESOURCE_TYPE_BLOB 1
#pragma pack(1)
typedef struct _VIOGPU_CREATE_ALLOCATION_EXCHANGE
{
ULONG Type;
union {
VIOGPU_RESOURCE_3D_OPTIONS Options3D;
VIOGPU_RESOURCE_BLOB_OPTIONS OptionsBlob;
};
ULONGLONG Size;
} VIOGPU_CREATE_ALLOCATION_EXCHANGE;
#pragma pack()
// ================= BLIT
#pragma pack(1)
struct _VIOGPU_BLIT_PRESENT
{
struct {
void *resource;
RECT rect;
} src;
struct {
VIOGPU_CREATE_ALLOCATION_EXCHANGE alloc;
VIOGPU_RES_INFO_REQ res_info;
RECT rect;
} dst;
};
#pragma pack()
// ================= COMMAND BUFFER
#define VIOGPU_CMD_NOP 0x0
#define VIOGPU_CMD_SUBMIT 0x1 // Submit Command to virgl
#define VIOGPU_CMD_TRANSFER_TO_HOST 0x2 // Transfer resource to host
#define VIOGPU_CMD_TRANSFER_FROM_HOST 0x3 // Transfer resource to host
#define VIOGPU_CMD_MAP_BLOB 0x4 // Map blob resource
#define VIOGPU_CMD_UNMAP_BLOB 0x5 // Unmap blob resource
//#define VIOGPU_CMD_SUBMIT_UM 0x6
// #define VIOGPU_EXECBUF_FENCE_FD_IN 0x01
// #define VIOGPU_EXECBUF_FENCE_FD_OUT 0x02
#define VIOGPU_EXECBUF_RING_IDX 0x04
#define VIOGPU_EXECBUF_VIRGL 0x08
// #define VIOGPU_EXECBUF_FLAGS (VIOGPU_EXECBUF_FENCE_FD_IN | VIOGPU_EXECBUF_FENCE_FD_OUT | VIOGPU_EXECBUF_RING_IDX)
#pragma pack(1)
typedef struct _VIOGPU_COMMAND_HDR
{
UINT type;
UINT size;
UINT flags;
UINT ring_idx;
} VIOGPU_COMMAND_HDR;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_TRANSFER_CMD
{
ULONG res_id;
VIOGPU_BOX box;
ULONGLONG offset;
ULONG level;
ULONG stride;
ULONG layer_stride;
} VIOGPU_TRANSFER_CMD;
#pragma pack()
#pragma pack(1)
typedef struct _VIOGPU_BEGIN_UM_BLIT_CMD
{
RECT src, dst;
} VIOGPU_BEGIN_UM_BLIT_CMD;
#pragma pack()
#define BASE_NAMED_OBJECTS L"\\BaseNamedObjects\\"
#define GLOBAL_OBJECTS L"Global\\"
#define RESOLUTION_EVENT_NAME L"VioGpuResolutionEvent"

View file

@ -105,6 +105,8 @@ vn_link_args = [
vulkan_icd_link_args,
]
vn_kwargs = {}
vn_libs = []
if not with_platform_windows
@ -136,6 +138,16 @@ if with_platform_android
vn_deps += [dep_android, idep_u_gralloc]
endif
if with_platform_windows
libvn_files += files('vn_renderer_virtgpu_win32.c')
vn_incs += inc_winddk
vn_link_args += '-static'
vn_kwargs = {
'vs_module_defs': vulkan_api_def,
'name_prefix': '',
}
endif
libvulkan_virtio = shared_library(
'vulkan_virtio',
[libvn_files, vn_entrypoints, sha1_h],
@ -146,5 +158,6 @@ libvulkan_virtio = shared_library(
link_args : vn_link_args,
link_depends : vulkan_icd_link_depends,
gnu_symbol_visibility : 'hidden',
kwargs: vn_kwargs,
install : true,
)

View file

@ -103,9 +103,11 @@ vn_log(struct vn_instance *instance, const char *format, ...)
VkResult
vn_log_result(struct vn_instance *instance,
VkResult result,
const char *file,
int line,
const char *where)
{
vn_log(instance, "%s: %s", where, vk_Result_to_str(result));
vn_log(instance, "%s:%d: %s: %s", file, line, where, vk_Result_to_str(result));
return result;
}

View file

@ -66,7 +66,7 @@
#define VN_PERF(category) (unlikely(vn_env.perf & VN_PERF_##category))
#define vn_error(instance, error) \
(VN_DEBUG(RESULT) ? vn_log_result((instance), (error), __func__) : (error))
(VN_DEBUG(RESULT) ? vn_log_result((instance), (error), __FILE__, __LINE__, __func__) : (error))
#define vn_result(instance, result) \
((result) >= VK_SUCCESS ? (result) : vn_error((instance), (result)))
@ -326,6 +326,8 @@ vn_log(struct vn_instance *instance, const char *format, ...)
VkResult
vn_log_result(struct vn_instance *instance,
VkResult result,
const char *file,
int line,
const char *where);
#define VN_REFCOUNT_INIT(val) \

View file

@ -196,6 +196,12 @@ find_extension_names(const char *const *exts,
uint32_t ext_count,
const char *name)
{
#ifdef VK_USE_PLATFORM_WIN32_KHR
/* Filter win32 extensions as they're fully implemented in the driver */
if (strstr(name, "win32"))
return true;
#endif
for (uint32_t i = 0; i < ext_count; i++) {
if (!strcmp(exts[i], name))
return true;
@ -370,6 +376,21 @@ vn_device_fix_create_info(const struct vn_device *dev,
block_exts[block_count++] = VK_EXT_PCI_BUS_INFO_EXTENSION_NAME;
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
if (app_exts->KHR_external_fence_win32) {
/* see vn_physical_device_get_native_extensions */
block_exts[block_count++] = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
}
if (app_exts->KHR_external_semaphore_win32) {
/* see vn_physical_device_get_native_extensions */
block_exts[block_count++] = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
}
if (app_exts->KHR_external_memory_win32) {
/* see vn_physical_device_get_native_extensions */
block_exts[block_count++] = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME;
}
#endif
assert(extra_count <= ARRAY_SIZE(extra_exts));
assert(block_count <= ARRAY_SIZE(block_exts));

View file

@ -22,6 +22,14 @@
#include "vn_renderer.h"
#include "vn_renderer_util.h"
#ifdef VK_USE_PLATFORM_WIN32_KHR
#define _D3D10_CONSTANTS
#define _D3D10_1_CONSTANTS
#include <winddk_compat.h>
#include <d3d10umddi.h>
#include <vulkan/vulkan_d3dddi.h>
#endif
/* device memory commands */
static inline VkResult
@ -83,7 +91,8 @@ vn_device_memory_wait_alloc(struct vn_device *dev,
}
static inline VkResult
vn_device_memory_bo_init(struct vn_device *dev, struct vn_device_memory *mem)
vn_device_memory_bo_init(struct vn_device *dev, struct vn_device_memory *mem,
const VkMemoryAllocateInfo *alloc_info)
{
VkResult result = vn_device_memory_wait_alloc(dev, mem);
if (result != VK_SUCCESS)
@ -94,7 +103,7 @@ vn_device_memory_bo_init(struct vn_device *dev, struct vn_device_memory *mem)
.memoryTypes[mem_vk->memory_type_index];
return vn_renderer_bo_create_from_device_memory(
dev->renderer, mem_vk->size, mem->base.id, mem_type->propertyFlags,
mem_vk->export_handle_types, &mem->base_bo);
mem_vk->export_handle_types, alloc_info, &mem->base_bo);
}
static inline void
@ -106,6 +115,58 @@ vn_device_memory_bo_fini(struct vn_device *dev, struct vn_device_memory *mem)
}
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
static VkResult
vn_device_memory_import_handle(struct vn_device *dev,
struct vn_device_memory *mem,
const VkMemoryAllocateInfo *alloc_info,
bool is_kmt,
void *handle)
{
const VkMemoryType *mem_type =
&dev->physical_device->memory_properties
.memoryTypes[alloc_info->memoryTypeIndex];
const VkMemoryDedicatedAllocateInfo *dedicated_info =
vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
const bool is_dedicated =
dedicated_info && (dedicated_info->image != VK_NULL_HANDLE ||
dedicated_info->buffer != VK_NULL_HANDLE);
struct vn_renderer_bo *bo;
VkResult result = vn_renderer_bo_create_from_handle(
dev->renderer, is_dedicated ? alloc_info->allocationSize : 0,
mem->base.id, is_kmt, handle, mem_type->propertyFlags, alloc_info, &bo);
if (result != VK_SUCCESS)
return result;
vn_ring_roundtrip(dev->primary_ring);
const VkImportMemoryResourceInfoMESA import_memory_resource_info = {
.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_RESOURCE_INFO_MESA,
.pNext = alloc_info->pNext,
.resourceId = bo->res_id,
};
const VkMemoryAllocateInfo memory_allocate_info = {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.pNext = &import_memory_resource_info,
.allocationSize = alloc_info->allocationSize,
.memoryTypeIndex = alloc_info->memoryTypeIndex,
};
result = vn_device_memory_alloc_simple(dev, mem, &memory_allocate_info);
if (result != VK_SUCCESS) {
vn_renderer_bo_unref(dev->renderer, bo);
return result;
}
if (!is_kmt) {
/* need to close import fd on success to avoid fd leak */
CloseHandle((HANDLE) handle);
}
mem->base_bo = bo;
return VK_SUCCESS;
}
#else
VkResult
vn_device_memory_import_dma_buf(struct vn_device *dev,
struct vn_device_memory *mem,
@ -148,6 +209,7 @@ vn_device_memory_import_dma_buf(struct vn_device *dev,
return VK_SUCCESS;
}
#endif
static VkResult
vn_device_memory_alloc_guest_vram(struct vn_device *dev,
@ -169,7 +231,7 @@ vn_device_memory_alloc_guest_vram(struct vn_device *dev,
VkResult result = vn_renderer_bo_create_from_device_memory(
dev->renderer, mem_vk->size, mem->base.id, flags,
mem_vk->export_handle_types, &mem->base_bo);
mem_vk->export_handle_types, alloc_info, &mem->base_bo);
if (result != VK_SUCCESS) {
return result;
}
@ -207,7 +269,7 @@ vn_device_memory_alloc_export(struct vn_device *dev,
if (result != VK_SUCCESS)
return result;
result = vn_device_memory_bo_init(dev, mem);
result = vn_device_memory_bo_init(dev, mem, alloc_info);
if (result != VK_SUCCESS) {
vn_device_memory_free_simple(dev, mem);
return result;
@ -232,6 +294,10 @@ struct vn_device_memory_alloc_info {
VkMemoryAllocateFlagsInfo flags;
VkMemoryDedicatedAllocateInfo dedicated;
VkMemoryOpaqueCaptureAddressAllocateInfo capture;
#ifdef VK_USE_PLATFORM_WIN32_KHR
VkD3DDDICreateResource d3d_create;
VkD3DDDIOpenResource d3d_open;
#endif
};
static const VkMemoryAllocateInfo *
@ -267,10 +333,22 @@ vn_device_memory_fix_alloc_info(
memcpy(&local_info->capture, src, sizeof(local_info->capture));
next = &local_info->capture;
break;
default:
break;
}
/* FIXME: -Werror=switch */
#ifdef VK_USE_PLATFORM_WIN32_KHR
if (src->sType == VK_STRUCTURE_TYPE_D3DDDI_CREATE_RESOURCE) {
memcpy(&local_info->d3d_create, src, sizeof(local_info->d3d_create));
next = &local_info->d3d_create;
} else if (src->sType == VK_STRUCTURE_TYPE_D3DDDI_OPEN_RESOURCE) {
memcpy(&local_info->d3d_open, src, sizeof(local_info->d3d_open));
next = &local_info->d3d_open;
}
#endif
if (next) {
cur->pNext = next;
cur = next;
@ -304,10 +382,21 @@ vn_device_memory_alloc(struct vn_device *dev,
alloc_info = vn_device_memory_fix_alloc_info(
alloc_info, renderer_handle_type, has_guest_vram, &local_info);
// FIXME: this is slightly wrong for Windows
/* ensure correct blob flags */
mem_vk->export_handle_types = renderer_handle_type;
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
const bool need_bo_now =
vk_find_struct_const(alloc_info, D3DDDI_CREATE_RESOURCE) != NULL ||
vk_find_struct_const(alloc_info, D3DDDI_OPEN_RESOURCE) != NULL;
if (need_bo_now) {
return vn_device_memory_alloc_export(dev, mem, alloc_info);
}
#endif
if (has_guest_vram && (host_visible || export_alloc)) {
return vn_device_memory_alloc_guest_vram(dev, mem, alloc_info);
} else if (export_alloc) {
@ -367,10 +456,10 @@ vn_AllocateMemory(VkDevice device,
vn_object_set_id(mem, vn_get_next_obj_id(), VK_OBJECT_TYPE_DEVICE_MEMORY);
VkResult result;
#ifndef VK_USE_PLATFORM_WIN32_KHR
const VkImportMemoryFdInfoKHR *import_fd_info =
vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_FD_INFO_KHR);
VkResult result;
if (mem->base.vk.ahardware_buffer) {
result = vn_android_device_import_ahb(dev, mem, pAllocateInfo);
} else if (import_fd_info) {
@ -381,6 +470,17 @@ vn_AllocateMemory(VkDevice device,
if (result == VK_SUCCESS)
vn_wsi_memory_info_init(mem, pAllocateInfo);
}
#else
const VkImportMemoryWin32HandleInfoKHR *import_win32_info =
vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR);
if (import_win32_info) {
const bool is_kmt = !(import_win32_info->handleType & VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT);
result = vn_device_memory_import_handle(dev, mem, pAllocateInfo,
is_kmt, import_win32_info->handle);
} else {
result = vn_device_memory_alloc(dev, mem, pAllocateInfo);
}
#endif
vn_device_memory_emit_report(dev, mem, /* is_alloc */ true, result);
@ -455,7 +555,7 @@ vn_MapMemory2(VkDevice device,
* the extension.
*/
if (need_bo) {
result = vn_device_memory_bo_init(dev, mem);
result = vn_device_memory_bo_init(dev, mem, NULL);
if (result != VK_SUCCESS)
return vn_error(dev->instance, result);
}
@ -548,6 +648,92 @@ vn_GetDeviceMemoryCommitment(VkDevice device,
pCommittedMemoryInBytes);
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
VKAPI_ATTR VkResult VKAPI_CALL
vn_GetMemoryWin32HandleKHR(VkDevice device,
const VkMemoryGetWin32HandleInfoKHR *pGetWin32HandleInfo,
HANDLE *pHandle)
{
VN_TRACE_FUNC();
struct vn_device *dev = vn_device_from_handle(device);
struct vn_device_memory *mem =
vn_device_memory_from_handle(pGetWin32HandleInfo->memory);
/* At the moment, we support only the below handle type. */
assert(pGetWin32HandleInfo->handleType &
(VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT |
VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT));
assert(mem->base_bo);
const bool is_kmt = !(pGetWin32HandleInfo->handleType & VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT);
*pHandle = vn_renderer_bo_export_handle(dev->renderer, mem->base_bo, is_kmt);
if (*pHandle == NULL)
return vn_error(dev->instance, VK_ERROR_TOO_MANY_OBJECTS);
return VK_SUCCESS;
}
static VkResult
vn_get_memory_handle_properties(struct vn_device *dev,
bool is_kmt,
void *handle,
void *alloc_info,
uint32_t *out_mem_type_bits)
{
VkDevice device = vn_device_to_handle(dev);
struct vn_renderer_bo *bo;
VkResult result = vn_renderer_bo_create_from_handle(
dev->renderer, 0 /* size */, 0 /* id */, is_kmt, handle, 0 /* flags */, alloc_info, &bo);
if (result != VK_SUCCESS) {
vn_log(dev->instance, "bo_create_from_handle failed");
return result;
}
vn_ring_roundtrip(dev->primary_ring);
VkMemoryResourcePropertiesMESA props = {
.sType = VK_STRUCTURE_TYPE_MEMORY_RESOURCE_PROPERTIES_MESA,
};
result = vn_call_vkGetMemoryResourcePropertiesMESA(
dev->primary_ring, device, bo->res_id, &props);
vn_renderer_bo_unref(dev->renderer, bo);
if (result != VK_SUCCESS) {
vn_log(dev->instance, "vkGetMemoryResourcePropertiesMESA failed");
return result;
}
*out_mem_type_bits = props.memoryTypeBits;
return VK_SUCCESS;
}
VKAPI_ATTR VkResult VKAPI_CALL
vn_GetMemoryWin32HandlePropertiesKHR(VkDevice device,
VkExternalMemoryHandleTypeFlagBits handleType,
HANDLE handle,
VkMemoryWin32HandlePropertiesKHR *pMemoryWin32HandleProperties)
{
VN_TRACE_FUNC();
struct vn_device *dev = vn_device_from_handle(device);
uint32_t mem_type_bits = 0;
VkResult result = VK_SUCCESS;
if (handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT &&
handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT)
return vn_error(dev->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
const bool is_kmt = handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT;
result = vn_get_memory_handle_properties(
dev, is_kmt, handle, pMemoryWin32HandleProperties, &mem_type_bits);
if (result != VK_SUCCESS)
return vn_error(dev->instance, result);
pMemoryWin32HandleProperties->memoryTypeBits = mem_type_bits;
return VK_SUCCESS;
}
#else
VKAPI_ATTR VkResult VKAPI_CALL
vn_GetMemoryFdKHR(VkDevice device,
const VkMemoryGetFdInfoKHR *pGetFdInfo,
@ -625,3 +811,4 @@ vn_GetMemoryFdPropertiesKHR(VkDevice device,
return VK_SUCCESS;
}
#endif

View file

@ -57,6 +57,7 @@ VK_DEFINE_NONDISP_HANDLE_CASTS(vn_device_memory,
VkDeviceMemory,
VK_OBJECT_TYPE_DEVICE_MEMORY)
#ifndef VK_USE_PLATFORM_WIN32_KHR
VkResult
vn_device_memory_import_dma_buf(struct vn_device *dev,
struct vn_device_memory *mem,
@ -67,5 +68,6 @@ VkResult
vn_get_memory_dma_buf_properties(struct vn_device *dev,
int fd,
uint32_t *out_mem_type_bits);
#endif
#endif /* VN_DEVICE_MEMORY_H */

View file

@ -56,7 +56,9 @@ static const struct vk_instance_extension_table
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
.EXT_acquire_xlib_display = true,
#endif
#ifndef VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
.KHR_win32_surface = true,
#else
.EXT_headless_surface = true,
#endif
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
@ -167,11 +169,11 @@ vn_instance_init_ring(struct vn_instance *instance)
}
static VkResult
vn_instance_init_renderer(struct vn_instance *instance)
vn_instance_init_renderer(struct vn_instance *instance, const VkInstanceCreateInfo *pCreateInfo)
{
const VkAllocationCallbacks *alloc = &instance->base.vk.alloc;
VkResult result = vn_renderer_create(instance, alloc, &instance->renderer);
VkResult result = vn_renderer_create(instance, alloc, pCreateInfo, &instance->renderer);
if (result != VK_SUCCESS)
return result;
@ -314,7 +316,7 @@ vn_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
goto out_mtx_destroy;
}
result = vn_instance_init_renderer(instance);
result = vn_instance_init_renderer(instance, pCreateInfo);
if (result == VK_ERROR_INITIALIZATION_FAILED) {
assert(!instance->renderer);
*pInstance = instance_handle;

View file

@ -1171,19 +1171,27 @@ vn_physical_device_get_native_extensions(
if (physical_dev->instance->renderer->info.has_external_sync &&
physical_dev->renderer_sync_fd.fence_exportable) {
#if DETECT_OS_WINDOWS
exts->KHR_external_fence_win32 = true;
#else
if (physical_dev->external_fence_handles ==
VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT) {
exts->KHR_external_fence_fd = true;
}
#endif
}
if (physical_dev->instance->renderer->info.has_external_sync &&
physical_dev->renderer_sync_fd.semaphore_importable &&
physical_dev->renderer_sync_fd.semaphore_exportable) {
#if DETECT_OS_WINDOWS
exts->KHR_external_semaphore_win32 = true;
#else
if (physical_dev->external_binary_semaphore_handles ==
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
exts->KHR_external_semaphore_fd = true;
}
#endif
}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
@ -1208,10 +1216,13 @@ vn_physical_device_get_native_extensions(
}
#else /* VK_USE_PLATFORM_ANDROID_KHR */
if (physical_dev->external_memory.renderer_handle_type) {
#if !DETECT_OS_WINDOWS
#if DETECT_OS_WINDOWS
exts->KHR_external_memory_win32 = true;
exts->KHR_win32_keyed_mutex = true;
#else
exts->KHR_external_memory_fd = true;
exts->EXT_external_memory_dma_buf = true;
#endif /* !DETECT_OS_WINDOWS */
#endif /* DETECT_OS_WINDOWS */
}
#endif /* VK_USE_PLATFORM_ANDROID_KHR */
@ -2796,7 +2807,7 @@ vn_GetPhysicalDeviceImageFormatProperties2(
pImageFormatInfo->pNext, WSI_IMAGE_CREATE_INFO_MESA);
if (wsi_info &&
!vn_wsi_validate_image_format_info(physical_dev, pImageFormatInfo)) {
return vn_error(physical_dev->instance, VK_ERROR_FORMAT_NOT_SUPPORTED);
return VK_ERROR_FORMAT_NOT_SUPPORTED;
}
const VkPhysicalDeviceExternalImageFormatInfo *external_info =
@ -2806,8 +2817,7 @@ vn_GetPhysicalDeviceImageFormatProperties2(
if (!external_info->handleType) {
external_info = NULL;
} else if (!(external_info->handleType & supported_handle_types)) {
return vn_error(physical_dev->instance,
VK_ERROR_FORMAT_NOT_SUPPORTED);
return VK_ERROR_FORMAT_NOT_SUPPORTED;
}
/* Fully resolve AHB image format query on the driver side. */
@ -2830,16 +2840,14 @@ vn_GetPhysicalDeviceImageFormatProperties2(
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT &&
pImageFormatInfo->tiling !=
VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
return vn_error(physical_dev->instance,
VK_ERROR_FORMAT_NOT_SUPPORTED);
return VK_ERROR_FORMAT_NOT_SUPPORTED;
}
if (external_info->handleType != renderer_handle_type) {
pImageFormatInfo = vn_physical_device_fix_image_format_info(
pImageFormatInfo, renderer_handle_type, &local_info);
if (!pImageFormatInfo) {
return vn_error(physical_dev->instance,
VK_ERROR_FORMAT_NOT_SUPPORTED);
return VK_ERROR_FORMAT_NOT_SUPPORTED;
}
}
}
@ -2884,6 +2892,10 @@ vn_GetPhysicalDeviceImageFormatProperties2(
}
}
/* Silence the log spam */
if (result == VK_ERROR_FORMAT_NOT_SUPPORTED)
return result;
return vn_result(physical_dev->instance, result);
}

View file

@ -25,6 +25,21 @@
#include "vn_renderer.h"
#include "vn_wsi.h"
#ifdef VK_USE_PLATFORM_WIN32_KHR
#include <windows.h>
static inline VkResult sync_wait_handle(void *handle, int timeout)
{
switch (WaitForSingleObject(handle, timeout)) {
case WAIT_OBJECT_0:
return VK_SUCCESS;
case WAIT_TIMEOUT:
return VK_NOT_READY;
default:
return VK_ERROR_DEVICE_LOST;
}
}
#endif
/* queue commands */
struct vn_submit_info_pnext_fix {
@ -398,7 +413,11 @@ vn_queue_submission_fix_batch_semaphores(struct vn_queue_submission *submit,
struct vn_semaphore *sem = vn_semaphore_from_handle(sem_handle);
const struct vn_sync_payload *payload = sem->payload;
#ifdef VK_USE_PLATFORM_WIN32_KHR
if (payload->type != VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE)
#else
if (payload->type != VN_SYNC_TYPE_IMPORTED_SYNC_FD)
#endif
continue;
if (!vn_semaphore_wait_external(dev, sem))
@ -1539,8 +1558,13 @@ static void
vn_sync_payload_release(UNUSED struct vn_device *dev,
struct vn_sync_payload *payload)
{
if (payload->type == VN_SYNC_TYPE_IMPORTED_SYNC_FD && payload->fd >= 0)
#ifdef VK_USE_PLATFORM_WIN32_KHR
if (payload->type == VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE && payload->handle != NULL)
CloseHandle(payload->handle);
#else
if (payload->type == VN_SYNC_TYPE_IMPORTED_SYNC_FD && is_fd_valid(payload->fd))
close(payload->fd);
#endif
payload->type = VN_SYNC_TYPE_INVALID;
}
@ -1763,12 +1787,19 @@ vn_GetFenceStatus(VkDevice device, VkFence _fence)
result = vn_call_vkGetFenceStatus(dev->primary_ring, device, _fence);
}
break;
#ifdef VK_USE_PLATFORM_WIN32_KHR
case VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE:
//vn_log(dev->instance, "waiting for handle %p", payload->handle);
result = payload->handle != NULL ? sync_wait_handle(payload->handle, 0) : VK_SUCCESS;
break;
#else
case VN_SYNC_TYPE_IMPORTED_SYNC_FD:
if (payload->fd < 0 || sync_wait(payload->fd, 0) == 0)
if (!is_fd_valid(payload->fd) || sync_wait(payload->fd, 0) == 0)
result = VK_SUCCESS;
else
result = errno == ETIME ? VK_NOT_READY : VK_ERROR_DEVICE_LOST;
break;
#endif
default:
UNREACHABLE("unexpected fence payload type");
break;
@ -1869,6 +1900,128 @@ vn_WaitForFences(VkDevice device,
return vn_result(dev->instance, result);
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
static VkResult
vn_create_sync_handle(struct vn_device *dev,
struct vn_sync_payload_external *external_payload,
HANDLE *out_handle)
{
struct vn_renderer_sync *sync;
VkResult result = vn_renderer_sync_create(dev->renderer, 0,
VN_RENDERER_SYNC_BINARY, &sync);
if (result != VK_SUCCESS)
return vn_error(dev->instance, result);
struct vn_renderer_submit_batch batch = {
.syncs = &sync,
.sync_values = &(const uint64_t){ 1 },
.sync_count = 1,
.ring_idx = external_payload->ring_idx,
};
uint32_t local_data[8];
struct vn_cs_encoder local_enc =
VN_CS_ENCODER_INITIALIZER_LOCAL(local_data, sizeof(local_data));
if (external_payload->ring_seqno_valid) {
const uint64_t ring_id = vn_ring_get_id(dev->primary_ring);
vn_encode_vkWaitRingSeqnoMESA(&local_enc, 0, ring_id,
external_payload->ring_seqno);
batch.cs_data = local_data;
batch.cs_size = vn_cs_encoder_get_len(&local_enc);
}
const struct vn_renderer_submit submit = {
.batches = &batch,
.batch_count = 1,
};
result = vn_renderer_submit(dev->renderer, &submit);
if (result != VK_SUCCESS) {
vn_renderer_sync_destroy(dev->renderer, sync);
return vn_error(dev->instance, result);
}
*out_handle = vn_renderer_sync_export_handle(dev->renderer, sync);
vn_renderer_sync_destroy(dev->renderer, sync);
return *out_handle != NULL ? VK_SUCCESS : VK_ERROR_TOO_MANY_OBJECTS;
}
VkResult
vn_ImportFenceWin32HandleKHR(VkDevice device,
const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo)
{
VN_TRACE_FUNC();
struct vn_device *dev = vn_device_from_handle(device);
struct vn_fence *fence = vn_fence_from_handle(pImportFenceWin32HandleInfo->fence);
ASSERTED const bool is_handle = pImportFenceWin32HandleInfo->handleType ==
VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT;
void *handle = pImportFenceWin32HandleInfo->handle;
const LPCWSTR name = pImportFenceWin32HandleInfo->name;
assert(is_handle);
if ((handle == NULL && name == NULL) && (handle != NULL && name != NULL))
return vn_error(dev->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
struct vn_sync_payload *temp = &fence->temporary;
vn_sync_payload_release(dev, temp);
temp->type = VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE;
temp->handle = name != NULL ? CreateEventW(NULL, FALSE, FALSE, name) : handle;
fence->payload = temp;
//vn_log(dev->instance, "created handle %p", temp->handle);
return VK_SUCCESS;
}
VkResult
vn_GetFenceWin32HandleKHR(VkDevice device,
const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
HANDLE *pHandle)
{
VN_TRACE_FUNC();
struct vn_device *dev = vn_device_from_handle(device);
struct vn_fence *fence = vn_fence_from_handle(pGetWin32HandleInfo->fence);
const bool is_handle =
pGetWin32HandleInfo->handleType == VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT;
struct vn_sync_payload *payload = fence->payload;
VkResult result;
assert(is_handle);
assert(dev->physical_device->renderer_sync_fd.fence_exportable);
HANDLE handle = NULL;
if (payload->type == VN_SYNC_TYPE_DEVICE_ONLY) {
result = vn_create_sync_handle(dev, &fence->external_payload, &handle);
if (result != VK_SUCCESS)
return vn_error(dev->instance, result);
vn_async_vkResetFenceResourceMESA(dev->primary_ring, device,
pGetWin32HandleInfo->fence);
vn_sync_payload_release(dev, &fence->temporary);
fence->payload = &fence->permanent;
} else {
assert(payload->type == VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE);
/* transfer ownership of imported sync fd to save a dup */
handle = payload->handle;
payload->handle = NULL;
/* reset host fence in case in signaled state before import */
result = vn_ResetFences(device, 1, &pGetWin32HandleInfo->fence);
if (result != VK_SUCCESS) {
/* transfer sync fd ownership back on error */
payload->handle = handle;
return result;
}
}
*pHandle = handle;
return VK_SUCCESS;
}
#else
static VkResult
vn_create_sync_file(struct vn_device *dev,
struct vn_sync_payload_external *external_payload,
@ -1911,7 +2064,7 @@ vn_create_sync_file(struct vn_device *dev,
*out_fd = vn_renderer_sync_export_syncobj(dev->renderer, sync, true);
vn_renderer_sync_destroy(dev->renderer, sync);
return *out_fd >= 0 ? VK_SUCCESS : VK_ERROR_TOO_MANY_OBJECTS;
return is_fd_valid(*out_fd) ? VK_SUCCESS : VK_ERROR_TOO_MANY_OBJECTS;
}
static inline bool
@ -1920,7 +2073,7 @@ vn_sync_valid_fd(int fd)
/* the special value -1 for fd is treated like a valid sync file descriptor
* referring to an object that has already signaled
*/
return (fd >= 0 && sync_valid_fd(fd)) || fd == -1;
return (is_fd_valid(fd) && sync_valid_fd(fd)) || fd == -1;
}
VKAPI_ATTR VkResult VKAPI_CALL
@ -1994,6 +2147,7 @@ vn_GetFenceFdKHR(VkDevice device,
*pFd = fd;
return VK_SUCCESS;
}
#endif
/* semaphore commands */
@ -2015,12 +2169,23 @@ vn_semaphore_wait_external(struct vn_device *dev, struct vn_semaphore *sem)
{
struct vn_sync_payload *temp = &sem->temporary;
#ifdef VK_USE_PLATFORM_WIN32_KHR
assert(temp->type == VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE);
if (temp->handle != NULL) {
//vn_log(dev->instance, "waiting for handle %p", temp->handle);
if (sync_wait_handle(temp->handle, INFINITE) != VK_SUCCESS)
return false;
}
#else
assert(temp->type == VN_SYNC_TYPE_IMPORTED_SYNC_FD);
if (temp->fd >= 0) {
if (sync_wait(temp->fd, -1))
return false;
}
#endif
vn_sync_payload_release(dev, &sem->temporary);
sem->payload = &sem->permanent;
@ -2409,6 +2574,96 @@ vn_WaitSemaphores(VkDevice device,
return vn_result(dev->instance, result);
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
VKAPI_ATTR VkResult VKAPI_CALL
vn_ImportSemaphoreWin32HandleKHR(VkDevice device,
const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo)
{
VN_TRACE_FUNC();
struct vn_device *dev = vn_device_from_handle(device);
struct vn_semaphore *sem =
vn_semaphore_from_handle(pImportSemaphoreWin32HandleInfo->semaphore);
ASSERTED const bool is_handle =
pImportSemaphoreWin32HandleInfo->handleType ==
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT;
void *handle = pImportSemaphoreWin32HandleInfo->handle;
const LPCWSTR name = pImportSemaphoreWin32HandleInfo->name;
assert(is_handle);
if ((handle == NULL && name == NULL) && (handle != NULL && name != NULL))
return vn_error(dev->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
struct vn_sync_payload *temp = &sem->temporary;
vn_sync_payload_release(dev, temp);
temp->type = VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE;
temp->handle = name != NULL ? CreateEventW(NULL, FALSE, FALSE, name) : handle;
sem->payload = temp;
//vn_log(dev->instance, "created handle %p", temp->handle);
return VK_SUCCESS;
}
VKAPI_ATTR VkResult VKAPI_CALL
vn_GetSemaphoreWin32HandleKHR(VkDevice device,
const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
HANDLE *pHandle)
{
VN_TRACE_FUNC();
struct vn_device *dev = vn_device_from_handle(device);
struct vn_semaphore *sem = vn_semaphore_from_handle(pGetWin32HandleInfo->semaphore);
const bool is_handle =
pGetWin32HandleInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT;
struct vn_sync_payload *payload = sem->payload;
assert(is_handle);
assert(dev->physical_device->renderer_sync_fd.semaphore_exportable);
assert(dev->physical_device->renderer_sync_fd.semaphore_importable);
HANDLE handle = NULL;
if (payload->type == VN_SYNC_TYPE_DEVICE_ONLY) {
VkResult result = vn_create_sync_handle(dev, &sem->external_payload, &handle);
if (result != VK_SUCCESS)
return vn_error(dev->instance, result);
vn_wsi_sync_wait_handle(dev, handle);
} else {
assert(payload->type == VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE);
/* transfer ownership of imported sync handle to save a dup */
handle = payload->handle;
payload->handle = NULL;
}
/* When payload->type is VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE, the current
* payload is from a prior temporary sync_fd import. The permanent
* payload of the sempahore might be in signaled state. So we do an
* import here to ensure later wait operation is legit. With resourceId
* 0, renderer does a signaled sync_fd -1 payload import on the host
* semaphore.
*/
if (payload->type == VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE) {
const VkImportSemaphoreResourceInfoMESA res_info = {
.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_RESOURCE_INFO_MESA,
.semaphore = pGetWin32HandleInfo->semaphore,
.resourceId = 0,
};
vn_async_vkImportSemaphoreResourceMESA(dev->primary_ring, device,
&res_info);
}
/* perform wait operation on the host semaphore */
vn_async_vkWaitSemaphoreResourceMESA(dev->primary_ring, device,
pGetWin32HandleInfo->semaphore);
vn_sync_payload_release(dev, &sem->temporary);
sem->payload = &sem->permanent;
*pHandle = handle;
return VK_SUCCESS;
}
#else
VKAPI_ATTR VkResult VKAPI_CALL
vn_ImportSemaphoreFdKHR(
VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo)
@ -2494,6 +2749,7 @@ vn_GetSemaphoreFdKHR(VkDevice device,
*pFd = fd;
return VK_SUCCESS;
}
#endif
/* event commands */

View file

@ -72,15 +72,25 @@ enum vn_sync_type {
/* device object */
VN_SYNC_TYPE_DEVICE_ONLY,
#ifndef VK_USE_PLATFORM_WIN32_KHR
/* payload is an imported sync file */
VN_SYNC_TYPE_IMPORTED_SYNC_FD,
#else
/* payload is an imported Win32 event handle */
VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE,
#endif
};
struct vn_sync_payload {
enum vn_sync_type type;
#ifndef VK_USE_PLATFORM_WIN32_KHR
/* If type is VN_SYNC_TYPE_IMPORTED_SYNC_FD, fd is a sync file. */
int fd;
#else
/* If type is VN_SYNC_TYPE_IMPORTED_WIN32_HANDLE, fd is a Win32 event handle. */
void *handle;
#endif
};
/* For external fences and external semaphores submitted to be signaled. The

View file

@ -152,18 +152,37 @@ struct vn_renderer_bo_ops {
vn_object_id mem_id,
VkMemoryPropertyFlags flags,
VkExternalMemoryHandleTypeFlags external_handles,
const VkMemoryAllocateInfo *alloc_info,
struct vn_renderer_bo **out_bo);
#ifdef VK_USE_PLATFORM_WIN32_KHR
VkResult (*create_from_handle)(struct vn_renderer *renderer,
VkDeviceSize size,
/* externally allocated handles might not have a valid id */
vn_object_id mem_id,
bool is_kmt,
void *handle,
VkMemoryPropertyFlags flags,
const VkMemoryAllocateInfo *alloc_info,
struct vn_renderer_bo **out_bo);
#else
VkResult (*create_from_dma_buf)(struct vn_renderer *renderer,
VkDeviceSize size,
int fd,
VkMemoryPropertyFlags flags,
struct vn_renderer_bo **out_bo);
#endif
bool (*destroy)(struct vn_renderer *renderer, struct vn_renderer_bo *bo);
#ifdef VK_USE_PLATFORM_WIN32_KHR
void *(*export_handle)(struct vn_renderer *renderer,
struct vn_renderer_bo *bo,
bool is_kmt);
#else
int (*export_dma_buf)(struct vn_renderer *renderer,
struct vn_renderer_bo *bo);
#endif
int (*export_sync_file)(struct vn_renderer *renderer,
struct vn_renderer_bo *bo);
@ -194,16 +213,27 @@ struct vn_renderer_sync_ops {
uint32_t flags,
struct vn_renderer_sync **out_sync);
#ifdef VK_USE_PLATFORM_WIN32_KHR
VkResult (*create_from_handle)(struct vn_renderer *renderer,
void *handle,
struct vn_renderer_sync **out_sync);
#else
VkResult (*create_from_syncobj)(struct vn_renderer *renderer,
int fd,
bool sync_file,
struct vn_renderer_sync **out_sync);
#endif
void (*destroy)(struct vn_renderer *renderer,
struct vn_renderer_sync *sync);
#ifdef VK_USE_PLATFORM_WIN32_KHR
void *(*export_handle)(struct vn_renderer *renderer,
struct vn_renderer_sync *sync);
#else
int (*export_syncobj)(struct vn_renderer *renderer,
struct vn_renderer_sync *sync,
bool sync_file);
#endif
/* reset the counter */
VkResult (*reset)(struct vn_renderer *renderer,
@ -236,6 +266,14 @@ vn_renderer_create_virtgpu(struct vn_instance *instance,
struct vn_renderer **renderer);
#endif
#ifdef VK_USE_PLATFORM_WIN32_KHR
VkResult
vn_renderer_create_virtgpu_win32(struct vn_instance *instance,
const VkAllocationCallbacks *alloc,
const VkInstanceCreateInfo *pInfo,
struct vn_renderer **renderer);
#endif
VkResult
vn_renderer_create_vtest(struct vn_instance *instance,
const VkAllocationCallbacks *alloc,
@ -244,6 +282,7 @@ vn_renderer_create_vtest(struct vn_instance *instance,
static inline VkResult
vn_renderer_create(struct vn_instance *instance,
const VkAllocationCallbacks *alloc,
const VkInstanceCreateInfo *pCreateInfo,
struct vn_renderer **renderer)
{
#ifdef HAVE_LIBDRM
@ -254,6 +293,9 @@ vn_renderer_create(struct vn_instance *instance,
}
return vn_renderer_create_virtgpu(instance, alloc, renderer);
#elif defined(VK_USE_PLATFORM_WIN32_KHR)
return vn_renderer_create_virtgpu_win32(instance, alloc, pCreateInfo, renderer);
#else
return vn_renderer_create_vtest(instance, alloc, renderer);
#endif
@ -319,11 +361,12 @@ vn_renderer_bo_create_from_device_memory(
vn_object_id mem_id,
VkMemoryPropertyFlags flags,
VkExternalMemoryHandleTypeFlags external_handles,
const VkMemoryAllocateInfo *alloc_info,
struct vn_renderer_bo **out_bo)
{
struct vn_renderer_bo *bo;
VkResult result = renderer->bo_ops.create_from_device_memory(
renderer, size, mem_id, flags, external_handles, &bo);
renderer, size, mem_id, flags, external_handles, alloc_info, &bo);
if (result != VK_SUCCESS)
return result;
@ -335,6 +378,31 @@ vn_renderer_bo_create_from_device_memory(
return VK_SUCCESS;
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
static inline VkResult
vn_renderer_bo_create_from_handle(struct vn_renderer *renderer,
VkDeviceSize size,
vn_object_id mem_id,
bool is_kmt,
void *handle,
VkMemoryPropertyFlags flags,
const VkMemoryAllocateInfo *alloc_info,
struct vn_renderer_bo **out_bo)
{
struct vn_renderer_bo *bo;
VkResult result =
renderer->bo_ops.create_from_handle(renderer, size, mem_id, is_kmt, handle, flags, alloc_info, &bo);
if (result != VK_SUCCESS)
return result;
assert(vn_refcount_is_valid(&bo->refcount));
assert(bo->res_id);
assert(!bo->mmap_size || bo->mmap_size >= size);
*out_bo = bo;
return VK_SUCCESS;
}
#else
static inline VkResult
vn_renderer_bo_create_from_dma_buf(struct vn_renderer *renderer,
VkDeviceSize size,
@ -355,6 +423,7 @@ vn_renderer_bo_create_from_dma_buf(struct vn_renderer *renderer,
*out_bo = bo;
return VK_SUCCESS;
}
#endif
static inline struct vn_renderer_bo *
vn_renderer_bo_ref(struct vn_renderer *renderer, struct vn_renderer_bo *bo)
@ -371,12 +440,22 @@ vn_renderer_bo_unref(struct vn_renderer *renderer, struct vn_renderer_bo *bo)
return false;
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
static inline void *
vn_renderer_bo_export_handle(struct vn_renderer *renderer,
struct vn_renderer_bo *bo,
bool is_kmt)
{
return renderer->bo_ops.export_handle(renderer, bo, is_kmt);
}
#else
static inline int
vn_renderer_bo_export_dma_buf(struct vn_renderer *renderer,
struct vn_renderer_bo *bo)
{
return renderer->bo_ops.export_dma_buf(renderer, bo);
}
#endif
static inline int
vn_renderer_bo_export_sync_file(struct vn_renderer *renderer,
@ -420,6 +499,15 @@ vn_renderer_sync_create(struct vn_renderer *renderer,
return renderer->sync_ops.create(renderer, initial_val, flags, out_sync);
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
static inline VkResult
vn_renderer_sync_create_from_handle(struct vn_renderer *renderer,
void *handle,
struct vn_renderer_sync **out_sync)
{
return renderer->sync_ops.create_from_handle(renderer, handle, out_sync);
}
#else
static inline VkResult
vn_renderer_sync_create_from_syncobj(struct vn_renderer *renderer,
int fd,
@ -429,6 +517,7 @@ vn_renderer_sync_create_from_syncobj(struct vn_renderer *renderer,
return renderer->sync_ops.create_from_syncobj(renderer, fd, sync_file,
out_sync);
}
#endif
static inline void
vn_renderer_sync_destroy(struct vn_renderer *renderer,
@ -437,6 +526,14 @@ vn_renderer_sync_destroy(struct vn_renderer *renderer,
renderer->sync_ops.destroy(renderer, sync);
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
static inline void *
vn_renderer_sync_export_handle(struct vn_renderer *renderer,
struct vn_renderer_sync *sync)
{
return renderer->sync_ops.export_handle(renderer, sync);
}
#else
static inline int
vn_renderer_sync_export_syncobj(struct vn_renderer *renderer,
struct vn_renderer_sync *sync,
@ -444,6 +541,7 @@ vn_renderer_sync_export_syncobj(struct vn_renderer *renderer,
{
return renderer->sync_ops.export_syncobj(renderer, sync, sync_file);
}
#endif
static inline VkResult
vn_renderer_sync_reset(struct vn_renderer *renderer,

View file

@ -1302,6 +1302,7 @@ virtgpu_bo_create_from_device_memory(
vn_object_id mem_id,
VkMemoryPropertyFlags flags,
VkExternalMemoryHandleTypeFlags external_handles,
const VkMemoryAllocateInfo *alloc_info,
struct vn_renderer_bo **out_bo)
{
struct virtgpu *gpu = (struct virtgpu *)renderer;

File diff suppressed because it is too large Load diff

View file

@ -750,6 +750,7 @@ vtest_bo_create_from_device_memory(
vn_object_id mem_id,
VkMemoryPropertyFlags flags,
VkExternalMemoryHandleTypeFlags external_handles,
const VkMemoryAllocateInfo *alloc_info,
struct vn_renderer_bo **out_bo)
{
struct vtest *vtest = (struct vtest *)renderer;

View file

@ -19,6 +19,10 @@
#include "vn_physical_device.h"
#include "vn_queue.h"
#ifdef VK_USE_PLATFORM_WIN32_KHR
#include <windows.h>
#endif
#ifndef DRM_FORMAT_MOD_LINEAR
#define DRM_FORMAT_MOD_LINEAR 0
#endif
@ -251,6 +255,7 @@ vn_wsi_memory_info_init(struct vn_device_memory *mem,
}
}
#ifndef VK_USE_PLATFORM_WIN32_KHR
static uint32_t
vn_modifier_plane_count(struct vn_physical_device *physical_dev,
VkFormat format,
@ -292,11 +297,13 @@ vn_modifier_plane_count(struct vn_physical_device *physical_dev,
STACK_ARRAY_FINISH(modifier_props);
return plane_count;
}
#endif
bool
vn_wsi_validate_image_format_info(struct vn_physical_device *physical_dev,
const VkPhysicalDeviceImageFormatInfo2 *info)
{
#ifndef VK_USE_PLATFORM_WIN32_KHR
const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *modifier_info =
vk_find_struct_const(
info->pNext, PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT);
@ -347,7 +354,7 @@ vn_wsi_validate_image_format_info(struct vn_physical_device *physical_dev,
return false;
}
}
#endif
return true;
}
@ -407,6 +414,7 @@ vn_wsi_fence_wait(struct vn_device *dev, struct vn_queue *queue)
return vn_ResetFences(dev_handle, 1, &queue->async_present.fence);
}
#ifndef VK_USE_PLATFORM_WIN32_KHR
void
vn_wsi_sync_wait(struct vn_device *dev, int fd)
{
@ -435,6 +443,36 @@ vn_wsi_sync_wait(struct vn_device *dev, int fd)
simple_mtx_lock(&queue->async_present.queue_mutex);
}
}
#else
void
vn_wsi_sync_wait_handle(struct vn_device *dev, void *handle)
{
if (dev->renderer->info.has_implicit_fencing)
return;
const pid_t tid = vn_gettid();
struct vn_queue *queue = NULL;
for (uint32_t i = 0; i < dev->queue_count; i++) {
if (dev->queues[i].async_present.initialized &&
dev->queues[i].async_present.tid == tid) {
queue = &dev->queues[i];
break;
}
}
if (queue) {
simple_mtx_unlock(&queue->async_present.queue_mutex);
vn_wsi_chains_unlock(dev, queue->async_present.info, /*all=*/false);
}
WaitForSingleObject(handle, INFINITE);
if (queue) {
vn_wsi_chains_lock(dev, queue->async_present.info, /*all=*/false);
simple_mtx_lock(&queue->async_present.queue_mutex);
}
}
#endif
void
vn_wsi_flush(struct vn_queue *queue)
@ -853,6 +891,33 @@ vn_AcquireNextImage2KHR(VkDevice device,
if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR)
return vn_error(dev->instance, result);
#ifdef VK_USE_PLATFORM_WIN32_KHR
/* XXX this relies on renderer side doing implicit fencing */
if (pAcquireInfo->semaphore != VK_NULL_HANDLE) {
const VkImportSemaphoreWin32HandleInfoKHR info = {
.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
.semaphore = pAcquireInfo->semaphore,
.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
.handle = CreateEventA(NULL, TRUE, TRUE, NULL),
};
//vn_log(dev->instance, "created handle %p", info.handle);
result = vn_ImportSemaphoreWin32HandleKHR(device, &info);
}
if (result == VK_SUCCESS && pAcquireInfo->fence != VK_NULL_HANDLE) {
const VkImportFenceWin32HandleInfoKHR info = {
.sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
.fence = pAcquireInfo->fence,
.flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
.handle = CreateEventA(NULL, TRUE, TRUE, NULL),
};
//vn_log(dev->instance, "created handle %p", info.handle);
result = vn_ImportFenceWin32HandleKHR(device, &info);
}
#else
int sync_fd = -1;
if (!dev->renderer->info.has_implicit_fencing) {
VkDeviceMemory mem_handle =
@ -932,6 +997,8 @@ out:
close(sem_fd);
if (fence_fd >= 0)
close(fence_fd);
#endif
return vn_result(dev->instance, result);
}

View file

@ -42,8 +42,13 @@ vn_wsi_validate_image_format_info(
VkResult
vn_wsi_fence_wait(struct vn_device *dev, struct vn_queue *queue);
#ifndef VK_USE_PLATFORM_WIN32_KHR
void
vn_wsi_sync_wait(struct vn_device *dev, int fd);
#else
void
vn_wsi_sync_wait_handle(struct vn_device *dev, void *handle);
#endif
void
vn_wsi_flush(struct vn_queue *queue);
@ -91,7 +96,11 @@ vn_wsi_fence_wait(struct vn_device *dev, struct vn_queue *queue)
}
static inline void
#ifndef VK_USE_PLATFORM_WIN32_KHR
vn_wsi_sync_wait(struct vn_device *dev, int fd)
#else
vn_wsi_sync_wait_handle(struct vn_device *dev, void *handle)
#endif
{
return;
}

View file

@ -248,6 +248,7 @@ vk_device_init(struct vk_device *device,
device->enabled_extensions.EXT_calibrated_timestamps) {
/* sorted by preference */
const VkTimeDomainKHR calibrate_domains[] = {
VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR,
VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR,
VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR,
};
@ -840,6 +841,14 @@ vk_device_get_timestamp(struct vk_device *device, VkTimeDomainKHR domain,
return VK_SUCCESS;
fail:
#else
if (domain == VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR) {
LARGE_INTEGER ts;
if (QueryPerformanceCounter(&ts)) {
*timestamp = ts.QuadPart;
return VK_SUCCESS;
}
}
#endif /* _WIN32 */
return VK_ERROR_FEATURE_NOT_PRESENT;
}

View file

@ -297,6 +297,7 @@ vk_common_GetPhysicalDeviceCalibrateableTimeDomainsKHR(
const VkTimeDomainKHR host_time_domains[] = {
VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR,
VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR,
VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR,
};
for (uint32_t i = 0; i < ARRAY_SIZE(host_time_domains); i++) {
const VkTimeDomainKHR domain = host_time_domains[i];

View file

@ -3257,7 +3257,8 @@ wsi_WaitForPresentKHR(VkDevice device, VkSwapchainKHR _swapchain,
uint64_t presentId, uint64_t timeout)
{
VK_FROM_HANDLE(wsi_swapchain, swapchain, _swapchain);
assert(swapchain->wait_for_present);
//assert(swapchain->wait_for_present);
if (!swapchain->wait_for_present) return VK_SUCCESS;
return swapchain->wait_for_present(swapchain, presentId, timeout);
}
@ -3266,7 +3267,8 @@ wsi_WaitForPresent2KHR(VkDevice device, VkSwapchainKHR _swapchain,
const VkPresentWait2InfoKHR *info)
{
VK_FROM_HANDLE(wsi_swapchain, swapchain, _swapchain);
assert(swapchain->wait_for_present2);
//assert(swapchain->wait_for_present2);
if (!swapchain->wait_for_present2) return VK_SUCCESS;
return swapchain->wait_for_present2(swapchain, info->presentId, info->timeout);
}