vk/0.210.0: Rework allocation to use the new pAllocator's

This commit is contained in:
Jason Ekstrand 2015-12-02 03:28:27 -08:00
parent d3547e7334
commit fcfb404a58
21 changed files with 669 additions and 520 deletions

View file

@ -133,7 +133,7 @@ typedef enum VkResult {
typedef enum VkStructureType {
VK_STRUCTURE_TYPE_APPLICATION_INFO = 0,
VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 1,
VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO = 2,
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 2,
VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 3,
VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 4,
VK_STRUCTURE_TYPE_SHADER_CREATE_INFO = 5,
@ -185,17 +185,25 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkStructureType;
typedef enum {
VK_SYSTEM_ALLOC_TYPE_API_OBJECT = 0,
VK_SYSTEM_ALLOC_TYPE_INTERNAL = 1,
VK_SYSTEM_ALLOC_TYPE_INTERNAL_TEMP = 2,
VK_SYSTEM_ALLOC_TYPE_INTERNAL_SHADER = 3,
VK_SYSTEM_ALLOC_TYPE_DEBUG = 4,
VK_SYSTEM_ALLOC_TYPE_BEGIN_RANGE = VK_SYSTEM_ALLOC_TYPE_API_OBJECT,
VK_SYSTEM_ALLOC_TYPE_END_RANGE = VK_SYSTEM_ALLOC_TYPE_DEBUG,
VK_SYSTEM_ALLOC_TYPE_NUM = (VK_SYSTEM_ALLOC_TYPE_DEBUG - VK_SYSTEM_ALLOC_TYPE_API_OBJECT + 1),
VK_SYSTEM_ALLOC_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkSystemAllocType;
typedef enum VkSystemAllocationScope {
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1,
VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2,
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3,
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4,
VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE,
VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1),
VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF
} VkSystemAllocationScope;
typedef enum VkInternalAllocationType {
VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0,
VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1),
VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkInternalAllocationType;
typedef enum VkFormat {
VK_FORMAT_UNDEFINED = 0,
@ -1066,15 +1074,34 @@ typedef enum {
} VkQueryControlFlagBits;
typedef VkFlags VkQueryControlFlags;
typedef void* (VKAPI_PTR *PFN_vkAllocFunction)(
void* pUserData,
size_t size,
size_t alignment,
VkSystemAllocType allocType);
typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
void* pUserData,
size_t size,
size_t alignment,
VkSystemAllocationScope allocationScope);
typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
void* pUserData,
void* pOriginal,
size_t size,
size_t alignment,
VkSystemAllocationScope allocationScope);
typedef void (VKAPI_PTR *PFN_vkFreeFunction)(
void* pUserData,
void* pMem);
void* pUserData,
void* pMemory);
typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)(
void* pUserData,
size_t size,
VkInternalAllocationType allocationType,
VkSystemAllocationScope allocationScope);
typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)(
void* pUserData,
size_t size,
VkInternalAllocationType allocationType,
VkSystemAllocationScope allocationScope);
typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
@ -1088,24 +1115,26 @@ typedef struct VkApplicationInfo {
uint32_t apiVersion;
} VkApplicationInfo;
typedef struct {
void* pUserData;
PFN_vkAllocFunction pfnAlloc;
PFN_vkFreeFunction pfnFree;
} VkAllocCallbacks;
typedef struct VkInstanceCreateInfo {
VkStructureType sType;
const void* pNext;
VkInstanceCreateFlags flags;
const VkApplicationInfo* pApplicationInfo;
const VkAllocCallbacks* pAllocCb;
uint32_t enabledLayerNameCount;
const char* const* ppEnabledLayerNames;
uint32_t enabledExtensionNameCount;
const char* const* ppEnabledExtensionNames;
} VkInstanceCreateInfo;
typedef struct VkAllocationCallbacks {
void* pUserData;
PFN_vkAllocationFunction pfnAllocation;
PFN_vkReallocationFunction pfnReallocation;
PFN_vkFreeFunction pfnFree;
PFN_vkInternalAllocationNotification pfnInternalAllocation;
PFN_vkInternalFreeNotification pfnInternalFree;
} VkAllocationCallbacks;
typedef struct VkPhysicalDeviceFeatures {
VkBool32 robustBufferAccess;
VkBool32 fullDrawIndexUint32;
@ -1356,12 +1385,12 @@ typedef struct VkLayerProperties {
char description[VK_MAX_DESCRIPTION_SIZE];
} VkLayerProperties;
typedef struct {
typedef struct VkMemoryAllocateInfo {
VkStructureType sType;
const void* pNext;
VkDeviceSize allocationSize;
uint32_t memoryTypeIndex;
} VkMemoryAllocInfo;
} VkMemoryAllocateInfo;
typedef struct VkMappedMemoryRange {
VkStructureType sType;
@ -2124,8 +2153,8 @@ typedef struct VkMemoryBarrier {
} VkMemoryBarrier;
typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, VkInstance* pInstance);
typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance);
typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties);
@ -2135,8 +2164,8 @@ typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysica
typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties);
typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName);
typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice);
typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties);
@ -2145,8 +2174,8 @@ typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFa
typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers, VkFence fence);
typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue);
typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device);
typedef VkResult (VKAPI_PTR *PFN_vkAllocMemory)(VkDevice device, const VkMemoryAllocInfo* pAllocInfo, VkDeviceMemory* pMem);
typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory mem);
typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
@ -2159,63 +2188,63 @@ typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkIm
typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties);
typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, VkFence* pFence);
typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence);
typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences);
typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence);
typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout);
typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, VkSemaphore* pSemaphore);
typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore);
typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkQueueSignalSemaphore)(VkQueue queue, VkSemaphore semaphore);
typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitSemaphore)(VkQueue queue, VkSemaphore semaphore);
typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, VkEvent* pEvent);
typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event);
typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event);
typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event);
typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event);
typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, VkQueryPool* pQueryPool);
typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool);
typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t* pDataSize, void* pData, VkQueryResultFlags flags);
typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, VkBuffer* pBuffer);
typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer);
typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, VkBufferView* pView);
typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView);
typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, VkImage* pImage);
typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image);
typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, VkImageView* pView);
typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView);
typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModule* pShaderModule);
typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule);
typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView);
typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateShader)(VkDevice device, const VkShaderCreateInfo* pCreateInfo, VkShader* pShader);
typedef void (VKAPI_PTR *PFN_vkDestroyShader)(VkDevice device, VkShader shader);
typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, VkPipelineCache* pPipelineCache);
typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache);
typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache);
typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator);
typedef size_t (VKAPI_PTR *PFN_vkGetPipelineCacheSize)(VkDevice device, VkPipelineCache pipelineCache);
typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData);
typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches);
typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkGraphicsPipelineCreateInfo* pCreateInfos, VkPipeline* pPipelines);
typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkComputePipelineCreateInfo* pCreateInfos, VkPipeline* pPipelines);
typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline);
typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, VkPipelineLayout* pPipelineLayout);
typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout);
typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, VkSampler* pSampler);
typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayout* pSetLayout);
typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, VkDescriptorPool* pDescriptorPool);
typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool);
typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool);
typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets);
typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets);
typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies);
typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, VkFramebuffer* pFramebuffer);
typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer);
typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, VkRenderPass* pRenderPass);
typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass);
typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity);
typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, VkCommandPool* pCommandPool);
typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool);
typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandBuffer)(VkDevice device, const VkCommandBufferCreateInfo* pCreateInfo, VkCommandBuffer* pCommandBuffer);
typedef void (VKAPI_PTR *PFN_vkDestroyCommandBuffer)(VkDevice device, VkCommandBuffer commandBuffer);
@ -2270,10 +2299,12 @@ typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer
#ifdef VK_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
const VkInstanceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkInstance* pInstance);
VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
VkInstance instance);
VkInstance instance,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(
VkInstance instance,
@ -2322,10 +2353,12 @@ VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
VkPhysicalDevice physicalDevice,
const VkDeviceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDevice* pDevice);
VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
VkDevice device);
VkDevice device,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
const char* pLayerName,
@ -2365,14 +2398,16 @@ VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
VkDevice device);
VKAPI_ATTR VkResult VKAPI_CALL vkAllocMemory(
VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
VkDevice device,
const VkMemoryAllocInfo* pAllocInfo,
VkDeviceMemory* pMem);
const VkMemoryAllocateInfo* pAllocateInfo,
const VkAllocationCallbacks* pAllocator,
VkDeviceMemory* pMemory);
VKAPI_ATTR void VKAPI_CALL vkFreeMemory(
VkDevice device,
VkDeviceMemory mem);
VkDeviceMemory memory,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
VkDevice device,
@ -2448,11 +2483,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
VkDevice device,
const VkFenceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkFence* pFence);
VKAPI_ATTR void VKAPI_CALL vkDestroyFence(
VkDevice device,
VkFence fence);
VkFence fence,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
VkDevice device,
@ -2473,11 +2510,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
VkDevice device,
const VkSemaphoreCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSemaphore* pSemaphore);
VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(
VkDevice device,
VkSemaphore semaphore);
VkSemaphore semaphore,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalSemaphore(
VkQueue queue,
@ -2490,11 +2529,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitSemaphore(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(
VkDevice device,
const VkEventCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkEvent* pEvent);
VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(
VkDevice device,
VkEvent event);
VkEvent event,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(
VkDevice device,
@ -2511,11 +2552,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(
VkDevice device,
const VkQueryPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkQueryPool* pQueryPool);
VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(
VkDevice device,
VkQueryPool queryPool);
VkQueryPool queryPool,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
VkDevice device,
@ -2529,29 +2572,35 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
VkDevice device,
const VkBufferCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkBuffer* pBuffer);
VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(
VkDevice device,
VkBuffer buffer);
VkBuffer buffer,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
VkDevice device,
const VkBufferViewCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkBufferView* pView);
VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(
VkDevice device,
VkBufferView bufferView);
VkBufferView bufferView,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
VkDevice device,
const VkImageCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkImage* pImage);
VKAPI_ATTR void VKAPI_CALL vkDestroyImage(
VkDevice device,
VkImage image);
VkImage image,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
VkDevice device,
@ -2562,20 +2611,24 @@ VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
VkDevice device,
const VkImageViewCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkImageView* pView);
VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(
VkDevice device,
VkImageView imageView);
VkImageView imageView,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
VkDevice device,
const VkShaderModuleCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkShaderModule* pShaderModule);
VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(
VkDevice device,
VkShaderModule shaderModule);
VkShaderModule shaderModule,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateShader(
VkDevice device,
@ -2589,11 +2642,13 @@ VKAPI_ATTR void VKAPI_CALL vkDestroyShader(
VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
VkDevice device,
const VkPipelineCacheCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipelineCache* pPipelineCache);
VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(
VkDevice device,
VkPipelineCache pipelineCache);
VkPipelineCache pipelineCache,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR size_t VKAPI_CALL vkGetPipelineCacheSize(
VkDevice device,
@ -2614,56 +2669,67 @@ VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
VkDevice device,
VkPipelineCache pipelineCache,
uint32_t count,
uint32_t createInfoCount,
const VkGraphicsPipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipelines);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
VkDevice device,
VkPipelineCache pipelineCache,
uint32_t count,
uint32_t createInfoCount,
const VkComputePipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipelines);
VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(
VkDevice device,
VkPipeline pipeline);
VkPipeline pipeline,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(
VkDevice device,
const VkPipelineLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipelineLayout* pPipelineLayout);
VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(
VkDevice device,
VkPipelineLayout pipelineLayout);
VkPipelineLayout pipelineLayout,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(
VkDevice device,
const VkSamplerCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSampler* pSampler);
VKAPI_ATTR void VKAPI_CALL vkDestroySampler(
VkDevice device,
VkSampler sampler);
VkSampler sampler,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
VkDevice device,
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorSetLayout* pSetLayout);
VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(
VkDevice device,
VkDescriptorSetLayout descriptorSetLayout);
VkDescriptorSetLayout descriptorSetLayout,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(
VkDevice device,
const VkDescriptorPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorPool* pDescriptorPool);
VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(
VkDevice device,
VkDescriptorPool descriptorPool);
VkDescriptorPool descriptorPool,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(
VkDevice device,
@ -2691,20 +2757,24 @@ VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
VkDevice device,
const VkFramebufferCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkFramebuffer* pFramebuffer);
VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(
VkDevice device,
VkFramebuffer framebuffer);
VkFramebuffer framebuffer,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
VkDevice device,
const VkRenderPassCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkRenderPass* pRenderPass);
VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(
VkDevice device,
VkRenderPass renderPass);
VkRenderPass renderPass,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
VkDevice device,
@ -2714,11 +2784,13 @@ VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
VkDevice device,
const VkCommandPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkCommandPool* pCommandPool);
VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(
VkDevice device,
VkCommandPool commandPool);
VkCommandPool commandPool,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
VkDevice device,

View file

@ -42,13 +42,14 @@ typedef struct VkDmaBufImageCreateInfo_
uint32_t strideInBytes;
} VkDmaBufImageCreateInfo;
typedef VkResult (VKAPI_PTR *PFN_vkCreateDmaBufImageINTEL)(VkDevice device, const VkDmaBufImageCreateInfo* pCreateInfo, VkDeviceMemory* pMem, VkImage* pImage);
typedef VkResult (VKAPI_PTR *PFN_vkCreateDmaBufImageINTEL)(VkDevice device, const VkDmaBufImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMem, VkImage* pImage);
#ifdef VK_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCreateDmaBufImageINTEL(
VkDevice _device,
const VkDmaBufImageCreateInfo* pCreateInfo,
const VkDmaBufImageCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDeviceMemory* pMem,
VkImage* pImage);

View file

@ -47,7 +47,7 @@
static VkResult
anv_reloc_list_init_clone(struct anv_reloc_list *list,
struct anv_device *device,
const VkAllocationCallbacks *alloc,
const struct anv_reloc_list *other_list)
{
if (other_list) {
@ -59,18 +59,18 @@ anv_reloc_list_init_clone(struct anv_reloc_list *list,
}
list->relocs =
anv_device_alloc(device, list->array_length * sizeof(*list->relocs), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(alloc, list->array_length * sizeof(*list->relocs), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (list->relocs == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
list->reloc_bos =
anv_device_alloc(device, list->array_length * sizeof(*list->reloc_bos), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(alloc, list->array_length * sizeof(*list->reloc_bos), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (list->reloc_bos == NULL) {
anv_device_free(device, list->relocs);
anv_free(alloc, list->relocs);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}
@ -85,20 +85,23 @@ anv_reloc_list_init_clone(struct anv_reloc_list *list,
}
VkResult
anv_reloc_list_init(struct anv_reloc_list *list, struct anv_device *device)
anv_reloc_list_init(struct anv_reloc_list *list,
const VkAllocationCallbacks *alloc)
{
return anv_reloc_list_init_clone(list, device, NULL);
return anv_reloc_list_init_clone(list, alloc, NULL);
}
void
anv_reloc_list_finish(struct anv_reloc_list *list, struct anv_device *device)
anv_reloc_list_finish(struct anv_reloc_list *list,
const VkAllocationCallbacks *alloc)
{
anv_device_free(device, list->relocs);
anv_device_free(device, list->reloc_bos);
anv_free(alloc, list->relocs);
anv_free(alloc, list->reloc_bos);
}
static VkResult
anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
anv_reloc_list_grow(struct anv_reloc_list *list,
const VkAllocationCallbacks *alloc,
size_t num_additional_relocs)
{
if (list->num_relocs + num_additional_relocs <= list->array_length)
@ -109,16 +112,16 @@ anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
new_length *= 2;
struct drm_i915_gem_relocation_entry *new_relocs =
anv_device_alloc(device, new_length * sizeof(*list->relocs), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(alloc, new_length * sizeof(*list->relocs), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (new_relocs == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
struct anv_bo **new_reloc_bos =
anv_device_alloc(device, new_length * sizeof(*list->reloc_bos), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(alloc, new_length * sizeof(*list->reloc_bos), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (new_relocs == NULL) {
anv_device_free(device, new_relocs);
anv_free(alloc, new_relocs);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}
@ -126,8 +129,8 @@ anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
memcpy(new_reloc_bos, list->reloc_bos,
list->num_relocs * sizeof(*list->reloc_bos));
anv_device_free(device, list->relocs);
anv_device_free(device, list->reloc_bos);
anv_free(alloc, list->relocs);
anv_free(alloc, list->reloc_bos);
list->array_length = new_length;
list->relocs = new_relocs;
@ -137,13 +140,14 @@ anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
}
uint64_t
anv_reloc_list_add(struct anv_reloc_list *list, struct anv_device *device,
anv_reloc_list_add(struct anv_reloc_list *list,
const VkAllocationCallbacks *alloc,
uint32_t offset, struct anv_bo *target_bo, uint32_t delta)
{
struct drm_i915_gem_relocation_entry *entry;
int index;
anv_reloc_list_grow(list, device, 1);
anv_reloc_list_grow(list, alloc, 1);
/* TODO: Handle failure */
/* XXX: Can we use I915_EXEC_HANDLE_LUT? */
@ -161,10 +165,11 @@ anv_reloc_list_add(struct anv_reloc_list *list, struct anv_device *device,
}
static void
anv_reloc_list_append(struct anv_reloc_list *list, struct anv_device *device,
anv_reloc_list_append(struct anv_reloc_list *list,
const VkAllocationCallbacks *alloc,
struct anv_reloc_list *other, uint32_t offset)
{
anv_reloc_list_grow(list, device, other->num_relocs);
anv_reloc_list_grow(list, alloc, other->num_relocs);
/* TODO: Handle failure */
memcpy(&list->relocs[list->num_relocs], &other->relocs[0],
@ -200,7 +205,7 @@ uint64_t
anv_batch_emit_reloc(struct anv_batch *batch,
void *location, struct anv_bo *bo, uint32_t delta)
{
return anv_reloc_list_add(batch->relocs, batch->device,
return anv_reloc_list_add(batch->relocs, batch->alloc,
location - batch->start, bo, delta);
}
@ -221,7 +226,7 @@ anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
memcpy(batch->next, other->start, size);
offset = batch->next - batch->start;
anv_reloc_list_append(batch->relocs, batch->device,
anv_reloc_list_append(batch->relocs, batch->alloc,
other->relocs, offset);
batch->next += size;
@ -232,20 +237,21 @@ anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
*-----------------------------------------------------------------------*/
static VkResult
anv_batch_bo_create(struct anv_device *device, struct anv_batch_bo **bbo_out)
anv_batch_bo_create(struct anv_cmd_buffer *cmd_buffer,
struct anv_batch_bo **bbo_out)
{
VkResult result;
struct anv_batch_bo *bbo =
anv_device_alloc(device, sizeof(*bbo), 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
struct anv_batch_bo *bbo = anv_alloc(&cmd_buffer->pool->alloc, sizeof(*bbo),
8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (bbo == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
result = anv_bo_pool_alloc(&device->batch_bo_pool, &bbo->bo);
result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, &bbo->bo);
if (result != VK_SUCCESS)
goto fail_alloc;
result = anv_reloc_list_init(&bbo->relocs, device);
result = anv_reloc_list_init(&bbo->relocs, &cmd_buffer->pool->alloc);
if (result != VK_SUCCESS)
goto fail_bo_alloc;
@ -254,30 +260,31 @@ anv_batch_bo_create(struct anv_device *device, struct anv_batch_bo **bbo_out)
return VK_SUCCESS;
fail_bo_alloc:
anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, &bbo->bo);
fail_alloc:
anv_device_free(device, bbo);
anv_free(&cmd_buffer->pool->alloc, bbo);
return result;
}
static VkResult
anv_batch_bo_clone(struct anv_device *device,
anv_batch_bo_clone(struct anv_cmd_buffer *cmd_buffer,
const struct anv_batch_bo *other_bbo,
struct anv_batch_bo **bbo_out)
{
VkResult result;
struct anv_batch_bo *bbo =
anv_device_alloc(device, sizeof(*bbo), 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
struct anv_batch_bo *bbo = anv_alloc(&cmd_buffer->pool->alloc, sizeof(*bbo),
8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (bbo == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
result = anv_bo_pool_alloc(&device->batch_bo_pool, &bbo->bo);
result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, &bbo->bo);
if (result != VK_SUCCESS)
goto fail_alloc;
result = anv_reloc_list_init_clone(&bbo->relocs, device, &other_bbo->relocs);
result = anv_reloc_list_init_clone(&bbo->relocs, &cmd_buffer->pool->alloc,
&other_bbo->relocs);
if (result != VK_SUCCESS)
goto fail_bo_alloc;
@ -291,9 +298,9 @@ anv_batch_bo_clone(struct anv_device *device,
return VK_SUCCESS;
fail_bo_alloc:
anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, &bbo->bo);
fail_alloc:
anv_device_free(device, bbo);
anv_free(&cmd_buffer->pool->alloc, bbo);
return result;
}
@ -328,15 +335,17 @@ anv_batch_bo_finish(struct anv_batch_bo *bbo, struct anv_batch *batch)
}
static void
anv_batch_bo_destroy(struct anv_batch_bo *bbo, struct anv_device *device)
anv_batch_bo_destroy(struct anv_batch_bo *bbo,
struct anv_cmd_buffer *cmd_buffer)
{
anv_reloc_list_finish(&bbo->relocs, device);
anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
anv_device_free(device, bbo);
anv_reloc_list_finish(&bbo->relocs, &cmd_buffer->pool->alloc);
anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, &bbo->bo);
anv_free(&cmd_buffer->pool->alloc, bbo);
}
static VkResult
anv_batch_bo_list_clone(const struct list_head *list, struct anv_device *device,
anv_batch_bo_list_clone(const struct list_head *list,
struct anv_cmd_buffer *cmd_buffer,
struct list_head *new_list)
{
VkResult result = VK_SUCCESS;
@ -346,7 +355,7 @@ anv_batch_bo_list_clone(const struct list_head *list, struct anv_device *device,
struct anv_batch_bo *prev_bbo = NULL;
list_for_each_entry(struct anv_batch_bo, bbo, list, link) {
struct anv_batch_bo *new_bbo;
result = anv_batch_bo_clone(device, bbo, &new_bbo);
result = anv_batch_bo_clone(cmd_buffer, bbo, &new_bbo);
if (result != VK_SUCCESS)
break;
list_addtail(&new_bbo->link, new_list);
@ -367,7 +376,7 @@ anv_batch_bo_list_clone(const struct list_head *list, struct anv_device *device,
if (result != VK_SUCCESS) {
list_for_each_entry_safe(struct anv_batch_bo, bbo, new_list, link)
anv_batch_bo_destroy(bbo, device);
anv_batch_bo_destroy(bbo, cmd_buffer);
}
return result;
@ -393,7 +402,8 @@ anv_cmd_buffer_surface_base_address(struct anv_cmd_buffer *cmd_buffer)
}
static void
emit_batch_buffer_start(struct anv_batch *batch, struct anv_bo *bo, uint32_t offset)
emit_batch_buffer_start(struct anv_cmd_buffer *cmd_buffer,
struct anv_bo *bo, uint32_t offset)
{
/* In gen8+ the address field grew to two dwords to accomodate 48 bit
* offsets. The high 16 bits are in the last dword, so we can use the gen8
@ -408,8 +418,9 @@ emit_batch_buffer_start(struct anv_batch *batch, struct anv_bo *bo, uint32_t off
const uint32_t gen8_length =
GEN8_MI_BATCH_BUFFER_START_length - GEN8_MI_BATCH_BUFFER_START_length_bias;
anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_START,
.DwordLength = batch->device->info.gen < 8 ? gen7_length : gen8_length,
anv_batch_emit(&cmd_buffer->batch, GEN8_MI_BATCH_BUFFER_START,
.DwordLength = cmd_buffer->device->info.gen < 8 ?
gen7_length : gen8_length,
._2ndLevelBatchBuffer = _1stlevelbatch,
.AddressSpaceIndicator = ASI_PPGTT,
.BatchBufferStartAddress = { bo, offset });
@ -430,7 +441,7 @@ cmd_buffer_chain_to_batch_bo(struct anv_cmd_buffer *cmd_buffer,
batch->end += GEN8_MI_BATCH_BUFFER_START_length * 4;
assert(batch->end == current_bbo->bo.map + current_bbo->bo.size);
emit_batch_buffer_start(batch, &bbo->bo, 0);
emit_batch_buffer_start(cmd_buffer, &bbo->bo, 0);
anv_batch_bo_finish(current_bbo, batch);
}
@ -441,13 +452,13 @@ anv_cmd_buffer_chain_batch(struct anv_batch *batch, void *_data)
struct anv_cmd_buffer *cmd_buffer = _data;
struct anv_batch_bo *new_bbo;
VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
VkResult result = anv_batch_bo_create(cmd_buffer, &new_bbo);
if (result != VK_SUCCESS)
return result;
struct anv_batch_bo **seen_bbo = anv_vector_add(&cmd_buffer->seen_bbos);
if (seen_bbo == NULL) {
anv_batch_bo_destroy(new_bbo, cmd_buffer->device);
anv_batch_bo_destroy(new_bbo, cmd_buffer);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}
*seen_bbo = new_bbo;
@ -520,18 +531,17 @@ VkResult
anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
{
struct anv_batch_bo *batch_bo;
struct anv_device *device = cmd_buffer->device;
VkResult result;
list_inithead(&cmd_buffer->batch_bos);
result = anv_batch_bo_create(device, &batch_bo);
result = anv_batch_bo_create(cmd_buffer, &batch_bo);
if (result != VK_SUCCESS)
return result;
list_addtail(&batch_bo->link, &cmd_buffer->batch_bos);
cmd_buffer->batch.device = device;
cmd_buffer->batch.alloc = &cmd_buffer->pool->alloc;
cmd_buffer->batch.extend_cb = anv_cmd_buffer_chain_batch;
cmd_buffer->batch.user_data = cmd_buffer;
@ -552,7 +562,7 @@ anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
goto fail_seen_bbos;
result = anv_reloc_list_init(&cmd_buffer->surface_relocs,
cmd_buffer->device);
&cmd_buffer->pool->alloc);
if (result != VK_SUCCESS)
goto fail_bt_blocks;
@ -569,7 +579,7 @@ anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
fail_seen_bbos:
anv_vector_finish(&cmd_buffer->seen_bbos);
fail_batch_bo:
anv_batch_bo_destroy(batch_bo, device);
anv_batch_bo_destroy(batch_bo, cmd_buffer);
return result;
}
@ -577,8 +587,6 @@ anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
void
anv_cmd_buffer_fini_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
{
struct anv_device *device = cmd_buffer->device;
int32_t *bt_block;
anv_vector_foreach(bt_block, &cmd_buffer->bt_blocks) {
anv_block_pool_free(&cmd_buffer->device->surface_state_block_pool,
@ -586,31 +594,29 @@ anv_cmd_buffer_fini_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
}
anv_vector_finish(&cmd_buffer->bt_blocks);
anv_reloc_list_finish(&cmd_buffer->surface_relocs, cmd_buffer->device);
anv_reloc_list_finish(&cmd_buffer->surface_relocs, &cmd_buffer->pool->alloc);
anv_vector_finish(&cmd_buffer->seen_bbos);
/* Destroy all of the batch buffers */
list_for_each_entry_safe(struct anv_batch_bo, bbo,
&cmd_buffer->batch_bos, link) {
anv_batch_bo_destroy(bbo, device);
anv_batch_bo_destroy(bbo, cmd_buffer);
}
anv_device_free(device, cmd_buffer->execbuf2.objects);
anv_device_free(device, cmd_buffer->execbuf2.bos);
anv_free(&cmd_buffer->pool->alloc, cmd_buffer->execbuf2.objects);
anv_free(&cmd_buffer->pool->alloc, cmd_buffer->execbuf2.bos);
}
void
anv_cmd_buffer_reset_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer)
{
struct anv_device *device = cmd_buffer->device;
/* Delete all but the first batch bo */
assert(!list_empty(&cmd_buffer->batch_bos));
while (cmd_buffer->batch_bos.next != cmd_buffer->batch_bos.prev) {
struct anv_batch_bo *bbo = anv_cmd_buffer_current_batch_bo(cmd_buffer);
list_del(&bbo->link);
anv_batch_bo_destroy(bbo, device);
anv_batch_bo_destroy(bbo, cmd_buffer);
}
assert(!list_empty(&cmd_buffer->batch_bos));
@ -713,7 +719,7 @@ anv_cmd_buffer_add_secondary(struct anv_cmd_buffer *primary,
struct anv_batch_bo *last_bbo =
list_last_entry(&secondary->batch_bos, struct anv_batch_bo, link);
emit_batch_buffer_start(&primary->batch, &first_bbo->bo, 0);
emit_batch_buffer_start(primary, &first_bbo->bo, 0);
struct anv_batch_bo *this_bbo = anv_cmd_buffer_current_batch_bo(primary);
assert(primary->batch.start == this_bbo->bo.map);
@ -727,14 +733,14 @@ anv_cmd_buffer_add_secondary(struct anv_cmd_buffer *primary,
*/
last_bbo->relocs.num_relocs--;
secondary->batch.next -= GEN8_MI_BATCH_BUFFER_START_length * 4;
emit_batch_buffer_start(&secondary->batch, &this_bbo->bo, offset);
emit_batch_buffer_start(secondary, &this_bbo->bo, offset);
anv_cmd_buffer_add_seen_bbos(primary, &secondary->batch_bos);
break;
}
case ANV_CMD_BUFFER_EXEC_MODE_COPY_AND_CHAIN: {
struct list_head copy_list;
VkResult result = anv_batch_bo_list_clone(&secondary->batch_bos,
secondary->device,
secondary,
&copy_list);
if (result != VK_SUCCESS)
return; /* FIXME */
@ -760,7 +766,7 @@ anv_cmd_buffer_add_secondary(struct anv_cmd_buffer *primary,
assert(!"Invalid execution mode");
}
anv_reloc_list_append(&primary->surface_relocs, primary->device,
anv_reloc_list_append(&primary->surface_relocs, &primary->pool->alloc,
&secondary->surface_relocs, 0);
}
@ -784,16 +790,16 @@ anv_cmd_buffer_add_bo(struct anv_cmd_buffer *cmd_buffer,
cmd_buffer->execbuf2.array_length * 2 : 64;
struct drm_i915_gem_exec_object2 *new_objects =
anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_objects),
8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(&cmd_buffer->pool->alloc, new_len * sizeof(*new_objects),
8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (new_objects == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
struct anv_bo **new_bos =
anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_bos),
8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(&cmd_buffer->pool->alloc, new_len * sizeof(*new_bos),
8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (new_objects == NULL) {
anv_device_free(cmd_buffer->device, new_objects);
anv_free(&cmd_buffer->pool->alloc, new_objects);
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
}

View file

@ -134,23 +134,17 @@ anv_cmd_buffer_ensure_push_constants_size(struct anv_cmd_buffer *cmd_buffer,
struct anv_push_constants **ptr = &cmd_buffer->state.push_constants[stage];
if (*ptr == NULL) {
*ptr = anv_device_alloc(cmd_buffer->device, size, 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
*ptr = anv_alloc(&cmd_buffer->pool->alloc, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (*ptr == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
(*ptr)->size = size;
} else if ((*ptr)->size < size) {
void *new_data = anv_device_alloc(cmd_buffer->device, size, 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
if (new_data == NULL)
*ptr = anv_realloc(&cmd_buffer->pool->alloc, *ptr, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (*ptr == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
memcpy(new_data, *ptr, (*ptr)->size);
anv_device_free(cmd_buffer->device, *ptr);
*ptr = new_data;
(*ptr)->size = size;
}
(*ptr)->size = size;
return VK_SUCCESS;
}
@ -170,13 +164,14 @@ VkResult anv_CreateCommandBuffer(
struct anv_cmd_buffer *cmd_buffer;
VkResult result;
cmd_buffer = anv_device_alloc(device, sizeof(*cmd_buffer), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
cmd_buffer = anv_alloc(&pool->alloc, sizeof(*cmd_buffer), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (cmd_buffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
cmd_buffer->device = device;
cmd_buffer->pool = pool;
result = anv_cmd_buffer_init_batch_bo_chain(cmd_buffer);
if (result != VK_SUCCESS)
@ -205,7 +200,8 @@ VkResult anv_CreateCommandBuffer(
return VK_SUCCESS;
fail: anv_device_free(device, cmd_buffer);
fail:
anv_free(&cmd_buffer->pool->alloc, cmd_buffer);
return result;
}
@ -214,7 +210,6 @@ void anv_DestroyCommandBuffer(
VkDevice _device,
VkCommandBuffer _cmd_buffer)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, _cmd_buffer);
list_del(&cmd_buffer->pool_link);
@ -223,7 +218,8 @@ void anv_DestroyCommandBuffer(
anv_state_stream_finish(&cmd_buffer->surface_state_stream);
anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
anv_device_free(device, cmd_buffer);
anv_free(&cmd_buffer->pool->alloc, cmd_buffer);
}
VkResult anv_ResetCommandBuffer(
@ -549,7 +545,7 @@ add_surface_state_reloc(struct anv_cmd_buffer *cmd_buffer,
const uint32_t dword = cmd_buffer->device->info.gen < 8 ? 1 : 8;
anv_reloc_list_add(&cmd_buffer->surface_relocs, cmd_buffer->device,
anv_reloc_list_add(&cmd_buffer->surface_relocs, &cmd_buffer->pool->alloc,
state.offset + dword * 4, bo, offset);
}
@ -898,16 +894,22 @@ void anv_CmdExecuteCommands(
VkResult anv_CreateCommandPool(
VkDevice _device,
const VkCommandPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkCommandPool* pCmdPool)
{
ANV_FROM_HANDLE(anv_device, device, _device);
struct anv_cmd_pool *pool;
pool = anv_device_alloc(device, sizeof(*pool), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
pool = anv_alloc2(&device->alloc, pAllocator, sizeof(*pool), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pool == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
if (pAllocator)
pool->alloc = *pAllocator;
else
pool->alloc = device->alloc;
list_inithead(&pool->cmd_buffers);
*pCmdPool = anv_cmd_pool_to_handle(pool);
@ -917,14 +919,15 @@ VkResult anv_CreateCommandPool(
void anv_DestroyCommandPool(
VkDevice _device,
VkCommandPool commandPool)
VkCommandPool commandPool,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_cmd_pool, pool, commandPool);
anv_ResetCommandPool(_device, commandPool, 0);
anv_device_free(device, pool);
anv_free2(&device->alloc, pAllocator, pool);
}
VkResult anv_ResetCommandPool(

View file

@ -36,6 +36,7 @@
VkResult anv_CreateDescriptorSetLayout(
VkDevice _device,
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorSetLayout* pSetLayout)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -56,8 +57,8 @@ VkResult anv_CreateDescriptorSetLayout(
(max_binding + 1) * sizeof(set_layout->binding[0]) +
immutable_sampler_count * sizeof(struct anv_sampler *);
set_layout = anv_device_alloc(device, size, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
set_layout = anv_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set_layout)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -154,12 +155,13 @@ VkResult anv_CreateDescriptorSetLayout(
void anv_DestroyDescriptorSetLayout(
VkDevice _device,
VkDescriptorSetLayout _set_layout)
VkDescriptorSetLayout _set_layout,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
anv_device_free(device, set_layout);
anv_free2(&device->alloc, pAllocator, set_layout);
}
/*
@ -170,6 +172,7 @@ void anv_DestroyDescriptorSetLayout(
VkResult anv_CreatePipelineLayout(
VkDevice _device,
const VkPipelineLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipelineLayout* pPipelineLayout)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -219,7 +222,8 @@ VkResult anv_CreatePipelineLayout(
size_t size = sizeof(*layout) + num_bindings * sizeof(layout->entries[0]);
layout = anv_device_alloc(device, size, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
layout = anv_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (layout == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -273,12 +277,13 @@ VkResult anv_CreatePipelineLayout(
void anv_DestroyPipelineLayout(
VkDevice _device,
VkPipelineLayout _pipelineLayout)
VkPipelineLayout _pipelineLayout,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
anv_device_free(device, pipeline_layout);
anv_free2(&device->alloc, pAllocator, pipeline_layout);
}
/*
@ -288,6 +293,7 @@ void anv_DestroyPipelineLayout(
VkResult anv_CreateDescriptorPool(
VkDevice device,
const VkDescriptorPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorPool* pDescriptorPool)
{
anv_finishme("VkDescriptorPool is a stub");
@ -297,7 +303,8 @@ VkResult anv_CreateDescriptorPool(
void anv_DestroyDescriptorPool(
VkDevice _device,
VkDescriptorPool _pool)
VkDescriptorPool _pool,
const VkAllocationCallbacks* pAllocator)
{
anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
}
@ -319,7 +326,8 @@ anv_descriptor_set_create(struct anv_device *device,
struct anv_descriptor_set *set;
size_t size = sizeof(*set) + layout->size * sizeof(set->descriptors[0]);
set = anv_device_alloc(device, size, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
set = anv_alloc(&device->alloc /* XXX: Use the pool */, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!set)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -349,7 +357,7 @@ void
anv_descriptor_set_destroy(struct anv_device *device,
struct anv_descriptor_set *set)
{
anv_device_free(device, set);
anv_free(&device->alloc /* XXX: Use the pool */, set);
}
VkResult anv_AllocateDescriptorSets(

View file

@ -148,28 +148,6 @@ anv_physical_device_finish(struct anv_physical_device *device)
ralloc_free(device->compiler);
}
static void *default_alloc(
void* pUserData,
size_t size,
size_t alignment,
VkSystemAllocType allocType)
{
return malloc(size);
}
static void default_free(
void* pUserData,
void* pMem)
{
free(pMem);
}
static const VkAllocCallbacks default_alloc_callbacks = {
.pUserData = NULL,
.pfnAlloc = default_alloc,
.pfnFree = default_free
};
static const VkExtensionProperties global_extensions[] = {
{
.extensionName = VK_EXT_KHR_SWAPCHAIN_EXTENSION_NAME,
@ -184,13 +162,39 @@ static const VkExtensionProperties device_extensions[] = {
},
};
static void *
default_alloc_func(void *pUserData, size_t size, size_t align,
VkSystemAllocationScope allocationScope)
{
return malloc(size);
}
static void *
default_realloc_func(void *pUserData, void *pOriginal, size_t size,
size_t align, VkSystemAllocationScope allocationScope)
{
return realloc(pOriginal, size);
}
static void
default_free_func(void *pUserData, void *pMemory)
{
free(pMemory);
}
static const VkAllocationCallbacks default_alloc = {
.pUserData = NULL,
.pfnAllocation = default_alloc_func,
.pfnReallocation = default_realloc_func,
.pfnFree = default_free_func,
};
VkResult anv_CreateInstance(
const VkInstanceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkInstance* pInstance)
{
struct anv_instance *instance;
const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
void *user_data = NULL;
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
@ -210,19 +214,18 @@ VkResult anv_CreateInstance(
return vk_error(VK_ERROR_EXTENSION_NOT_PRESENT);
}
if (pCreateInfo->pAllocCb) {
alloc_callbacks = pCreateInfo->pAllocCb;
user_data = pCreateInfo->pAllocCb->pUserData;
}
instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
instance = anv_alloc2(&default_alloc, pAllocator, sizeof(*instance), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!instance)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
instance->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
instance->pAllocUserData = alloc_callbacks->pUserData;
instance->pfnAlloc = alloc_callbacks->pfnAlloc;
instance->pfnFree = alloc_callbacks->pfnFree;
if (pAllocator)
instance->alloc = *pAllocator;
else
instance->alloc = default_alloc;
instance->apiVersion = pCreateInfo->pApplicationInfo->apiVersion;
instance->physicalDeviceCount = -1;
@ -238,7 +241,8 @@ VkResult anv_CreateInstance(
}
void anv_DestroyInstance(
VkInstance _instance)
VkInstance _instance,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_instance, instance, _instance);
@ -254,31 +258,7 @@ void anv_DestroyInstance(
_mesa_locale_fini();
instance->pfnFree(instance->pAllocUserData, instance);
}
void *
anv_instance_alloc(struct anv_instance *instance, size_t size,
size_t alignment, VkSystemAllocType allocType)
{
void *mem = instance->pfnAlloc(instance->pAllocUserData,
size, alignment, allocType);
if (mem) {
VG(VALGRIND_MEMPOOL_ALLOC(instance, mem, size));
VG(VALGRIND_MAKE_MEM_UNDEFINED(mem, size));
}
return mem;
}
void
anv_instance_free(struct anv_instance *instance, void *mem)
{
if (mem == NULL)
return;
VG(VALGRIND_MEMPOOL_FREE(instance, mem));
instance->pfnFree(instance->pAllocUserData, mem);
anv_free(&instance->alloc, instance);
}
VkResult anv_EnumeratePhysicalDevices(
@ -611,10 +591,10 @@ anv_device_init_border_colors(struct anv_device *device)
VkResult anv_CreateDevice(
VkPhysicalDevice physicalDevice,
const VkDeviceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDevice* pDevice)
{
ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
struct anv_instance *instance = physical_device->instance;
struct anv_device *device;
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
@ -634,14 +614,20 @@ VkResult anv_CreateDevice(
anv_set_dispatch_devinfo(physical_device->info);
device = anv_instance_alloc(instance, sizeof(*device), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
device = anv_alloc2(&physical_device->instance->alloc, pAllocator,
sizeof(*device), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!device)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
device->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
device->instance = physical_device->instance;
if (pAllocator)
device->alloc = *pAllocator;
else
device->alloc = physical_device->instance->alloc;
/* XXX(chadv): Can we dup() physicalDevice->fd here? */
device->fd = open(physical_device->path, O_RDWR | O_CLOEXEC);
if (device->fd == -1)
@ -686,13 +672,14 @@ VkResult anv_CreateDevice(
fail_fd:
close(device->fd);
fail_device:
anv_device_free(device, device);
anv_free(&device->alloc, device);
return vk_error(VK_ERROR_INITIALIZATION_FAILED);
}
void anv_DestroyDevice(
VkDevice _device)
VkDevice _device,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -720,7 +707,7 @@ void anv_DestroyDevice(
close(device->fd);
anv_instance_free(device->instance, device);
anv_free(&device->alloc, device);
}
VkResult anv_EnumerateInstanceExtensionProperties(
@ -915,22 +902,6 @@ VkResult anv_DeviceWaitIdle(
return result;
}
void *
anv_device_alloc(struct anv_device * device,
size_t size,
size_t alignment,
VkSystemAllocType allocType)
{
return anv_instance_alloc(device->instance, size, alignment, allocType);
}
void
anv_device_free(struct anv_device * device,
void * mem)
{
anv_instance_free(device->instance, mem);
}
VkResult
anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
{
@ -946,28 +917,29 @@ anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
return VK_SUCCESS;
}
VkResult anv_AllocMemory(
VkResult anv_AllocateMemory(
VkDevice _device,
const VkMemoryAllocInfo* pAllocInfo,
const VkMemoryAllocateInfo* pAllocateInfo,
const VkAllocationCallbacks* pAllocator,
VkDeviceMemory* pMem)
{
ANV_FROM_HANDLE(anv_device, device, _device);
struct anv_device_memory *mem;
VkResult result;
assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
/* We support exactly one memory heap. */
assert(pAllocInfo->memoryTypeIndex == 0);
assert(pAllocateInfo->memoryTypeIndex == 0);
/* FINISHME: Fail if allocation request exceeds heap size. */
mem = anv_device_alloc(device, sizeof(*mem), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
mem = anv_alloc2(&device->alloc, pAllocator, sizeof(*mem), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (mem == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
result = anv_bo_init_new(&mem->bo, device, pAllocateInfo->allocationSize);
if (result != VK_SUCCESS)
goto fail;
@ -976,14 +948,15 @@ VkResult anv_AllocMemory(
return VK_SUCCESS;
fail:
anv_device_free(device, mem);
anv_free2(&device->alloc, pAllocator, mem);
return result;
}
void anv_FreeMemory(
VkDevice _device,
VkDeviceMemory _mem)
VkDeviceMemory _mem,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
@ -994,7 +967,7 @@ void anv_FreeMemory(
if (mem->bo.gem_handle != 0)
anv_gem_close(device, mem->bo.gem_handle);
anv_device_free(device, mem);
anv_free2(&device->alloc, pAllocator, mem);
}
VkResult anv_MapMemory(
@ -1152,6 +1125,7 @@ VkResult anv_QueueBindSparse(
VkResult anv_CreateFence(
VkDevice _device,
const VkFenceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkFence* pFence)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -1163,8 +1137,8 @@ VkResult anv_CreateFence(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
fence = anv_device_alloc(device, sizeof(*fence), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
fence = anv_alloc2(&device->alloc, pAllocator, sizeof(*fence), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (fence == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1207,21 +1181,22 @@ VkResult anv_CreateFence(
return VK_SUCCESS;
fail:
anv_device_free(device, fence);
anv_free2(&device->alloc, pAllocator, fence);
return result;
}
void anv_DestroyFence(
VkDevice _device,
VkFence _fence)
VkFence _fence,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_fence, fence, _fence);
anv_gem_munmap(fence->bo.map, fence->bo.size);
anv_gem_close(device, fence->bo.gem_handle);
anv_device_free(device, fence);
anv_free2(&device->alloc, pAllocator, fence);
}
VkResult anv_ResetFences(
@ -1301,6 +1276,7 @@ VkResult anv_WaitForFences(
VkResult anv_CreateSemaphore(
VkDevice device,
const VkSemaphoreCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSemaphore* pSemaphore)
{
*pSemaphore = (VkSemaphore)1;
@ -1309,7 +1285,8 @@ VkResult anv_CreateSemaphore(
void anv_DestroySemaphore(
VkDevice device,
VkSemaphore semaphore)
VkSemaphore semaphore,
const VkAllocationCallbacks* pAllocator)
{
stub();
}
@ -1333,6 +1310,7 @@ VkResult anv_QueueWaitSemaphore(
VkResult anv_CreateEvent(
VkDevice device,
const VkEventCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkEvent* pEvent)
{
stub_return(VK_UNSUPPORTED);
@ -1340,7 +1318,8 @@ VkResult anv_CreateEvent(
void anv_DestroyEvent(
VkDevice device,
VkEvent event)
VkEvent event,
const VkAllocationCallbacks* pAllocator)
{
stub();
}
@ -1371,6 +1350,7 @@ VkResult anv_ResetEvent(
VkResult anv_CreateBuffer(
VkDevice _device,
const VkBufferCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkBuffer* pBuffer)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -1378,8 +1358,8 @@ VkResult anv_CreateBuffer(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
buffer = anv_device_alloc(device, sizeof(*buffer), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
buffer = anv_alloc2(&device->alloc, pAllocator, sizeof(*buffer), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (buffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1394,12 +1374,13 @@ VkResult anv_CreateBuffer(
void anv_DestroyBuffer(
VkDevice _device,
VkBuffer _buffer)
VkBuffer _buffer,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
anv_device_free(device, buffer);
anv_free2(&device->alloc, pAllocator, buffer);
}
void
@ -1428,6 +1409,7 @@ anv_fill_buffer_surface_state(struct anv_device *device, void *state,
VkResult anv_CreateBufferView(
VkDevice _device,
const VkBufferViewCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkBufferView* pView)
{
stub_return(VK_UNSUPPORTED);
@ -1435,24 +1417,27 @@ VkResult anv_CreateBufferView(
void anv_DestroyBufferView(
VkDevice _device,
VkBufferView _bview)
VkBufferView _bview,
const VkAllocationCallbacks* pAllocator)
{
stub();
}
void anv_DestroySampler(
VkDevice _device,
VkSampler _sampler)
VkSampler _sampler,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_sampler, sampler, _sampler);
anv_device_free(device, sampler);
anv_free2(&device->alloc, pAllocator, sampler);
}
VkResult anv_CreateFramebuffer(
VkDevice _device,
const VkFramebufferCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkFramebuffer* pFramebuffer)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -1462,8 +1447,8 @@ VkResult anv_CreateFramebuffer(
size_t size = sizeof(*framebuffer) +
sizeof(struct anv_image_view *) * pCreateInfo->attachmentCount;
framebuffer = anv_device_alloc(device, size, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
framebuffer = anv_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (framebuffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -1484,12 +1469,13 @@ VkResult anv_CreateFramebuffer(
void anv_DestroyFramebuffer(
VkDevice _device,
VkFramebuffer _fb)
VkFramebuffer _fb,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_framebuffer, fb, _fb);
anv_device_free(device, fb);
anv_free2(&device->alloc, pAllocator, fb);
}
void vkCmdDbgMarkerBegin(

View file

@ -54,19 +54,19 @@ anv_dump_image_to_ppm(struct anv_device *device,
.tiling = VK_IMAGE_TILING_LINEAR,
.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
.flags = 0,
}, &copy_image);
}, NULL, &copy_image);
assert(result == VK_SUCCESS);
VkMemoryRequirements reqs;
anv_GetImageMemoryRequirements(vk_device, copy_image, &reqs);
VkDeviceMemory memory;
result = anv_AllocMemory(vk_device,
&(VkMemoryAllocInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO,
result = anv_AllocateMemory(vk_device,
&(VkMemoryAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.allocationSize = reqs.size,
.memoryTypeIndex = 0,
}, &memory);
}, NULL, &memory);
assert(result == VK_SUCCESS);
result = anv_BindImageMemory(vk_device, copy_image, memory, 0);
@ -78,7 +78,7 @@ anv_dump_image_to_ppm(struct anv_device *device,
.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
.queueFamilyIndex = 0,
.flags = 0,
}, &commandPool);
}, NULL, &commandPool);
assert(result == VK_SUCCESS);
VkCommandBuffer cmd;
@ -158,7 +158,7 @@ anv_dump_image_to_ppm(struct anv_device *device,
&(VkFenceCreateInfo) {
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
.flags = 0,
}, &fence);
}, NULL, &fence);
assert(result == VK_SUCCESS);
result = anv_QueueSubmit(anv_queue_to_handle(&device->queue),
@ -168,8 +168,8 @@ anv_dump_image_to_ppm(struct anv_device *device,
result = anv_WaitForFences(vk_device, 1, &fence, true, UINT64_MAX);
assert(result == VK_SUCCESS);
anv_DestroyFence(vk_device, fence);
anv_DestroyCommandPool(vk_device, commandPool);
anv_DestroyFence(vk_device, fence, NULL);
anv_DestroyCommandPool(vk_device, commandPool, NULL);
uint8_t *map;
result = anv_MapMemory(vk_device, memory, 0, reqs.size, 0, (void **)&map);
@ -204,6 +204,6 @@ anv_dump_image_to_ppm(struct anv_device *device,
fclose(file);
anv_UnmapMemory(vk_device, memory);
anv_DestroyImage(vk_device, copy_image);
anv_FreeMemory(vk_device, memory);
anv_DestroyImage(vk_device, copy_image, NULL);
anv_FreeMemory(vk_device, memory, NULL);
}

View file

@ -292,6 +292,7 @@ anv_image_get_full_usage(const VkImageCreateInfo *info)
VkResult
anv_image_create(VkDevice _device,
const struct anv_image_create_info *create_info,
const VkAllocationCallbacks* alloc,
VkImage *pImage)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -321,8 +322,8 @@ anv_image_create(VkDevice _device,
assert(extent->height <= limits->height);
assert(extent->depth <= limits->depth);
image = anv_device_alloc(device, sizeof(*image), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
image = anv_alloc2(&device->alloc, alloc, sizeof(*image), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!image)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -374,7 +375,7 @@ anv_image_create(VkDevice _device,
fail:
if (image)
anv_device_free(device, image);
anv_free2(&device->alloc, alloc, image);
return r;
}
@ -382,21 +383,24 @@ fail:
VkResult
anv_CreateImage(VkDevice device,
const VkImageCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkImage *pImage)
{
return anv_image_create(device,
&(struct anv_image_create_info) {
.vk_info = pCreateInfo,
},
pAllocator,
pImage);
}
void
anv_DestroyImage(VkDevice _device, VkImage _image)
anv_DestroyImage(VkDevice _device, VkImage _image,
const VkAllocationCallbacks *pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
anv_device_free(device, anv_image_from_handle(_image));
anv_free2(&device->alloc, pAllocator, anv_image_from_handle(_image));
}
static void
@ -453,6 +457,7 @@ void anv_GetImageSubresourceLayout(
VkResult
anv_validate_CreateImageView(VkDevice _device,
const VkImageViewCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkImageView *pView)
{
ANV_FROM_HANDLE(anv_image, image, pCreateInfo->image);
@ -531,7 +536,7 @@ anv_validate_CreateImageView(VkDevice _device,
assert(!"bad VkImageSubresourceRange::aspectFlags");
}
return anv_CreateImageView(_device, pCreateInfo, pView);
return anv_CreateImageView(_device, pCreateInfo, pAllocator, pView);
}
void
@ -584,13 +589,14 @@ anv_image_view_init(struct anv_image_view *iview,
VkResult
anv_CreateImageView(VkDevice _device,
const VkImageViewCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkImageView *pView)
{
ANV_FROM_HANDLE(anv_device, device, _device);
struct anv_image_view *view;
view = anv_device_alloc(device, sizeof(*view), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
view = anv_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (view == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -601,10 +607,13 @@ anv_CreateImageView(VkDevice _device,
return VK_SUCCESS;
}
static void
anv_image_view_destroy(struct anv_device *device,
struct anv_image_view *iview)
void
anv_DestroyImageView(VkDevice _device, VkImageView _iview,
const VkAllocationCallbacks *pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_image_view, iview, _iview);
if (iview->image->needs_color_rt_surface_state) {
anv_state_pool_free(&device->surface_state_pool,
iview->color_rt_surface_state);
@ -615,16 +624,7 @@ anv_image_view_destroy(struct anv_device *device,
iview->nonrt_surface_state);
}
anv_device_free(device, iview);
}
void
anv_DestroyImageView(VkDevice _device, VkImageView _iview)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_image_view, iview, _iview);
anv_image_view_destroy(device, iview);
anv_free2(&device->alloc, pAllocator, iview);
}
struct anv_surface *

View file

@ -32,6 +32,7 @@
VkResult anv_CreateDmaBufImageINTEL(
VkDevice _device,
const VkDmaBufImageCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDeviceMemory* pMem,
VkImage* pImage)
{
@ -43,8 +44,8 @@ VkResult anv_CreateDmaBufImageINTEL(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DMA_BUF_IMAGE_CREATE_INFO_INTEL);
mem = anv_device_alloc(device, sizeof(*mem), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
mem = anv_alloc2(&device->alloc, pAllocator, sizeof(*mem), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (mem == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -59,13 +60,6 @@ VkResult anv_CreateDmaBufImageINTEL(
mem->bo.offset = 0;
mem->bo.size = pCreateInfo->strideInBytes * pCreateInfo->extent.height;
image = anv_device_alloc(device, sizeof(*image), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
if (image == NULL) {
result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
goto fail_mem;
}
anv_image_create(_device,
&(struct anv_image_create_info) {
.force_tiling = true,
@ -85,7 +79,7 @@ VkResult anv_CreateDmaBufImageINTEL(
.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
.flags = 0,
}},
&image_h);
pAllocator, &image_h);
image = anv_image_from_handle(image_h);
image->bo = &mem->bo;
@ -100,10 +94,8 @@ VkResult anv_CreateDmaBufImageINTEL(
return VK_SUCCESS;
fail_mem:
anv_gem_close(device, mem->bo.gem_handle);
fail:
anv_device_free(device, mem);
anv_free2(&device->alloc, pAllocator, mem);
return result;
}

View file

@ -217,7 +217,7 @@ anv_device_init_meta_blit_state(struct anv_device *device)
},
},
.dependencyCount = 0,
}, &device->meta_state.blit.render_pass);
}, NULL, &device->meta_state.blit.render_pass);
/* We don't use a vertex shader for clearing, but instead build and pass
* the VUEs directly to the rasterization backend. However, we do need
@ -315,7 +315,7 @@ anv_device_init_meta_blit_state(struct anv_device *device)
}
};
anv_CreateDescriptorSetLayout(anv_device_to_handle(device), &ds_layout_info,
&device->meta_state.blit.ds_layout);
NULL, &device->meta_state.blit.ds_layout);
anv_CreatePipelineLayout(anv_device_to_handle(device),
&(VkPipelineLayoutCreateInfo) {
@ -323,7 +323,7 @@ anv_device_init_meta_blit_state(struct anv_device *device)
.setLayoutCount = 1,
.pSetLayouts = &device->meta_state.blit.ds_layout,
},
&device->meta_state.blit.pipeline_layout);
NULL, &device->meta_state.blit.pipeline_layout);
VkPipelineShaderStageCreateInfo pipeline_shader_stages[] = {
{
@ -411,12 +411,12 @@ anv_device_init_meta_blit_state(struct anv_device *device)
pipeline_shader_stages[1].shader = fs_2d;
anv_graphics_pipeline_create(anv_device_to_handle(device),
&vk_pipeline_info, &anv_pipeline_info,
&device->meta_state.blit.pipeline_2d_src);
NULL, &device->meta_state.blit.pipeline_2d_src);
pipeline_shader_stages[1].shader = fs_3d;
anv_graphics_pipeline_create(anv_device_to_handle(device),
&vk_pipeline_info, &anv_pipeline_info,
&device->meta_state.blit.pipeline_3d_src);
NULL, &device->meta_state.blit.pipeline_3d_src);
anv_DestroyShader(anv_device_to_handle(device), vs);
anv_DestroyShader(anv_device_to_handle(device), fs_2d);
@ -527,7 +527,7 @@ meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
.magFilter = blit_filter,
.minFilter = blit_filter,
}, &sampler);
}, &cmd_buffer->pool->alloc, &sampler);
VkDescriptorSet set;
anv_AllocateDescriptorSets(anv_device_to_handle(device),
@ -568,7 +568,7 @@ meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
.width = dest_iview->extent.width,
.height = dest_iview->extent.height,
.layers = 1
}, &fb);
}, &cmd_buffer->pool->alloc, &fb);
ANV_CALL(CmdBeginRenderPass)(anv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
@ -628,8 +628,10 @@ meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
* descriptor sets, etc. has been used. We are free to delete it.
*/
anv_descriptor_set_destroy(device, anv_descriptor_set_from_handle(set));
anv_DestroySampler(anv_device_to_handle(device), sampler);
anv_DestroyFramebuffer(anv_device_to_handle(device), fb);
anv_DestroySampler(anv_device_to_handle(device), sampler,
&cmd_buffer->pool->alloc);
anv_DestroyFramebuffer(anv_device_to_handle(device), fb,
&cmd_buffer->pool->alloc);
}
static void
@ -683,11 +685,13 @@ do_buffer_copy(struct anv_cmd_buffer *cmd_buffer,
VkImage src_image;
image_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
anv_CreateImage(vk_device, &image_info, &src_image);
anv_CreateImage(vk_device, &image_info,
&cmd_buffer->pool->alloc, &src_image);
VkImage dest_image;
image_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
anv_CreateImage(vk_device, &image_info, &dest_image);
anv_CreateImage(vk_device, &image_info,
&cmd_buffer->pool->alloc, &dest_image);
/* We could use a vk call to bind memory, but that would require
* creating a dummy memory object etc. so there's really no point.
@ -742,8 +746,8 @@ do_buffer_copy(struct anv_cmd_buffer *cmd_buffer,
(VkExtent3D) { width, height, 1 },
VK_FILTER_NEAREST);
anv_DestroyImage(vk_device, src_image);
anv_DestroyImage(vk_device, dest_image);
anv_DestroyImage(vk_device, src_image, &cmd_buffer->pool->alloc);
anv_DestroyImage(vk_device, dest_image, &cmd_buffer->pool->alloc);
}
void anv_CmdCopyBuffer(
@ -1013,6 +1017,7 @@ static struct anv_image *
make_image_for_buffer(VkDevice vk_device, VkBuffer vk_buffer, VkFormat format,
VkImageUsageFlags usage,
VkImageType image_type,
const VkAllocationCallbacks *alloc,
const VkBufferImageCopy *copy)
{
ANV_FROM_HANDLE(anv_buffer, buffer, vk_buffer);
@ -1037,7 +1042,7 @@ make_image_for_buffer(VkDevice vk_device, VkBuffer vk_buffer, VkFormat format,
.tiling = VK_IMAGE_TILING_LINEAR,
.usage = usage,
.flags = 0,
}, &vk_image);
}, alloc, &vk_image);
assert(result == VK_SUCCESS);
ANV_FROM_HANDLE(anv_image, image, vk_image);
@ -1079,7 +1084,8 @@ void anv_CmdCopyBufferToImage(
struct anv_image *src_image =
make_image_for_buffer(vk_device, srcBuffer, proxy_format,
VK_IMAGE_USAGE_SAMPLED_BIT,
dest_image->type, &pRegions[r]);
dest_image->type, &cmd_buffer->pool->alloc,
&pRegions[r]);
const uint32_t dest_base_array_slice =
meta_blit_get_dest_view_base_array_slice(dest_image,
@ -1159,7 +1165,8 @@ void anv_CmdCopyBufferToImage(
src_image->extent.height * 4;
}
anv_DestroyImage(vk_device, anv_image_to_handle(src_image));
anv_DestroyImage(vk_device, anv_image_to_handle(src_image),
&cmd_buffer->pool->alloc);
}
meta_finish_blit(cmd_buffer, &saved_state);
@ -1209,7 +1216,8 @@ void anv_CmdCopyImageToBuffer(
struct anv_image *dest_image =
make_image_for_buffer(vk_device, destBuffer, dest_format,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
src_image->type, &pRegions[r]);
src_image->type, &cmd_buffer->pool->alloc,
&pRegions[r]);
unsigned num_slices;
if (src_image->type == VK_IMAGE_TYPE_3D) {
@ -1262,7 +1270,8 @@ void anv_CmdCopyImageToBuffer(
dest_image->extent.height * 4;
}
anv_DestroyImage(vk_device, anv_image_to_handle(dest_image));
anv_DestroyImage(vk_device, anv_image_to_handle(dest_image),
&cmd_buffer->pool->alloc);
}
meta_finish_blit(cmd_buffer, &saved_state);
@ -1314,13 +1323,13 @@ anv_device_finish_meta(struct anv_device *device)
/* Blit */
anv_DestroyRenderPass(anv_device_to_handle(device),
device->meta_state.blit.render_pass);
device->meta_state.blit.render_pass, NULL);
anv_DestroyPipeline(anv_device_to_handle(device),
device->meta_state.blit.pipeline_2d_src);
device->meta_state.blit.pipeline_2d_src, NULL);
anv_DestroyPipeline(anv_device_to_handle(device),
device->meta_state.blit.pipeline_3d_src);
device->meta_state.blit.pipeline_3d_src, NULL);
anv_DestroyPipelineLayout(anv_device_to_handle(device),
device->meta_state.blit.pipeline_layout);
device->meta_state.blit.pipeline_layout, NULL);
anv_DestroyDescriptorSetLayout(anv_device_to_handle(device),
device->meta_state.blit.ds_layout);
device->meta_state.blit.ds_layout, NULL);
}

View file

@ -118,7 +118,8 @@ create_pipeline(struct anv_device *device,
struct nir_shader *fs_nir,
const VkPipelineVertexInputStateCreateInfo *vi_state,
const VkPipelineDepthStencilStateCreateInfo *ds_state,
const VkPipelineColorBlendStateCreateInfo *cb_state)
const VkPipelineColorBlendStateCreateInfo *cb_state,
const VkAllocationCallbacks *alloc)
{
VkDevice device_h = anv_device_to_handle(device);
@ -223,6 +224,7 @@ create_pipeline(struct anv_device *device,
.disable_vs = true,
.use_rectlist = true
},
alloc,
&pipeline_h);
ANV_CALL(DestroyShader)(device_h, vs_h);
@ -302,7 +304,7 @@ init_color_pipeline(struct anv_device *device)
device->meta_state.clear.color_pipeline =
create_pipeline(device, vs_nir, fs_nir, &vi_state, &ds_state,
&cb_state);
&cb_state, NULL);
}
static void
@ -475,7 +477,7 @@ create_depthstencil_pipeline(struct anv_device *device,
};
return create_pipeline(device, vs_nir, fs_nir, &vi_state, &ds_state,
&cb_state);
&cb_state, NULL);
}
static void
@ -601,13 +603,17 @@ anv_device_finish_meta_clear_state(struct anv_device *device)
VkDevice device_h = anv_device_to_handle(device);
ANV_CALL(DestroyPipeline)(device_h,
anv_pipeline_to_handle(device->meta_state.clear.color_pipeline));
anv_pipeline_to_handle(device->meta_state.clear.color_pipeline),
NULL);
ANV_CALL(DestroyPipeline)(device_h,
anv_pipeline_to_handle(device->meta_state.clear.depth_only_pipeline));
anv_pipeline_to_handle(device->meta_state.clear.depth_only_pipeline),
NULL);
ANV_CALL(DestroyPipeline)(device_h,
anv_pipeline_to_handle(device->meta_state.clear.stencil_only_pipeline));
anv_pipeline_to_handle(device->meta_state.clear.stencil_only_pipeline),
NULL);
ANV_CALL(DestroyPipeline)(device_h,
anv_pipeline_to_handle(device->meta_state.clear.depthstencil_pipeline));
anv_pipeline_to_handle(device->meta_state.clear.depthstencil_pipeline),
NULL);
}
void
@ -720,7 +726,7 @@ void anv_CmdClearColorImage(
.width = iview.extent.width,
.height = iview.extent.height,
.layers = 1
}, &fb);
}, &cmd_buffer->pool->alloc, &fb);
VkRenderPass pass;
anv_CreateRenderPass(anv_device_to_handle(cmd_buffer->device),
@ -755,7 +761,7 @@ void anv_CmdClearColorImage(
},
},
.dependencyCount = 0,
}, &pass);
}, &cmd_buffer->pool->alloc, &pass);
ANV_CALL(CmdBeginRenderPass)(anv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
@ -776,6 +782,8 @@ void anv_CmdClearColorImage(
}, VK_SUBPASS_CONTENTS_INLINE);
ANV_CALL(CmdEndRenderPass)(anv_cmd_buffer_to_handle(cmd_buffer));
/* XXX: We're leaking the render pass and framebuffer */
}
}
}

View file

@ -26,6 +26,7 @@
VkResult anv_CreateRenderPass(
VkDevice _device,
const VkRenderPassCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkRenderPass* pRenderPass)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -40,8 +41,8 @@ VkResult anv_CreateRenderPass(
attachments_offset = size;
size += pCreateInfo->attachmentCount * sizeof(pass->attachments[0]);
pass = anv_device_alloc(device, size, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
pass = anv_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pass == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -73,9 +74,9 @@ VkResult anv_CreateRenderPass(
if (desc->inputAttachmentCount > 0) {
subpass->input_attachments =
anv_device_alloc(device,
desc->inputAttachmentCount * sizeof(uint32_t),
8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
anv_alloc2(&device->alloc, pAllocator,
desc->inputAttachmentCount * sizeof(uint32_t), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
for (uint32_t j = 0; j < desc->inputAttachmentCount; j++) {
subpass->input_attachments[j]
@ -85,9 +86,9 @@ VkResult anv_CreateRenderPass(
if (desc->colorAttachmentCount > 0) {
subpass->color_attachments =
anv_device_alloc(device,
desc->colorAttachmentCount * sizeof(uint32_t),
8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
anv_alloc2(&device->alloc, pAllocator,
desc->colorAttachmentCount * sizeof(uint32_t), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
for (uint32_t j = 0; j < desc->colorAttachmentCount; j++) {
subpass->color_attachments[j]
@ -97,9 +98,9 @@ VkResult anv_CreateRenderPass(
if (desc->pResolveAttachments) {
subpass->resolve_attachments =
anv_device_alloc(device,
desc->colorAttachmentCount * sizeof(uint32_t),
8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
anv_alloc2(&device->alloc, pAllocator,
desc->colorAttachmentCount * sizeof(uint32_t), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
for (uint32_t j = 0; j < desc->colorAttachmentCount; j++) {
subpass->resolve_attachments[j]
@ -122,7 +123,8 @@ VkResult anv_CreateRenderPass(
void anv_DestroyRenderPass(
VkDevice _device,
VkRenderPass _pass)
VkRenderPass _pass,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_render_pass, pass, _pass);
@ -133,12 +135,12 @@ void anv_DestroyRenderPass(
*/
struct anv_subpass *subpass = &pass->subpasses[i];
anv_device_free(device, subpass->input_attachments);
anv_device_free(device, subpass->color_attachments);
anv_device_free(device, subpass->resolve_attachments);
anv_free2(&device->alloc, pAllocator, subpass->input_attachments);
anv_free2(&device->alloc, pAllocator, subpass->color_attachments);
anv_free2(&device->alloc, pAllocator, subpass->resolve_attachments);
}
anv_device_free(device, pass);
anv_free2(&device->alloc, pAllocator, pass);
}
void anv_GetRenderAreaGranularity(

View file

@ -40,6 +40,7 @@
VkResult anv_CreateShaderModule(
VkDevice _device,
const VkShaderModuleCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkShaderModule* pShaderModule)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -48,8 +49,9 @@ VkResult anv_CreateShaderModule(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO);
assert(pCreateInfo->flags == 0);
module = anv_device_alloc(device, sizeof(*module) + pCreateInfo->codeSize, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
module = anv_alloc2(&device->alloc, pAllocator,
sizeof(*module) + pCreateInfo->codeSize, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (module == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -64,12 +66,13 @@ VkResult anv_CreateShaderModule(
void anv_DestroyShaderModule(
VkDevice _device,
VkShaderModule _module)
VkShaderModule _module,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_shader_module, module, _module);
anv_device_free(device, module);
anv_free2(&device->alloc, pAllocator, module);
}
VkResult anv_CreateShader(
@ -87,8 +90,8 @@ VkResult anv_CreateShader(
const char *name = pCreateInfo->pName ? pCreateInfo->pName : "main";
size_t name_len = strlen(name);
shader = anv_device_alloc(device, sizeof(*shader) + name_len + 1, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
shader = anv_alloc(&device->alloc, sizeof(*shader) + name_len + 1, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (shader == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -107,7 +110,7 @@ void anv_DestroyShader(
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_shader, shader, _shader);
anv_device_free(device, shader);
anv_free(&device->alloc, shader);
}
#define SPIR_V_MAGIC_NUMBER 0x07230203
@ -187,6 +190,7 @@ anv_shader_compile_to_nir(struct anv_device *device,
VkResult anv_CreatePipelineCache(
VkDevice device,
const VkPipelineCacheCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipelineCache* pPipelineCache)
{
*pPipelineCache = (VkPipelineCache)1;
@ -196,7 +200,8 @@ VkResult anv_CreatePipelineCache(
void anv_DestroyPipelineCache(
VkDevice _device,
VkPipelineCache _cache)
VkPipelineCache _cache,
const VkAllocationCallbacks* pAllocator)
{
}
@ -227,16 +232,18 @@ VkResult anv_MergePipelineCaches(
void anv_DestroyPipeline(
VkDevice _device,
VkPipeline _pipeline)
VkPipeline _pipeline,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_pipeline, pipeline, _pipeline);
anv_reloc_list_finish(&pipeline->batch_relocs, pipeline->device);
anv_reloc_list_finish(&pipeline->batch_relocs,
pAllocator ? pAllocator : &device->alloc);
anv_state_stream_finish(&pipeline->program_stream);
if (pipeline->blend_state.map)
anv_state_pool_free(&device->dynamic_state_pool, pipeline->blend_state);
anv_device_free(pipeline->device, pipeline);
anv_free2(&device->alloc, pAllocator, pipeline);
}
static const uint32_t vk_to_gen_primitive_type[] = {
@ -366,10 +373,9 @@ anv_pipeline_compile(struct anv_pipeline *pipeline,
prog_data->nr_params += MAX_DYNAMIC_BUFFERS * 2;
if (prog_data->nr_params > 0) {
/* XXX: I think we're leaking this */
prog_data->param = (const gl_constant_value **)
anv_device_alloc(pipeline->device,
prog_data->nr_params * sizeof(gl_constant_value *),
8, VK_SYSTEM_ALLOC_TYPE_INTERNAL_SHADER);
malloc(prog_data->nr_params * sizeof(gl_constant_value *));
/* We now set the param values to be offsets into a
* anv_push_constant_data structure. Since the compiler doesn't
@ -961,22 +967,23 @@ anv_pipeline_validate_create_info(const VkGraphicsPipelineCreateInfo *info)
VkResult
anv_pipeline_init(struct anv_pipeline *pipeline, struct anv_device *device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra)
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc)
{
VkResult result;
anv_validate {
anv_pipeline_validate_create_info(pCreateInfo);
}
if (alloc == NULL)
alloc = &device->alloc;
pipeline->device = device;
pipeline->layout = anv_pipeline_layout_from_handle(pCreateInfo->layout);
result = anv_reloc_list_init(&pipeline->batch_relocs, device);
if (result != VK_SUCCESS) {
anv_device_free(device, pipeline);
return result;
}
anv_reloc_list_init(&pipeline->batch_relocs, alloc);
/* TODO: Handle allocation fail */
pipeline->batch.alloc = alloc;
pipeline->batch.next = pipeline->batch.start = pipeline->batch_data;
pipeline->batch.end = pipeline->batch.start + sizeof(pipeline->batch_data);
pipeline->batch.relocs = &pipeline->batch_relocs;
@ -1074,6 +1081,7 @@ anv_graphics_pipeline_create(
VkDevice _device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *pAllocator,
VkPipeline *pPipeline)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -1081,13 +1089,13 @@ anv_graphics_pipeline_create(
switch (device->info.gen) {
case 7:
if (device->info.is_haswell)
return gen75_graphics_pipeline_create(_device, pCreateInfo, extra, pPipeline);
return gen75_graphics_pipeline_create(_device, pCreateInfo, extra, pAllocator, pPipeline);
else
return gen7_graphics_pipeline_create(_device, pCreateInfo, extra, pPipeline);
return gen7_graphics_pipeline_create(_device, pCreateInfo, extra, pAllocator, pPipeline);
case 8:
return gen8_graphics_pipeline_create(_device, pCreateInfo, extra, pPipeline);
return gen8_graphics_pipeline_create(_device, pCreateInfo, extra, pAllocator, pPipeline);
case 9:
return gen9_graphics_pipeline_create(_device, pCreateInfo, extra, pPipeline);
return gen9_graphics_pipeline_create(_device, pCreateInfo, extra, pAllocator, pPipeline);
default:
unreachable("unsupported gen\n");
}
@ -1098,6 +1106,7 @@ VkResult anv_CreateGraphicsPipelines(
VkPipelineCache pipelineCache,
uint32_t count,
const VkGraphicsPipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipelines)
{
VkResult result = VK_SUCCESS;
@ -1105,10 +1114,10 @@ VkResult anv_CreateGraphicsPipelines(
unsigned i = 0;
for (; i < count; i++) {
result = anv_graphics_pipeline_create(_device, &pCreateInfos[i],
NULL, &pPipelines[i]);
NULL, pAllocator, &pPipelines[i]);
if (result != VK_SUCCESS) {
for (unsigned j = 0; j < i; j++) {
anv_DestroyPipeline(_device, pPipelines[j]);
anv_DestroyPipeline(_device, pPipelines[j], pAllocator);
}
return result;
@ -1121,6 +1130,7 @@ VkResult anv_CreateGraphicsPipelines(
static VkResult anv_compute_pipeline_create(
VkDevice _device,
const VkComputePipelineCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipeline)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -1128,13 +1138,13 @@ static VkResult anv_compute_pipeline_create(
switch (device->info.gen) {
case 7:
if (device->info.is_haswell)
return gen75_compute_pipeline_create(_device, pCreateInfo, pPipeline);
return gen75_compute_pipeline_create(_device, pCreateInfo, pAllocator, pPipeline);
else
return gen7_compute_pipeline_create(_device, pCreateInfo, pPipeline);
return gen7_compute_pipeline_create(_device, pCreateInfo, pAllocator, pPipeline);
case 8:
return gen8_compute_pipeline_create(_device, pCreateInfo, pPipeline);
return gen8_compute_pipeline_create(_device, pCreateInfo, pAllocator, pPipeline);
case 9:
return gen9_compute_pipeline_create(_device, pCreateInfo, pPipeline);
return gen9_compute_pipeline_create(_device, pCreateInfo, pAllocator, pPipeline);
default:
unreachable("unsupported gen\n");
}
@ -1145,6 +1155,7 @@ VkResult anv_CreateComputePipelines(
VkPipelineCache pipelineCache,
uint32_t count,
const VkComputePipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipelines)
{
VkResult result = VK_SUCCESS;
@ -1152,10 +1163,10 @@ VkResult anv_CreateComputePipelines(
unsigned i = 0;
for (; i < count; i++) {
result = anv_compute_pipeline_create(_device, &pCreateInfos[i],
&pPipelines[i]);
pAllocator, &pPipelines[i]);
if (result != VK_SUCCESS) {
for (unsigned j = 0; j < i; j++) {
anv_DestroyPipeline(_device, pPipelines[j]);
anv_DestroyPipeline(_device, pPipelines[j], pAllocator);
}
return result;

View file

@ -431,6 +431,50 @@ extern struct anv_dispatch_table dtable;
dtable.func; \
})
static inline void *
anv_alloc(const VkAllocationCallbacks *alloc,
size_t size, size_t align,
VkSystemAllocationScope scope)
{
return alloc->pfnAllocation(alloc->pUserData, size, align, scope);
}
static inline void *
anv_realloc(const VkAllocationCallbacks *alloc,
void *ptr, size_t size, size_t align,
VkSystemAllocationScope scope)
{
return alloc->pfnReallocation(alloc->pUserData, ptr, size, align, scope);
}
static inline void
anv_free(const VkAllocationCallbacks *alloc, void *data)
{
alloc->pfnFree(alloc->pUserData, data);
}
static inline void *
anv_alloc2(const VkAllocationCallbacks *parent_alloc,
const VkAllocationCallbacks *alloc,
size_t size, size_t align,
VkSystemAllocationScope scope)
{
if (alloc)
return anv_alloc(alloc, size, align, scope);
else
return anv_alloc(parent_alloc, size, align, scope);
}
static inline void
anv_free2(const VkAllocationCallbacks *parent_alloc,
const VkAllocationCallbacks *alloc,
void *data)
{
if (alloc)
anv_free(alloc, data);
else
anv_free(parent_alloc, data);
}
struct anv_physical_device {
VK_LOADER_DATA _loader_data;
@ -451,9 +495,8 @@ bool anv_is_scalar_shader_stage(const struct brw_compiler *compiler,
struct anv_instance {
VK_LOADER_DATA _loader_data;
void * pAllocUserData;
PFN_vkAllocFunction pfnAlloc;
PFN_vkFreeFunction pfnFree;
VkAllocationCallbacks alloc;
uint32_t apiVersion;
int physicalDeviceCount;
struct anv_physical_device physicalDevice;
@ -497,6 +540,8 @@ struct anv_queue {
struct anv_device {
VK_LOADER_DATA _loader_data;
VkAllocationCallbacks alloc;
struct anv_instance * instance;
uint32_t chipset_id;
struct brw_device_info info;
@ -526,26 +571,6 @@ struct anv_device {
pthread_mutex_t mutex;
};
void *
anv_instance_alloc(struct anv_instance * instance,
size_t size,
size_t alignment,
VkSystemAllocType allocType);
void
anv_instance_free(struct anv_instance * instance,
void * mem);
void *
anv_device_alloc(struct anv_device * device,
size_t size,
size_t alignment,
VkSystemAllocType allocType);
void
anv_device_free(struct anv_device * device,
void * mem);
void* anv_gem_mmap(struct anv_device *device,
uint32_t gem_handle, uint64_t offset, uint64_t size);
void anv_gem_munmap(void *p, uint64_t size);
@ -575,12 +600,12 @@ struct anv_reloc_list {
};
VkResult anv_reloc_list_init(struct anv_reloc_list *list,
struct anv_device *device);
const VkAllocationCallbacks *alloc);
void anv_reloc_list_finish(struct anv_reloc_list *list,
struct anv_device *device);
const VkAllocationCallbacks *alloc);
uint64_t anv_reloc_list_add(struct anv_reloc_list *list,
struct anv_device *device,
const VkAllocationCallbacks *alloc,
uint32_t offset, struct anv_bo *target_bo,
uint32_t delta);
@ -600,7 +625,7 @@ struct anv_batch_bo {
};
struct anv_batch {
struct anv_device * device;
const VkAllocationCallbacks * alloc;
void * start;
void * end;
@ -977,6 +1002,7 @@ struct anv_cmd_state {
};
struct anv_cmd_pool {
VkAllocationCallbacks alloc;
struct list_head cmd_buffers;
};
@ -994,6 +1020,7 @@ struct anv_cmd_buffer {
struct anv_device * device;
struct anv_cmd_pool * pool;
struct list_head pool_link;
struct anv_batch batch;
@ -1220,7 +1247,8 @@ struct anv_graphics_pipeline_create_info {
VkResult
anv_pipeline_init(struct anv_pipeline *pipeline, struct anv_device *device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra);
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc);
VkResult
anv_pipeline_compile_cs(struct anv_pipeline *pipeline,
@ -1231,46 +1259,55 @@ VkResult
anv_graphics_pipeline_create(VkDevice device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen7_graphics_pipeline_create(VkDevice _device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen75_graphics_pipeline_create(VkDevice _device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen8_graphics_pipeline_create(VkDevice _device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen9_graphics_pipeline_create(VkDevice _device,
const VkGraphicsPipelineCreateInfo *pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen7_compute_pipeline_create(VkDevice _device,
const VkComputePipelineCreateInfo *pCreateInfo,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen75_compute_pipeline_create(VkDevice _device,
const VkComputePipelineCreateInfo *pCreateInfo,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen8_compute_pipeline_create(VkDevice _device,
const VkComputePipelineCreateInfo *pCreateInfo,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
VkResult
gen9_compute_pipeline_create(VkDevice _device,
const VkComputePipelineCreateInfo *pCreateInfo,
const VkAllocationCallbacks *alloc,
VkPipeline *pPipeline);
struct anv_format {
@ -1406,6 +1443,7 @@ struct anv_image_create_info {
VkResult anv_image_create(VkDevice _device,
const struct anv_image_create_info *info,
const VkAllocationCallbacks* alloc,
VkImage *pImage);
struct anv_surface *

View file

@ -32,6 +32,7 @@
VkResult anv_CreateQueryPool(
VkDevice _device,
const VkQueryPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkQueryPool* pQueryPool)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -50,8 +51,8 @@ VkResult anv_CreateQueryPool(
unreachable("");
}
pool = anv_device_alloc(device, sizeof(*pool), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
pool = anv_alloc2(&device->alloc, pAllocator, sizeof(*pool), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pool == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -67,21 +68,22 @@ VkResult anv_CreateQueryPool(
return VK_SUCCESS;
fail:
anv_device_free(device, pool);
anv_free2(&device->alloc, pAllocator, pool);
return result;
}
void anv_DestroyQueryPool(
VkDevice _device,
VkQueryPool _pool)
VkQueryPool _pool,
const VkAllocationCallbacks* pAllocator)
{
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_query_pool, pool, _pool);
anv_gem_munmap(pool->bo.map, pool->bo.size);
anv_gem_close(device, pool->bo.gem_handle);
anv_device_free(device, pool);
anv_free2(&device->alloc, pAllocator, pool);
}
VkResult anv_GetQueryPoolResults(

View file

@ -215,15 +215,15 @@ wsi_wl_display_destroy(struct wsi_wayland *wsi, struct wsi_wl_display *display)
anv_vector_finish(&display->formats);
if (display->drm)
wl_drm_destroy(display->drm);
anv_instance_free(wsi->instance, display);
anv_free(&wsi->instance->alloc, display);
}
static struct wsi_wl_display *
wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display)
{
struct wsi_wl_display *display =
anv_instance_alloc(wsi->instance, sizeof(*display), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
anv_alloc(&wsi->instance->alloc, sizeof(*display), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!display)
return NULL;
@ -520,8 +520,8 @@ static void
wsi_wl_image_finish(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image)
{
VkDevice vk_device = anv_device_to_handle(chain->base.device);
anv_FreeMemory(vk_device, anv_device_memory_to_handle(image->memory));
anv_DestroyImage(vk_device, anv_image_to_handle(image->image));
anv_FreeMemory(vk_device, anv_device_memory_to_handle(image->memory), NULL);
anv_DestroyImage(vk_device, anv_image_to_handle(image->image), NULL);
}
static void
@ -568,6 +568,7 @@ wsi_wl_image_init(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image)
.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
.flags = 0,
}},
NULL,
&vk_image);
if (result != VK_SUCCESS)
@ -579,12 +580,13 @@ wsi_wl_image_init(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image)
struct anv_surface *surface = &image->image->color_surface;
VkDeviceMemory vk_memory;
result = anv_AllocMemory(vk_device,
&(VkMemoryAllocInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO,
result = anv_AllocateMemory(vk_device,
&(VkMemoryAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.allocationSize = image->image->size,
.memoryTypeIndex = 0,
},
NULL,
&vk_memory);
if (result != VK_SUCCESS)
@ -631,9 +633,9 @@ wsi_wl_image_init(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image)
return VK_SUCCESS;
fail_mem:
anv_FreeMemory(vk_device, vk_memory);
anv_FreeMemory(vk_device, vk_memory, NULL);
fail_image:
anv_DestroyImage(vk_device, vk_image);
anv_DestroyImage(vk_device, vk_image, NULL);
return result;
}
@ -648,7 +650,7 @@ wsi_wl_destroy_swapchain(struct anv_swapchain *anv_chain)
wsi_wl_image_finish(chain, &chain->images[i]);
}
anv_device_free(chain->base.device, chain);
anv_free(&chain->base.device->alloc, chain);
return VK_SUCCESS;
}
@ -685,8 +687,8 @@ wsi_wl_create_swapchain(struct anv_wsi_implementation *impl,
num_images = MAX2(num_images, 4);
size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
chain = anv_device_alloc(device, size, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
chain = anv_alloc(&device->alloc, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (chain == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -744,8 +746,8 @@ anv_wl_init_wsi(struct anv_instance *instance)
struct wsi_wayland *wsi;
VkResult result;
wsi = anv_instance_alloc(instance, sizeof(*wsi), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
wsi = anv_alloc(&instance->alloc, sizeof(*wsi), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!wsi)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -784,7 +786,7 @@ fail_mutex:
pthread_mutex_destroy(&wsi->mutex);
fail_alloc:
anv_instance_free(instance, wsi);
anv_free(&instance->alloc, wsi);
return result;
}
@ -799,5 +801,5 @@ anv_wl_finish_wsi(struct anv_instance *instance)
pthread_mutex_destroy(&wsi->mutex);
anv_instance_free(instance, wsi);
anv_free(&instance->alloc, wsi);
}

View file

@ -245,7 +245,7 @@ x11_destroy_swapchain(struct anv_swapchain *anv_chain)
/* TODO: Delete images and free memory */
}
anv_device_free(chain->base.device, chain);
anv_free(NULL /* XXX: pAllocator */, chain);
return VK_SUCCESS;
}
@ -271,8 +271,8 @@ x11_create_swapchain(struct anv_wsi_implementation *impl,
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
chain = anv_device_alloc(device, size, 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
chain = anv_alloc(&device->alloc, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (chain == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -320,6 +320,7 @@ x11_create_swapchain(struct anv_wsi_implementation *impl,
.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
.flags = 0,
}},
NULL,
&image_h);
image = anv_image_from_handle(image_h);
@ -327,13 +328,14 @@ x11_create_swapchain(struct anv_wsi_implementation *impl,
surface = &image->color_surface;
anv_AllocMemory(anv_device_to_handle(device),
&(VkMemoryAllocInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO,
.allocationSize = image->size,
.memoryTypeIndex = 0,
},
&memory_h);
anv_AllocateMemory(anv_device_to_handle(device),
&(VkMemoryAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.allocationSize = image->size,
.memoryTypeIndex = 0,
},
NULL /* XXX: pAllocator */,
&memory_h);
memory = anv_device_memory_from_handle(memory_h);
@ -406,8 +408,8 @@ anv_x11_init_wsi(struct anv_instance *instance)
{
struct anv_wsi_implementation *impl;
impl = anv_instance_alloc(instance, sizeof(*impl), 8,
VK_SYSTEM_ALLOC_TYPE_INTERNAL);
impl = anv_alloc(&instance->alloc, sizeof(*impl), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!impl)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -425,5 +427,5 @@ anv_x11_init_wsi(struct anv_instance *instance)
void
anv_x11_finish_wsi(struct anv_instance *instance)
{
anv_instance_free(instance, instance->wsi_impl[VK_PLATFORM_XCB_KHR]);
anv_free(&instance->alloc, instance->wsi_impl[VK_PLATFORM_XCB_KHR]);
}

View file

@ -329,6 +329,7 @@ genX(graphics_pipeline_create)(
VkDevice _device,
const VkGraphicsPipelineCreateInfo* pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipeline)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -337,14 +338,14 @@ genX(graphics_pipeline_create)(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
pipeline = anv_device_alloc(device, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
pipeline = anv_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
result = anv_pipeline_init(pipeline, device, pCreateInfo, extra);
result = anv_pipeline_init(pipeline, device, pCreateInfo, extra, pAllocator);
if (result != VK_SUCCESS) {
anv_device_free(device, pipeline);
anv_free2(&device->alloc, pAllocator, pipeline);
return result;
}
@ -582,6 +583,7 @@ GENX_FUNC(GEN7, GEN75) VkResult
genX(compute_pipeline_create)(
VkDevice _device,
const VkComputePipelineCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipeline)
{
anv_finishme("primitive_id needs sbe swizzling setup");

View file

@ -107,6 +107,7 @@ alloc_surface_state(struct anv_device *device,
VkResult genX(CreateSampler)(
VkDevice _device,
const VkSamplerCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSampler* pSampler)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -115,8 +116,8 @@ VkResult genX(CreateSampler)(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
sampler = anv_device_alloc(device, sizeof(*sampler), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
sampler = anv_alloc2(&device->alloc, pAllocator, sizeof(*sampler), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!sampler)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);

View file

@ -332,6 +332,7 @@ genX(graphics_pipeline_create)(
VkDevice _device,
const VkGraphicsPipelineCreateInfo* pCreateInfo,
const struct anv_graphics_pipeline_create_info *extra,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipeline)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -341,12 +342,12 @@ genX(graphics_pipeline_create)(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
pipeline = anv_device_alloc(device, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
pipeline = anv_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
result = anv_pipeline_init(pipeline, device, pCreateInfo, extra);
result = anv_pipeline_init(pipeline, device, pCreateInfo, extra, pAllocator);
if (result != VK_SUCCESS)
return result;
@ -663,6 +664,7 @@ genX(graphics_pipeline_create)(
VkResult genX(compute_pipeline_create)(
VkDevice _device,
const VkComputePipelineCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkPipeline* pPipeline)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -671,8 +673,8 @@ VkResult genX(compute_pipeline_create)(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO);
pipeline = anv_device_alloc(device, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
pipeline = anv_alloc2(&device->alloc, pAllocator, sizeof(*pipeline), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (pipeline == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
@ -681,9 +683,10 @@ VkResult genX(compute_pipeline_create)(
pipeline->blend_state.map = NULL;
result = anv_reloc_list_init(&pipeline->batch_relocs, device);
result = anv_reloc_list_init(&pipeline->batch_relocs,
pAllocator ? pAllocator : &device->alloc);
if (result != VK_SUCCESS) {
anv_device_free(device, pipeline);
anv_free2(&device->alloc, pAllocator, pipeline);
return result;
}
pipeline->batch.next = pipeline->batch.start = pipeline->batch_data;

View file

@ -269,6 +269,7 @@ genX(image_view_init)(struct anv_image_view *iview,
VkResult genX(CreateSampler)(
VkDevice _device,
const VkSamplerCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSampler* pSampler)
{
ANV_FROM_HANDLE(anv_device, device, _device);
@ -277,8 +278,8 @@ VkResult genX(CreateSampler)(
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
sampler = anv_device_alloc(device, sizeof(*sampler), 8,
VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
sampler = anv_alloc2(&device->alloc, pAllocator, sizeof(*sampler), 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!sampler)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);