venus: split out vn_render_pass.[ch]

Move VkRenderPass and VkFramebuffer functions to the new files.

Signed-off-by: Chia-I Wu <olvaffe@gmail.com>
Reviewed-by: Yiwei Zhang <zzyiwei@chromium.org>
Acked-by: Erik Faye-Lund <erik.faye-lund@collabora.com>
Reviewed-by: Ryan Neph <ryanneph@google.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/10117>
This commit is contained in:
Chia-I Wu 2021-04-07 16:42:37 -07:00 committed by Marge Bot
parent 02e6164b08
commit c3fbe3368b
5 changed files with 193 additions and 160 deletions

View file

@ -38,6 +38,7 @@ libvn_files = files(
'vn_icd.c',
'vn_pipeline.c',
'vn_query_pool.c',
'vn_render_pass.c',
'vn_ring.c',
'vn_renderer_virtgpu.c',
'vn_renderer_vtest.c',

View file

@ -6146,148 +6146,6 @@ vn_UpdateDescriptorSetWithTemplate(
mtx_unlock(&templ->mutex);
}
/* render pass commands */
VkResult
vn_CreateRenderPass(VkDevice device,
const VkRenderPassCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkRenderPass *pRenderPass)
{
struct vn_device *dev = vn_device_from_handle(device);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
struct vn_render_pass *pass =
vk_zalloc(alloc, sizeof(*pass), VN_DEFAULT_ALIGN,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pass)
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
/* XXX VK_IMAGE_LAYOUT_PRESENT_SRC_KHR */
VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
vn_async_vkCreateRenderPass(dev->instance, device, pCreateInfo, NULL,
&pass_handle);
*pRenderPass = pass_handle;
return VK_SUCCESS;
}
VkResult
vn_CreateRenderPass2(VkDevice device,
const VkRenderPassCreateInfo2 *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkRenderPass *pRenderPass)
{
struct vn_device *dev = vn_device_from_handle(device);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
struct vn_render_pass *pass =
vk_zalloc(alloc, sizeof(*pass), VN_DEFAULT_ALIGN,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pass)
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
/* XXX VK_IMAGE_LAYOUT_PRESENT_SRC_KHR */
VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
vn_async_vkCreateRenderPass2(dev->instance, device, pCreateInfo, NULL,
&pass_handle);
*pRenderPass = pass_handle;
return VK_SUCCESS;
}
void
vn_DestroyRenderPass(VkDevice device,
VkRenderPass renderPass,
const VkAllocationCallbacks *pAllocator)
{
struct vn_device *dev = vn_device_from_handle(device);
struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
if (!pass)
return;
vn_async_vkDestroyRenderPass(dev->instance, device, renderPass, NULL);
vn_object_base_fini(&pass->base);
vk_free(alloc, pass);
}
void
vn_GetRenderAreaGranularity(VkDevice device,
VkRenderPass renderPass,
VkExtent2D *pGranularity)
{
struct vn_device *dev = vn_device_from_handle(device);
struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
if (!pass->granularity.width) {
vn_call_vkGetRenderAreaGranularity(dev->instance, device, renderPass,
&pass->granularity);
}
*pGranularity = pass->granularity;
}
/* framebuffer commands */
VkResult
vn_CreateFramebuffer(VkDevice device,
const VkFramebufferCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkFramebuffer *pFramebuffer)
{
struct vn_device *dev = vn_device_from_handle(device);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
struct vn_framebuffer *fb = vk_zalloc(alloc, sizeof(*fb), VN_DEFAULT_ALIGN,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!fb)
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
vn_object_base_init(&fb->base, VK_OBJECT_TYPE_FRAMEBUFFER, &dev->base);
VkFramebuffer fb_handle = vn_framebuffer_to_handle(fb);
vn_async_vkCreateFramebuffer(dev->instance, device, pCreateInfo, NULL,
&fb_handle);
*pFramebuffer = fb_handle;
return VK_SUCCESS;
}
void
vn_DestroyFramebuffer(VkDevice device,
VkFramebuffer framebuffer,
const VkAllocationCallbacks *pAllocator)
{
struct vn_device *dev = vn_device_from_handle(device);
struct vn_framebuffer *fb = vn_framebuffer_from_handle(framebuffer);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
if (!fb)
return;
vn_async_vkDestroyFramebuffer(dev->instance, device, framebuffer, NULL);
vn_object_base_fini(&fb->base);
vk_free(alloc, fb);
}
/* event commands */
VkResult

View file

@ -318,24 +318,6 @@ VK_DEFINE_NONDISP_HANDLE_CASTS(vn_descriptor_update_template,
VkDescriptorUpdateTemplate,
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE)
struct vn_render_pass {
struct vn_object_base base;
VkExtent2D granularity;
};
VK_DEFINE_NONDISP_HANDLE_CASTS(vn_render_pass,
base.base,
VkRenderPass,
VK_OBJECT_TYPE_RENDER_PASS)
struct vn_framebuffer {
struct vn_object_base base;
};
VK_DEFINE_NONDISP_HANDLE_CASTS(vn_framebuffer,
base.base,
VkFramebuffer,
VK_OBJECT_TYPE_FRAMEBUFFER)
struct vn_event {
struct vn_object_base base;
};

View file

@ -0,0 +1,158 @@
/*
* Copyright 2019 Google LLC
* SPDX-License-Identifier: MIT
*
* based in part on anv and radv which are:
* Copyright © 2015 Intel Corporation
* Copyright © 2016 Red Hat.
* Copyright © 2016 Bas Nieuwenhuizen
*/
#include "vn_render_pass.h"
#include "venus-protocol/vn_protocol_driver_framebuffer.h"
#include "venus-protocol/vn_protocol_driver_render_pass.h"
#include "vn_device.h"
/* render pass commands */
VkResult
vn_CreateRenderPass(VkDevice device,
const VkRenderPassCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkRenderPass *pRenderPass)
{
struct vn_device *dev = vn_device_from_handle(device);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
struct vn_render_pass *pass =
vk_zalloc(alloc, sizeof(*pass), VN_DEFAULT_ALIGN,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pass)
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
/* XXX VK_IMAGE_LAYOUT_PRESENT_SRC_KHR */
VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
vn_async_vkCreateRenderPass(dev->instance, device, pCreateInfo, NULL,
&pass_handle);
*pRenderPass = pass_handle;
return VK_SUCCESS;
}
VkResult
vn_CreateRenderPass2(VkDevice device,
const VkRenderPassCreateInfo2 *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkRenderPass *pRenderPass)
{
struct vn_device *dev = vn_device_from_handle(device);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
struct vn_render_pass *pass =
vk_zalloc(alloc, sizeof(*pass), VN_DEFAULT_ALIGN,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pass)
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
/* XXX VK_IMAGE_LAYOUT_PRESENT_SRC_KHR */
VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
vn_async_vkCreateRenderPass2(dev->instance, device, pCreateInfo, NULL,
&pass_handle);
*pRenderPass = pass_handle;
return VK_SUCCESS;
}
void
vn_DestroyRenderPass(VkDevice device,
VkRenderPass renderPass,
const VkAllocationCallbacks *pAllocator)
{
struct vn_device *dev = vn_device_from_handle(device);
struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
if (!pass)
return;
vn_async_vkDestroyRenderPass(dev->instance, device, renderPass, NULL);
vn_object_base_fini(&pass->base);
vk_free(alloc, pass);
}
void
vn_GetRenderAreaGranularity(VkDevice device,
VkRenderPass renderPass,
VkExtent2D *pGranularity)
{
struct vn_device *dev = vn_device_from_handle(device);
struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
if (!pass->granularity.width) {
vn_call_vkGetRenderAreaGranularity(dev->instance, device, renderPass,
&pass->granularity);
}
*pGranularity = pass->granularity;
}
/* framebuffer commands */
VkResult
vn_CreateFramebuffer(VkDevice device,
const VkFramebufferCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkFramebuffer *pFramebuffer)
{
struct vn_device *dev = vn_device_from_handle(device);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
struct vn_framebuffer *fb = vk_zalloc(alloc, sizeof(*fb), VN_DEFAULT_ALIGN,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!fb)
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
vn_object_base_init(&fb->base, VK_OBJECT_TYPE_FRAMEBUFFER, &dev->base);
VkFramebuffer fb_handle = vn_framebuffer_to_handle(fb);
vn_async_vkCreateFramebuffer(dev->instance, device, pCreateInfo, NULL,
&fb_handle);
*pFramebuffer = fb_handle;
return VK_SUCCESS;
}
void
vn_DestroyFramebuffer(VkDevice device,
VkFramebuffer framebuffer,
const VkAllocationCallbacks *pAllocator)
{
struct vn_device *dev = vn_device_from_handle(device);
struct vn_framebuffer *fb = vn_framebuffer_from_handle(framebuffer);
const VkAllocationCallbacks *alloc =
pAllocator ? pAllocator : &dev->base.base.alloc;
if (!fb)
return;
vn_async_vkDestroyFramebuffer(dev->instance, device, framebuffer, NULL);
vn_object_base_fini(&fb->base);
vk_free(alloc, fb);
}

View file

@ -0,0 +1,34 @@
/*
* Copyright 2019 Google LLC
* SPDX-License-Identifier: MIT
*
* based in part on anv and radv which are:
* Copyright © 2015 Intel Corporation
* Copyright © 2016 Red Hat.
* Copyright © 2016 Bas Nieuwenhuizen
*/
#ifndef VN_RENDER_PASS_H
#define VN_RENDER_PASS_H
#include "vn_common.h"
struct vn_render_pass {
struct vn_object_base base;
VkExtent2D granularity;
};
VK_DEFINE_NONDISP_HANDLE_CASTS(vn_render_pass,
base.base,
VkRenderPass,
VK_OBJECT_TYPE_RENDER_PASS)
struct vn_framebuffer {
struct vn_object_base base;
};
VK_DEFINE_NONDISP_HANDLE_CASTS(vn_framebuffer,
base.base,
VkFramebuffer,
VK_OBJECT_TYPE_FRAMEBUFFER)
#endif /* VN_RENDER_PASS_H */