2018-08-31 16:50:20 +02:00
|
|
|
/*
|
|
|
|
|
* Copyright 2018 Collabora Ltd.
|
|
|
|
|
*
|
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
|
* on the rights to use, copy, modify, merge, publish, distribute, sub
|
|
|
|
|
* license, and/or sell copies of the Software, and to permit persons to whom
|
|
|
|
|
* the Software is furnished to do so, subject to the following conditions:
|
|
|
|
|
*
|
|
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
|
* Software.
|
|
|
|
|
*
|
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
|
* THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
|
|
|
|
|
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
|
|
|
|
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
|
|
|
|
* USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include "zink_resource.h"
|
|
|
|
|
|
2019-06-13 11:08:13 +02:00
|
|
|
#include "zink_batch.h"
|
2018-08-31 16:50:20 +02:00
|
|
|
#include "zink_context.h"
|
2020-10-02 13:40:40 -04:00
|
|
|
#include "zink_program.h"
|
2018-08-31 16:50:20 +02:00
|
|
|
#include "zink_screen.h"
|
|
|
|
|
|
2020-10-30 10:44:17 +01:00
|
|
|
#include "vulkan/wsi/wsi_common.h"
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
#include "util/slab.h"
|
|
|
|
|
#include "util/u_debug.h"
|
2019-06-27 15:05:31 -07:00
|
|
|
#include "util/format/u_format.h"
|
2020-06-01 10:53:19 -04:00
|
|
|
#include "util/u_transfer_helper.h"
|
2018-08-31 16:50:20 +02:00
|
|
|
#include "util/u_inlines.h"
|
|
|
|
|
#include "util/u_memory.h"
|
|
|
|
|
|
2019-12-03 18:01:31 -05:00
|
|
|
#include "frontend/sw_winsys.h"
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-11-03 18:42:58 +01:00
|
|
|
#ifndef _WIN32
|
|
|
|
|
#define ZINK_USE_DMABUF
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
#ifdef ZINK_USE_DMABUF
|
2020-10-19 11:17:30 +02:00
|
|
|
#include "drm-uapi/drm_fourcc.h"
|
2020-11-03 18:42:58 +01:00
|
|
|
#endif
|
2020-10-19 11:17:30 +02:00
|
|
|
|
2020-10-27 12:07:36 -04:00
|
|
|
void
|
|
|
|
|
debug_describe_zink_resource_object(char *buf, const struct zink_resource_object *ptr)
|
|
|
|
|
{
|
|
|
|
|
sprintf(buf, "zink_resource_object");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-09-04 12:08:18 -04:00
|
|
|
static void
|
2020-10-28 13:18:55 -04:00
|
|
|
resource_sync_writes_from_batch_usage(struct zink_context *ctx, struct zink_resource *res)
|
2020-09-04 12:08:18 -04:00
|
|
|
{
|
2020-10-28 13:18:55 -04:00
|
|
|
uint32_t batch_uses = zink_get_resource_usage(res);
|
|
|
|
|
batch_uses &= ~(ZINK_RESOURCE_ACCESS_READ << ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
|
2020-10-08 14:16:40 -04:00
|
|
|
uint32_t write_mask = 0;
|
|
|
|
|
for (int i = 0; i < ZINK_NUM_GFX_BATCHES + ZINK_COMPUTE_BATCH_COUNT; i++)
|
|
|
|
|
write_mask |= ZINK_RESOURCE_ACCESS_WRITE << i;
|
|
|
|
|
while (batch_uses & write_mask) {
|
|
|
|
|
int batch_id = zink_get_resource_latest_batch_usage(ctx, batch_uses);
|
|
|
|
|
if (batch_id == -1)
|
|
|
|
|
break;
|
|
|
|
|
zink_wait_on_batch(ctx, batch_id);
|
|
|
|
|
batch_uses &= ~((ZINK_RESOURCE_ACCESS_READ | ZINK_RESOURCE_ACCESS_WRITE) << batch_id);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int
|
|
|
|
|
zink_get_resource_latest_batch_usage(struct zink_context *ctx, uint32_t batch_uses)
|
|
|
|
|
{
|
|
|
|
|
unsigned cur_batch = zink_curr_batch(ctx)->batch_id;
|
|
|
|
|
|
2020-09-04 12:08:18 -04:00
|
|
|
if (batch_uses & ZINK_RESOURCE_ACCESS_WRITE << ZINK_COMPUTE_BATCH_ID)
|
2020-10-08 14:16:40 -04:00
|
|
|
return ZINK_COMPUTE_BATCH_ID;
|
2020-09-04 12:08:18 -04:00
|
|
|
batch_uses &= ~(ZINK_RESOURCE_ACCESS_WRITE << ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
if (!batch_uses)
|
2020-10-08 14:16:40 -04:00
|
|
|
return -1;
|
2020-09-04 12:08:18 -04:00
|
|
|
for (unsigned i = 0; i < ZINK_NUM_BATCHES + 1; i++) {
|
|
|
|
|
/* loop backwards and sync with highest batch id that has writes */
|
|
|
|
|
if (batch_uses & (ZINK_RESOURCE_ACCESS_WRITE << cur_batch)) {
|
2020-10-08 14:16:40 -04:00
|
|
|
return cur_batch;
|
2020-09-04 12:08:18 -04:00
|
|
|
}
|
|
|
|
|
cur_batch--;
|
|
|
|
|
if (cur_batch > ZINK_COMPUTE_BATCH_ID - 1) // underflowed past max batch id
|
|
|
|
|
cur_batch = ZINK_COMPUTE_BATCH_ID - 1;
|
|
|
|
|
}
|
2020-10-08 14:16:40 -04:00
|
|
|
return -1;
|
2020-09-04 12:08:18 -04:00
|
|
|
}
|
|
|
|
|
|
2020-10-13 09:42:07 -04:00
|
|
|
static uint32_t
|
|
|
|
|
mem_hash(const void *key)
|
|
|
|
|
{
|
|
|
|
|
return _mesa_hash_data(key, sizeof(struct mem_key));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static bool
|
|
|
|
|
mem_equals(const void *a, const void *b)
|
|
|
|
|
{
|
|
|
|
|
return !memcmp(a, b, sizeof(struct mem_key));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void
|
2020-10-27 11:59:33 -04:00
|
|
|
cache_or_free_mem(struct zink_screen *screen, struct zink_resource_object *obj)
|
2020-10-13 09:42:07 -04:00
|
|
|
{
|
2020-10-27 11:59:33 -04:00
|
|
|
if (obj->mkey.flags) {
|
2020-10-13 09:42:07 -04:00
|
|
|
simple_mtx_lock(&screen->mem_cache_mtx);
|
2020-10-27 11:59:33 -04:00
|
|
|
struct hash_entry *he = _mesa_hash_table_search_pre_hashed(screen->resource_mem_cache, obj->mem_hash, &obj->mkey);
|
2020-10-13 09:42:07 -04:00
|
|
|
struct util_dynarray *array = he ? (void*)he->data : NULL;
|
|
|
|
|
if (!array) {
|
|
|
|
|
struct mem_key *mkey = rzalloc(screen->resource_mem_cache, struct mem_key);
|
2020-10-27 11:59:33 -04:00
|
|
|
memcpy(mkey, &obj->mkey, sizeof(struct mem_key));
|
2020-10-13 09:42:07 -04:00
|
|
|
array = rzalloc(screen->resource_mem_cache, struct util_dynarray);
|
|
|
|
|
util_dynarray_init(array, screen->resource_mem_cache);
|
2020-10-27 11:59:33 -04:00
|
|
|
_mesa_hash_table_insert_pre_hashed(screen->resource_mem_cache, obj->mem_hash, mkey, array);
|
2020-10-13 09:42:07 -04:00
|
|
|
}
|
|
|
|
|
if (util_dynarray_num_elements(array, VkDeviceMemory) < 5) {
|
2020-10-27 11:59:33 -04:00
|
|
|
util_dynarray_append(array, VkDeviceMemory, obj->mem);
|
2020-10-13 09:42:07 -04:00
|
|
|
simple_mtx_unlock(&screen->mem_cache_mtx);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
simple_mtx_unlock(&screen->mem_cache_mtx);
|
|
|
|
|
}
|
2020-10-27 11:59:33 -04:00
|
|
|
vkFreeMemory(screen->dev, obj->mem, NULL);
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-27 12:07:36 -04:00
|
|
|
void
|
|
|
|
|
zink_destroy_resource_object(struct zink_screen *screen, struct zink_resource_object *obj)
|
2020-10-27 11:59:33 -04:00
|
|
|
{
|
|
|
|
|
if (obj->is_buffer)
|
|
|
|
|
vkDestroyBuffer(screen->dev, obj->buffer, NULL);
|
|
|
|
|
else
|
|
|
|
|
vkDestroyImage(screen->dev, obj->image, NULL);
|
|
|
|
|
|
2020-10-27 12:07:36 -04:00
|
|
|
zink_descriptor_set_refs_clear(&obj->desc_set_refs, obj);
|
2020-10-27 11:59:33 -04:00
|
|
|
cache_or_free_mem(screen, obj);
|
|
|
|
|
FREE(obj);
|
2020-10-13 09:42:07 -04:00
|
|
|
}
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
static void
|
|
|
|
|
zink_resource_destroy(struct pipe_screen *pscreen,
|
|
|
|
|
struct pipe_resource *pres)
|
|
|
|
|
{
|
|
|
|
|
struct zink_screen *screen = zink_screen(pscreen);
|
|
|
|
|
struct zink_resource *res = zink_resource(pres);
|
2020-10-27 11:59:33 -04:00
|
|
|
if (pres->target == PIPE_BUFFER)
|
2020-09-03 10:44:11 -04:00
|
|
|
util_range_destroy(&res->valid_buffer_range);
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-10-27 12:07:36 -04:00
|
|
|
zink_resource_object_reference(screen, &res->obj, NULL);
|
2018-08-31 16:50:20 +02:00
|
|
|
FREE(res);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static uint32_t
|
|
|
|
|
get_memory_type_index(struct zink_screen *screen,
|
|
|
|
|
const VkMemoryRequirements *reqs,
|
|
|
|
|
VkMemoryPropertyFlags props)
|
|
|
|
|
{
|
|
|
|
|
for (uint32_t i = 0u; i < VK_MAX_MEMORY_TYPES; i++) {
|
|
|
|
|
if (((reqs->memoryTypeBits >> i) & 1) == 1) {
|
2020-09-28 18:43:36 +08:00
|
|
|
if ((screen->info.mem_props.memoryTypes[i].propertyFlags & props) == props) {
|
2018-08-31 16:50:20 +02:00
|
|
|
return i;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unreachable("Unsupported memory-type");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 15:33:34 +02:00
|
|
|
static VkImageAspectFlags
|
|
|
|
|
aspect_from_format(enum pipe_format fmt)
|
2018-08-31 16:50:20 +02:00
|
|
|
{
|
|
|
|
|
if (util_format_is_depth_or_stencil(fmt)) {
|
|
|
|
|
VkImageAspectFlags aspect = 0;
|
|
|
|
|
const struct util_format_description *desc = util_format_description(fmt);
|
|
|
|
|
if (util_format_has_depth(desc))
|
|
|
|
|
aspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
|
|
|
|
if (util_format_has_stencil(desc))
|
|
|
|
|
aspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
|
|
|
return aspect;
|
|
|
|
|
} else
|
|
|
|
|
return VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
static struct zink_resource_object *
|
|
|
|
|
resource_object_create(struct zink_screen *screen, const struct pipe_resource *templ, struct winsys_handle *whandle, bool *optimal_tiling)
|
2018-08-31 16:50:20 +02:00
|
|
|
{
|
2020-10-27 11:59:33 -04:00
|
|
|
struct zink_resource_object *obj = CALLOC_STRUCT(zink_resource_object);
|
|
|
|
|
if (!obj)
|
2020-10-27 11:38:16 -04:00
|
|
|
return NULL;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-12-23 15:36:50 -05:00
|
|
|
VkMemoryRequirements reqs = {};
|
2020-08-13 10:23:24 -04:00
|
|
|
VkMemoryPropertyFlags flags;
|
2020-06-01 10:53:19 -04:00
|
|
|
|
2020-10-27 12:07:36 -04:00
|
|
|
pipe_reference_init(&obj->reference, 1);
|
|
|
|
|
util_dynarray_init(&obj->desc_set_refs.refs, NULL);
|
2018-08-31 16:50:20 +02:00
|
|
|
if (templ->target == PIPE_BUFFER) {
|
|
|
|
|
VkBufferCreateInfo bci = {};
|
|
|
|
|
bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
|
|
|
|
|
bci.size = templ->width0;
|
|
|
|
|
|
2019-11-08 12:54:09 +01:00
|
|
|
bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
|
2021-02-21 12:18:09 -05:00
|
|
|
VK_BUFFER_USAGE_TRANSFER_DST_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-09-15 14:57:45 -04:00
|
|
|
if (templ->usage != PIPE_USAGE_STAGING)
|
|
|
|
|
bci.usage |= VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
|
|
|
|
|
|
2020-08-17 15:08:03 -04:00
|
|
|
/* apparently gallium thinks these are the jack-of-all-trades bind types */
|
|
|
|
|
if (templ->bind & (PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_QUERY_BUFFER)) {
|
2020-11-17 18:25:41 -05:00
|
|
|
bci.usage |= VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT;
|
2020-10-07 12:46:38 -04:00
|
|
|
VkFormatProperties props = screen->format_props[templ->format];
|
2021-02-21 12:18:09 -05:00
|
|
|
if (props.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)
|
2020-11-17 18:25:41 -05:00
|
|
|
bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
|
|
|
|
|
}
|
2020-07-02 11:54:53 -04:00
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
if (templ->bind & PIPE_BIND_VERTEX_BUFFER)
|
2020-07-02 11:54:53 -04:00
|
|
|
bci.usage |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
|
2020-07-02 17:40:16 -04:00
|
|
|
VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
|
2020-08-19 10:10:30 -04:00
|
|
|
VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_INDEX_BUFFER)
|
|
|
|
|
bci.usage |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
|
|
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_CONSTANT_BUFFER)
|
|
|
|
|
bci.usage |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
|
|
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_SHADER_BUFFER)
|
|
|
|
|
bci.usage |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
|
|
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_COMMAND_ARGS_BUFFER)
|
|
|
|
|
bci.usage |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
|
|
|
|
|
|
2020-06-01 14:59:15 -04:00
|
|
|
if (templ->bind == (PIPE_BIND_STREAM_OUTPUT | PIPE_BIND_CUSTOM)) {
|
|
|
|
|
bci.usage |= VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT;
|
|
|
|
|
} else if (templ->bind & PIPE_BIND_STREAM_OUTPUT) {
|
2020-08-19 10:10:30 -04:00
|
|
|
bci.usage |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
|
|
|
|
|
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT;
|
2020-06-01 14:59:15 -04:00
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
if (vkCreateBuffer(screen->dev, &bci, NULL, &obj->buffer) != VK_SUCCESS) {
|
2020-12-18 09:47:23 -05:00
|
|
|
debug_printf("vkCreateBuffer failed\n");
|
2020-10-27 11:59:33 -04:00
|
|
|
goto fail1;
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
vkGetBufferMemoryRequirements(screen->dev, obj->buffer, &reqs);
|
2020-08-13 10:23:24 -04:00
|
|
|
flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
2020-10-27 11:59:33 -04:00
|
|
|
obj->is_buffer = true;
|
2018-08-31 16:50:20 +02:00
|
|
|
} else {
|
|
|
|
|
VkImageCreateInfo ici = {};
|
2020-09-09 15:16:42 -04:00
|
|
|
VkExternalMemoryImageCreateInfo emici = {};
|
2018-08-31 16:50:20 +02:00
|
|
|
ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
|
2019-10-29 23:19:53 +01:00
|
|
|
ici.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
|
|
|
|
switch (templ->target) {
|
|
|
|
|
case PIPE_TEXTURE_1D:
|
|
|
|
|
case PIPE_TEXTURE_1D_ARRAY:
|
|
|
|
|
ici.imageType = VK_IMAGE_TYPE_1D;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PIPE_TEXTURE_CUBE:
|
|
|
|
|
case PIPE_TEXTURE_CUBE_ARRAY:
|
2019-10-29 23:19:53 +01:00
|
|
|
ici.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
|
2019-07-24 14:09:11 +02:00
|
|
|
/* fall-through */
|
|
|
|
|
case PIPE_TEXTURE_2D:
|
|
|
|
|
case PIPE_TEXTURE_2D_ARRAY:
|
2019-04-06 21:06:11 +02:00
|
|
|
case PIPE_TEXTURE_RECT:
|
2018-08-31 16:50:20 +02:00
|
|
|
ici.imageType = VK_IMAGE_TYPE_2D;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PIPE_TEXTURE_3D:
|
|
|
|
|
ici.imageType = VK_IMAGE_TYPE_3D;
|
2019-07-16 17:16:09 +02:00
|
|
|
if (templ->bind & PIPE_BIND_RENDER_TARGET)
|
2019-10-29 23:19:53 +01:00
|
|
|
ici.flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
|
2018-08-31 16:50:20 +02:00
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PIPE_BUFFER:
|
|
|
|
|
unreachable("PIPE_BUFFER should already be handled");
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
unreachable("Unknown target");
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
ici.format = zink_get_format(screen, templ->format);
|
2018-08-31 16:50:20 +02:00
|
|
|
ici.extent.width = templ->width0;
|
|
|
|
|
ici.extent.height = templ->height0;
|
|
|
|
|
ici.extent.depth = templ->depth0;
|
|
|
|
|
ici.mipLevels = templ->last_level + 1;
|
2020-05-29 14:38:02 -04:00
|
|
|
ici.arrayLayers = MAX2(templ->array_size, 1);
|
2018-08-31 16:50:20 +02:00
|
|
|
ici.samples = templ->nr_samples ? templ->nr_samples : VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
|
ici.tiling = templ->bind & PIPE_BIND_LINEAR ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
|
|
|
|
|
|
|
|
|
|
if (templ->target == PIPE_TEXTURE_CUBE ||
|
|
|
|
|
templ->target == PIPE_TEXTURE_CUBE_ARRAY)
|
|
|
|
|
ici.arrayLayers *= 6;
|
|
|
|
|
|
2020-09-09 15:16:42 -04:00
|
|
|
if (templ->bind & PIPE_BIND_SHARED) {
|
|
|
|
|
emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
|
|
|
|
|
emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
|
|
|
|
|
ici.pNext = &emici;
|
2021-02-04 10:42:22 +01:00
|
|
|
|
|
|
|
|
/* TODO: deal with DRM modifiers here */
|
|
|
|
|
ici.tiling = VK_IMAGE_TILING_LINEAR;
|
2020-09-09 15:16:42 -04:00
|
|
|
}
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
if (templ->usage == PIPE_USAGE_STAGING)
|
|
|
|
|
ici.tiling = VK_IMAGE_TILING_LINEAR;
|
|
|
|
|
|
|
|
|
|
/* sadly, gallium doesn't let us know if it'll ever need this, so we have to assume */
|
2019-11-01 10:37:08 +01:00
|
|
|
ici.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
|
|
|
|
|
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
|
|
|
|
|
VK_IMAGE_USAGE_SAMPLED_BIT;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-11-17 18:26:01 -05:00
|
|
|
if ((templ->nr_samples <= 1 || screen->info.feats.features.shaderStorageImageMultisample) &&
|
|
|
|
|
(templ->bind & PIPE_BIND_SHADER_IMAGE ||
|
|
|
|
|
(templ->bind & PIPE_BIND_SAMPLER_VIEW && templ->flags & PIPE_RESOURCE_FLAG_TEXTURING_MORE_LIKELY))) {
|
2020-10-07 12:46:38 -04:00
|
|
|
VkFormatProperties props = screen->format_props[templ->format];
|
2020-11-17 18:26:01 -05:00
|
|
|
/* gallium doesn't provide any way to actually know whether this will be used as a shader image,
|
|
|
|
|
* so we have to just assume and set the bit if it's available
|
|
|
|
|
*/
|
|
|
|
|
if ((ici.tiling == VK_IMAGE_TILING_LINEAR && props.linearTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) ||
|
|
|
|
|
(ici.tiling == VK_IMAGE_TILING_OPTIMAL && props.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
|
|
|
|
|
ici.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
|
|
|
|
|
}
|
2020-10-27 11:59:33 -04:00
|
|
|
if (optimal_tiling)
|
|
|
|
|
*optimal_tiling = ici.tiling != VK_IMAGE_TILING_LINEAR;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_RENDER_TARGET)
|
|
|
|
|
ici.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
|
|
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_DEPTH_STENCIL)
|
|
|
|
|
ici.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
|
|
|
|
|
|
|
|
|
|
if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
|
|
|
|
|
ici.usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
|
|
|
|
|
|
|
|
|
|
if (templ->bind & PIPE_BIND_STREAM_OUTPUT)
|
|
|
|
|
ici.usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
|
|
|
|
|
|
|
|
|
|
ici.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
|
|
|
|
ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
|
|
2020-10-30 10:44:17 +01:00
|
|
|
struct wsi_image_create_info image_wsi_info = {
|
|
|
|
|
VK_STRUCTURE_TYPE_WSI_IMAGE_CREATE_INFO_MESA,
|
|
|
|
|
NULL,
|
|
|
|
|
.scanout = true,
|
|
|
|
|
};
|
|
|
|
|
|
2020-11-06 09:20:42 +01:00
|
|
|
if (screen->needs_mesa_wsi && (templ->bind & PIPE_BIND_SCANOUT))
|
2020-10-30 10:44:17 +01:00
|
|
|
ici.pNext = &image_wsi_info;
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
VkResult result = vkCreateImage(screen->dev, &ici, NULL, &obj->image);
|
2018-08-31 16:50:20 +02:00
|
|
|
if (result != VK_SUCCESS) {
|
2020-12-18 09:47:23 -05:00
|
|
|
debug_printf("vkCreateImage failed\n");
|
2020-10-27 11:59:33 -04:00
|
|
|
goto fail1;
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
vkGetImageMemoryRequirements(screen->dev, obj->image, &reqs);
|
2021-02-04 11:21:35 +01:00
|
|
|
if (templ->usage == PIPE_USAGE_STAGING)
|
2020-08-13 10:23:24 -04:00
|
|
|
flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
2018-08-31 16:50:20 +02:00
|
|
|
else
|
2020-08-13 10:23:24 -04:00
|
|
|
flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|
|
|
|
|
|
2020-08-13 10:24:46 -04:00
|
|
|
if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT)
|
|
|
|
|
flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
VkMemoryAllocateInfo mai = {};
|
|
|
|
|
mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
|
|
|
|
mai.allocationSize = reqs.size;
|
|
|
|
|
mai.memoryTypeIndex = get_memory_type_index(screen, &reqs, flags);
|
|
|
|
|
|
2021-02-03 17:22:29 +01:00
|
|
|
if (templ->target != PIPE_BUFFER) {
|
|
|
|
|
VkMemoryType mem_type =
|
|
|
|
|
screen->info.mem_props.memoryTypes[mai.memoryTypeIndex];
|
2020-10-27 11:59:33 -04:00
|
|
|
obj->host_visible = mem_type.propertyFlags &
|
2021-02-03 17:22:29 +01:00
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
|
2020-10-27 11:38:16 -04:00
|
|
|
} else
|
2020-10-27 11:59:33 -04:00
|
|
|
obj->host_visible = true;
|
2021-02-03 17:22:29 +01:00
|
|
|
|
2018-10-02 23:27:36 +01:00
|
|
|
VkExportMemoryAllocateInfo emai = {};
|
|
|
|
|
if (templ->bind & PIPE_BIND_SHARED) {
|
|
|
|
|
emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
|
|
|
|
|
emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
|
2020-10-30 10:44:17 +01:00
|
|
|
|
|
|
|
|
emai.pNext = mai.pNext;
|
2018-10-02 23:27:36 +01:00
|
|
|
mai.pNext = &emai;
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-24 17:58:54 +02:00
|
|
|
VkImportMemoryFdInfoKHR imfi = {
|
|
|
|
|
VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
|
|
|
|
|
NULL,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if (whandle && whandle->type == WINSYS_HANDLE_TYPE_FD) {
|
|
|
|
|
imfi.pNext = NULL;
|
|
|
|
|
imfi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
|
|
|
|
|
imfi.fd = whandle->handle;
|
|
|
|
|
|
2020-10-30 10:44:17 +01:00
|
|
|
imfi.pNext = mai.pNext;
|
2020-11-06 14:07:45 +01:00
|
|
|
emai.pNext = &imfi;
|
2020-10-30 10:44:17 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
struct wsi_memory_allocate_info memory_wsi_info = {
|
|
|
|
|
VK_STRUCTURE_TYPE_WSI_MEMORY_ALLOCATE_INFO_MESA,
|
|
|
|
|
NULL,
|
|
|
|
|
};
|
|
|
|
|
|
2020-11-06 09:20:42 +01:00
|
|
|
if (screen->needs_mesa_wsi && (templ->bind & PIPE_BIND_SCANOUT)) {
|
2020-10-30 10:44:17 +01:00
|
|
|
memory_wsi_info.implicit_sync = true;
|
|
|
|
|
|
|
|
|
|
memory_wsi_info.pNext = mai.pNext;
|
|
|
|
|
mai.pNext = &memory_wsi_info;
|
2019-07-24 17:58:54 +02:00
|
|
|
}
|
|
|
|
|
|
2020-10-13 09:42:07 -04:00
|
|
|
if (!mai.pNext && !(templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT)) {
|
2020-10-27 11:59:33 -04:00
|
|
|
obj->mkey.reqs = reqs;
|
|
|
|
|
obj->mkey.flags = flags;
|
|
|
|
|
obj->mem_hash = mem_hash(&obj->mkey);
|
2020-10-13 09:42:07 -04:00
|
|
|
simple_mtx_lock(&screen->mem_cache_mtx);
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
struct hash_entry *he = _mesa_hash_table_search_pre_hashed(screen->resource_mem_cache, obj->mem_hash, &obj->mkey);
|
2020-10-27 11:38:16 -04:00
|
|
|
|
2020-10-13 09:42:07 -04:00
|
|
|
struct util_dynarray *array = he ? (void*)he->data : NULL;
|
|
|
|
|
if (array && util_dynarray_num_elements(array, VkDeviceMemory)) {
|
2020-10-27 11:59:33 -04:00
|
|
|
obj->mem = util_dynarray_pop(array, VkDeviceMemory);
|
2020-10-13 09:42:07 -04:00
|
|
|
}
|
|
|
|
|
simple_mtx_unlock(&screen->mem_cache_mtx);
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
if (!obj->mem && vkAllocateMemory(screen->dev, &mai, NULL, &obj->mem) != VK_SUCCESS) {
|
2020-12-18 09:47:23 -05:00
|
|
|
debug_printf("vkAllocateMemory failed\n");
|
2020-10-27 11:59:33 -04:00
|
|
|
goto fail2;
|
2020-12-18 09:47:23 -05:00
|
|
|
}
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
obj->offset = 0;
|
|
|
|
|
obj->size = reqs.size;
|
|
|
|
|
|
|
|
|
|
if (templ->target == PIPE_BUFFER)
|
|
|
|
|
vkBindBufferMemory(screen->dev, obj->buffer, obj->mem, obj->offset);
|
|
|
|
|
else
|
|
|
|
|
vkBindImageMemory(screen->dev, obj->image, obj->mem, obj->offset);
|
|
|
|
|
return obj;
|
|
|
|
|
|
|
|
|
|
fail2:
|
|
|
|
|
if (templ->target == PIPE_BUFFER)
|
|
|
|
|
vkDestroyBuffer(screen->dev, obj->buffer, NULL);
|
|
|
|
|
else
|
|
|
|
|
vkDestroyImage(screen->dev, obj->image, NULL);
|
|
|
|
|
fail1:
|
|
|
|
|
FREE(obj);
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static struct pipe_resource *
|
|
|
|
|
resource_create(struct pipe_screen *pscreen,
|
|
|
|
|
const struct pipe_resource *templ,
|
|
|
|
|
struct winsys_handle *whandle,
|
|
|
|
|
unsigned external_usage)
|
|
|
|
|
{
|
|
|
|
|
struct zink_screen *screen = zink_screen(pscreen);
|
|
|
|
|
struct zink_resource *res = CALLOC_STRUCT(zink_resource);
|
|
|
|
|
|
|
|
|
|
res->base = *templ;
|
|
|
|
|
|
|
|
|
|
pipe_reference_init(&res->base.reference, 1);
|
|
|
|
|
res->base.screen = pscreen;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-10-27 11:59:33 -04:00
|
|
|
bool optimal_tiling = false;
|
|
|
|
|
res->obj = resource_object_create(screen, templ, whandle, &optimal_tiling);
|
|
|
|
|
if (!res->obj) {
|
|
|
|
|
FREE(res);
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
res->internal_format = templ->format;
|
2020-09-03 10:44:11 -04:00
|
|
|
if (templ->target == PIPE_BUFFER) {
|
|
|
|
|
util_range_init(&res->valid_buffer_range);
|
2020-10-27 11:59:33 -04:00
|
|
|
} else {
|
|
|
|
|
res->format = zink_get_format(screen, templ->format);
|
|
|
|
|
res->layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
|
res->optimal_tiling = optimal_tiling;
|
|
|
|
|
res->aspect = aspect_from_format(templ->format);
|
|
|
|
|
}
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2021-02-04 13:58:12 +01:00
|
|
|
if (screen->winsys && (templ->bind & PIPE_BIND_DISPLAY_TARGET)) {
|
2018-08-31 16:50:20 +02:00
|
|
|
struct sw_winsys *winsys = screen->winsys;
|
|
|
|
|
res->dt = winsys->displaytarget_create(screen->winsys,
|
|
|
|
|
res->base.bind,
|
|
|
|
|
res->base.format,
|
|
|
|
|
templ->width0,
|
|
|
|
|
templ->height0,
|
|
|
|
|
64, NULL,
|
|
|
|
|
&res->dt_stride);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return &res->base;
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-24 17:58:54 +02:00
|
|
|
static struct pipe_resource *
|
|
|
|
|
zink_resource_create(struct pipe_screen *pscreen,
|
|
|
|
|
const struct pipe_resource *templ)
|
|
|
|
|
{
|
|
|
|
|
return resource_create(pscreen, templ, NULL, 0);
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-02 23:27:36 +01:00
|
|
|
static bool
|
|
|
|
|
zink_resource_get_handle(struct pipe_screen *pscreen,
|
|
|
|
|
struct pipe_context *context,
|
|
|
|
|
struct pipe_resource *tex,
|
|
|
|
|
struct winsys_handle *whandle,
|
|
|
|
|
unsigned usage)
|
|
|
|
|
{
|
|
|
|
|
struct zink_resource *res = zink_resource(tex);
|
|
|
|
|
struct zink_screen *screen = zink_screen(pscreen);
|
|
|
|
|
|
|
|
|
|
if (res->base.target != PIPE_BUFFER) {
|
|
|
|
|
VkImageSubresource sub_res = {};
|
|
|
|
|
VkSubresourceLayout sub_res_layout = {};
|
|
|
|
|
|
|
|
|
|
sub_res.aspectMask = res->aspect;
|
|
|
|
|
|
2020-10-27 11:38:16 -04:00
|
|
|
vkGetImageSubresourceLayout(screen->dev, res->obj->image, &sub_res, &sub_res_layout);
|
2018-10-02 23:27:36 +01:00
|
|
|
|
|
|
|
|
whandle->stride = sub_res_layout.rowPitch;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (whandle->type == WINSYS_HANDLE_TYPE_FD) {
|
2020-11-03 18:42:58 +01:00
|
|
|
#ifdef ZINK_USE_DMABUF
|
2020-11-17 09:29:54 +01:00
|
|
|
VkMemoryGetFdInfoKHR fd_info = {};
|
|
|
|
|
int fd;
|
2018-10-02 23:27:36 +01:00
|
|
|
fd_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
|
2020-10-27 11:38:16 -04:00
|
|
|
fd_info.memory = res->obj->mem;
|
2018-10-02 23:27:36 +01:00
|
|
|
fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
|
|
|
|
|
VkResult result = (*screen->vk_GetMemoryFdKHR)(screen->dev, &fd_info, &fd);
|
|
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
|
return false;
|
|
|
|
|
whandle->handle = fd;
|
2020-10-19 11:17:30 +02:00
|
|
|
whandle->modifier = DRM_FORMAT_MOD_INVALID;
|
2020-11-03 18:42:58 +01:00
|
|
|
#else
|
|
|
|
|
return false;
|
|
|
|
|
#endif
|
2018-10-02 23:27:36 +01:00
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-12 11:40:12 +02:00
|
|
|
static struct pipe_resource *
|
|
|
|
|
zink_resource_from_handle(struct pipe_screen *pscreen,
|
2019-07-24 17:58:54 +02:00
|
|
|
const struct pipe_resource *templ,
|
2019-07-12 11:40:12 +02:00
|
|
|
struct winsys_handle *whandle,
|
|
|
|
|
unsigned usage)
|
|
|
|
|
{
|
2020-11-03 18:42:58 +01:00
|
|
|
#ifdef ZINK_USE_DMABUF
|
2020-10-19 11:17:30 +02:00
|
|
|
if (whandle->modifier != DRM_FORMAT_MOD_INVALID)
|
|
|
|
|
return NULL;
|
|
|
|
|
|
2019-07-24 17:58:54 +02:00
|
|
|
return resource_create(pscreen, templ, whandle, usage);
|
2020-11-03 18:42:58 +01:00
|
|
|
#else
|
|
|
|
|
return NULL;
|
|
|
|
|
#endif
|
2019-07-12 11:40:12 +02:00
|
|
|
}
|
|
|
|
|
|
2020-10-27 14:30:20 -04:00
|
|
|
static void
|
|
|
|
|
zink_resource_invalidate(struct pipe_context *pctx, struct pipe_resource *pres)
|
|
|
|
|
{
|
|
|
|
|
struct zink_context *ctx = zink_context(pctx);
|
|
|
|
|
struct zink_resource *res = zink_resource(pres);
|
|
|
|
|
struct zink_screen *screen = zink_screen(pctx->screen);
|
|
|
|
|
|
|
|
|
|
if (pres->target != PIPE_BUFFER)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
if (res->valid_buffer_range.start > res->valid_buffer_range.end)
|
|
|
|
|
return;
|
|
|
|
|
|
2021-03-12 17:42:28 -05:00
|
|
|
if (res->bind_history & ZINK_RESOURCE_USAGE_STREAMOUT)
|
|
|
|
|
ctx->dirty_so_targets = true;
|
|
|
|
|
/* force counter buffer reset */
|
|
|
|
|
res->bind_history &= ~ZINK_RESOURCE_USAGE_STREAMOUT;
|
|
|
|
|
|
2020-10-27 14:30:20 -04:00
|
|
|
util_range_set_empty(&res->valid_buffer_range);
|
|
|
|
|
if (!zink_get_resource_usage(res))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
struct zink_resource_object *old_obj = res->obj;
|
|
|
|
|
struct zink_resource_object *new_obj = resource_object_create(screen, pres, NULL, NULL);
|
|
|
|
|
if (!new_obj) {
|
|
|
|
|
debug_printf("new backing resource alloc failed!");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
res->obj = new_obj;
|
|
|
|
|
res->access_stage = 0;
|
|
|
|
|
res->access = 0;
|
|
|
|
|
zink_resource_rebind(ctx, res);
|
|
|
|
|
zink_descriptor_set_refs_clear(&old_obj->desc_set_refs, old_obj);
|
|
|
|
|
zink_resource_object_reference(screen, &old_obj, NULL);
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-07 15:55:16 -04:00
|
|
|
static void
|
2018-08-31 16:50:20 +02:00
|
|
|
zink_transfer_copy_bufimage(struct zink_context *ctx,
|
2020-10-07 15:55:16 -04:00
|
|
|
struct zink_resource *dst,
|
|
|
|
|
struct zink_resource *src,
|
|
|
|
|
struct zink_transfer *trans)
|
2018-08-31 16:50:20 +02:00
|
|
|
{
|
2020-07-01 08:16:12 -04:00
|
|
|
assert((trans->base.usage & (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY)) !=
|
|
|
|
|
(PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY));
|
2019-06-13 11:08:13 +02:00
|
|
|
|
2020-10-07 15:55:16 -04:00
|
|
|
bool buf2img = src->base.target == PIPE_BUFFER;
|
|
|
|
|
|
|
|
|
|
struct pipe_box box = trans->base.box;
|
|
|
|
|
int x = box.x;
|
|
|
|
|
if (buf2img)
|
2020-10-27 11:38:16 -04:00
|
|
|
box.x = src->obj->offset;
|
2020-10-07 15:55:16 -04:00
|
|
|
|
2020-10-27 11:38:16 -04:00
|
|
|
zink_copy_image_buffer(ctx, NULL, dst, src, trans->base.level, buf2img ? x : dst->obj->offset,
|
2020-10-07 15:55:16 -04:00
|
|
|
box.y, box.z, trans->base.level, &box, trans->base.usage);
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|
|
|
|
|
|
2020-08-11 18:36:15 -04:00
|
|
|
uint32_t
|
|
|
|
|
zink_get_resource_usage(struct zink_resource *res)
|
2020-06-26 15:16:17 -04:00
|
|
|
{
|
|
|
|
|
uint32_t batch_uses = 0;
|
2020-10-27 12:07:36 -04:00
|
|
|
for (unsigned i = 0; i < ARRAY_SIZE(res->obj->batch_uses); i++)
|
|
|
|
|
batch_uses |= p_atomic_read(&res->obj->batch_uses[i]) << i;
|
2020-06-26 15:16:17 -04:00
|
|
|
return batch_uses;
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
static void *
|
|
|
|
|
zink_transfer_map(struct pipe_context *pctx,
|
|
|
|
|
struct pipe_resource *pres,
|
|
|
|
|
unsigned level,
|
|
|
|
|
unsigned usage,
|
|
|
|
|
const struct pipe_box *box,
|
|
|
|
|
struct pipe_transfer **transfer)
|
|
|
|
|
{
|
|
|
|
|
struct zink_context *ctx = zink_context(pctx);
|
|
|
|
|
struct zink_screen *screen = zink_screen(pctx->screen);
|
|
|
|
|
struct zink_resource *res = zink_resource(pres);
|
2020-08-11 18:36:15 -04:00
|
|
|
uint32_t batch_uses = zink_get_resource_usage(res);
|
2018-08-31 16:50:20 +02:00
|
|
|
|
|
|
|
|
struct zink_transfer *trans = slab_alloc(&ctx->transfer_pool);
|
|
|
|
|
if (!trans)
|
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
|
|
memset(trans, 0, sizeof(*trans));
|
|
|
|
|
pipe_resource_reference(&trans->base.resource, pres);
|
|
|
|
|
|
|
|
|
|
trans->base.resource = pres;
|
|
|
|
|
trans->base.level = level;
|
|
|
|
|
trans->base.usage = usage;
|
|
|
|
|
trans->base.box = *box;
|
|
|
|
|
|
|
|
|
|
void *ptr;
|
|
|
|
|
if (pres->target == PIPE_BUFFER) {
|
2020-09-06 11:57:06 -04:00
|
|
|
if (!(usage & PIPE_MAP_UNSYNCHRONIZED)) {
|
2020-10-27 14:31:29 -04:00
|
|
|
if (usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE) {
|
|
|
|
|
/* Replace the backing storage with a fresh buffer for non-async maps */
|
|
|
|
|
//if (!(usage & TC_TRANSFER_MAP_NO_INVALIDATE))
|
|
|
|
|
zink_resource_invalidate(pctx, pres);
|
|
|
|
|
|
|
|
|
|
/* If we can discard the whole resource, we can discard the range. */
|
|
|
|
|
usage |= PIPE_MAP_DISCARD_RANGE;
|
|
|
|
|
}
|
2020-09-06 11:57:06 -04:00
|
|
|
if (util_ranges_intersect(&res->valid_buffer_range, box->x, box->x + box->width)) {
|
|
|
|
|
/* special case compute reads since they aren't handled by zink_fence_wait() */
|
|
|
|
|
if (usage & PIPE_MAP_WRITE && (batch_uses & (ZINK_RESOURCE_ACCESS_READ << ZINK_COMPUTE_BATCH_ID)))
|
|
|
|
|
zink_wait_on_batch(ctx, ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
batch_uses &= ~(ZINK_RESOURCE_ACCESS_READ << ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
if (usage & PIPE_MAP_READ && batch_uses >= ZINK_RESOURCE_ACCESS_WRITE)
|
2020-10-28 13:18:55 -04:00
|
|
|
resource_sync_writes_from_batch_usage(ctx, res);
|
2020-09-06 11:57:06 -04:00
|
|
|
else if (usage & PIPE_MAP_WRITE && batch_uses) {
|
|
|
|
|
/* need to wait for all rendering to finish
|
|
|
|
|
* TODO: optimize/fix this to be much less obtrusive
|
|
|
|
|
* mesa/mesa#2966
|
|
|
|
|
*/
|
|
|
|
|
|
2020-09-09 16:08:54 -04:00
|
|
|
trans->staging_res = pipe_buffer_create(pctx->screen, 0, PIPE_USAGE_STAGING, pres->width0);
|
|
|
|
|
res = zink_resource(trans->staging_res);
|
2020-09-06 11:57:06 -04:00
|
|
|
}
|
2020-06-26 15:16:17 -04:00
|
|
|
}
|
2020-05-20 09:59:26 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-10-27 11:38:16 -04:00
|
|
|
VkResult result = vkMapMemory(screen->dev, res->obj->mem, res->obj->offset, res->obj->size, 0, &ptr);
|
2018-08-31 16:50:20 +02:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
|
return NULL;
|
|
|
|
|
|
2019-08-14 11:11:19 +01:00
|
|
|
#if defined(__APPLE__)
|
|
|
|
|
if (!(usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE)) {
|
|
|
|
|
// Work around for MoltenVk limitation
|
|
|
|
|
// MoltenVk returns blank memory ranges when there should be data present
|
|
|
|
|
// This is a known limitation of MoltenVK.
|
|
|
|
|
// See https://github.com/KhronosGroup/MoltenVK/blob/master/Docs/MoltenVK_Runtime_UserGuide.md#known-moltenvk-limitations
|
|
|
|
|
VkMappedMemoryRange range = {
|
|
|
|
|
VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
|
|
|
|
|
NULL,
|
|
|
|
|
res->mem,
|
|
|
|
|
res->offset,
|
|
|
|
|
res->size
|
|
|
|
|
};
|
|
|
|
|
result = vkFlushMappedMemoryRanges(screen->dev, 1, &range);
|
|
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
trans->base.stride = 0;
|
|
|
|
|
trans->base.layer_stride = 0;
|
|
|
|
|
ptr = ((uint8_t *)ptr) + box->x;
|
2020-09-03 10:44:11 -04:00
|
|
|
if (usage & PIPE_MAP_WRITE)
|
|
|
|
|
util_range_add(&res->base, &res->valid_buffer_range, box->x, box->x + box->width);
|
2018-08-31 16:50:20 +02:00
|
|
|
} else {
|
2020-09-17 16:32:54 -04:00
|
|
|
if (usage & PIPE_MAP_WRITE && !(usage & PIPE_MAP_READ))
|
|
|
|
|
/* this is like a blit, so we can potentially dump some clears or maybe we have to */
|
|
|
|
|
zink_fb_clears_apply_or_discard(ctx, pres, zink_rect_from_box(box), false);
|
|
|
|
|
else if (usage & PIPE_MAP_READ)
|
|
|
|
|
/* if the map region intersects with any clears then we have to apply them */
|
|
|
|
|
zink_fb_clears_apply_region(ctx, pres, zink_rect_from_box(box));
|
2020-10-27 11:38:16 -04:00
|
|
|
if (res->optimal_tiling || !res->obj->host_visible) {
|
2020-06-01 10:53:19 -04:00
|
|
|
enum pipe_format format = pres->format;
|
2020-07-01 08:16:12 -04:00
|
|
|
if (usage & PIPE_MAP_DEPTH_ONLY)
|
2020-06-01 10:53:19 -04:00
|
|
|
format = util_format_get_depth_only(pres->format);
|
2020-07-01 08:16:12 -04:00
|
|
|
else if (usage & PIPE_MAP_STENCIL_ONLY)
|
2020-06-01 10:53:19 -04:00
|
|
|
format = PIPE_FORMAT_S8_UINT;
|
|
|
|
|
trans->base.stride = util_format_get_stride(format, box->width);
|
|
|
|
|
trans->base.layer_stride = util_format_get_2d_size(format,
|
2018-08-31 16:50:20 +02:00
|
|
|
trans->base.stride,
|
|
|
|
|
box->height);
|
|
|
|
|
|
|
|
|
|
struct pipe_resource templ = *pres;
|
2020-06-01 10:53:19 -04:00
|
|
|
templ.format = format;
|
2018-08-31 16:50:20 +02:00
|
|
|
templ.usage = PIPE_USAGE_STAGING;
|
|
|
|
|
templ.target = PIPE_BUFFER;
|
2019-06-20 12:03:20 +02:00
|
|
|
templ.bind = 0;
|
2018-08-31 16:50:20 +02:00
|
|
|
templ.width0 = trans->base.layer_stride * box->depth;
|
|
|
|
|
templ.height0 = templ.depth0 = 0;
|
|
|
|
|
templ.last_level = 0;
|
|
|
|
|
templ.array_size = 1;
|
|
|
|
|
templ.flags = 0;
|
|
|
|
|
|
|
|
|
|
trans->staging_res = zink_resource_create(pctx->screen, &templ);
|
|
|
|
|
if (!trans->staging_res)
|
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
|
|
struct zink_resource *staging_res = zink_resource(trans->staging_res);
|
|
|
|
|
|
2020-07-01 08:16:12 -04:00
|
|
|
if (usage & PIPE_MAP_READ) {
|
2020-08-11 18:39:25 -04:00
|
|
|
/* TODO: can probably just do a full cs copy if it's already in a cs batch */
|
|
|
|
|
if (batch_uses & (ZINK_RESOURCE_ACCESS_WRITE << ZINK_COMPUTE_BATCH_ID))
|
2020-09-06 11:08:33 -04:00
|
|
|
/* don't actually have to stall here, only ensure batch is submitted */
|
|
|
|
|
zink_flush_compute(ctx);
|
2018-08-31 16:50:20 +02:00
|
|
|
struct zink_context *ctx = zink_context(pctx);
|
2020-10-07 15:55:16 -04:00
|
|
|
zink_transfer_copy_bufimage(ctx, staging_res, res, trans);
|
2019-07-12 12:58:49 +02:00
|
|
|
/* need to wait for rendering to finish */
|
2020-06-29 14:04:22 -04:00
|
|
|
zink_fence_wait(pctx);
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|
|
|
|
|
|
2020-10-27 11:38:16 -04:00
|
|
|
VkResult result = vkMapMemory(screen->dev, staging_res->obj->mem,
|
|
|
|
|
staging_res->obj->offset,
|
|
|
|
|
staging_res->obj->size, 0, &ptr);
|
2018-08-31 16:50:20 +02:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
|
|
} else {
|
2020-12-11 18:41:39 -08:00
|
|
|
assert(!res->optimal_tiling);
|
2020-09-04 12:08:18 -04:00
|
|
|
/* special case compute reads since they aren't handled by zink_fence_wait() */
|
|
|
|
|
if (batch_uses & (ZINK_RESOURCE_ACCESS_READ << ZINK_COMPUTE_BATCH_ID))
|
|
|
|
|
zink_wait_on_batch(ctx, ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
batch_uses &= ~(ZINK_RESOURCE_ACCESS_READ << ZINK_COMPUTE_BATCH_ID);
|
2020-08-11 18:39:25 -04:00
|
|
|
if (batch_uses >= ZINK_RESOURCE_ACCESS_WRITE) {
|
2020-09-04 12:08:18 -04:00
|
|
|
if (usage & PIPE_MAP_READ)
|
2020-10-28 13:18:55 -04:00
|
|
|
resource_sync_writes_from_batch_usage(ctx, res);
|
2020-09-04 12:08:18 -04:00
|
|
|
else
|
|
|
|
|
zink_fence_wait(pctx);
|
2020-08-11 18:39:25 -04:00
|
|
|
}
|
2020-10-27 11:38:16 -04:00
|
|
|
VkResult result = vkMapMemory(screen->dev, res->obj->mem, res->obj->offset, res->obj->size, 0, &ptr);
|
2018-08-31 16:50:20 +02:00
|
|
|
if (result != VK_SUCCESS)
|
|
|
|
|
return NULL;
|
|
|
|
|
VkImageSubresource isr = {
|
|
|
|
|
res->aspect,
|
|
|
|
|
level,
|
|
|
|
|
0
|
|
|
|
|
};
|
|
|
|
|
VkSubresourceLayout srl;
|
2020-10-27 11:38:16 -04:00
|
|
|
vkGetImageSubresourceLayout(screen->dev, res->obj->image, &isr, &srl);
|
2018-08-31 16:50:20 +02:00
|
|
|
trans->base.stride = srl.rowPitch;
|
|
|
|
|
trans->base.layer_stride = srl.arrayPitch;
|
2020-12-12 11:59:40 -05:00
|
|
|
const struct util_format_description *desc = util_format_description(res->base.format);
|
2020-12-11 10:53:53 -05:00
|
|
|
unsigned offset = srl.offset +
|
|
|
|
|
box->z * srl.depthPitch +
|
|
|
|
|
(box->y / desc->block.height) * srl.rowPitch +
|
2020-12-12 11:59:40 -05:00
|
|
|
(box->x / desc->block.width) * (desc->block.bits / 8);
|
2020-12-11 10:53:53 -05:00
|
|
|
ptr = ((uint8_t *)ptr) + offset;
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|
|
|
|
|
}
|
2020-08-13 10:53:25 -04:00
|
|
|
if ((usage & PIPE_MAP_PERSISTENT) && !(usage & PIPE_MAP_COHERENT))
|
2020-10-27 11:59:33 -04:00
|
|
|
res->obj->persistent_maps++;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
|
|
|
|
*transfer = &trans->base;
|
|
|
|
|
return ptr;
|
|
|
|
|
}
|
|
|
|
|
|
2020-09-03 10:44:11 -04:00
|
|
|
static void
|
2020-09-04 09:41:30 -04:00
|
|
|
zink_transfer_flush_region(struct pipe_context *pctx,
|
2020-09-03 10:44:11 -04:00
|
|
|
struct pipe_transfer *ptrans,
|
|
|
|
|
const struct pipe_box *box)
|
|
|
|
|
{
|
2020-09-04 09:41:30 -04:00
|
|
|
struct zink_context *ctx = zink_context(pctx);
|
2020-09-03 10:44:11 -04:00
|
|
|
struct zink_resource *res = zink_resource(ptrans->resource);
|
2020-09-04 09:41:30 -04:00
|
|
|
struct zink_transfer *trans = (struct zink_transfer *)ptrans;
|
|
|
|
|
|
|
|
|
|
if (trans->base.usage & PIPE_MAP_WRITE) {
|
|
|
|
|
if (trans->staging_res) {
|
2020-09-09 16:08:54 -04:00
|
|
|
struct zink_resource *staging_res = zink_resource(trans->staging_res);
|
|
|
|
|
uint32_t batch_uses = zink_get_resource_usage(res) | zink_get_resource_usage(staging_res);
|
2020-09-04 09:41:30 -04:00
|
|
|
if (batch_uses & (ZINK_RESOURCE_ACCESS_WRITE << ZINK_COMPUTE_BATCH_ID)) {
|
2020-09-06 11:08:33 -04:00
|
|
|
/* don't actually have to stall here, only ensure batch is submitted */
|
|
|
|
|
zink_flush_compute(ctx);
|
2020-09-04 09:41:30 -04:00
|
|
|
batch_uses &= ~(ZINK_RESOURCE_ACCESS_WRITE << ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
batch_uses &= ~(ZINK_RESOURCE_ACCESS_READ << ZINK_COMPUTE_BATCH_ID);
|
|
|
|
|
}
|
|
|
|
|
|
2020-09-09 16:08:54 -04:00
|
|
|
if (ptrans->resource->target == PIPE_BUFFER)
|
|
|
|
|
zink_copy_buffer(ctx, NULL, res, staging_res, box->x, box->x, box->width);
|
|
|
|
|
else
|
2020-10-07 15:55:16 -04:00
|
|
|
zink_transfer_copy_bufimage(ctx, res, staging_res, trans);
|
2020-09-04 09:41:30 -04:00
|
|
|
}
|
|
|
|
|
}
|
2020-09-03 10:44:11 -04:00
|
|
|
}
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
static void
|
|
|
|
|
zink_transfer_unmap(struct pipe_context *pctx,
|
|
|
|
|
struct pipe_transfer *ptrans)
|
|
|
|
|
{
|
|
|
|
|
struct zink_context *ctx = zink_context(pctx);
|
|
|
|
|
struct zink_screen *screen = zink_screen(pctx->screen);
|
|
|
|
|
struct zink_resource *res = zink_resource(ptrans->resource);
|
|
|
|
|
struct zink_transfer *trans = (struct zink_transfer *)ptrans;
|
|
|
|
|
if (trans->staging_res) {
|
|
|
|
|
struct zink_resource *staging_res = zink_resource(trans->staging_res);
|
2020-10-27 11:38:16 -04:00
|
|
|
vkUnmapMemory(screen->dev, staging_res->obj->mem);
|
2018-08-31 16:50:20 +02:00
|
|
|
} else
|
2020-10-27 11:38:16 -04:00
|
|
|
vkUnmapMemory(screen->dev, res->obj->mem);
|
2020-08-13 10:53:25 -04:00
|
|
|
if ((trans->base.usage & PIPE_MAP_PERSISTENT) && !(trans->base.usage & PIPE_MAP_COHERENT))
|
2020-10-27 11:59:33 -04:00
|
|
|
res->obj->persistent_maps--;
|
2020-09-03 10:44:11 -04:00
|
|
|
if (!(trans->base.usage & (PIPE_MAP_FLUSH_EXPLICIT | PIPE_MAP_COHERENT))) {
|
|
|
|
|
zink_transfer_flush_region(pctx, ptrans, &ptrans->box);
|
|
|
|
|
}
|
2020-09-04 09:41:30 -04:00
|
|
|
|
|
|
|
|
if (trans->staging_res)
|
|
|
|
|
pipe_resource_reference(&trans->staging_res, NULL);
|
2018-08-31 16:50:20 +02:00
|
|
|
pipe_resource_reference(&trans->base.resource, NULL);
|
|
|
|
|
slab_free(&ctx->transfer_pool, ptrans);
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-01 10:53:19 -04:00
|
|
|
static struct pipe_resource *
|
|
|
|
|
zink_resource_get_separate_stencil(struct pipe_resource *pres)
|
|
|
|
|
{
|
|
|
|
|
/* For packed depth-stencil, we treat depth as the primary resource
|
|
|
|
|
* and store S8 as the "second plane" resource.
|
|
|
|
|
*/
|
|
|
|
|
if (pres->next && pres->next->format == PIPE_FORMAT_S8_UINT)
|
|
|
|
|
return pres->next;
|
|
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-14 00:01:07 -04:00
|
|
|
void
|
2020-09-15 18:26:39 -04:00
|
|
|
zink_resource_setup_transfer_layouts(struct zink_context *ctx, struct zink_resource *src, struct zink_resource *dst)
|
2020-06-14 00:01:07 -04:00
|
|
|
{
|
|
|
|
|
if (src == dst) {
|
|
|
|
|
/* The Vulkan 1.1 specification says the following about valid usage
|
|
|
|
|
* of vkCmdBlitImage:
|
|
|
|
|
*
|
|
|
|
|
* "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
|
|
|
|
|
* VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
|
|
|
|
|
*
|
|
|
|
|
* and:
|
|
|
|
|
*
|
|
|
|
|
* "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
|
|
|
|
|
* VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
|
|
|
|
|
*
|
|
|
|
|
* Since we cant have the same image in two states at the same time,
|
|
|
|
|
* we're effectively left with VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR or
|
|
|
|
|
* VK_IMAGE_LAYOUT_GENERAL. And since this isn't a present-related
|
|
|
|
|
* operation, VK_IMAGE_LAYOUT_GENERAL seems most appropriate.
|
|
|
|
|
*/
|
2020-09-15 18:26:39 -04:00
|
|
|
zink_resource_image_barrier(ctx, NULL, src,
|
2020-09-10 14:45:04 -04:00
|
|
|
VK_IMAGE_LAYOUT_GENERAL,
|
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT);
|
2020-06-14 00:01:07 -04:00
|
|
|
} else {
|
2020-09-15 18:26:39 -04:00
|
|
|
zink_resource_image_barrier(ctx, NULL, src,
|
2020-09-10 14:45:04 -04:00
|
|
|
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT);
|
|
|
|
|
|
2020-09-15 18:26:39 -04:00
|
|
|
zink_resource_image_barrier(ctx, NULL, dst,
|
2020-09-10 14:45:04 -04:00
|
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT);
|
2020-06-14 00:01:07 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-01 10:53:19 -04:00
|
|
|
void
|
|
|
|
|
zink_get_depth_stencil_resources(struct pipe_resource *res,
|
|
|
|
|
struct zink_resource **out_z,
|
|
|
|
|
struct zink_resource **out_s)
|
|
|
|
|
{
|
|
|
|
|
if (!res) {
|
|
|
|
|
if (out_z) *out_z = NULL;
|
|
|
|
|
if (out_s) *out_s = NULL;
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (res->format != PIPE_FORMAT_S8_UINT) {
|
|
|
|
|
if (out_z) *out_z = zink_resource(res);
|
|
|
|
|
if (out_s) *out_s = zink_resource(zink_resource_get_separate_stencil(res));
|
|
|
|
|
} else {
|
|
|
|
|
if (out_z) *out_z = NULL;
|
|
|
|
|
if (out_s) *out_s = zink_resource(res);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
zink_resource_set_separate_stencil(struct pipe_resource *pres,
|
|
|
|
|
struct pipe_resource *stencil)
|
|
|
|
|
{
|
|
|
|
|
assert(util_format_has_depth(util_format_description(pres->format)));
|
|
|
|
|
pipe_resource_reference(&pres->next, stencil);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static enum pipe_format
|
|
|
|
|
zink_resource_get_internal_format(struct pipe_resource *pres)
|
|
|
|
|
{
|
|
|
|
|
struct zink_resource *res = zink_resource(pres);
|
|
|
|
|
return res->internal_format;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static const struct u_transfer_vtbl transfer_vtbl = {
|
|
|
|
|
.resource_create = zink_resource_create,
|
|
|
|
|
.resource_destroy = zink_resource_destroy,
|
|
|
|
|
.transfer_map = zink_transfer_map,
|
|
|
|
|
.transfer_unmap = zink_transfer_unmap,
|
2020-09-03 10:44:11 -04:00
|
|
|
.transfer_flush_region = zink_transfer_flush_region,
|
2020-06-01 10:53:19 -04:00
|
|
|
.get_internal_format = zink_resource_get_internal_format,
|
|
|
|
|
.set_stencil = zink_resource_set_separate_stencil,
|
|
|
|
|
.get_stencil = zink_resource_get_separate_stencil,
|
|
|
|
|
};
|
|
|
|
|
|
2020-10-13 09:42:07 -04:00
|
|
|
bool
|
2020-06-01 10:53:19 -04:00
|
|
|
zink_screen_resource_init(struct pipe_screen *pscreen)
|
|
|
|
|
{
|
2020-10-13 09:42:07 -04:00
|
|
|
struct zink_screen *screen = zink_screen(pscreen);
|
2020-06-01 10:53:19 -04:00
|
|
|
pscreen->resource_create = zink_resource_create;
|
|
|
|
|
pscreen->resource_destroy = zink_resource_destroy;
|
|
|
|
|
pscreen->transfer_helper = u_transfer_helper_create(&transfer_vtbl, true, true, false, false);
|
|
|
|
|
|
2020-10-13 09:42:07 -04:00
|
|
|
if (screen->info.have_KHR_external_memory_fd) {
|
2020-06-01 10:53:19 -04:00
|
|
|
pscreen->resource_get_handle = zink_resource_get_handle;
|
|
|
|
|
pscreen->resource_from_handle = zink_resource_from_handle;
|
|
|
|
|
}
|
2020-10-13 09:42:07 -04:00
|
|
|
simple_mtx_init(&screen->mem_cache_mtx, mtx_plain);
|
|
|
|
|
screen->resource_mem_cache = _mesa_hash_table_create(NULL, mem_hash, mem_equals);
|
|
|
|
|
return !!screen->resource_mem_cache;
|
2020-06-01 10:53:19 -04:00
|
|
|
}
|
|
|
|
|
|
2018-08-31 16:50:20 +02:00
|
|
|
void
|
|
|
|
|
zink_context_resource_init(struct pipe_context *pctx)
|
|
|
|
|
{
|
2020-06-01 10:53:19 -04:00
|
|
|
pctx->transfer_map = u_transfer_helper_deinterleave_transfer_map;
|
|
|
|
|
pctx->transfer_unmap = u_transfer_helper_deinterleave_transfer_unmap;
|
2018-08-31 16:50:20 +02:00
|
|
|
|
2020-06-01 10:53:19 -04:00
|
|
|
pctx->transfer_flush_region = u_transfer_helper_transfer_flush_region;
|
2018-08-31 16:50:20 +02:00
|
|
|
pctx->buffer_subdata = u_default_buffer_subdata;
|
|
|
|
|
pctx->texture_subdata = u_default_texture_subdata;
|
2020-10-27 14:30:20 -04:00
|
|
|
pctx->invalidate_resource = zink_resource_invalidate;
|
2018-08-31 16:50:20 +02:00
|
|
|
}
|