2
* Copyright 2018 Collabora Ltd.
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* on the rights to use, copy, modify, merge, publish, distribute, sub
8
* license, and/or sell copies of the Software, and to permit persons to whom
9
* the Software is furnished to do so, subject to the following conditions:
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21
* USE OR OTHER DEALINGS IN THE SOFTWARE.
24
#include "zink_resource.h"
26
#include "zink_batch.h"
27
#include "zink_context.h"
28
#include "zink_fence.h"
29
#include "zink_program.h"
30
#include "zink_screen.h"
31
#include "zink_kopper.h"
33
#ifdef VK_USE_PLATFORM_METAL_EXT
34
#include "QuartzCore/CAMetalLayer.h"
36
#include "vulkan/wsi/wsi_common.h"
38
#include "vk_format.h"
39
#include "util/slab.h"
40
#include "util/u_blitter.h"
41
#include "util/u_debug.h"
42
#include "util/format/u_format.h"
43
#include "util/u_transfer_helper.h"
44
#include "util/u_inlines.h"
45
#include "util/u_memory.h"
46
#include "util/u_upload_mgr.h"
47
#include "util/os_file.h"
48
#include "frontend/winsys_handle.h"
50
#if !defined(_WIN32) && !defined(__APPLE__)
51
#define ZINK_USE_DMABUF
54
#ifdef ZINK_USE_DMABUF
55
#include "drm-uapi/drm_fourcc.h"
57
/* these won't actually be used */
58
#define DRM_FORMAT_MOD_INVALID 0
59
#define DRM_FORMAT_MOD_LINEAR 0
62
#if defined(__APPLE__)
63
// Source of MVK_VERSION
64
#include "MoltenVK/vk_mvk_moltenvk.h"
67
#define ZINK_EXTERNAL_MEMORY_HANDLE 999
70
equals_ivci(const void *a, const void *b)
72
const uint8_t *pa = a;
73
const uint8_t *pb = b;
74
size_t offset = offsetof(VkImageViewCreateInfo, flags);
75
return memcmp(pa + offset, pb + offset, sizeof(VkImageViewCreateInfo) - offset) == 0;
79
equals_bvci(const void *a, const void *b)
81
const uint8_t *pa = a;
82
const uint8_t *pb = b;
83
size_t offset = offsetof(VkBufferViewCreateInfo, flags);
84
return memcmp(pa + offset, pb + offset, sizeof(VkBufferViewCreateInfo) - offset) == 0;
88
zink_transfer_flush_region(struct pipe_context *pctx,
89
struct pipe_transfer *ptrans,
90
const struct pipe_box *box);
93
debug_describe_zink_resource_object(char *buf, const struct zink_resource_object *ptr)
95
sprintf(buf, "zink_resource_object");
99
zink_destroy_resource_object(struct zink_screen *screen, struct zink_resource_object *obj)
101
if (obj->is_buffer) {
102
VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
103
VKSCR(DestroyBuffer)(screen->dev, obj->storage_buffer, NULL);
104
} else if (obj->dt) {
105
zink_kopper_displaytarget_destroy(screen, obj->dt);
107
VKSCR(DestroyImage)(screen->dev, obj->image, NULL);
110
zink_descriptor_set_refs_clear(&obj->desc_set_refs, obj);
112
FREE(obj->bo); //this is a dummy struct
114
zink_bo_unref(screen, obj->bo);
119
zink_resource_destroy(struct pipe_screen *pscreen,
120
struct pipe_resource *pres)
122
struct zink_screen *screen = zink_screen(pscreen);
123
struct zink_resource *res = zink_resource(pres);
124
if (pres->target == PIPE_BUFFER) {
125
util_range_destroy(&res->valid_buffer_range);
126
util_idalloc_mt_free(&screen->buffer_ids, res->base.buffer_id_unique);
127
assert(!_mesa_hash_table_num_entries(&res->bufferview_cache));
128
simple_mtx_destroy(&res->bufferview_mtx);
129
ralloc_free(res->bufferview_cache.table);
131
assert(!_mesa_hash_table_num_entries(&res->surface_cache));
132
simple_mtx_destroy(&res->surface_mtx);
133
ralloc_free(res->surface_cache.table);
135
/* no need to do anything for the caches, these objects own the resource lifetimes */
137
zink_resource_object_reference(screen, &res->obj, NULL);
138
threaded_resource_deinit(pres);
142
static VkImageAspectFlags
143
aspect_from_format(enum pipe_format fmt)
145
if (util_format_is_depth_or_stencil(fmt)) {
146
VkImageAspectFlags aspect = 0;
147
const struct util_format_description *desc = util_format_description(fmt);
148
if (util_format_has_depth(desc))
149
aspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
150
if (util_format_has_stencil(desc))
151
aspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
154
return VK_IMAGE_ASPECT_COLOR_BIT;
157
static VkBufferCreateInfo
158
create_bci(struct zink_screen *screen, const struct pipe_resource *templ, unsigned bind)
160
VkBufferCreateInfo bci;
161
bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
163
bci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
164
bci.queueFamilyIndexCount = 0;
165
bci.pQueueFamilyIndices = NULL;
166
bci.size = templ->width0;
168
assert(bci.size > 0);
170
bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
171
VK_BUFFER_USAGE_TRANSFER_DST_BIT |
172
VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
174
bci.usage |= VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
175
VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT |
176
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
177
VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
178
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
179
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT |
180
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT;
182
if (bind & PIPE_BIND_SHADER_IMAGE)
183
bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
185
if (bind & PIPE_BIND_QUERY_BUFFER)
186
bci.usage |= VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT;
188
if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
189
bci.flags |= VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
194
check_ici(struct zink_screen *screen, VkImageCreateInfo *ici, uint64_t modifier)
196
VkImageFormatProperties image_props;
198
assert(modifier == DRM_FORMAT_MOD_INVALID ||
199
(VKSCR(GetPhysicalDeviceImageFormatProperties2) && screen->info.have_EXT_image_drm_format_modifier));
200
if (VKSCR(GetPhysicalDeviceImageFormatProperties2)) {
201
VkImageFormatProperties2 props2;
202
props2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
204
VkSamplerYcbcrConversionImageFormatProperties ycbcr_props;
205
ycbcr_props.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
206
ycbcr_props.pNext = NULL;
207
if (screen->info.have_KHR_sampler_ycbcr_conversion)
208
props2.pNext = &ycbcr_props;
209
VkPhysicalDeviceImageFormatInfo2 info;
210
info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
211
info.format = ici->format;
212
info.type = ici->imageType;
213
info.tiling = ici->tiling;
214
info.usage = ici->usage;
215
info.flags = ici->flags;
217
VkPhysicalDeviceImageDrmFormatModifierInfoEXT mod_info;
218
if (modifier != DRM_FORMAT_MOD_INVALID) {
219
mod_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
220
mod_info.pNext = NULL;
221
mod_info.drmFormatModifier = modifier;
222
mod_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
223
mod_info.queueFamilyIndexCount = 0;
224
info.pNext = &mod_info;
228
ret = VKSCR(GetPhysicalDeviceImageFormatProperties2)(screen->pdev, &info, &props2);
229
/* this is using VK_IMAGE_CREATE_EXTENDED_USAGE_BIT and can't be validated */
230
if (vk_format_aspects(ici->format) & VK_IMAGE_ASPECT_PLANE_1_BIT)
232
image_props = props2.imageFormatProperties;
234
ret = VKSCR(GetPhysicalDeviceImageFormatProperties)(screen->pdev, ici->format, ici->imageType,
235
ici->tiling, ici->usage, ici->flags, &image_props);
236
if (ret != VK_SUCCESS)
238
if (ici->extent.depth > image_props.maxExtent.depth ||
239
ici->extent.height > image_props.maxExtent.height ||
240
ici->extent.width > image_props.maxExtent.width)
242
if (ici->mipLevels > image_props.maxMipLevels)
244
if (ici->arrayLayers > image_props.maxArrayLayers)
249
static VkImageUsageFlags
250
get_image_usage_for_feats(struct zink_screen *screen, VkFormatFeatureFlags feats, const struct pipe_resource *templ, unsigned bind, bool *need_extended)
252
VkImageUsageFlags usage = 0;
253
bool is_planar = util_format_get_num_planes(templ->format) > 1;
254
*need_extended = false;
256
if (bind & ZINK_BIND_TRANSIENT)
257
usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
259
/* sadly, gallium doesn't let us know if it'll ever need this, so we have to assume */
260
if (is_planar || (feats & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
261
usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
262
if (is_planar || (feats & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
263
usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
264
if (feats & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)
265
usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
267
if ((is_planar || (feats & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) && (bind & PIPE_BIND_SHADER_IMAGE)) {
268
assert(templ->nr_samples <= 1 || screen->info.feats.features.shaderStorageImageMultisample);
269
usage |= VK_IMAGE_USAGE_STORAGE_BIT;
273
if (bind & PIPE_BIND_RENDER_TARGET) {
274
if (feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
275
usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
276
if ((bind & (PIPE_BIND_LINEAR | PIPE_BIND_SHARED)) != (PIPE_BIND_LINEAR | PIPE_BIND_SHARED))
277
usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
279
/* trust that gallium isn't going to give us anything wild */
280
*need_extended = true;
283
} else if ((bind & PIPE_BIND_SAMPLER_VIEW) && !util_format_is_depth_or_stencil(templ->format)) {
284
if (!(feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
285
/* ensure we can u_blitter this later */
286
*need_extended = true;
289
usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
292
if (bind & PIPE_BIND_DEPTH_STENCIL) {
293
if (feats & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
294
usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
297
/* this is unlikely to occur and has been included for completeness */
298
} else if (bind & PIPE_BIND_SAMPLER_VIEW && !(usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
299
if (feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)
300
usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
305
if (bind & PIPE_BIND_STREAM_OUTPUT)
306
usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
311
static VkFormatFeatureFlags
312
find_modifier_feats(const struct zink_modifier_prop *prop, uint64_t modifier, uint64_t *mod)
314
for (unsigned j = 0; j < prop->drmFormatModifierCount; j++) {
315
if (prop->pDrmFormatModifierProperties[j].drmFormatModifier == modifier) {
317
return prop->pDrmFormatModifierProperties[j].drmFormatModifierTilingFeatures;
323
static VkImageUsageFlags
324
get_image_usage(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, unsigned bind, unsigned modifiers_count, const uint64_t *modifiers, uint64_t *mod)
326
VkImageTiling tiling = ici->tiling;
327
bool need_extended = false;
328
*mod = DRM_FORMAT_MOD_INVALID;
329
if (modifiers_count) {
330
bool have_linear = false;
331
const struct zink_modifier_prop *prop = &screen->modifier_props[templ->format];
332
assert(tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
333
for (unsigned i = 0; i < modifiers_count; i++) {
334
if (modifiers[i] == DRM_FORMAT_MOD_LINEAR) {
336
if (!screen->info.have_EXT_image_drm_format_modifier)
340
VkFormatFeatureFlags feats = find_modifier_feats(prop, modifiers[i], mod);
342
VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
343
assert(!need_extended);
346
if (check_ici(screen, ici, *mod))
351
/* only try linear if no other options available */
353
VkFormatFeatureFlags feats = find_modifier_feats(prop, DRM_FORMAT_MOD_LINEAR, mod);
355
VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
356
assert(!need_extended);
359
if (check_ici(screen, ici, *mod))
366
VkFormatProperties props = screen->format_props[templ->format];
367
VkFormatFeatureFlags feats = tiling == VK_IMAGE_TILING_LINEAR ? props.linearTilingFeatures : props.optimalTilingFeatures;
368
if (ici->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT)
370
VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
372
ici->flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
374
usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
378
if (check_ici(screen, ici, *mod))
382
*mod = DRM_FORMAT_MOD_INVALID;
387
create_ici(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, bool dmabuf, unsigned bind, unsigned modifiers_count, const uint64_t *modifiers, bool *success)
389
ici->sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
391
if (util_format_get_num_planes(templ->format) > 1)
392
ici->flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
394
ici->flags = modifiers_count || dmabuf || bind & (PIPE_BIND_SCANOUT | PIPE_BIND_DEPTH_STENCIL) ? 0 : VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
396
ici->queueFamilyIndexCount = 0;
398
if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
399
ici->flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
401
bool need_2D = false;
402
switch (templ->target) {
403
case PIPE_TEXTURE_1D:
404
case PIPE_TEXTURE_1D_ARRAY:
405
if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
406
need_2D |= screen->need_2D_sparse;
407
if (util_format_is_depth_or_stencil(templ->format))
408
need_2D |= screen->need_2D_zs;
409
ici->imageType = need_2D ? VK_IMAGE_TYPE_2D : VK_IMAGE_TYPE_1D;
412
case PIPE_TEXTURE_CUBE:
413
case PIPE_TEXTURE_CUBE_ARRAY:
414
case PIPE_TEXTURE_2D:
415
case PIPE_TEXTURE_2D_ARRAY:
416
case PIPE_TEXTURE_RECT:
417
ici->imageType = VK_IMAGE_TYPE_2D;
420
case PIPE_TEXTURE_3D:
421
ici->imageType = VK_IMAGE_TYPE_3D;
422
ici->flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
423
if (screen->info.have_EXT_image_2d_view_of_3d)
424
ici->flags |= VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT;
428
unreachable("PIPE_BUFFER should already be handled");
431
unreachable("Unknown target");
434
if (screen->info.have_EXT_sample_locations &&
435
bind & PIPE_BIND_DEPTH_STENCIL &&
436
util_format_has_depth(util_format_description(templ->format)))
437
ici->flags |= VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT;
439
ici->format = zink_get_format(screen, templ->format);
440
ici->extent.width = templ->width0;
441
ici->extent.height = templ->height0;
442
ici->extent.depth = templ->depth0;
443
ici->mipLevels = templ->last_level + 1;
444
ici->arrayLayers = MAX2(templ->array_size, 1);
445
ici->samples = templ->nr_samples ? templ->nr_samples : VK_SAMPLE_COUNT_1_BIT;
446
ici->tiling = screen->info.have_EXT_image_drm_format_modifier && modifiers_count ?
447
VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
448
bind & PIPE_BIND_LINEAR ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
449
ici->sharingMode = VK_SHARING_MODE_EXCLUSIVE;
450
ici->initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
452
/* sampleCounts will be set to VK_SAMPLE_COUNT_1_BIT if at least one of the following conditions is true:
453
* - flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
455
* 44.1.1. Supported Sample Counts
457
bool want_cube = ici->samples == 1 &&
458
(templ->target == PIPE_TEXTURE_CUBE ||
459
templ->target == PIPE_TEXTURE_CUBE_ARRAY ||
460
(templ->target == PIPE_TEXTURE_2D_ARRAY && ici->extent.width == ici->extent.height && ici->arrayLayers >= 6));
462
if (templ->target == PIPE_TEXTURE_CUBE)
463
ici->arrayLayers *= 6;
465
if (templ->usage == PIPE_USAGE_STAGING &&
466
templ->format != PIPE_FORMAT_B4G4R4A4_UNORM &&
467
templ->format != PIPE_FORMAT_B4G4R4A4_UINT)
468
ici->tiling = VK_IMAGE_TILING_LINEAR;
469
if (ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
474
uint64_t mod = DRM_FORMAT_MOD_INVALID;
476
while (!ici->usage) {
478
switch (ici->tiling) {
479
case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
480
ici->tiling = VK_IMAGE_TILING_OPTIMAL;
483
case VK_IMAGE_TILING_OPTIMAL:
484
ici->tiling = VK_IMAGE_TILING_LINEAR;
486
case VK_IMAGE_TILING_LINEAR:
487
if (bind & PIPE_BIND_LINEAR) {
489
return DRM_FORMAT_MOD_INVALID;
491
ici->tiling = VK_IMAGE_TILING_OPTIMAL;
494
unreachable("unhandled tiling mode");
496
if (tried[ici->tiling]) {
497
if (ici->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT) {
499
return DRM_FORMAT_MOD_INVALID;
501
ici->flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
508
ici->usage = get_image_usage(screen, ici, templ, bind, modifiers_count, modifiers, &mod);
510
if (ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
511
tried[ici->tiling] = true;
514
ici->flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
515
if (get_image_usage(screen, ici, templ, bind, modifiers_count, modifiers, &mod) != ici->usage)
516
ici->flags &= ~VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
523
static struct zink_resource_object *
524
resource_object_create(struct zink_screen *screen, const struct pipe_resource *templ, struct winsys_handle *whandle, bool *optimal_tiling,
525
const uint64_t *modifiers, int modifiers_count, const void *loader_private)
527
struct zink_resource_object *obj = CALLOC_STRUCT(zink_resource_object);
530
obj->last_dt_idx = obj->dt_idx = UINT32_MAX; //TODO: unionize
532
VkMemoryRequirements reqs = {0};
533
VkMemoryPropertyFlags flags;
534
bool need_dedicated = false;
535
bool shared = templ->bind & PIPE_BIND_SHARED;
536
VkExternalMemoryHandleTypeFlags export_types = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
537
unsigned num_planes = util_format_get_num_planes(templ->format);
538
VkImageAspectFlags plane_aspects[] = {
539
VK_IMAGE_ASPECT_PLANE_0_BIT,
540
VK_IMAGE_ASPECT_PLANE_1_BIT,
541
VK_IMAGE_ASPECT_PLANE_2_BIT,
543
VkExternalMemoryHandleTypeFlags external = 0;
544
bool needs_export = (templ->bind & (ZINK_BIND_VIDEO | ZINK_BIND_DMABUF)) != 0;
546
if (whandle->type == WINSYS_HANDLE_TYPE_FD || whandle->type == ZINK_EXTERNAL_MEMORY_HANDLE)
547
needs_export |= true;
549
unreachable("unknown handle type");
552
if (whandle && whandle->type == ZINK_EXTERNAL_MEMORY_HANDLE) {
553
external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
555
external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
556
export_types |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
560
/* we may export WINSYS_HANDLE_TYPE_FD handle which is dma-buf */
561
if (shared && screen->info.have_EXT_external_memory_dma_buf)
562
export_types |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
564
pipe_reference_init(&obj->reference, 1);
565
util_dynarray_init(&obj->desc_set_refs.refs, NULL);
566
if (loader_private) {
567
obj->bo = CALLOC_STRUCT(zink_bo);
568
obj->transfer_dst = true;
570
} else if (templ->target == PIPE_BUFFER) {
571
VkBufferCreateInfo bci = create_bci(screen, templ, templ->bind);
573
if (VKSCR(CreateBuffer)(screen->dev, &bci, NULL, &obj->buffer) != VK_SUCCESS) {
574
mesa_loge("ZINK: vkCreateBuffer failed");
578
if (!(templ->bind & PIPE_BIND_SHADER_IMAGE)) {
579
bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
580
if (VKSCR(CreateBuffer)(screen->dev, &bci, NULL, &obj->storage_buffer) != VK_SUCCESS) {
581
mesa_loge("ZINK: vkCreateBuffer failed");
586
VKSCR(GetBufferMemoryRequirements)(screen->dev, obj->buffer, &reqs);
587
if (templ->usage == PIPE_USAGE_STAGING)
588
flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
589
else if (templ->usage == PIPE_USAGE_STREAM)
590
flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
591
else if (templ->usage == PIPE_USAGE_IMMUTABLE)
592
flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
594
flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
595
obj->is_buffer = true;
596
obj->transfer_dst = true;
598
bool winsys_modifier = (export_types & VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) && whandle && whandle->modifier != DRM_FORMAT_MOD_INVALID;
600
bool try_modifiers = false;
601
if ((export_types & VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) && whandle && whandle->modifier == DRM_FORMAT_MOD_INVALID) {
603
modifiers_count = screen->modifier_props[templ->format].drmFormatModifierCount;
604
for (unsigned j = 0; j < modifiers_count; j++)
605
mods[j] = screen->modifier_props[templ->format].pDrmFormatModifierProperties[j].drmFormatModifier;
606
if (modifiers_count > 1)
607
try_modifiers = true;
609
const uint64_t *ici_modifiers = winsys_modifier ? &whandle->modifier : modifiers;
610
unsigned ici_modifier_count = winsys_modifier ? 1 : modifiers_count;
611
bool success = false;
612
VkImageCreateInfo ici;
613
uint64_t mod = create_ici(screen, &ici, templ, external == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
614
templ->bind, ici_modifier_count, ici_modifiers, &success);
615
VkExternalMemoryImageCreateInfo emici;
616
VkImageDrmFormatModifierExplicitCreateInfoEXT idfmeci;
617
VkImageDrmFormatModifierListCreateInfoEXT idfmlci;
618
VkSubresourceLayout plane_layout = {
619
.offset = whandle ? whandle->offset : 0,
621
.rowPitch = whandle ? whandle->stride : 0,
628
obj->render_target = (ici.usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0;
630
if (shared || external) {
631
emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
633
emici.handleTypes = export_types;
636
assert(ici.tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT || mod != DRM_FORMAT_MOD_INVALID);
637
if (whandle && ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
638
assert(mod == whandle->modifier || !winsys_modifier);
639
idfmeci.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
640
idfmeci.pNext = ici.pNext;
641
idfmeci.drmFormatModifier = mod;
643
idfmeci.drmFormatModifierPlaneCount = 1;
644
idfmeci.pPlaneLayouts = &plane_layout;
646
ici.pNext = &idfmeci;
647
} else if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
648
idfmlci.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
649
idfmlci.pNext = ici.pNext;
650
idfmlci.drmFormatModifierCount = modifiers_count;
651
idfmlci.pDrmFormatModifiers = modifiers;
652
ici.pNext = &idfmlci;
653
} else if (ici.tiling == VK_IMAGE_TILING_OPTIMAL) {
661
*optimal_tiling = ici.tiling == VK_IMAGE_TILING_OPTIMAL;
663
if (ici.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
664
obj->transfer_dst = true;
666
if (util_format_is_yuv(templ->format)) {
667
VkFormatFeatureFlags feats = VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM;
668
switch (ici.tiling) {
669
case VK_IMAGE_TILING_LINEAR:
670
feats = screen->format_props[templ->format].linearTilingFeatures;
672
case VK_IMAGE_TILING_OPTIMAL:
673
feats = screen->format_props[templ->format].optimalTilingFeatures;
675
case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
677
If is tiling then VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, the value of
678
imageCreateFormatFeatures is found by calling vkGetPhysicalDeviceFormatProperties2
679
with VkImageFormatProperties::format equal to VkImageCreateInfo::format and with
680
VkDrmFormatModifierPropertiesListEXT chained into VkImageFormatProperties2; by
681
collecting all members of the returned array
682
VkDrmFormatModifierPropertiesListEXT::pDrmFormatModifierProperties
683
whose drmFormatModifier belongs to imageCreateDrmFormatModifiers; and by taking the bitwise
684
intersection, over the collected array members, of drmFormatModifierTilingFeatures.
685
(The resultant imageCreateFormatFeatures may be empty).
686
* -Chapter 12. Resource Creation
688
for (unsigned i = 0; i < screen->modifier_props[templ->format].drmFormatModifierCount; i++)
689
feats &= screen->modifier_props[templ->format].pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
692
unreachable("unknown tiling");
694
if (feats & VK_FORMAT_FEATURE_DISJOINT_BIT)
695
ici.flags |= VK_IMAGE_CREATE_DISJOINT_BIT;
696
VkSamplerYcbcrConversionCreateInfo sycci = {0};
697
sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
699
sycci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
700
sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709;
701
sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
702
sycci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
703
sycci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
704
sycci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
705
sycci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
706
if (!feats || (feats & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) {
707
sycci.xChromaOffset = VK_CHROMA_LOCATION_COSITED_EVEN;
708
sycci.yChromaOffset = VK_CHROMA_LOCATION_COSITED_EVEN;
710
assert(feats & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT);
711
sycci.xChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
712
sycci.yChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
714
sycci.chromaFilter = VK_FILTER_LINEAR;
715
sycci.forceExplicitReconstruction = VK_FALSE;
716
VkResult res = VKSCR(CreateSamplerYcbcrConversion)(screen->dev, &sycci, NULL, &obj->sampler_conversion);
717
if (res != VK_SUCCESS) {
718
mesa_loge("ZINK: vkCreateSamplerYcbcrConversion failed");
723
VkResult result = VKSCR(CreateImage)(screen->dev, &ici, NULL, &obj->image);
724
if (result != VK_SUCCESS) {
726
for (unsigned i = 0; i < modifiers_count; i++) {
727
if (modifiers[i] == mod)
729
idfmeci.drmFormatModifier = modifiers[i];
730
result = VKSCR(CreateImage)(screen->dev, &ici, NULL, &obj->image);
731
if (result == VK_SUCCESS)
736
if (result != VK_SUCCESS) {
737
mesa_loge("ZINK: vkCreateImage failed");
741
if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
742
VkImageDrmFormatModifierPropertiesEXT modprops = {0};
743
modprops.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
744
result = VKSCR(GetImageDrmFormatModifierPropertiesEXT)(screen->dev, obj->image, &modprops);
745
if (result != VK_SUCCESS) {
746
mesa_loge("ZINK: vkGetImageDrmFormatModifierPropertiesEXT failed");
749
obj->modifier = modprops.drmFormatModifier;
750
unsigned num_planes = screen->base.get_dmabuf_modifier_planes(&screen->base, obj->modifier, templ->format);
751
obj->modifier_aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
753
obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
755
obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
757
obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
758
assert(num_planes <= 4);
761
if (VKSCR(GetImageMemoryRequirements2)) {
762
VkMemoryRequirements2 req2;
763
req2.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
764
VkImageMemoryRequirementsInfo2 info2;
765
info2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
767
info2.image = obj->image;
768
VkMemoryDedicatedRequirements ded;
769
ded.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
772
VkImagePlaneMemoryRequirementsInfo plane;
773
plane.sType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
776
info2.pNext = &plane;
777
for (unsigned i = 0; i < num_planes; i++) {
778
assert(i < ARRAY_SIZE(plane_aspects));
779
plane.planeAspect = plane_aspects[i];
780
VKSCR(GetImageMemoryRequirements2)(screen->dev, &info2, &req2);
782
reqs.alignment = req2.memoryRequirements.alignment;
783
obj->plane_sizes[i] = req2.memoryRequirements.size;
784
reqs.size += req2.memoryRequirements.size;
785
reqs.memoryTypeBits |= req2.memoryRequirements.memoryTypeBits;
786
need_dedicated |= ded.prefersDedicatedAllocation || ded.requiresDedicatedAllocation;
789
VKSCR(GetImageMemoryRequirements)(screen->dev, obj->image, &reqs);
791
if (templ->usage == PIPE_USAGE_STAGING && ici.tiling == VK_IMAGE_TILING_LINEAR)
792
flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
794
flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
796
obj->vkflags = ici.flags;
797
obj->vkusage = ici.usage;
799
obj->alignment = reqs.alignment;
801
if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT || templ->usage == PIPE_USAGE_DYNAMIC)
802
flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
803
else if (!(flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) &&
804
templ->usage == PIPE_USAGE_STAGING)
805
flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
807
if (templ->bind & ZINK_BIND_TRANSIENT)
808
flags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
810
VkMemoryAllocateInfo mai;
811
enum zink_alloc_flag aflags = templ->flags & PIPE_RESOURCE_FLAG_SPARSE ? ZINK_ALLOC_SPARSE : 0;
812
mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
814
mai.allocationSize = reqs.size;
815
enum zink_heap heap = zink_heap_from_domain_flags(flags, aflags);
816
mai.memoryTypeIndex = screen->heap_map[heap];
817
if (unlikely(!(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex)))) {
818
/* not valid based on reqs; demote to more compatible type */
820
case ZINK_HEAP_DEVICE_LOCAL_VISIBLE:
821
heap = ZINK_HEAP_DEVICE_LOCAL;
823
case ZINK_HEAP_HOST_VISIBLE_CACHED:
824
heap = ZINK_HEAP_HOST_VISIBLE_COHERENT;
829
mai.memoryTypeIndex = screen->heap_map[heap];
830
assert(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex));
833
VkMemoryDedicatedAllocateInfo ded_alloc_info = {
834
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
837
.buffer = VK_NULL_HANDLE,
840
if (screen->info.have_KHR_dedicated_allocation && need_dedicated) {
841
ded_alloc_info.pNext = mai.pNext;
842
mai.pNext = &ded_alloc_info;
845
VkExportMemoryAllocateInfo emai;
846
if ((templ->bind & ZINK_BIND_VIDEO) || ((templ->bind & PIPE_BIND_SHARED) && shared) || (templ->bind & ZINK_BIND_DMABUF)) {
847
emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
848
emai.handleTypes = export_types;
850
emai.pNext = mai.pNext;
852
obj->exportable = true;
855
#ifdef ZINK_USE_DMABUF
856
VkImportMemoryFdInfoKHR imfi = {
857
VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
863
imfi.handleType = external;
864
imfi.fd = os_dupfd_cloexec(whandle->handle);
866
mesa_loge("ZINK: failed to dup dmabuf fd: %s\n", strerror(errno));
870
imfi.pNext = mai.pNext;
875
unsigned alignment = MAX2(reqs.alignment, 256);
876
if (templ->usage == PIPE_USAGE_STAGING && obj->is_buffer)
877
alignment = MAX2(alignment, screen->info.props.limits.minMemoryMapAlignment);
878
obj->alignment = alignment;
880
obj->bo = zink_bo(zink_bo_create(screen, reqs.size, alignment, heap, mai.pNext ? ZINK_ALLOC_NO_SUBALLOC : 0, mai.pNext));
882
if (heap == ZINK_HEAP_DEVICE_LOCAL_VISIBLE) {
883
if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT || templ->usage == PIPE_USAGE_DYNAMIC)
884
heap = ZINK_HEAP_HOST_VISIBLE_COHERENT;
886
heap = ZINK_HEAP_DEVICE_LOCAL;
891
if (aflags == ZINK_ALLOC_SPARSE) {
892
obj->size = templ->width0;
894
obj->offset = zink_bo_get_offset(obj->bo);
895
obj->size = zink_bo_get_size(obj->bo);
898
obj->coherent = obj->bo->base.placement & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
899
if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)) {
900
obj->host_visible = obj->bo->base.placement & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
903
if (templ->target == PIPE_BUFFER) {
904
if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)) {
905
if (VKSCR(BindBufferMemory)(screen->dev, obj->buffer, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
906
mesa_loge("ZINK: vkBindBufferMemory failed");
909
if (obj->storage_buffer && VKSCR(BindBufferMemory)(screen->dev, obj->storage_buffer, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
910
mesa_loge("ZINK: vkBindBufferMemory failed");
915
if (num_planes > 1) {
916
VkBindImageMemoryInfo infos[3];
917
VkBindImagePlaneMemoryInfo planes[3];
919
for (unsigned i = 0; i < num_planes; i++) {
920
infos[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
921
infos[i].pNext = &planes[i];
922
infos[i].image = obj->image;
923
infos[i].memory = zink_bo_get_mem(obj->bo);
924
infos[i].memoryOffset = offset;
925
planes[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
926
planes[i].pNext = NULL;
927
planes[i].planeAspect = plane_aspects[i];
928
offset += obj->plane_sizes[i];
930
if (VKSCR(BindImageMemory2)(screen->dev, num_planes, infos) != VK_SUCCESS) {
931
mesa_loge("ZINK: vkBindImageMemory2 failed");
935
if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE))
936
if (VKSCR(BindImageMemory)(screen->dev, obj->image, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
937
mesa_loge("ZINK: vkBindImageMemory failed");
945
zink_bo_unref(screen, obj->bo);
948
if (templ->target == PIPE_BUFFER) {
949
VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
950
VKSCR(DestroyBuffer)(screen->dev, obj->storage_buffer, NULL);
952
VKSCR(DestroyImage)(screen->dev, obj->image, NULL);
958
static struct pipe_resource *
959
resource_create(struct pipe_screen *pscreen,
960
const struct pipe_resource *templ,
961
struct winsys_handle *whandle,
962
unsigned external_usage,
963
const uint64_t *modifiers, int modifiers_count,
964
const void *loader_private)
966
struct zink_screen *screen = zink_screen(pscreen);
967
struct zink_resource *res = CALLOC_STRUCT_CL(zink_resource);
969
if (modifiers_count > 0 && screen->info.have_EXT_image_drm_format_modifier) {
971
res->modifiers_count = modifiers_count;
972
res->modifiers = mem_dup(modifiers, modifiers_count * sizeof(uint64_t));
973
if (!res->modifiers) {
979
res->base.b = *templ;
981
threaded_resource_init(&res->base.b, false);
982
pipe_reference_init(&res->base.b.reference, 1);
983
res->base.b.screen = pscreen;
985
bool optimal_tiling = false;
986
struct pipe_resource templ2 = *templ;
987
if (templ2.flags & PIPE_RESOURCE_FLAG_SPARSE)
988
templ2.bind |= PIPE_BIND_SHADER_IMAGE;
989
if (screen->faked_e5sparse && templ->format == PIPE_FORMAT_R9G9B9E5_FLOAT) {
990
templ2.flags &= ~PIPE_RESOURCE_FLAG_SPARSE;
991
res->base.b.flags &= ~PIPE_RESOURCE_FLAG_SPARSE;
993
res->obj = resource_object_create(screen, &templ2, whandle, &optimal_tiling, modifiers, modifiers_count, loader_private);
995
free(res->modifiers);
1000
res->internal_format = templ->format;
1001
if (templ->target == PIPE_BUFFER) {
1002
util_range_init(&res->valid_buffer_range);
1003
res->base.b.bind |= PIPE_BIND_SHADER_IMAGE;
1004
if (!screen->resizable_bar && templ->width0 >= 8196) {
1005
/* We don't want to evict buffers from VRAM by mapping them for CPU access,
1006
* because they might never be moved back again. If a buffer is large enough,
1007
* upload data by copying from a temporary GTT buffer. 8K might not seem much,
1008
* but there can be 100000 buffers.
1010
* This tweak improves performance for viewperf.
1012
res->base.b.flags |= PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY;
1015
if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
1016
res->base.b.bind |= PIPE_BIND_SHADER_IMAGE;
1017
if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE) {
1019
VKSCR(GetImageSparseMemoryRequirements)(screen->dev, res->obj->image, &count, &res->sparse);
1020
res->base.b.nr_sparse_levels = res->sparse.imageMipTailFirstLod;
1022
res->format = zink_get_format(screen, templ->format);
1023
if (templ->target == PIPE_TEXTURE_1D || templ->target == PIPE_TEXTURE_1D_ARRAY) {
1024
res->need_2D = (screen->need_2D_zs && util_format_is_depth_or_stencil(templ->format)) ||
1025
(screen->need_2D_sparse && (templ->flags & PIPE_RESOURCE_FLAG_SPARSE));
1027
res->dmabuf_acquire = whandle && whandle->type == WINSYS_HANDLE_TYPE_FD;
1028
res->layout = res->dmabuf_acquire ? VK_IMAGE_LAYOUT_PREINITIALIZED : VK_IMAGE_LAYOUT_UNDEFINED;
1029
res->optimal_tiling = optimal_tiling;
1030
res->aspect = aspect_from_format(templ->format);
1033
if (loader_private) {
1034
if (templ->bind & PIPE_BIND_DISPLAY_TARGET) {
1036
res->obj->dt = zink_kopper_displaytarget_create(screen,
1043
assert(res->obj->dt);
1046
struct zink_resource *back = (void*)loader_private;
1047
struct kopper_displaytarget *cdt = back->obj->dt;
1049
assert(back->obj->dt);
1050
res->obj->dt = back->obj->dt;
1052
struct kopper_displaytarget *cdt = res->obj->dt;
1053
if (zink_kopper_has_srgb(cdt))
1054
res->obj->vkflags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1055
if (cdt->swapchain->scci.flags == VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
1056
res->obj->vkflags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR;
1057
res->obj->vkusage = cdt->swapchain->scci.imageUsage;
1058
res->base.b.bind |= PIPE_BIND_DISPLAY_TARGET;
1059
res->optimal_tiling = true;
1060
res->swapchain = true;
1062
if (res->obj->is_buffer) {
1063
res->base.buffer_id_unique = util_idalloc_mt_alloc(&screen->buffer_ids);
1064
_mesa_hash_table_init(&res->bufferview_cache, NULL, NULL, equals_bvci);
1065
simple_mtx_init(&res->bufferview_mtx, mtx_plain);
1067
_mesa_hash_table_init(&res->surface_cache, NULL, NULL, equals_ivci);
1068
simple_mtx_init(&res->surface_mtx, mtx_plain);
1070
if (res->obj->exportable)
1071
res->base.b.bind |= ZINK_BIND_DMABUF;
1072
return &res->base.b;
1075
static struct pipe_resource *
1076
zink_resource_create(struct pipe_screen *pscreen,
1077
const struct pipe_resource *templ)
1079
return resource_create(pscreen, templ, NULL, 0, NULL, 0, NULL);
1082
static struct pipe_resource *
1083
zink_resource_create_with_modifiers(struct pipe_screen *pscreen, const struct pipe_resource *templ,
1084
const uint64_t *modifiers, int modifiers_count)
1086
return resource_create(pscreen, templ, NULL, 0, modifiers, modifiers_count, NULL);
1089
static struct pipe_resource *
1090
zink_resource_create_drawable(struct pipe_screen *pscreen,
1091
const struct pipe_resource *templ,
1092
const void *loader_private)
1094
return resource_create(pscreen, templ, NULL, 0, NULL, 0, loader_private);
1098
add_resource_bind(struct zink_context *ctx, struct zink_resource *res, unsigned bind)
1100
struct zink_screen *screen = zink_screen(ctx->base.screen);
1101
assert((res->base.b.bind & bind) == 0);
1102
zink_resource_image_barrier(ctx, res, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 0, 0);
1103
res->base.b.bind |= bind;
1104
struct zink_resource_object *old_obj = res->obj;
1105
if (bind & ZINK_BIND_DMABUF && !res->modifiers_count) {
1106
res->modifiers_count = 1;
1107
res->modifiers = malloc(res->modifiers_count * sizeof(uint64_t));
1108
res->modifiers[0] = DRM_FORMAT_MOD_LINEAR;
1110
struct zink_resource_object *new_obj = resource_object_create(screen, &res->base.b, NULL, &res->optimal_tiling, res->modifiers, res->modifiers_count, NULL);
1112
debug_printf("new backing resource alloc failed!");
1113
res->base.b.bind &= ~bind;
1116
struct zink_resource staging = *res;
1117
staging.obj = old_obj;
1118
staging.all_binds = 0;
1119
res->layout = VK_IMAGE_LAYOUT_UNDEFINED;
1120
res->obj->access = 0;
1121
res->obj->access_stage = 0;
1122
bool needs_unref = true;
1123
if (zink_resource_has_usage(res)) {
1124
zink_batch_reference_resource_move(&ctx->batch, res);
1125
needs_unref = false;
1128
zink_descriptor_set_refs_clear(&old_obj->desc_set_refs, old_obj);
1129
for (unsigned i = 0; i <= res->base.b.last_level; i++) {
1130
struct pipe_box box = {0, 0, 0,
1131
u_minify(res->base.b.width0, i),
1132
u_minify(res->base.b.height0, i), res->base.b.array_size};
1133
box.depth = util_num_layers(&res->base.b, i);
1134
ctx->base.resource_copy_region(&ctx->base, &res->base.b, i, 0, 0, 0, &staging.base.b, i, &box);
1137
zink_resource_object_reference(screen, &old_obj, NULL);
1142
zink_resource_get_param(struct pipe_screen *pscreen, struct pipe_context *pctx,
1143
struct pipe_resource *pres,
1147
enum pipe_resource_param param,
1148
unsigned handle_usage,
1151
struct zink_screen *screen = zink_screen(pscreen);
1152
struct zink_resource *res = zink_resource(pres);
1153
struct zink_resource_object *obj = res->obj;
1154
struct winsys_handle whandle;
1155
VkImageAspectFlags aspect;
1156
if (res->modifiers) {
1159
aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
1162
aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
1165
aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
1168
aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
1171
unreachable("how many planes you got in this thing?");
1173
} else if (res->obj->sampler_conversion) {
1174
aspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
1176
aspect = res->aspect;
1179
case PIPE_RESOURCE_PARAM_NPLANES:
1180
if (screen->info.have_EXT_image_drm_format_modifier)
1181
*value = util_format_get_num_planes(res->drm_format);
1186
case PIPE_RESOURCE_PARAM_STRIDE: {
1187
VkImageSubresource sub_res = {0};
1188
VkSubresourceLayout sub_res_layout = {0};
1190
sub_res.aspectMask = aspect;
1192
VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &sub_res, &sub_res_layout);
1194
*value = sub_res_layout.rowPitch;
1198
case PIPE_RESOURCE_PARAM_OFFSET: {
1199
VkImageSubresource isr = {
1204
VkSubresourceLayout srl;
1205
VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &isr, &srl);
1206
*value = srl.offset;
1210
case PIPE_RESOURCE_PARAM_MODIFIER: {
1211
*value = obj->modifier;
1215
case PIPE_RESOURCE_PARAM_LAYER_STRIDE: {
1216
VkImageSubresource isr = {
1221
VkSubresourceLayout srl;
1222
VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &isr, &srl);
1223
if (res->base.b.target == PIPE_TEXTURE_3D)
1224
*value = srl.depthPitch;
1226
*value = srl.arrayPitch;
1230
case PIPE_RESOURCE_PARAM_HANDLE_TYPE_KMS:
1232
case PIPE_RESOURCE_PARAM_HANDLE_TYPE_SHARED:
1233
case PIPE_RESOURCE_PARAM_HANDLE_TYPE_FD: {
1234
#ifdef ZINK_USE_DMABUF
1235
memset(&whandle, 0, sizeof(whandle));
1236
if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_SHARED)
1237
whandle.type = WINSYS_HANDLE_TYPE_SHARED;
1238
else if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_FD)
1239
whandle.type = WINSYS_HANDLE_TYPE_FD;
1241
if (!pscreen->resource_get_handle(pscreen, pctx, pres, &whandle, handle_usage))
1244
*value = whandle.handle;
1256
zink_resource_get_handle(struct pipe_screen *pscreen,
1257
struct pipe_context *context,
1258
struct pipe_resource *tex,
1259
struct winsys_handle *whandle,
1262
if (whandle->type == WINSYS_HANDLE_TYPE_FD || whandle->type == WINSYS_HANDLE_TYPE_KMS) {
1263
#ifdef ZINK_USE_DMABUF
1264
struct zink_resource *res = zink_resource(tex);
1265
struct zink_screen *screen = zink_screen(pscreen);
1266
struct zink_resource_object *obj = res->obj;
1268
if (whandle->type == WINSYS_HANDLE_TYPE_KMS) {
1269
whandle->handle = -1;
1271
if (!res->obj->exportable) {
1272
assert(!res->all_binds); //TODO handle if problematic
1273
assert(!zink_resource_usage_is_unflushed(res));
1274
if (!add_resource_bind(screen->copy_context, res, ZINK_BIND_DMABUF | PIPE_BIND_SHARED))
1276
screen->copy_context->base.flush(&screen->copy_context->base, NULL, 0);
1280
VkMemoryGetFdInfoKHR fd_info = {0};
1282
fd_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
1283
fd_info.memory = zink_bo_get_mem(obj->bo);
1284
if (whandle->type == WINSYS_HANDLE_TYPE_FD)
1285
fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1287
fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
1288
VkResult result = VKSCR(GetMemoryFdKHR)(screen->dev, &fd_info, &fd);
1289
if (result != VK_SUCCESS) {
1290
mesa_loge("ZINK: vkGetMemoryFdKHR failed");
1293
whandle->handle = fd;
1296
zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_MODIFIER, 0, &value);
1297
whandle->modifier = value;
1298
zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_OFFSET, 0, &value);
1299
whandle->offset = value;
1300
zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_STRIDE, 0, &value);
1301
whandle->stride = value;
1309
static struct pipe_resource *
1310
zink_resource_from_handle(struct pipe_screen *pscreen,
1311
const struct pipe_resource *templ,
1312
struct winsys_handle *whandle,
1315
#ifdef ZINK_USE_DMABUF
1316
if (whandle->modifier != DRM_FORMAT_MOD_INVALID &&
1317
!zink_screen(pscreen)->info.have_EXT_image_drm_format_modifier)
1320
struct pipe_resource templ2 = *templ;
1321
if (templ->format == PIPE_FORMAT_NONE)
1322
templ2.format = whandle->format;
1324
uint64_t modifier = DRM_FORMAT_MOD_INVALID;
1325
int modifier_count = 0;
1326
if (whandle->modifier != DRM_FORMAT_MOD_INVALID) {
1327
modifier = whandle->modifier;
1330
struct pipe_resource *pres = resource_create(pscreen, &templ2, whandle, usage, &modifier, modifier_count, NULL);
1332
zink_resource(pres)->drm_format = whandle->format;
1339
struct zink_memory_object {
1340
struct pipe_memory_object b;
1341
struct winsys_handle whandle;
1344
static struct pipe_memory_object *
1345
zink_memobj_create_from_handle(struct pipe_screen *pscreen, struct winsys_handle *whandle, bool dedicated)
1347
struct zink_memory_object *memobj = CALLOC_STRUCT(zink_memory_object);
1350
memcpy(&memobj->whandle, whandle, sizeof(struct winsys_handle));
1351
memobj->whandle.type = ZINK_EXTERNAL_MEMORY_HANDLE;
1352
#ifdef ZINK_USE_DMABUF
1353
memobj->whandle.handle = os_dupfd_cloexec(whandle->handle);
1355
return (struct pipe_memory_object *)memobj;
1359
zink_memobj_destroy(struct pipe_screen *pscreen, struct pipe_memory_object *pmemobj)
1361
#ifdef ZINK_USE_DMABUF
1362
struct zink_memory_object *memobj = (struct zink_memory_object *)pmemobj;
1363
close(memobj->whandle.handle);
1368
static struct pipe_resource *
1369
zink_resource_from_memobj(struct pipe_screen *pscreen,
1370
const struct pipe_resource *templ,
1371
struct pipe_memory_object *pmemobj,
1374
struct zink_memory_object *memobj = (struct zink_memory_object *)pmemobj;
1376
return resource_create(pscreen, templ, &memobj->whandle, 0, NULL, 0, NULL);
1380
invalidate_buffer(struct zink_context *ctx, struct zink_resource *res)
1382
struct zink_screen *screen = zink_screen(ctx->base.screen);
1384
assert(res->base.b.target == PIPE_BUFFER);
1386
if (res->base.b.flags & PIPE_RESOURCE_FLAG_SPARSE)
1389
if (res->valid_buffer_range.start > res->valid_buffer_range.end)
1393
ctx->dirty_so_targets = true;
1394
/* force counter buffer reset */
1395
res->so_valid = false;
1397
util_range_set_empty(&res->valid_buffer_range);
1398
if (!zink_resource_has_usage(res))
1401
struct zink_resource_object *old_obj = res->obj;
1402
struct zink_resource_object *new_obj = resource_object_create(screen, &res->base.b, NULL, NULL, NULL, 0, NULL);
1404
debug_printf("new backing resource alloc failed!");
1407
/* this ref must be transferred before rebind or else BOOM */
1408
zink_batch_reference_resource_move(&ctx->batch, res);
1410
zink_resource_rebind(ctx, res);
1411
zink_descriptor_set_refs_clear(&old_obj->desc_set_refs, old_obj);
1417
zink_resource_invalidate(struct pipe_context *pctx, struct pipe_resource *pres)
1419
if (pres->target == PIPE_BUFFER)
1420
invalidate_buffer(zink_context(pctx), zink_resource(pres));
1424
zink_transfer_copy_bufimage(struct zink_context *ctx,
1425
struct zink_resource *dst,
1426
struct zink_resource *src,
1427
struct zink_transfer *trans)
1429
assert((trans->base.b.usage & (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY)) !=
1430
(PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY));
1432
bool buf2img = src->base.b.target == PIPE_BUFFER;
1434
struct pipe_box box = trans->base.b.box;
1437
box.x = trans->offset;
1439
if (dst->obj->transfer_dst)
1440
zink_copy_image_buffer(ctx, dst, src, trans->base.b.level, buf2img ? x : 0,
1441
box.y, box.z, trans->base.b.level, &box, trans->base.b.usage);
1443
util_blitter_copy_texture(ctx->blitter, &dst->base.b, trans->base.b.level,
1444
x, box.y, box.z, &src->base.b,
1448
ALWAYS_INLINE static void
1449
align_offset_size(const VkDeviceSize alignment, VkDeviceSize *offset, VkDeviceSize *size, VkDeviceSize obj_size)
1451
VkDeviceSize align = *offset % alignment;
1452
if (alignment - 1 > *offset)
1455
*offset -= align, *size += align;
1456
align = alignment - (*size % alignment);
1457
if (*offset + *size + align > obj_size)
1458
*size = obj_size - *offset;
1464
zink_resource_init_mem_range(struct zink_screen *screen, struct zink_resource_object *obj, VkDeviceSize offset, VkDeviceSize size)
1467
align_offset_size(screen->info.props.limits.nonCoherentAtomSize, &offset, &size, obj->size);
1468
VkMappedMemoryRange range = {
1469
VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
1471
zink_bo_get_mem(obj->bo),
1480
map_resource(struct zink_screen *screen, struct zink_resource *res)
1482
assert(res->obj->host_visible);
1483
return zink_bo_map(screen, res->obj->bo);
1487
unmap_resource(struct zink_screen *screen, struct zink_resource *res)
1489
zink_bo_unmap(screen, res->obj->bo);
1492
static struct zink_transfer *
1493
create_transfer(struct zink_context *ctx, struct pipe_resource *pres, unsigned usage, const struct pipe_box *box)
1495
struct zink_transfer *trans;
1497
if (usage & PIPE_MAP_THREAD_SAFE)
1498
trans = calloc(1, sizeof(*trans));
1499
else if (usage & TC_TRANSFER_MAP_THREADED_UNSYNC)
1500
trans = slab_zalloc(&ctx->transfer_pool_unsync);
1502
trans = slab_zalloc(&ctx->transfer_pool);
1506
pipe_resource_reference(&trans->base.b.resource, pres);
1508
trans->base.b.usage = usage;
1509
trans->base.b.box = *box;
1514
destroy_transfer(struct zink_context *ctx, struct zink_transfer *trans)
1516
if (trans->base.b.usage & PIPE_MAP_THREAD_SAFE) {
1519
/* Don't use pool_transfers_unsync. We are always in the driver
1520
* thread. Freeing an object into a different pool is allowed.
1522
slab_free(&ctx->transfer_pool, trans);
1527
zink_buffer_map(struct pipe_context *pctx,
1528
struct pipe_resource *pres,
1531
const struct pipe_box *box,
1532
struct pipe_transfer **transfer)
1534
struct zink_context *ctx = zink_context(pctx);
1535
struct zink_screen *screen = zink_screen(pctx->screen);
1536
struct zink_resource *res = zink_resource(pres);
1537
struct zink_transfer *trans = create_transfer(ctx, pres, usage, box);
1543
if (res->base.is_user_ptr)
1544
usage |= PIPE_MAP_PERSISTENT;
1546
/* See if the buffer range being mapped has never been initialized,
1547
* in which case it can be mapped unsynchronized. */
1548
if (!(usage & (PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_NO_INFER_UNSYNCHRONIZED)) &&
1549
usage & PIPE_MAP_WRITE && !res->base.is_shared &&
1550
!util_ranges_intersect(&res->valid_buffer_range, box->x, box->x + box->width)) {
1551
usage |= PIPE_MAP_UNSYNCHRONIZED;
1554
/* If discarding the entire range, discard the whole resource instead. */
1555
if (usage & PIPE_MAP_DISCARD_RANGE && box->x == 0 && box->width == res->base.b.width0) {
1556
usage |= PIPE_MAP_DISCARD_WHOLE_RESOURCE;
1559
/* If a buffer in VRAM is too large and the range is discarded, don't
1560
* map it directly. This makes sure that the buffer stays in VRAM.
1562
bool force_discard_range = false;
1563
if (usage & (PIPE_MAP_DISCARD_WHOLE_RESOURCE | PIPE_MAP_DISCARD_RANGE) &&
1564
!(usage & PIPE_MAP_PERSISTENT) &&
1565
res->base.b.flags & PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY) {
1566
usage &= ~(PIPE_MAP_DISCARD_WHOLE_RESOURCE | PIPE_MAP_UNSYNCHRONIZED);
1567
usage |= PIPE_MAP_DISCARD_RANGE;
1568
force_discard_range = true;
1571
if (usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE &&
1572
!(usage & (PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_NO_INVALIDATE))) {
1573
assert(usage & PIPE_MAP_WRITE);
1575
if (invalidate_buffer(ctx, res)) {
1576
/* At this point, the buffer is always idle. */
1577
usage |= PIPE_MAP_UNSYNCHRONIZED;
1579
/* Fall back to a temporary buffer. */
1580
usage |= PIPE_MAP_DISCARD_RANGE;
1584
unsigned map_offset = box->x;
1585
if (usage & PIPE_MAP_DISCARD_RANGE &&
1586
(!res->obj->host_visible ||
1587
!(usage & (PIPE_MAP_UNSYNCHRONIZED | PIPE_MAP_PERSISTENT)))) {
1589
/* Check if mapping this buffer would cause waiting for the GPU.
1592
if (!res->obj->host_visible || force_discard_range ||
1593
!zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_RW)) {
1594
/* Do a wait-free write-only transfer using a temporary buffer. */
1597
/* If we are not called from the driver thread, we have
1598
* to use the uploader from u_threaded_context, which is
1599
* local to the calling thread.
1601
struct u_upload_mgr *mgr;
1602
if (usage & TC_TRANSFER_MAP_THREADED_UNSYNC)
1603
mgr = ctx->tc->base.stream_uploader;
1605
mgr = ctx->base.stream_uploader;
1606
u_upload_alloc(mgr, 0, box->width,
1607
screen->info.props.limits.minMemoryMapAlignment, &offset,
1608
(struct pipe_resource **)&trans->staging_res, (void **)&ptr);
1609
res = zink_resource(trans->staging_res);
1610
trans->offset = offset;
1611
usage |= PIPE_MAP_UNSYNCHRONIZED;
1612
ptr = ((uint8_t *)ptr);
1614
/* At this point, the buffer is always idle (we checked it above). */
1615
usage |= PIPE_MAP_UNSYNCHRONIZED;
1617
} else if (usage & PIPE_MAP_DONTBLOCK) {
1618
/* sparse/device-local will always need to wait since it has to copy */
1619
if (!res->obj->host_visible)
1621
if (!zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_WRITE))
1623
usage |= PIPE_MAP_UNSYNCHRONIZED;
1624
} else if (!(usage & PIPE_MAP_UNSYNCHRONIZED) &&
1625
(((usage & PIPE_MAP_READ) && !(usage & PIPE_MAP_PERSISTENT) && res->base.b.usage != PIPE_USAGE_STAGING) || !res->obj->host_visible)) {
1626
assert(!(usage & (TC_TRANSFER_MAP_THREADED_UNSYNC | PIPE_MAP_THREAD_SAFE)));
1627
if (!res->obj->host_visible || !(usage & PIPE_MAP_ONCE)) {
1628
trans->offset = box->x % screen->info.props.limits.minMemoryMapAlignment;
1629
trans->staging_res = pipe_buffer_create(&screen->base, PIPE_BIND_LINEAR, PIPE_USAGE_STAGING, box->width + trans->offset);
1630
if (!trans->staging_res)
1632
struct zink_resource *staging_res = zink_resource(trans->staging_res);
1633
zink_copy_buffer(ctx, staging_res, res, trans->offset, box->x, box->width);
1635
usage &= ~PIPE_MAP_UNSYNCHRONIZED;
1636
map_offset = trans->offset;
1638
} else if ((usage & PIPE_MAP_UNSYNCHRONIZED) && !res->obj->host_visible) {
1639
trans->offset = box->x % screen->info.props.limits.minMemoryMapAlignment;
1640
trans->staging_res = pipe_buffer_create(&screen->base, PIPE_BIND_LINEAR, PIPE_USAGE_STAGING, box->width + trans->offset);
1641
if (!trans->staging_res)
1643
struct zink_resource *staging_res = zink_resource(trans->staging_res);
1645
map_offset = trans->offset;
1648
if (!(usage & PIPE_MAP_UNSYNCHRONIZED)) {
1649
if (usage & PIPE_MAP_WRITE)
1650
zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_RW);
1652
zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
1653
res->obj->access = 0;
1654
res->obj->access_stage = 0;
1658
/* if writing to a streamout buffer, ensure synchronization next time it's used */
1659
if (usage & PIPE_MAP_WRITE && res->so_valid) {
1660
ctx->dirty_so_targets = true;
1661
/* force counter buffer reset */
1662
res->so_valid = false;
1664
ptr = map_resource(screen, res);
1667
ptr = ((uint8_t *)ptr) + map_offset;
1670
if (!res->obj->coherent
1671
#if defined(MVK_VERSION)
1672
// Work around for MoltenVk limitation specifically on coherent memory
1673
// MoltenVk returns blank memory ranges when there should be data present
1674
// This is a known limitation of MoltenVK.
1675
// See https://github.com/KhronosGroup/MoltenVK/blob/master/Docs/MoltenVK_Runtime_UserGuide.md#known-moltenvk-limitations
1677
|| screen->instance_info.have_MVK_moltenvk
1680
VkDeviceSize size = box->width;
1681
VkDeviceSize offset = res->obj->offset + trans->offset;
1682
VkMappedMemoryRange range = zink_resource_init_mem_range(screen, res->obj, offset, size);
1683
if (VKSCR(InvalidateMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
1684
mesa_loge("ZINK: vkInvalidateMappedMemoryRanges failed");
1685
zink_bo_unmap(screen, res->obj->bo);
1689
trans->base.b.usage = usage;
1690
if (usage & PIPE_MAP_WRITE)
1691
util_range_add(&res->base.b, &res->valid_buffer_range, box->x, box->x + box->width);
1692
if ((usage & PIPE_MAP_PERSISTENT) && !(usage & PIPE_MAP_COHERENT))
1693
res->obj->persistent_maps++;
1696
*transfer = &trans->base.b;
1700
destroy_transfer(ctx, trans);
1705
zink_image_map(struct pipe_context *pctx,
1706
struct pipe_resource *pres,
1709
const struct pipe_box *box,
1710
struct pipe_transfer **transfer)
1712
struct zink_context *ctx = zink_context(pctx);
1713
struct zink_screen *screen = zink_screen(pctx->screen);
1714
struct zink_resource *res = zink_resource(pres);
1715
struct zink_transfer *trans = create_transfer(ctx, pres, usage, box);
1719
trans->base.b.level = level;
1722
if (usage & PIPE_MAP_WRITE && !(usage & PIPE_MAP_READ))
1723
/* this is like a blit, so we can potentially dump some clears or maybe we have to */
1724
zink_fb_clears_apply_or_discard(ctx, pres, zink_rect_from_box(box), false);
1725
else if (usage & PIPE_MAP_READ)
1726
/* if the map region intersects with any clears then we have to apply them */
1727
zink_fb_clears_apply_region(ctx, pres, zink_rect_from_box(box));
1728
if (res->optimal_tiling || !res->obj->host_visible) {
1729
enum pipe_format format = pres->format;
1730
if (usage & PIPE_MAP_DEPTH_ONLY)
1731
format = util_format_get_depth_only(pres->format);
1732
else if (usage & PIPE_MAP_STENCIL_ONLY)
1733
format = PIPE_FORMAT_S8_UINT;
1734
trans->base.b.stride = util_format_get_stride(format, box->width);
1735
trans->base.b.layer_stride = util_format_get_2d_size(format,
1736
trans->base.b.stride,
1739
struct pipe_resource templ = *pres;
1741
templ.format = format;
1742
templ.usage = usage & PIPE_MAP_READ ? PIPE_USAGE_STAGING : PIPE_USAGE_STREAM;
1743
templ.target = PIPE_BUFFER;
1744
templ.bind = PIPE_BIND_LINEAR;
1745
templ.width0 = trans->base.b.layer_stride * box->depth;
1746
templ.height0 = templ.depth0 = 0;
1747
templ.last_level = 0;
1748
templ.array_size = 1;
1751
trans->staging_res = zink_resource_create(pctx->screen, &templ);
1752
if (!trans->staging_res)
1755
struct zink_resource *staging_res = zink_resource(trans->staging_res);
1757
if (usage & PIPE_MAP_READ) {
1758
/* force multi-context sync */
1759
if (zink_resource_usage_is_unflushed_write(res))
1760
zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
1761
zink_transfer_copy_bufimage(ctx, staging_res, res, trans);
1762
/* need to wait for rendering to finish */
1763
zink_fence_wait(pctx);
1766
ptr = map_resource(screen, staging_res);
1768
assert(!res->optimal_tiling);
1769
ptr = map_resource(screen, res);
1772
if (zink_resource_has_usage(res)) {
1773
if (usage & PIPE_MAP_WRITE)
1774
zink_fence_wait(pctx);
1776
zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
1778
VkImageSubresource isr = {
1779
res->modifiers ? res->obj->modifier_aspect : res->aspect,
1783
VkSubresourceLayout srl;
1784
VKSCR(GetImageSubresourceLayout)(screen->dev, res->obj->image, &isr, &srl);
1785
trans->base.b.stride = srl.rowPitch;
1786
if (res->base.b.target == PIPE_TEXTURE_3D)
1787
trans->base.b.layer_stride = srl.depthPitch;
1789
trans->base.b.layer_stride = srl.arrayPitch;
1790
trans->offset = srl.offset;
1791
trans->depthPitch = srl.depthPitch;
1792
const struct util_format_description *desc = util_format_description(res->base.b.format);
1793
unsigned offset = srl.offset +
1794
box->z * srl.depthPitch +
1795
(box->y / desc->block.height) * srl.rowPitch +
1796
(box->x / desc->block.width) * (desc->block.bits / 8);
1797
if (!res->obj->coherent) {
1798
VkDeviceSize size = (VkDeviceSize)box->width * box->height * desc->block.bits / 8;
1799
VkMappedMemoryRange range = zink_resource_init_mem_range(screen, res->obj, res->obj->offset + offset, size);
1800
if (VKSCR(FlushMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
1801
mesa_loge("ZINK: vkFlushMappedMemoryRanges failed");
1804
ptr = ((uint8_t *)ptr) + offset;
1809
if (sizeof(void*) == 4)
1810
trans->base.b.usage |= ZINK_MAP_TEMPORARY;
1811
if ((usage & PIPE_MAP_PERSISTENT) && !(usage & PIPE_MAP_COHERENT))
1812
res->obj->persistent_maps++;
1814
*transfer = &trans->base.b;
1818
destroy_transfer(ctx, trans);
1823
zink_transfer_flush_region(struct pipe_context *pctx,
1824
struct pipe_transfer *ptrans,
1825
const struct pipe_box *box)
1827
struct zink_context *ctx = zink_context(pctx);
1828
struct zink_resource *res = zink_resource(ptrans->resource);
1829
struct zink_transfer *trans = (struct zink_transfer *)ptrans;
1831
if (trans->base.b.usage & PIPE_MAP_WRITE) {
1832
struct zink_screen *screen = zink_screen(pctx->screen);
1833
struct zink_resource *m = trans->staging_res ? zink_resource(trans->staging_res) :
1835
ASSERTED VkDeviceSize size, src_offset, dst_offset = 0;
1836
if (m->obj->is_buffer) {
1838
src_offset = box->x + (trans->staging_res ? trans->offset : ptrans->box.x);
1839
dst_offset = box->x + ptrans->box.x;
1841
size = (VkDeviceSize)box->width * box->height * util_format_get_blocksize(m->base.b.format);
1842
src_offset = trans->offset +
1843
box->z * trans->depthPitch +
1844
util_format_get_2d_size(m->base.b.format, trans->base.b.stride, box->y) +
1845
util_format_get_stride(m->base.b.format, box->x);
1846
assert(src_offset + size <= res->obj->size);
1848
if (!m->obj->coherent) {
1849
VkMappedMemoryRange range = zink_resource_init_mem_range(screen, m->obj, m->obj->offset, m->obj->size);
1850
if (VKSCR(FlushMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
1851
mesa_loge("ZINK: vkFlushMappedMemoryRanges failed");
1854
if (trans->staging_res) {
1855
struct zink_resource *staging_res = zink_resource(trans->staging_res);
1857
if (ptrans->resource->target == PIPE_BUFFER)
1858
zink_copy_buffer(ctx, res, staging_res, dst_offset, src_offset, size);
1860
zink_transfer_copy_bufimage(ctx, res, staging_res, trans);
1866
transfer_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
1868
struct zink_context *ctx = zink_context(pctx);
1869
struct zink_resource *res = zink_resource(ptrans->resource);
1870
struct zink_transfer *trans = (struct zink_transfer *)ptrans;
1872
if (!(trans->base.b.usage & (PIPE_MAP_FLUSH_EXPLICIT | PIPE_MAP_COHERENT))) {
1873
/* flush_region is relative to the mapped region: use only the extents */
1874
struct pipe_box box = ptrans->box;
1875
box.x = box.y = box.z = 0;
1876
zink_transfer_flush_region(pctx, ptrans, &box);
1879
if ((trans->base.b.usage & PIPE_MAP_PERSISTENT) && !(trans->base.b.usage & PIPE_MAP_COHERENT))
1880
res->obj->persistent_maps--;
1882
if (trans->staging_res)
1883
pipe_resource_reference(&trans->staging_res, NULL);
1884
pipe_resource_reference(&trans->base.b.resource, NULL);
1886
destroy_transfer(ctx, trans);
1890
do_transfer_unmap(struct zink_screen *screen, struct zink_transfer *trans)
1892
struct zink_resource *res = zink_resource(trans->staging_res);
1894
res = zink_resource(trans->base.b.resource);
1895
unmap_resource(screen, res);
1899
zink_buffer_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
1901
struct zink_screen *screen = zink_screen(pctx->screen);
1902
struct zink_transfer *trans = (struct zink_transfer *)ptrans;
1903
if (trans->base.b.usage & PIPE_MAP_ONCE && !trans->staging_res)
1904
do_transfer_unmap(screen, trans);
1905
transfer_unmap(pctx, ptrans);
1909
zink_image_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
1911
struct zink_screen *screen = zink_screen(pctx->screen);
1912
struct zink_transfer *trans = (struct zink_transfer *)ptrans;
1913
if (sizeof(void*) == 4)
1914
do_transfer_unmap(screen, trans);
1915
transfer_unmap(pctx, ptrans);
1919
zink_buffer_subdata(struct pipe_context *ctx, struct pipe_resource *buffer,
1920
unsigned usage, unsigned offset, unsigned size, const void *data)
1922
struct pipe_transfer *transfer = NULL;
1923
struct pipe_box box;
1924
uint8_t *map = NULL;
1926
usage |= PIPE_MAP_WRITE;
1928
if (!(usage & PIPE_MAP_DIRECTLY))
1929
usage |= PIPE_MAP_DISCARD_RANGE;
1931
u_box_1d(offset, size, &box);
1932
map = zink_buffer_map(ctx, buffer, 0, usage, &box, &transfer);
1936
memcpy(map, data, size);
1937
zink_buffer_unmap(ctx, transfer);
1940
static struct pipe_resource *
1941
zink_resource_get_separate_stencil(struct pipe_resource *pres)
1943
/* For packed depth-stencil, we treat depth as the primary resource
1944
* and store S8 as the "second plane" resource.
1946
if (pres->next && pres->next->format == PIPE_FORMAT_S8_UINT)
1954
zink_resource_object_init_storage(struct zink_context *ctx, struct zink_resource *res)
1956
/* base resource already has the cap */
1957
if (res->base.b.bind & PIPE_BIND_SHADER_IMAGE)
1959
if (res->obj->is_buffer) {
1960
unreachable("zink: all buffers should have this bit");
1963
assert(!res->obj->dt);
1964
zink_fb_clears_apply_region(ctx, &res->base.b, (struct u_rect){0, res->base.b.width0, 0, res->base.b.height0});
1965
bool ret = add_resource_bind(ctx, res, PIPE_BIND_SHADER_IMAGE);
1967
zink_resource_rebind(ctx, res);
1973
zink_resource_setup_transfer_layouts(struct zink_context *ctx, struct zink_resource *src, struct zink_resource *dst)
1976
/* The Vulkan 1.1 specification says the following about valid usage
1977
* of vkCmdBlitImage:
1979
* "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
1980
* VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
1984
* "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
1985
* VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
1987
* Since we cant have the same image in two states at the same time,
1988
* we're effectively left with VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR or
1989
* VK_IMAGE_LAYOUT_GENERAL. And since this isn't a present-related
1990
* operation, VK_IMAGE_LAYOUT_GENERAL seems most appropriate.
1992
zink_resource_image_barrier(ctx, src,
1993
VK_IMAGE_LAYOUT_GENERAL,
1994
VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
1995
VK_PIPELINE_STAGE_TRANSFER_BIT);
1997
zink_resource_image_barrier(ctx, src,
1998
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1999
VK_ACCESS_TRANSFER_READ_BIT,
2000
VK_PIPELINE_STAGE_TRANSFER_BIT);
2002
zink_resource_image_barrier(ctx, dst,
2003
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2004
VK_ACCESS_TRANSFER_WRITE_BIT,
2005
VK_PIPELINE_STAGE_TRANSFER_BIT);
2010
zink_get_depth_stencil_resources(struct pipe_resource *res,
2011
struct zink_resource **out_z,
2012
struct zink_resource **out_s)
2015
if (out_z) *out_z = NULL;
2016
if (out_s) *out_s = NULL;
2020
if (res->format != PIPE_FORMAT_S8_UINT) {
2021
if (out_z) *out_z = zink_resource(res);
2022
if (out_s) *out_s = zink_resource(zink_resource_get_separate_stencil(res));
2024
if (out_z) *out_z = NULL;
2025
if (out_s) *out_s = zink_resource(res);
2030
zink_resource_set_separate_stencil(struct pipe_resource *pres,
2031
struct pipe_resource *stencil)
2033
assert(util_format_has_depth(util_format_description(pres->format)));
2034
pipe_resource_reference(&pres->next, stencil);
2037
static enum pipe_format
2038
zink_resource_get_internal_format(struct pipe_resource *pres)
2040
struct zink_resource *res = zink_resource(pres);
2041
return res->internal_format;
2044
static const struct u_transfer_vtbl transfer_vtbl = {
2045
.resource_create = zink_resource_create,
2046
.resource_destroy = zink_resource_destroy,
2047
.transfer_map = zink_image_map,
2048
.transfer_unmap = zink_image_unmap,
2049
.transfer_flush_region = zink_transfer_flush_region,
2050
.get_internal_format = zink_resource_get_internal_format,
2051
.set_stencil = zink_resource_set_separate_stencil,
2052
.get_stencil = zink_resource_get_separate_stencil,
2056
zink_screen_resource_init(struct pipe_screen *pscreen)
2058
struct zink_screen *screen = zink_screen(pscreen);
2059
pscreen->resource_create = zink_resource_create;
2060
pscreen->resource_create_with_modifiers = zink_resource_create_with_modifiers;
2061
pscreen->resource_create_drawable = zink_resource_create_drawable;
2062
pscreen->resource_destroy = zink_resource_destroy;
2063
pscreen->transfer_helper = u_transfer_helper_create(&transfer_vtbl, true, true, false, false, !screen->have_D24_UNORM_S8_UINT);
2065
if (screen->info.have_KHR_external_memory_fd) {
2066
pscreen->resource_get_handle = zink_resource_get_handle;
2067
pscreen->resource_from_handle = zink_resource_from_handle;
2069
if (screen->instance_info.have_KHR_external_memory_capabilities) {
2070
pscreen->memobj_create_from_handle = zink_memobj_create_from_handle;
2071
pscreen->memobj_destroy = zink_memobj_destroy;
2072
pscreen->resource_from_memobj = zink_resource_from_memobj;
2074
pscreen->resource_get_param = zink_resource_get_param;
2079
zink_context_resource_init(struct pipe_context *pctx)
2081
pctx->buffer_map = zink_buffer_map;
2082
pctx->buffer_unmap = zink_buffer_unmap;
2083
pctx->texture_map = u_transfer_helper_deinterleave_transfer_map;
2084
pctx->texture_unmap = u_transfer_helper_deinterleave_transfer_unmap;
2086
pctx->transfer_flush_region = u_transfer_helper_transfer_flush_region;
2087
pctx->buffer_subdata = zink_buffer_subdata;
2088
pctx->texture_subdata = u_default_texture_subdata;
2089
pctx->invalidate_resource = zink_resource_invalidate;