2
* Copyright © 2019 Raspberry Pi Ltd
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
29
#include <sys/sysinfo.h>
34
#include <sys/mkdev.h>
36
#ifdef MAJOR_IN_SYSMACROS
37
#include <sys/sysmacros.h>
40
#include "v3dv_private.h"
42
#include "common/v3d_debug.h"
44
#include "compiler/v3d_compiler.h"
46
#include "drm-uapi/v3d_drm.h"
47
#include "format/u_format.h"
48
#include "vk_drm_syncobj.h"
52
#include "util/build_id.h"
53
#include "util/debug.h"
54
#include "util/u_cpu_detect.h"
56
#ifdef VK_USE_PLATFORM_XCB_KHR
59
#include <X11/Xlib-xcb.h>
62
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
63
#include <wayland-client.h>
64
#include "wayland-drm-client-protocol.h"
67
#ifdef USE_V3D_SIMULATOR
68
#include "drm-uapi/i915_drm.h"
71
#define V3DV_API_VERSION VK_MAKE_VERSION(1, 1, VK_HEADER_VERSION)
73
VKAPI_ATTR VkResult VKAPI_CALL
74
v3dv_EnumerateInstanceVersion(uint32_t *pApiVersion)
76
*pApiVersion = V3DV_API_VERSION;
80
#if defined(VK_USE_PLATFORM_WIN32_KHR) || \
81
defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
82
defined(VK_USE_PLATFORM_XCB_KHR) || \
83
defined(VK_USE_PLATFORM_XLIB_KHR) || \
84
defined(VK_USE_PLATFORM_DISPLAY_KHR)
85
#define V3DV_USE_WSI_PLATFORM
88
static const struct vk_instance_extension_table instance_extensions = {
89
.KHR_device_group_creation = true,
90
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
92
.KHR_get_display_properties2 = true,
94
.KHR_external_fence_capabilities = true,
95
.KHR_external_memory_capabilities = true,
96
.KHR_external_semaphore_capabilities = true,
97
.KHR_get_physical_device_properties2 = true,
98
#ifdef V3DV_USE_WSI_PLATFORM
99
.KHR_get_surface_capabilities2 = true,
101
.KHR_surface_protected_capabilities = true,
103
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
104
.KHR_wayland_surface = true,
106
#ifdef VK_USE_PLATFORM_XCB_KHR
107
.KHR_xcb_surface = true,
109
#ifdef VK_USE_PLATFORM_XLIB_KHR
110
.KHR_xlib_surface = true,
112
.EXT_debug_report = true,
113
.EXT_debug_utils = true,
117
get_device_extensions(const struct v3dv_physical_device *device,
118
struct vk_device_extension_table *ext)
120
*ext = (struct vk_device_extension_table) {
121
.KHR_8bit_storage = true,
122
.KHR_16bit_storage = true,
123
.KHR_bind_memory2 = true,
124
.KHR_copy_commands2 = true,
125
.KHR_create_renderpass2 = true,
126
.KHR_dedicated_allocation = true,
127
.KHR_device_group = true,
128
.KHR_driver_properties = true,
129
.KHR_descriptor_update_template = true,
130
.KHR_depth_stencil_resolve = true,
131
.KHR_external_fence = true,
132
.KHR_external_fence_fd = true,
133
.KHR_external_memory = true,
134
.KHR_external_memory_fd = true,
135
.KHR_external_semaphore = true,
136
.KHR_external_semaphore_fd = true,
137
.KHR_get_memory_requirements2 = true,
138
.KHR_image_format_list = true,
139
.KHR_imageless_framebuffer = true,
140
.KHR_relaxed_block_layout = true,
141
.KHR_maintenance1 = true,
142
.KHR_maintenance2 = true,
143
.KHR_maintenance3 = true,
144
.KHR_multiview = true,
145
.KHR_shader_non_semantic_info = true,
146
.KHR_sampler_mirror_clamp_to_edge = true,
147
.KHR_storage_buffer_storage_class = true,
148
.KHR_timeline_semaphore = true,
149
.KHR_uniform_buffer_standard_layout = true,
150
#ifdef V3DV_USE_WSI_PLATFORM
151
.KHR_swapchain = true,
152
.KHR_swapchain_mutable_format = true,
153
.KHR_incremental_present = true,
155
.KHR_variable_pointers = true,
156
.EXT_4444_formats = true,
157
.EXT_color_write_enable = true,
158
.EXT_custom_border_color = true,
159
.EXT_inline_uniform_block = true,
160
.EXT_external_memory_dma_buf = true,
161
.EXT_host_query_reset = true,
162
.EXT_image_drm_format_modifier = true,
163
.EXT_index_type_uint8 = true,
164
.EXT_line_rasterization = true,
165
.EXT_physical_device_drm = true,
166
.EXT_pipeline_creation_cache_control = true,
167
.EXT_pipeline_creation_feedback = true,
168
.EXT_private_data = true,
169
.EXT_provoking_vertex = true,
170
.EXT_vertex_attribute_divisor = true,
172
.ANDROID_native_buffer = true,
177
VKAPI_ATTR VkResult VKAPI_CALL
178
v3dv_EnumerateInstanceExtensionProperties(const char *pLayerName,
179
uint32_t *pPropertyCount,
180
VkExtensionProperties *pProperties)
182
/* We don't support any layers */
184
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
186
return vk_enumerate_instance_extension_properties(
187
&instance_extensions, pPropertyCount, pProperties);
190
VKAPI_ATTR VkResult VKAPI_CALL
191
v3dv_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
192
const VkAllocationCallbacks *pAllocator,
193
VkInstance *pInstance)
195
struct v3dv_instance *instance;
198
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
200
if (pAllocator == NULL)
201
pAllocator = vk_default_allocator();
203
instance = vk_alloc(pAllocator, sizeof(*instance), 8,
204
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
206
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
208
struct vk_instance_dispatch_table dispatch_table;
209
vk_instance_dispatch_table_from_entrypoints(
210
&dispatch_table, &v3dv_instance_entrypoints, true);
211
vk_instance_dispatch_table_from_entrypoints(
212
&dispatch_table, &wsi_instance_entrypoints, false);
214
result = vk_instance_init(&instance->vk,
215
&instance_extensions,
217
pCreateInfo, pAllocator);
219
if (result != VK_SUCCESS) {
220
vk_free(pAllocator, instance);
221
return vk_error(NULL, result);
224
v3d_process_debug_variable();
226
instance->physicalDeviceCount = -1;
228
/* We start with the default values for the pipeline_cache envvars */
229
instance->pipeline_cache_enabled = true;
230
instance->default_pipeline_cache_enabled = true;
231
const char *pipeline_cache_str = getenv("V3DV_ENABLE_PIPELINE_CACHE");
232
if (pipeline_cache_str != NULL) {
233
if (strncmp(pipeline_cache_str, "full", 4) == 0) {
234
/* nothing to do, just to filter correct values */
235
} else if (strncmp(pipeline_cache_str, "no-default-cache", 16) == 0) {
236
instance->default_pipeline_cache_enabled = false;
237
} else if (strncmp(pipeline_cache_str, "off", 3) == 0) {
238
instance->pipeline_cache_enabled = false;
239
instance->default_pipeline_cache_enabled = false;
241
fprintf(stderr, "Wrong value for envvar V3DV_ENABLE_PIPELINE_CACHE. "
242
"Allowed values are: full, no-default-cache, off\n");
246
if (instance->pipeline_cache_enabled == false) {
247
fprintf(stderr, "WARNING: v3dv pipeline cache is disabled. Performance "
248
"can be affected negatively\n");
250
if (instance->default_pipeline_cache_enabled == false) {
251
fprintf(stderr, "WARNING: default v3dv pipeline cache is disabled. "
252
"Performance can be affected negatively\n");
258
VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
260
*pInstance = v3dv_instance_to_handle(instance);
266
v3dv_physical_device_free_disk_cache(struct v3dv_physical_device *device)
268
#ifdef ENABLE_SHADER_CACHE
269
if (device->disk_cache)
270
disk_cache_destroy(device->disk_cache);
272
assert(device->disk_cache == NULL);
277
physical_device_finish(struct v3dv_physical_device *device)
279
v3dv_wsi_finish(device);
280
v3dv_physical_device_free_disk_cache(device);
281
v3d_compiler_free(device->compiler);
283
util_sparse_array_finish(&device->bo_map);
285
close(device->render_fd);
286
if (device->display_fd >= 0)
287
close(device->display_fd);
288
if (device->master_fd >= 0)
289
close(device->master_fd);
293
#if using_v3d_simulator
294
v3d_simulator_destroy(device->sim_file);
297
vk_physical_device_finish(&device->vk);
298
mtx_destroy(&device->mutex);
301
VKAPI_ATTR void VKAPI_CALL
302
v3dv_DestroyInstance(VkInstance _instance,
303
const VkAllocationCallbacks *pAllocator)
305
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
310
if (instance->physicalDeviceCount > 0) {
311
/* We support at most one physical device. */
312
assert(instance->physicalDeviceCount == 1);
313
physical_device_finish(&instance->physicalDevice);
316
VG(VALGRIND_DESTROY_MEMPOOL(instance));
318
vk_instance_finish(&instance->vk);
319
vk_free(&instance->vk.alloc, instance);
325
#if !using_v3d_simulator
326
/* Query the total ram from the system */
330
uint64_t total_ram = (uint64_t)info.totalram * (uint64_t)info.mem_unit;
332
uint64_t total_ram = (uint64_t) v3d_simulator_get_mem_size();
335
/* We don't want to burn too much ram with the GPU. If the user has 4GiB
336
* or less, we use at most half. If they have more than 4GiB, we use 3/4.
338
uint64_t available_ram;
339
if (total_ram <= 4ull * 1024ull * 1024ull * 1024ull)
340
available_ram = total_ram / 2;
342
available_ram = total_ram * 3 / 4;
344
return available_ram;
347
#if !using_v3d_simulator
348
#ifdef VK_USE_PLATFORM_XCB_KHR
350
create_display_fd_xcb(VkIcdSurfaceBase *surface)
354
xcb_connection_t *conn;
355
xcb_dri3_open_reply_t *reply = NULL;
357
if (surface->platform == VK_ICD_WSI_PLATFORM_XLIB)
358
conn = XGetXCBConnection(((VkIcdSurfaceXlib *)surface)->dpy);
360
conn = ((VkIcdSurfaceXcb *)surface)->connection;
362
conn = xcb_connect(NULL, NULL);
365
if (xcb_connection_has_error(conn))
368
const xcb_setup_t *setup = xcb_get_setup(conn);
369
xcb_screen_iterator_t iter = xcb_setup_roots_iterator(setup);
370
xcb_screen_t *screen = iter.data;
372
xcb_dri3_open_cookie_t cookie;
373
cookie = xcb_dri3_open(conn, screen->root, None);
374
reply = xcb_dri3_open_reply(conn, cookie, NULL);
381
fd = xcb_dri3_open_reply_fds(conn, reply)[0];
382
fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
386
xcb_disconnect(conn);
394
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
395
struct v3dv_wayland_info {
396
struct wl_drm *wl_drm;
403
v3dv_drm_handle_device(void *data, struct wl_drm *drm, const char *device)
405
struct v3dv_wayland_info *info = data;
406
info->fd = open(device, O_RDWR | O_CLOEXEC);
407
info->is_set = info->fd != -1;
409
fprintf(stderr, "v3dv_drm_handle_device: could not open %s (%s)\n",
410
device, strerror(errno));
415
if (drmGetMagic(info->fd, &magic)) {
416
fprintf(stderr, "v3dv_drm_handle_device: drmGetMagic failed\n");
419
info->is_set = false;
422
wl_drm_authenticate(info->wl_drm, magic);
426
v3dv_drm_handle_format(void *data, struct wl_drm *drm, uint32_t format)
431
v3dv_drm_handle_authenticated(void *data, struct wl_drm *drm)
433
struct v3dv_wayland_info *info = data;
434
info->authenticated = true;
438
v3dv_drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t value)
442
struct wl_drm_listener v3dv_drm_listener = {
443
.device = v3dv_drm_handle_device,
444
.format = v3dv_drm_handle_format,
445
.authenticated = v3dv_drm_handle_authenticated,
446
.capabilities = v3dv_drm_handle_capabilities
450
v3dv_registry_global(void *data,
451
struct wl_registry *registry,
453
const char *interface,
456
struct v3dv_wayland_info *info = data;
457
if (strcmp(interface, "wl_drm") == 0) {
458
info->wl_drm = wl_registry_bind(registry, name, &wl_drm_interface,
460
wl_drm_add_listener(info->wl_drm, &v3dv_drm_listener, data);
465
v3dv_registry_global_remove_cb(void *data,
466
struct wl_registry *registry,
472
create_display_fd_wayland(VkIcdSurfaceBase *surface)
474
struct wl_display *display;
475
struct wl_registry *registry = NULL;
477
struct v3dv_wayland_info info = {
481
.authenticated = false
485
display = ((VkIcdSurfaceWayland *) surface)->display;
487
display = wl_display_connect(NULL);
492
registry = wl_display_get_registry(display);
495
wl_display_disconnect(display);
499
static const struct wl_registry_listener registry_listener = {
500
v3dv_registry_global,
501
v3dv_registry_global_remove_cb
503
wl_registry_add_listener(registry, ®istry_listener, &info);
505
wl_display_roundtrip(display); /* For the registry advertisement */
506
wl_display_roundtrip(display); /* For the DRM device event */
507
wl_display_roundtrip(display); /* For the authentication event */
509
wl_drm_destroy(info.wl_drm);
510
wl_registry_destroy(registry);
513
wl_display_disconnect(display);
518
if (!info.authenticated)
525
/* Acquire an authenticated display fd without a surface reference. This is the
526
* case where the application is making WSI allocations outside the Vulkan
527
* swapchain context (only Zink, for now). Since we lack information about the
528
* underlying surface we just try our best to figure out the correct display
529
* and platform to use. It should work in most cases.
532
acquire_display_device_no_surface(struct v3dv_instance *instance,
533
struct v3dv_physical_device *pdevice)
535
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
536
pdevice->display_fd = create_display_fd_wayland(NULL);
539
#ifdef VK_USE_PLATFORM_XCB_KHR
540
if (pdevice->display_fd == -1)
541
pdevice->display_fd = create_display_fd_xcb(NULL);
544
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
545
if (pdevice->display_fd == - 1 && pdevice->master_fd >= 0)
546
pdevice->display_fd = dup(pdevice->master_fd);
550
/* Acquire an authenticated display fd from the surface. This is the regular
551
* case where the application is using swapchains to create WSI allocations.
552
* In this case we use the surface information to figure out the correct
553
* display and platform combination.
556
acquire_display_device_surface(struct v3dv_instance *instance,
557
struct v3dv_physical_device *pdevice,
558
VkIcdSurfaceBase *surface)
560
/* Mesa will set both of VK_USE_PLATFORM_{XCB,XLIB} when building with
561
* platform X11, so only check for XCB and rely on XCB to get an
562
* authenticated device also for Xlib.
564
#ifdef VK_USE_PLATFORM_XCB_KHR
565
if (surface->platform == VK_ICD_WSI_PLATFORM_XCB ||
566
surface->platform == VK_ICD_WSI_PLATFORM_XLIB) {
567
pdevice->display_fd = create_display_fd_xcb(surface);
571
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
572
if (surface->platform == VK_ICD_WSI_PLATFORM_WAYLAND)
573
pdevice->display_fd = create_display_fd_wayland(surface);
576
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
577
if (surface->platform == VK_ICD_WSI_PLATFORM_DISPLAY &&
578
pdevice->master_fd >= 0) {
579
pdevice->display_fd = dup(pdevice->master_fd);
583
#endif /* !using_v3d_simulator */
585
/* Attempts to get an authenticated display fd from the display server that
586
* we can use to allocate BOs for presentable images.
589
v3dv_physical_device_acquire_display(struct v3dv_instance *instance,
590
struct v3dv_physical_device *pdevice,
591
VkIcdSurfaceBase *surface)
593
VkResult result = VK_SUCCESS;
594
mtx_lock(&pdevice->mutex);
596
if (pdevice->display_fd != -1)
599
/* When running on the simulator we do everything on a single render node so
600
* we don't need to get an authenticated display fd from the display server.
602
#if !using_v3d_simulator
604
acquire_display_device_surface(instance, pdevice, surface);
606
acquire_display_device_no_surface(instance, pdevice);
608
if (pdevice->display_fd == -1)
609
result = VK_ERROR_INITIALIZATION_FAILED;
613
mtx_unlock(&pdevice->mutex);
618
v3d_has_feature(struct v3dv_physical_device *device, enum drm_v3d_param feature)
620
struct drm_v3d_get_param p = {
623
if (v3dv_ioctl(device->render_fd, DRM_IOCTL_V3D_GET_PARAM, &p) != 0)
629
device_has_expected_features(struct v3dv_physical_device *device)
631
return v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_TFU) &&
632
v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_CSD) &&
633
v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_CACHE_FLUSH);
638
init_uuids(struct v3dv_physical_device *device)
640
const struct build_id_note *note =
641
build_id_find_nhdr_for_addr(init_uuids);
643
return vk_errorf(device->vk.instance,
644
VK_ERROR_INITIALIZATION_FAILED,
645
"Failed to find build-id");
648
unsigned build_id_len = build_id_length(note);
649
if (build_id_len < 20) {
650
return vk_errorf(device->vk.instance,
651
VK_ERROR_INITIALIZATION_FAILED,
652
"build-id too short. It needs to be a SHA");
655
memcpy(device->driver_build_sha1, build_id_data(note), 20);
657
uint32_t vendor_id = v3dv_physical_device_vendor_id(device);
658
uint32_t device_id = v3dv_physical_device_device_id(device);
660
struct mesa_sha1 sha1_ctx;
662
STATIC_ASSERT(VK_UUID_SIZE <= sizeof(sha1));
664
/* The pipeline cache UUID is used for determining when a pipeline cache is
665
* invalid. It needs both a driver build and the PCI ID of the device.
667
_mesa_sha1_init(&sha1_ctx);
668
_mesa_sha1_update(&sha1_ctx, build_id_data(note), build_id_len);
669
_mesa_sha1_update(&sha1_ctx, &device_id, sizeof(device_id));
670
_mesa_sha1_final(&sha1_ctx, sha1);
671
memcpy(device->pipeline_cache_uuid, sha1, VK_UUID_SIZE);
673
/* The driver UUID is used for determining sharability of images and memory
674
* between two Vulkan instances in separate processes. People who want to
675
* share memory need to also check the device UUID (below) so all this
676
* needs to be is the build-id.
678
memcpy(device->driver_uuid, build_id_data(note), VK_UUID_SIZE);
680
/* The device UUID uniquely identifies the given device within the machine.
681
* Since we never have more than one device, this doesn't need to be a real
684
_mesa_sha1_init(&sha1_ctx);
685
_mesa_sha1_update(&sha1_ctx, &vendor_id, sizeof(vendor_id));
686
_mesa_sha1_update(&sha1_ctx, &device_id, sizeof(device_id));
687
_mesa_sha1_final(&sha1_ctx, sha1);
688
memcpy(device->device_uuid, sha1, VK_UUID_SIZE);
694
v3dv_physical_device_init_disk_cache(struct v3dv_physical_device *device)
696
#ifdef ENABLE_SHADER_CACHE
698
_mesa_sha1_format(timestamp, device->driver_build_sha1);
700
assert(device->name);
701
device->disk_cache = disk_cache_create(device->name, timestamp, 0);
703
device->disk_cache = NULL;
708
physical_device_init(struct v3dv_physical_device *device,
709
struct v3dv_instance *instance,
710
drmDevicePtr drm_render_device,
711
drmDevicePtr drm_primary_device)
713
VkResult result = VK_SUCCESS;
714
int32_t master_fd = -1;
715
int32_t render_fd = -1;
717
struct vk_physical_device_dispatch_table dispatch_table;
718
vk_physical_device_dispatch_table_from_entrypoints
719
(&dispatch_table, &v3dv_physical_device_entrypoints, true);
720
vk_physical_device_dispatch_table_from_entrypoints(
721
&dispatch_table, &wsi_physical_device_entrypoints, false);
723
result = vk_physical_device_init(&device->vk, &instance->vk, NULL,
726
if (result != VK_SUCCESS)
729
assert(drm_render_device);
730
const char *path = drm_render_device->nodes[DRM_NODE_RENDER];
731
render_fd = open(path, O_RDWR | O_CLOEXEC);
733
fprintf(stderr, "Opening %s failed: %s\n", path, strerror(errno));
734
result = VK_ERROR_INCOMPATIBLE_DRIVER;
738
/* If we are running on VK_KHR_display we need to acquire the master
739
* display device now for the v3dv_wsi_init() call below. For anything else
740
* we postpone that until a swapchain is created.
743
const char *primary_path;
744
#if !using_v3d_simulator
745
if (drm_primary_device)
746
primary_path = drm_primary_device->nodes[DRM_NODE_PRIMARY];
750
primary_path = drm_render_device->nodes[DRM_NODE_PRIMARY];
753
struct stat primary_stat = {0}, render_stat = {0};
755
device->has_primary = primary_path;
756
if (device->has_primary) {
757
if (stat(primary_path, &primary_stat) != 0) {
758
result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
759
"failed to stat DRM primary node %s",
764
device->primary_devid = primary_stat.st_rdev;
767
if (fstat(render_fd, &render_stat) != 0) {
768
result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
769
"failed to stat DRM render node %s",
773
device->has_render = true;
774
device->render_devid = render_stat.st_rdev;
776
if (instance->vk.enabled_extensions.KHR_display) {
777
#if !using_v3d_simulator
778
/* Open the primary node on the vc4 display device */
779
assert(drm_primary_device);
780
master_fd = open(primary_path, O_RDWR | O_CLOEXEC);
782
/* There is only one device with primary and render nodes.
783
* Open its primary node.
785
master_fd = open(primary_path, O_RDWR | O_CLOEXEC);
789
#if using_v3d_simulator
790
device->sim_file = v3d_simulator_init(render_fd);
793
device->render_fd = render_fd; /* The v3d render node */
794
device->display_fd = -1; /* Authenticated vc4 primary node */
795
device->master_fd = master_fd; /* Master vc4 primary node */
797
if (!v3d_get_device_info(device->render_fd, &device->devinfo, &v3dv_ioctl)) {
798
result = VK_ERROR_INCOMPATIBLE_DRIVER;
802
if (device->devinfo.ver < 42) {
803
result = VK_ERROR_INCOMPATIBLE_DRIVER;
807
if (!device_has_expected_features(device)) {
808
result = VK_ERROR_INCOMPATIBLE_DRIVER;
812
device->caps.multisync =
813
v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_MULTISYNC_EXT);
815
result = init_uuids(device);
816
if (result != VK_SUCCESS)
819
device->compiler = v3d_compiler_init(&device->devinfo,
820
MAX_INLINE_UNIFORM_BUFFERS);
821
device->next_program_id = 0;
824
asprintf(&device->name, "V3D %d.%d",
825
device->devinfo.ver / 10, device->devinfo.ver % 10);
828
v3dv_physical_device_init_disk_cache(device);
830
/* Setup available memory heaps and types */
831
VkPhysicalDeviceMemoryProperties *mem = &device->memory;
832
mem->memoryHeapCount = 1;
833
mem->memoryHeaps[0].size = compute_heap_size();
834
mem->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
836
/* This is the only combination required by the spec */
837
mem->memoryTypeCount = 1;
838
mem->memoryTypes[0].propertyFlags =
839
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
840
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
841
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
842
mem->memoryTypes[0].heapIndex = 0;
844
/* Initialize sparse array for refcounting imported BOs */
845
util_sparse_array_init(&device->bo_map, sizeof(struct v3dv_bo), 512);
847
device->options.merge_jobs = getenv("V3DV_NO_MERGE_JOBS") == NULL;
849
device->drm_syncobj_type = vk_drm_syncobj_get_type(device->render_fd);
851
/* We don't support timelines in the uAPI yet and we don't want it getting
852
* suddenly turned on by vk_drm_syncobj_get_type() without us adding v3dv
855
device->drm_syncobj_type.features &= ~VK_SYNC_FEATURE_TIMELINE;
857
/* Sync file export is incompatible with the current model of execution
858
* where some jobs may run on the CPU. There are CTS tests which do the
861
* 1. Create a command buffer with a vkCmdWaitEvents()
862
* 2. Submit the command buffer
863
* 3. vkGetSemaphoreFdKHR() to try to get a sync_file
866
* This deadlocks because we have to wait for the syncobj to get a real
867
* fence in vkGetSemaphoreFdKHR() which only happens after all the work
868
* from the command buffer is complete which only happens after
869
* vkSetEvent(). No amount of CPU threading in userspace will ever fix
870
* this. Sadly, this is pretty explicitly allowed by the Vulkan spec:
872
* VUID-vkCmdWaitEvents-pEvents-01163
874
* "If pEvents includes one or more events that will be signaled by
875
* vkSetEvent after commandBuffer has been submitted to a queue, then
876
* vkCmdWaitEvents must not be called inside a render pass instance"
878
* Disable sync file support for now.
880
device->drm_syncobj_type.import_sync_file = NULL;
881
device->drm_syncobj_type.export_sync_file = NULL;
883
/* Multiwait is required for emulated timeline semaphores and is supported
884
* by the v3d kernel interface.
886
device->drm_syncobj_type.features |= VK_SYNC_FEATURE_GPU_MULTI_WAIT;
888
device->sync_timeline_type =
889
vk_sync_timeline_get_type(&device->drm_syncobj_type);
891
device->sync_types[0] = &device->drm_syncobj_type;
892
device->sync_types[1] = &device->sync_timeline_type.sync;
893
device->sync_types[2] = NULL;
894
device->vk.supported_sync_types = device->sync_types;
896
result = v3dv_wsi_init(device);
897
if (result != VK_SUCCESS) {
898
vk_error(instance, result);
902
get_device_extensions(device, &device->vk.supported_extensions);
904
mtx_init(&device->mutex, mtx_plain);
909
vk_physical_device_finish(&device->vk);
920
enumerate_devices(struct v3dv_instance *instance)
922
/* TODO: Check for more devices? */
923
drmDevicePtr devices[8];
924
VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
927
instance->physicalDeviceCount = 0;
929
max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
931
return VK_ERROR_INCOMPATIBLE_DRIVER;
933
#if !using_v3d_simulator
934
int32_t v3d_idx = -1;
935
int32_t vc4_idx = -1;
937
for (unsigned i = 0; i < (unsigned)max_devices; i++) {
938
#if using_v3d_simulator
939
/* In the simulator, we look for an Intel render node */
940
const int required_nodes = (1 << DRM_NODE_RENDER) | (1 << DRM_NODE_PRIMARY);
941
if ((devices[i]->available_nodes & required_nodes) == required_nodes &&
942
devices[i]->bustype == DRM_BUS_PCI &&
943
devices[i]->deviceinfo.pci->vendor_id == 0x8086) {
944
result = physical_device_init(&instance->physicalDevice, instance,
946
if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
950
/* On actual hardware, we should have a render node (v3d)
951
* and a primary node (vc4). We will need to use the primary
952
* to allocate WSI buffers and share them with the render node
953
* via prime, but that is a privileged operation so we need the
954
* primary node to be authenticated, and for that we need the
955
* display server to provide the device fd (with DRI3), so we
956
* here we only check that the device is present but we don't
959
if (devices[i]->bustype != DRM_BUS_PLATFORM)
962
if (devices[i]->available_nodes & 1 << DRM_NODE_RENDER) {
963
char **compat = devices[i]->deviceinfo.platform->compatible;
965
if (strncmp(*compat, "brcm,2711-v3d", 13) == 0) {
971
} else if (devices[i]->available_nodes & 1 << DRM_NODE_PRIMARY) {
972
char **compat = devices[i]->deviceinfo.platform->compatible;
974
if (strncmp(*compat, "brcm,bcm2711-vc5", 16) == 0 ||
975
strncmp(*compat, "brcm,bcm2835-vc4", 16) == 0 ) {
985
#if !using_v3d_simulator
986
if (v3d_idx == -1 || vc4_idx == -1)
987
result = VK_ERROR_INCOMPATIBLE_DRIVER;
989
result = physical_device_init(&instance->physicalDevice, instance,
990
devices[v3d_idx], devices[vc4_idx]);
993
drmFreeDevices(devices, max_devices);
995
if (result == VK_SUCCESS)
996
instance->physicalDeviceCount = 1;
1002
instance_ensure_physical_device(struct v3dv_instance *instance)
1004
if (instance->physicalDeviceCount < 0) {
1005
VkResult result = enumerate_devices(instance);
1006
if (result != VK_SUCCESS &&
1007
result != VK_ERROR_INCOMPATIBLE_DRIVER)
1014
VKAPI_ATTR VkResult VKAPI_CALL
1015
v3dv_EnumeratePhysicalDevices(VkInstance _instance,
1016
uint32_t *pPhysicalDeviceCount,
1017
VkPhysicalDevice *pPhysicalDevices)
1019
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1020
VK_OUTARRAY_MAKE_TYPED(VkPhysicalDevice, out,
1021
pPhysicalDevices, pPhysicalDeviceCount);
1023
VkResult result = instance_ensure_physical_device(instance);
1024
if (result != VK_SUCCESS)
1027
if (instance->physicalDeviceCount == 0)
1030
assert(instance->physicalDeviceCount == 1);
1031
vk_outarray_append_typed(VkPhysicalDevice, &out, i) {
1032
*i = v3dv_physical_device_to_handle(&instance->physicalDevice);
1035
return vk_outarray_status(&out);
1038
VKAPI_ATTR VkResult VKAPI_CALL
1039
v3dv_EnumeratePhysicalDeviceGroups(
1040
VkInstance _instance,
1041
uint32_t *pPhysicalDeviceGroupCount,
1042
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
1044
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1045
VK_OUTARRAY_MAKE_TYPED(VkPhysicalDeviceGroupProperties, out,
1046
pPhysicalDeviceGroupProperties,
1047
pPhysicalDeviceGroupCount);
1049
VkResult result = instance_ensure_physical_device(instance);
1050
if (result != VK_SUCCESS)
1053
assert(instance->physicalDeviceCount == 1);
1055
vk_outarray_append_typed(VkPhysicalDeviceGroupProperties, &out, p) {
1056
p->physicalDeviceCount = 1;
1057
memset(p->physicalDevices, 0, sizeof(p->physicalDevices));
1058
p->physicalDevices[0] =
1059
v3dv_physical_device_to_handle(&instance->physicalDevice);
1060
p->subsetAllocation = false;
1062
vk_foreach_struct(ext, p->pNext)
1063
v3dv_debug_ignored_stype(ext->sType);
1066
return vk_outarray_status(&out);
1069
VKAPI_ATTR void VKAPI_CALL
1070
v3dv_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
1071
VkPhysicalDeviceFeatures *pFeatures)
1073
memset(pFeatures, 0, sizeof(*pFeatures));
1075
*pFeatures = (VkPhysicalDeviceFeatures) {
1076
.robustBufferAccess = true, /* This feature is mandatory */
1077
.fullDrawIndexUint32 = false, /* Only available since V3D 4.4.9.1 */
1078
.imageCubeArray = true,
1079
.independentBlend = true,
1080
.geometryShader = true,
1081
.tessellationShader = false,
1082
.sampleRateShading = true,
1083
.dualSrcBlend = false,
1085
.multiDrawIndirect = false,
1086
.drawIndirectFirstInstance = true,
1087
.depthClamp = false,
1088
.depthBiasClamp = true,
1089
.fillModeNonSolid = true,
1090
.depthBounds = false, /* Only available since V3D 4.3.16.2 */
1092
.largePoints = true,
1094
.multiViewport = false,
1095
.samplerAnisotropy = true,
1096
.textureCompressionETC2 = true,
1097
.textureCompressionASTC_LDR = true,
1098
/* Note that textureCompressionBC requires that the driver support all
1099
* the BC formats. V3D 4.2 only support the BC1-3, so we can't claim
1100
* that we support it.
1102
.textureCompressionBC = false,
1103
.occlusionQueryPrecise = true,
1104
.pipelineStatisticsQuery = false,
1105
.vertexPipelineStoresAndAtomics = true,
1106
.fragmentStoresAndAtomics = true,
1107
.shaderTessellationAndGeometryPointSize = true,
1108
.shaderImageGatherExtended = false,
1109
.shaderStorageImageExtendedFormats = true,
1110
.shaderStorageImageMultisample = false,
1111
.shaderStorageImageReadWithoutFormat = false,
1112
.shaderStorageImageWriteWithoutFormat = false,
1113
.shaderUniformBufferArrayDynamicIndexing = false,
1114
.shaderSampledImageArrayDynamicIndexing = false,
1115
.shaderStorageBufferArrayDynamicIndexing = false,
1116
.shaderStorageImageArrayDynamicIndexing = false,
1117
.shaderClipDistance = true,
1118
.shaderCullDistance = false,
1119
.shaderFloat64 = false,
1120
.shaderInt64 = false,
1121
.shaderInt16 = false,
1122
.shaderResourceResidency = false,
1123
.shaderResourceMinLod = false,
1124
.sparseBinding = false,
1125
.sparseResidencyBuffer = false,
1126
.sparseResidencyImage2D = false,
1127
.sparseResidencyImage3D = false,
1128
.sparseResidency2Samples = false,
1129
.sparseResidency4Samples = false,
1130
.sparseResidency8Samples = false,
1131
.sparseResidency16Samples = false,
1132
.sparseResidencyAliased = false,
1133
.variableMultisampleRate = false,
1134
.inheritedQueries = true,
1138
VKAPI_ATTR void VKAPI_CALL
1139
v3dv_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
1140
VkPhysicalDeviceFeatures2 *pFeatures)
1142
v3dv_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
1144
VkPhysicalDeviceVulkan13Features vk13 = {
1145
.inlineUniformBlock = true,
1146
/* Inline buffers work like push constants, so after their are bound
1147
* some of their contents may be copied into the uniform stream as soon
1148
* as the next draw/dispatch is recorded in the command buffer. This means
1149
* that if the client updates the buffer contents after binding it to
1150
* a command buffer, the next queue submit of that command buffer may
1151
* not use the latest update to the buffer contents, but the data that
1152
* was present in the buffer at the time it was bound to the command
1155
.descriptorBindingInlineUniformBlockUpdateAfterBind = false,
1156
.pipelineCreationCacheControl = true,
1157
.privateData = true,
1160
VkPhysicalDeviceVulkan12Features vk12 = {
1161
.hostQueryReset = true,
1162
.uniformAndStorageBuffer8BitAccess = true,
1163
.uniformBufferStandardLayout = true,
1164
/* V3D 4.2 wraps TMU vector accesses to 16-byte boundaries, so loads and
1165
* stores of vectors that cross these boundaries would not work correcly
1166
* with scalarBlockLayout and would need to be split into smaller vectors
1167
* (and/or scalars) that don't cross these boundaries. For load/stores
1168
* with dynamic offsets where we can't identify if the offset is
1169
* problematic, we would always have to scalarize. Overall, this would
1170
* not lead to best performance so let's just not support it.
1172
.scalarBlockLayout = false,
1173
.storageBuffer8BitAccess = true,
1174
.storagePushConstant8 = true,
1175
.imagelessFramebuffer = true,
1176
.timelineSemaphore = true,
1179
VkPhysicalDeviceVulkan11Features vk11 = {
1180
.storageBuffer16BitAccess = true,
1181
.uniformAndStorageBuffer16BitAccess = true,
1182
.storagePushConstant16 = true,
1183
.storageInputOutput16 = false,
1185
.multiviewGeometryShader = false,
1186
.multiviewTessellationShader = false,
1187
.variablePointersStorageBuffer = true,
1188
/* FIXME: this needs support for non-constant index on UBO/SSBO */
1189
.variablePointers = false,
1190
.protectedMemory = false,
1191
.samplerYcbcrConversion = false,
1192
.shaderDrawParameters = false,
1195
vk_foreach_struct(ext, pFeatures->pNext) {
1196
if (vk_get_physical_device_core_1_1_feature_ext(ext, &vk11))
1198
if (vk_get_physical_device_core_1_2_feature_ext(ext, &vk12))
1200
if (vk_get_physical_device_core_1_3_feature_ext(ext, &vk13))
1203
switch (ext->sType) {
1204
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
1205
VkPhysicalDevice4444FormatsFeaturesEXT *features =
1206
(VkPhysicalDevice4444FormatsFeaturesEXT *)ext;
1207
features->formatA4R4G4B4 = true;
1208
features->formatA4B4G4R4 = true;
1212
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
1213
VkPhysicalDeviceCustomBorderColorFeaturesEXT *features =
1214
(VkPhysicalDeviceCustomBorderColorFeaturesEXT *)ext;
1215
features->customBorderColors = true;
1216
features->customBorderColorWithoutFormat = false;
1220
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
1221
VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
1222
(VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
1223
features->indexTypeUint8 = true;
1227
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: {
1228
VkPhysicalDeviceLineRasterizationFeaturesEXT *features =
1229
(VkPhysicalDeviceLineRasterizationFeaturesEXT *)ext;
1230
features->rectangularLines = true;
1231
features->bresenhamLines = true;
1232
features->smoothLines = false;
1233
features->stippledRectangularLines = false;
1234
features->stippledBresenhamLines = false;
1235
features->stippledSmoothLines = false;
1239
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: {
1240
VkPhysicalDeviceColorWriteEnableFeaturesEXT *features = (void *) ext;
1241
features->colorWriteEnable = true;
1245
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: {
1246
VkPhysicalDeviceProvokingVertexFeaturesEXT *features = (void *) ext;
1247
features->provokingVertexLast = true;
1248
/* FIXME: update when supporting EXT_transform_feedback */
1249
features->transformFeedbackPreservesProvokingVertex = false;
1253
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
1254
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
1256
features->vertexAttributeInstanceRateDivisor = true;
1257
features->vertexAttributeInstanceRateZeroDivisor = false;
1262
v3dv_debug_ignored_stype(ext->sType);
1268
VKAPI_ATTR void VKAPI_CALL
1269
v3dv_GetDeviceGroupPeerMemoryFeatures(VkDevice device,
1271
uint32_t localDeviceIndex,
1272
uint32_t remoteDeviceIndex,
1273
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
1275
assert(localDeviceIndex == 0 && remoteDeviceIndex == 0);
1276
*pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
1277
VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
1278
VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
1279
VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
1283
v3dv_physical_device_vendor_id(struct v3dv_physical_device *dev)
1285
return 0x14E4; /* Broadcom */
1289
#if using_v3d_simulator
1291
get_i915_param(int fd, uint32_t param, int *value)
1295
struct drm_i915_getparam gp = {
1300
int ret = drmIoctl(fd, DRM_IOCTL_I915_GETPARAM, &gp);
1310
v3dv_physical_device_device_id(struct v3dv_physical_device *dev)
1312
#if using_v3d_simulator
1315
if (!get_i915_param(dev->render_fd, I915_PARAM_CHIPSET_ID, &devid))
1316
fprintf(stderr, "Error getting device_id\n");
1320
switch (dev->devinfo.ver) {
1322
return 0xBE485FD3; /* Broadcom deviceID for 2711 */
1324
unreachable("Unsupported V3D version");
1329
VKAPI_ATTR void VKAPI_CALL
1330
v3dv_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
1331
VkPhysicalDeviceProperties *pProperties)
1333
V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
1335
STATIC_ASSERT(MAX_SAMPLED_IMAGES + MAX_STORAGE_IMAGES + MAX_INPUT_ATTACHMENTS
1336
<= V3D_MAX_TEXTURE_SAMPLERS);
1337
STATIC_ASSERT(MAX_UNIFORM_BUFFERS >= MAX_DYNAMIC_UNIFORM_BUFFERS);
1338
STATIC_ASSERT(MAX_STORAGE_BUFFERS >= MAX_DYNAMIC_STORAGE_BUFFERS);
1340
const uint32_t page_size = 4096;
1341
const uint32_t mem_size = compute_heap_size();
1343
const uint32_t max_varying_components = 16 * 4;
1345
const float v3d_point_line_granularity = 2.0f / (1 << V3D_COORD_SHIFT);
1346
const uint32_t max_fb_size = 4096;
1348
const VkSampleCountFlags supported_sample_counts =
1349
VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1351
struct timespec clock_res;
1352
clock_getres(CLOCK_MONOTONIC, &clock_res);
1353
const float timestamp_period =
1354
clock_res.tv_sec * 1000000000.0f + clock_res.tv_nsec;
1356
/* FIXME: this will probably require an in-depth review */
1357
VkPhysicalDeviceLimits limits = {
1358
.maxImageDimension1D = 4096,
1359
.maxImageDimension2D = 4096,
1360
.maxImageDimension3D = 4096,
1361
.maxImageDimensionCube = 4096,
1362
.maxImageArrayLayers = 2048,
1363
.maxTexelBufferElements = (1ul << 28),
1364
.maxUniformBufferRange = V3D_MAX_BUFFER_RANGE,
1365
.maxStorageBufferRange = V3D_MAX_BUFFER_RANGE,
1366
.maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
1367
.maxMemoryAllocationCount = mem_size / page_size,
1368
.maxSamplerAllocationCount = 64 * 1024,
1369
.bufferImageGranularity = V3D_NON_COHERENT_ATOM_SIZE,
1370
.sparseAddressSpaceSize = 0,
1371
.maxBoundDescriptorSets = MAX_SETS,
1372
.maxPerStageDescriptorSamplers = V3D_MAX_TEXTURE_SAMPLERS,
1373
.maxPerStageDescriptorUniformBuffers = MAX_UNIFORM_BUFFERS,
1374
.maxPerStageDescriptorStorageBuffers = MAX_STORAGE_BUFFERS,
1375
.maxPerStageDescriptorSampledImages = MAX_SAMPLED_IMAGES,
1376
.maxPerStageDescriptorStorageImages = MAX_STORAGE_IMAGES,
1377
.maxPerStageDescriptorInputAttachments = MAX_INPUT_ATTACHMENTS,
1378
.maxPerStageResources = 128,
1380
/* Some of these limits are multiplied by 6 because they need to
1381
* include all possible shader stages (even if not supported). See
1382
* 'Required Limits' table in the Vulkan spec.
1384
.maxDescriptorSetSamplers = 6 * V3D_MAX_TEXTURE_SAMPLERS,
1385
.maxDescriptorSetUniformBuffers = 6 * MAX_UNIFORM_BUFFERS,
1386
.maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
1387
.maxDescriptorSetStorageBuffers = 6 * MAX_STORAGE_BUFFERS,
1388
.maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
1389
.maxDescriptorSetSampledImages = 6 * MAX_SAMPLED_IMAGES,
1390
.maxDescriptorSetStorageImages = 6 * MAX_STORAGE_IMAGES,
1391
.maxDescriptorSetInputAttachments = MAX_INPUT_ATTACHMENTS,
1394
.maxVertexInputAttributes = MAX_VERTEX_ATTRIBS,
1395
.maxVertexInputBindings = MAX_VBS,
1396
.maxVertexInputAttributeOffset = 0xffffffff,
1397
.maxVertexInputBindingStride = 0xffffffff,
1398
.maxVertexOutputComponents = max_varying_components,
1400
/* Tessellation limits */
1401
.maxTessellationGenerationLevel = 0,
1402
.maxTessellationPatchSize = 0,
1403
.maxTessellationControlPerVertexInputComponents = 0,
1404
.maxTessellationControlPerVertexOutputComponents = 0,
1405
.maxTessellationControlPerPatchOutputComponents = 0,
1406
.maxTessellationControlTotalOutputComponents = 0,
1407
.maxTessellationEvaluationInputComponents = 0,
1408
.maxTessellationEvaluationOutputComponents = 0,
1410
/* Geometry limits */
1411
.maxGeometryShaderInvocations = 32,
1412
.maxGeometryInputComponents = 64,
1413
.maxGeometryOutputComponents = 64,
1414
.maxGeometryOutputVertices = 256,
1415
.maxGeometryTotalOutputComponents = 1024,
1417
/* Fragment limits */
1418
.maxFragmentInputComponents = max_varying_components,
1419
.maxFragmentOutputAttachments = 4,
1420
.maxFragmentDualSrcAttachments = 0,
1421
.maxFragmentCombinedOutputResources = MAX_RENDER_TARGETS +
1422
MAX_STORAGE_BUFFERS +
1425
/* Compute limits */
1426
.maxComputeSharedMemorySize = 16384,
1427
.maxComputeWorkGroupCount = { 65535, 65535, 65535 },
1428
.maxComputeWorkGroupInvocations = 256,
1429
.maxComputeWorkGroupSize = { 256, 256, 256 },
1431
.subPixelPrecisionBits = V3D_COORD_SHIFT,
1432
.subTexelPrecisionBits = 8,
1433
.mipmapPrecisionBits = 8,
1434
.maxDrawIndexedIndexValue = 0x00ffffff,
1435
.maxDrawIndirectCount = 0x7fffffff,
1436
.maxSamplerLodBias = 14.0f,
1437
.maxSamplerAnisotropy = 16.0f,
1438
.maxViewports = MAX_VIEWPORTS,
1439
.maxViewportDimensions = { max_fb_size, max_fb_size },
1440
.viewportBoundsRange = { -2.0 * max_fb_size,
1441
2.0 * max_fb_size - 1 },
1442
.viewportSubPixelBits = 0,
1443
.minMemoryMapAlignment = page_size,
1444
.minTexelBufferOffsetAlignment = V3D_UIFBLOCK_SIZE,
1445
.minUniformBufferOffsetAlignment = 32,
1446
.minStorageBufferOffsetAlignment = 32,
1447
.minTexelOffset = -8,
1448
.maxTexelOffset = 7,
1449
.minTexelGatherOffset = -8,
1450
.maxTexelGatherOffset = 7,
1451
.minInterpolationOffset = -0.5,
1452
.maxInterpolationOffset = 0.5,
1453
.subPixelInterpolationOffsetBits = V3D_COORD_SHIFT,
1454
.maxFramebufferWidth = max_fb_size,
1455
.maxFramebufferHeight = max_fb_size,
1456
.maxFramebufferLayers = 256,
1457
.framebufferColorSampleCounts = supported_sample_counts,
1458
.framebufferDepthSampleCounts = supported_sample_counts,
1459
.framebufferStencilSampleCounts = supported_sample_counts,
1460
.framebufferNoAttachmentsSampleCounts = supported_sample_counts,
1461
.maxColorAttachments = MAX_RENDER_TARGETS,
1462
.sampledImageColorSampleCounts = supported_sample_counts,
1463
.sampledImageIntegerSampleCounts = supported_sample_counts,
1464
.sampledImageDepthSampleCounts = supported_sample_counts,
1465
.sampledImageStencilSampleCounts = supported_sample_counts,
1466
.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
1467
.maxSampleMaskWords = 1,
1468
.timestampComputeAndGraphics = true,
1469
.timestampPeriod = timestamp_period,
1470
.maxClipDistances = 8,
1471
.maxCullDistances = 0,
1472
.maxCombinedClipAndCullDistances = 8,
1473
.discreteQueuePriorities = 2,
1474
.pointSizeRange = { v3d_point_line_granularity,
1475
V3D_MAX_POINT_SIZE },
1476
.lineWidthRange = { 1.0f, V3D_MAX_LINE_WIDTH },
1477
.pointSizeGranularity = v3d_point_line_granularity,
1478
.lineWidthGranularity = v3d_point_line_granularity,
1479
.strictLines = true,
1480
.standardSampleLocations = false,
1481
.optimalBufferCopyOffsetAlignment = 32,
1482
.optimalBufferCopyRowPitchAlignment = 32,
1483
.nonCoherentAtomSize = V3D_NON_COHERENT_ATOM_SIZE,
1486
*pProperties = (VkPhysicalDeviceProperties) {
1487
.apiVersion = V3DV_API_VERSION,
1488
.driverVersion = vk_get_driver_version(),
1489
.vendorID = v3dv_physical_device_vendor_id(pdevice),
1490
.deviceID = v3dv_physical_device_device_id(pdevice),
1491
.deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
1493
.sparseProperties = { 0 },
1496
snprintf(pProperties->deviceName, sizeof(pProperties->deviceName),
1497
"%s", pdevice->name);
1498
memcpy(pProperties->pipelineCacheUUID,
1499
pdevice->pipeline_cache_uuid, VK_UUID_SIZE);
1502
VKAPI_ATTR void VKAPI_CALL
1503
v3dv_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
1504
VkPhysicalDeviceProperties2 *pProperties)
1506
V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
1508
v3dv_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1510
/* We don't really have special restrictions for the maximum
1511
* descriptors per set, other than maybe not exceeding the limits
1512
* of addressable memory in a single allocation on either the host
1513
* or the GPU. This will be a much larger limit than any of the
1514
* per-stage limits already available in Vulkan though, so in practice,
1515
* it is not expected to limit anything beyond what is already
1516
* constrained through per-stage limits.
1518
const uint32_t max_host_descriptors =
1519
(UINT32_MAX - sizeof(struct v3dv_descriptor_set)) /
1520
sizeof(struct v3dv_descriptor);
1521
const uint32_t max_gpu_descriptors =
1522
(UINT32_MAX / v3dv_X(pdevice, max_descriptor_bo_size)());
1524
VkPhysicalDeviceVulkan13Properties vk13 = {
1525
.maxInlineUniformBlockSize = 4096,
1526
.maxPerStageDescriptorInlineUniformBlocks = MAX_INLINE_UNIFORM_BUFFERS,
1527
.maxDescriptorSetInlineUniformBlocks = MAX_INLINE_UNIFORM_BUFFERS,
1528
.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks =
1529
MAX_INLINE_UNIFORM_BUFFERS,
1530
.maxDescriptorSetUpdateAfterBindInlineUniformBlocks =
1531
MAX_INLINE_UNIFORM_BUFFERS,
1534
VkPhysicalDeviceVulkan12Properties vk12 = {
1535
.driverID = VK_DRIVER_ID_MESA_V3DV,
1536
.conformanceVersion = {
1543
.supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
1544
.supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
1545
/* FIXME: if we want to support independentResolveNone then we would
1546
* need to honor attachment load operations on resolve attachments,
1547
* which we currently ignore because the resolve makes them irrelevant,
1548
* as it unconditionally writes all pixels in the render area. However,
1549
* with independentResolveNone, it is possible to have one aspect of a
1550
* D/S resolve attachment stay unresolved, in which case the attachment
1551
* load operation is relevant.
1553
* NOTE: implementing attachment load for resolve attachments isn't
1554
* immediately trivial because these attachments are not part of the
1555
* framebuffer and therefore we can't use the same mechanism we use
1556
* for framebuffer attachments. Instead, we should probably have to
1557
* emit a meta operation for that right at the start of the render
1558
* pass (or subpass).
1560
.independentResolveNone = false,
1561
.independentResolve = false,
1562
.maxTimelineSemaphoreValueDifference = UINT64_MAX,
1564
memset(vk12.driverName, 0, VK_MAX_DRIVER_NAME_SIZE_KHR);
1565
snprintf(vk12.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR, "V3DV Mesa");
1566
memset(vk12.driverInfo, 0, VK_MAX_DRIVER_INFO_SIZE_KHR);
1567
snprintf(vk12.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR,
1568
"Mesa " PACKAGE_VERSION MESA_GIT_SHA1);
1570
VkPhysicalDeviceVulkan11Properties vk11 = {
1571
.deviceLUIDValid = false,
1572
.subgroupSize = V3D_CHANNELS,
1573
.subgroupSupportedStages = VK_SHADER_STAGE_COMPUTE_BIT,
1574
.subgroupSupportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT,
1575
.subgroupQuadOperationsInAllStages = false,
1576
.pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
1577
.maxMultiviewViewCount = MAX_MULTIVIEW_VIEW_COUNT,
1578
.maxMultiviewInstanceIndex = UINT32_MAX - 1,
1579
.protectedNoFault = false,
1580
.maxPerSetDescriptors = MIN2(max_host_descriptors, max_gpu_descriptors),
1581
/* Minimum required by the spec */
1582
.maxMemoryAllocationSize = MAX_MEMORY_ALLOCATION_SIZE,
1584
memcpy(vk11.deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
1585
memcpy(vk11.driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
1588
vk_foreach_struct(ext, pProperties->pNext) {
1589
if (vk_get_physical_device_core_1_1_property_ext(ext, &vk11))
1591
if (vk_get_physical_device_core_1_2_property_ext(ext, &vk12))
1593
if (vk_get_physical_device_core_1_3_property_ext(ext, &vk13))
1596
switch (ext->sType) {
1597
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: {
1598
VkPhysicalDeviceCustomBorderColorPropertiesEXT *props =
1599
(VkPhysicalDeviceCustomBorderColorPropertiesEXT *)ext;
1600
props->maxCustomBorderColorSamplers = V3D_MAX_TEXTURE_SAMPLERS;
1603
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: {
1604
VkPhysicalDeviceProvokingVertexPropertiesEXT *props =
1605
(VkPhysicalDeviceProvokingVertexPropertiesEXT *)ext;
1606
props->provokingVertexModePerPipeline = true;
1607
/* FIXME: update when supporting EXT_transform_feedback */
1608
props->transformFeedbackPreservesTriangleFanProvokingVertex = false;
1611
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
1612
VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *props =
1613
(VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
1614
props->maxVertexAttribDivisor = 0xffff;
1617
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: {
1618
VkPhysicalDeviceDrmPropertiesEXT *props =
1619
(VkPhysicalDeviceDrmPropertiesEXT *)ext;
1620
props->hasPrimary = pdevice->has_primary;
1621
if (props->hasPrimary) {
1622
props->primaryMajor = (int64_t) major(pdevice->primary_devid);
1623
props->primaryMinor = (int64_t) minor(pdevice->primary_devid);
1625
props->hasRender = pdevice->has_render;
1626
if (props->hasRender) {
1627
props->renderMajor = (int64_t) major(pdevice->render_devid);
1628
props->renderMinor = (int64_t) minor(pdevice->render_devid);
1632
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT: {
1633
VkPhysicalDeviceLineRasterizationPropertiesEXT *props =
1634
(VkPhysicalDeviceLineRasterizationPropertiesEXT *)ext;
1635
props->lineSubPixelPrecisionBits = V3D_COORD_SHIFT;
1638
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
1639
/* Do nothing, not even logging. This is a non-PCI device, so we will
1640
* never provide this extension.
1644
v3dv_debug_ignored_stype(ext->sType);
1650
/* We support exactly one queue family. */
1651
static const VkQueueFamilyProperties
1652
v3dv_queue_family_properties = {
1653
.queueFlags = VK_QUEUE_GRAPHICS_BIT |
1654
VK_QUEUE_COMPUTE_BIT |
1655
VK_QUEUE_TRANSFER_BIT,
1657
.timestampValidBits = 64,
1658
.minImageTransferGranularity = { 1, 1, 1 },
1661
VKAPI_ATTR void VKAPI_CALL
1662
v3dv_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
1663
uint32_t *pQueueFamilyPropertyCount,
1664
VkQueueFamilyProperties2 *pQueueFamilyProperties)
1666
VK_OUTARRAY_MAKE_TYPED(VkQueueFamilyProperties2, out,
1667
pQueueFamilyProperties, pQueueFamilyPropertyCount);
1669
vk_outarray_append_typed(VkQueueFamilyProperties2, &out, p) {
1670
p->queueFamilyProperties = v3dv_queue_family_properties;
1672
vk_foreach_struct(s, p->pNext) {
1673
v3dv_debug_ignored_stype(s->sType);
1678
VKAPI_ATTR void VKAPI_CALL
1679
v3dv_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
1680
VkPhysicalDeviceMemoryProperties *pMemoryProperties)
1682
V3DV_FROM_HANDLE(v3dv_physical_device, device, physicalDevice);
1683
*pMemoryProperties = device->memory;
1686
VKAPI_ATTR void VKAPI_CALL
1687
v3dv_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
1688
VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
1690
v3dv_GetPhysicalDeviceMemoryProperties(physicalDevice,
1691
&pMemoryProperties->memoryProperties);
1693
vk_foreach_struct(ext, pMemoryProperties->pNext) {
1694
switch (ext->sType) {
1696
v3dv_debug_ignored_stype(ext->sType);
1702
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1703
v3dv_GetInstanceProcAddr(VkInstance _instance,
1706
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1707
return vk_instance_get_proc_addr(&instance->vk,
1708
&v3dv_instance_entrypoints,
1712
/* With version 1+ of the loader interface the ICD should expose
1713
* vk_icdGetInstanceProcAddr to work around certain LD_PRELOAD issues seen in apps.
1716
VKAPI_ATTR PFN_vkVoidFunction
1717
VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance,
1721
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1722
vk_icdGetInstanceProcAddr(VkInstance instance,
1725
return v3dv_GetInstanceProcAddr(instance, pName);
1728
/* With version 4+ of the loader interface the ICD should expose
1729
* vk_icdGetPhysicalDeviceProcAddr()
1732
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1733
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
1737
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
1740
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1742
return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
1745
VKAPI_ATTR VkResult VKAPI_CALL
1746
v3dv_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1747
VkLayerProperties *pProperties)
1749
if (pProperties == NULL) {
1750
*pPropertyCount = 0;
1754
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1757
VKAPI_ATTR VkResult VKAPI_CALL
1758
v3dv_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
1759
uint32_t *pPropertyCount,
1760
VkLayerProperties *pProperties)
1762
V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
1764
if (pProperties == NULL) {
1765
*pPropertyCount = 0;
1769
return vk_error(physical_device, VK_ERROR_LAYER_NOT_PRESENT);
1773
destroy_queue_syncs(struct v3dv_queue *queue)
1775
for (int i = 0; i < V3DV_QUEUE_COUNT; i++) {
1776
if (queue->last_job_syncs.syncs[i]) {
1777
drmSyncobjDestroy(queue->device->pdevice->render_fd,
1778
queue->last_job_syncs.syncs[i]);
1784
queue_init(struct v3dv_device *device, struct v3dv_queue *queue,
1785
const VkDeviceQueueCreateInfo *create_info,
1786
uint32_t index_in_family)
1788
VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info,
1790
if (result != VK_SUCCESS)
1793
result = vk_queue_enable_submit_thread(&queue->vk);
1794
if (result != VK_SUCCESS)
1795
goto fail_submit_thread;
1797
queue->device = device;
1798
queue->vk.driver_submit = v3dv_queue_driver_submit;
1800
for (int i = 0; i < V3DV_QUEUE_COUNT; i++) {
1801
queue->last_job_syncs.first[i] = true;
1802
int ret = drmSyncobjCreate(device->pdevice->render_fd,
1803
DRM_SYNCOBJ_CREATE_SIGNALED,
1804
&queue->last_job_syncs.syncs[i]);
1806
result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
1807
"syncobj create failed: %m");
1808
goto fail_last_job_syncs;
1812
queue->noop_job = NULL;
1815
fail_last_job_syncs:
1816
destroy_queue_syncs(queue);
1818
vk_queue_finish(&queue->vk);
1823
queue_finish(struct v3dv_queue *queue)
1825
if (queue->noop_job)
1826
v3dv_job_destroy(queue->noop_job);
1827
destroy_queue_syncs(queue);
1828
vk_queue_finish(&queue->vk);
1832
init_device_meta(struct v3dv_device *device)
1834
mtx_init(&device->meta.mtx, mtx_plain);
1835
v3dv_meta_clear_init(device);
1836
v3dv_meta_blit_init(device);
1837
v3dv_meta_texel_buffer_copy_init(device);
1841
destroy_device_meta(struct v3dv_device *device)
1843
mtx_destroy(&device->meta.mtx);
1844
v3dv_meta_clear_finish(device);
1845
v3dv_meta_blit_finish(device);
1846
v3dv_meta_texel_buffer_copy_finish(device);
1849
VKAPI_ATTR VkResult VKAPI_CALL
1850
v3dv_CreateDevice(VkPhysicalDevice physicalDevice,
1851
const VkDeviceCreateInfo *pCreateInfo,
1852
const VkAllocationCallbacks *pAllocator,
1855
V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
1856
struct v3dv_instance *instance = (struct v3dv_instance*) physical_device->vk.instance;
1858
struct v3dv_device *device;
1860
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
1862
/* Check requested queues (we only expose one queue ) */
1863
assert(pCreateInfo->queueCreateInfoCount == 1);
1864
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
1865
assert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex == 0);
1866
assert(pCreateInfo->pQueueCreateInfos[i].queueCount == 1);
1867
if (pCreateInfo->pQueueCreateInfos[i].flags != 0)
1868
return vk_error(instance, VK_ERROR_INITIALIZATION_FAILED);
1871
device = vk_zalloc2(&physical_device->vk.instance->alloc, pAllocator,
1873
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1875
return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1877
struct vk_device_dispatch_table dispatch_table;
1878
vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1879
&v3dv_device_entrypoints, true);
1880
vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1881
&wsi_device_entrypoints, false);
1882
result = vk_device_init(&device->vk, &physical_device->vk,
1883
&dispatch_table, pCreateInfo, pAllocator);
1884
if (result != VK_SUCCESS) {
1885
vk_free(&device->vk.alloc, device);
1886
return vk_error(NULL, result);
1889
device->instance = instance;
1890
device->pdevice = physical_device;
1892
mtx_init(&device->query_mutex, mtx_plain);
1893
cnd_init(&device->query_ended);
1895
vk_device_set_drm_fd(&device->vk, physical_device->render_fd);
1896
vk_device_enable_threaded_submit(&device->vk);
1898
result = queue_init(device, &device->queue,
1899
pCreateInfo->pQueueCreateInfos, 0);
1900
if (result != VK_SUCCESS)
1903
device->devinfo = physical_device->devinfo;
1905
/* Vulkan 1.1 and VK_KHR_get_physical_device_properties2 added
1906
* VkPhysicalDeviceFeatures2 which can be used in the pNext chain of
1907
* vkDeviceCreateInfo, in which case it should be used instead of
1910
const VkPhysicalDeviceFeatures2 *features2 =
1911
vk_find_struct_const(pCreateInfo->pNext, PHYSICAL_DEVICE_FEATURES_2);
1913
memcpy(&device->features, &features2->features,
1914
sizeof(device->features));
1915
} else if (pCreateInfo->pEnabledFeatures) {
1916
memcpy(&device->features, pCreateInfo->pEnabledFeatures,
1917
sizeof(device->features));
1920
if (device->features.robustBufferAccess)
1921
perf_debug("Device created with Robust Buffer Access enabled.\n");
1924
v3dv_X(device, device_check_prepacked_sizes)();
1926
init_device_meta(device);
1927
v3dv_bo_cache_init(device);
1928
v3dv_pipeline_cache_init(&device->default_pipeline_cache, device, 0,
1929
device->instance->default_pipeline_cache_enabled);
1930
device->default_attribute_float =
1931
v3dv_pipeline_create_default_attribute_values(device, NULL);
1933
*pDevice = v3dv_device_to_handle(device);
1938
cnd_destroy(&device->query_ended);
1939
mtx_destroy(&device->query_mutex);
1940
vk_device_finish(&device->vk);
1941
vk_free(&device->vk.alloc, device);
1946
VKAPI_ATTR void VKAPI_CALL
1947
v3dv_DestroyDevice(VkDevice _device,
1948
const VkAllocationCallbacks *pAllocator)
1950
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1952
device->vk.dispatch_table.DeviceWaitIdle(_device);
1953
queue_finish(&device->queue);
1954
destroy_device_meta(device);
1955
v3dv_pipeline_cache_finish(&device->default_pipeline_cache);
1957
if (device->default_attribute_float) {
1958
v3dv_bo_free(device, device->default_attribute_float);
1959
device->default_attribute_float = NULL;
1962
/* Bo cache should be removed the last, as any other object could be
1963
* freeing their private bos
1965
v3dv_bo_cache_destroy(device);
1967
cnd_destroy(&device->query_ended);
1968
mtx_destroy(&device->query_mutex);
1970
vk_device_finish(&device->vk);
1971
vk_free2(&device->vk.alloc, pAllocator, device);
1975
device_alloc(struct v3dv_device *device,
1976
struct v3dv_device_memory *mem,
1979
/* Our kernel interface is 32-bit */
1980
assert(size <= UINT32_MAX);
1982
mem->bo = v3dv_bo_alloc(device, size, "device_alloc", false);
1984
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1990
device_free_wsi_dumb(int32_t display_fd, int32_t dumb_handle)
1992
assert(display_fd != -1);
1993
if (dumb_handle < 0)
1996
struct drm_mode_destroy_dumb destroy_dumb = {
1997
.handle = dumb_handle,
1999
if (v3dv_ioctl(display_fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_dumb)) {
2000
fprintf(stderr, "destroy dumb object %d: %s\n", dumb_handle, strerror(errno));
2005
device_free(struct v3dv_device *device, struct v3dv_device_memory *mem)
2007
/* If this memory allocation was for WSI, then we need to use the
2008
* display device to free the allocated dumb BO.
2010
if (mem->is_for_wsi) {
2011
device_free_wsi_dumb(device->instance->physicalDevice.display_fd,
2012
mem->bo->dumb_handle);
2015
v3dv_bo_free(device, mem->bo);
2019
device_unmap(struct v3dv_device *device, struct v3dv_device_memory *mem)
2021
assert(mem && mem->bo->map && mem->bo->map_size > 0);
2022
v3dv_bo_unmap(device, mem->bo);
2026
device_map(struct v3dv_device *device, struct v3dv_device_memory *mem)
2028
assert(mem && mem->bo);
2032
* "After a successful call to vkMapMemory the memory object memory is
2033
* considered to be currently host mapped. It is an application error to
2034
* call vkMapMemory on a memory object that is already host mapped."
2036
* We are not concerned with this ourselves (validation layers should
2037
* catch these errors and warn users), however, the driver may internally
2038
* map things (for example for debug CLIF dumps or some CPU-side operations)
2039
* so by the time the user calls here the buffer might already been mapped
2040
* internally by the driver.
2043
assert(mem->bo->map_size == mem->bo->size);
2047
bool ok = v3dv_bo_map(device, mem->bo, mem->bo->size);
2049
return VK_ERROR_MEMORY_MAP_FAILED;
2055
device_import_bo(struct v3dv_device *device,
2056
const VkAllocationCallbacks *pAllocator,
2057
int fd, uint64_t size,
2058
struct v3dv_bo **bo)
2062
off_t real_size = lseek(fd, 0, SEEK_END);
2063
lseek(fd, 0, SEEK_SET);
2064
if (real_size < 0 || (uint64_t) real_size < size)
2065
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2067
int render_fd = device->pdevice->render_fd;
2068
assert(render_fd >= 0);
2072
ret = drmPrimeFDToHandle(render_fd, fd, &handle);
2074
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2076
struct drm_v3d_get_bo_offset get_offset = {
2079
ret = v3dv_ioctl(render_fd, DRM_IOCTL_V3D_GET_BO_OFFSET, &get_offset);
2081
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2082
assert(get_offset.offset != 0);
2084
*bo = v3dv_device_lookup_bo(device->pdevice, handle);
2087
if ((*bo)->refcnt == 0)
2088
v3dv_bo_init(*bo, handle, size, get_offset.offset, "import", false);
2090
p_atomic_inc(&(*bo)->refcnt);
2096
device_alloc_for_wsi(struct v3dv_device *device,
2097
const VkAllocationCallbacks *pAllocator,
2098
struct v3dv_device_memory *mem,
2101
/* In the simulator we can get away with a regular allocation since both
2102
* allocation and rendering happen in the same DRM render node. On actual
2103
* hardware we need to allocate our winsys BOs on the vc4 display device
2104
* and import them into v3d.
2106
#if using_v3d_simulator
2107
return device_alloc(device, mem, size);
2109
/* If we are allocating for WSI we should have a swapchain and thus,
2110
* we should've initialized the display device. However, Zink doesn't
2111
* use swapchains, so in that case we can get here without acquiring the
2112
* display device and we need to do it now.
2115
struct v3dv_instance *instance = device->instance;
2116
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2117
if (unlikely(pdevice->display_fd < 0)) {
2118
result = v3dv_physical_device_acquire_display(instance, pdevice, NULL);
2119
if (result != VK_SUCCESS)
2122
assert(pdevice->display_fd != -1);
2124
mem->is_for_wsi = true;
2126
int display_fd = pdevice->display_fd;
2127
struct drm_mode_create_dumb create_dumb = {
2128
.width = 1024, /* one page */
2129
.height = align(size, 4096) / 4096,
2130
.bpp = util_format_get_blocksizebits(PIPE_FORMAT_RGBA8888_UNORM),
2134
err = v3dv_ioctl(display_fd, DRM_IOCTL_MODE_CREATE_DUMB, &create_dumb);
2140
drmPrimeHandleToFD(display_fd, create_dumb.handle, O_CLOEXEC, &fd);
2144
result = device_import_bo(device, pAllocator, fd, size, &mem->bo);
2146
if (result != VK_SUCCESS)
2149
mem->bo->dumb_handle = create_dumb.handle;
2154
device_free_wsi_dumb(display_fd, create_dumb.handle);
2157
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2161
VKAPI_ATTR VkResult VKAPI_CALL
2162
v3dv_AllocateMemory(VkDevice _device,
2163
const VkMemoryAllocateInfo *pAllocateInfo,
2164
const VkAllocationCallbacks *pAllocator,
2165
VkDeviceMemory *pMem)
2167
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2168
struct v3dv_device_memory *mem;
2169
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2171
assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
2173
/* The Vulkan 1.0.33 spec says "allocationSize must be greater than 0". */
2174
assert(pAllocateInfo->allocationSize > 0);
2176
mem = vk_object_zalloc(&device->vk, pAllocator, sizeof(*mem),
2177
VK_OBJECT_TYPE_DEVICE_MEMORY);
2179
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
2181
assert(pAllocateInfo->memoryTypeIndex < pdevice->memory.memoryTypeCount);
2182
mem->type = &pdevice->memory.memoryTypes[pAllocateInfo->memoryTypeIndex];
2183
mem->is_for_wsi = false;
2185
const struct wsi_memory_allocate_info *wsi_info = NULL;
2186
const VkImportMemoryFdInfoKHR *fd_info = NULL;
2187
vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
2188
switch ((unsigned)ext->sType) {
2189
case VK_STRUCTURE_TYPE_WSI_MEMORY_ALLOCATE_INFO_MESA:
2190
wsi_info = (void *)ext;
2192
case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
2193
fd_info = (void *)ext;
2195
case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
2196
/* We don't support VK_KHR_buffer_device_address or multiple
2197
* devices per device group, so we can ignore this.
2200
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR:
2201
/* We don't have particular optimizations associated with memory
2202
* allocations that won't be suballocated to multiple resources.
2205
case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR:
2206
/* The mask of handle types specified here must be supported
2207
* according to VkExternalImageFormatProperties, so it must be
2208
* fd or dmabuf, which don't have special requirements for us.
2212
v3dv_debug_ignored_stype(ext->sType);
2217
VkResult result = VK_SUCCESS;
2219
/* We always allocate device memory in multiples of a page, so round up
2220
* requested size to that.
2222
VkDeviceSize alloc_size = ALIGN(pAllocateInfo->allocationSize, 4096);
2224
if (unlikely(alloc_size > MAX_MEMORY_ALLOCATION_SIZE)) {
2225
result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
2228
result = device_alloc_for_wsi(device, pAllocator, mem, alloc_size);
2229
} else if (fd_info && fd_info->handleType) {
2230
assert(fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2231
fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2232
result = device_import_bo(device, pAllocator,
2233
fd_info->fd, alloc_size, &mem->bo);
2234
if (result == VK_SUCCESS)
2237
result = device_alloc(device, mem, alloc_size);
2241
if (result != VK_SUCCESS) {
2242
vk_object_free(&device->vk, pAllocator, mem);
2243
return vk_error(device, result);
2246
*pMem = v3dv_device_memory_to_handle(mem);
2250
VKAPI_ATTR void VKAPI_CALL
2251
v3dv_FreeMemory(VkDevice _device,
2252
VkDeviceMemory _mem,
2253
const VkAllocationCallbacks *pAllocator)
2255
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2256
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _mem);
2262
v3dv_UnmapMemory(_device, _mem);
2264
device_free(device, mem);
2266
vk_object_free(&device->vk, pAllocator, mem);
2269
VKAPI_ATTR VkResult VKAPI_CALL
2270
v3dv_MapMemory(VkDevice _device,
2271
VkDeviceMemory _memory,
2272
VkDeviceSize offset,
2274
VkMemoryMapFlags flags,
2277
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2278
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
2285
assert(offset < mem->bo->size);
2287
/* Since the driver can map BOs internally as well and the mapped range
2288
* required by the user or the driver might not be the same, we always map
2289
* the entire BO and then add the requested offset to the start address
2290
* of the mapped region.
2292
VkResult result = device_map(device, mem);
2293
if (result != VK_SUCCESS)
2294
return vk_error(device, result);
2296
*ppData = ((uint8_t *) mem->bo->map) + offset;
2300
VKAPI_ATTR void VKAPI_CALL
2301
v3dv_UnmapMemory(VkDevice _device,
2302
VkDeviceMemory _memory)
2304
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2305
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
2310
device_unmap(device, mem);
2313
VKAPI_ATTR VkResult VKAPI_CALL
2314
v3dv_FlushMappedMemoryRanges(VkDevice _device,
2315
uint32_t memoryRangeCount,
2316
const VkMappedMemoryRange *pMemoryRanges)
2321
VKAPI_ATTR VkResult VKAPI_CALL
2322
v3dv_InvalidateMappedMemoryRanges(VkDevice _device,
2323
uint32_t memoryRangeCount,
2324
const VkMappedMemoryRange *pMemoryRanges)
2329
VKAPI_ATTR void VKAPI_CALL
2330
v3dv_GetImageMemoryRequirements2(VkDevice device,
2331
const VkImageMemoryRequirementsInfo2 *pInfo,
2332
VkMemoryRequirements2 *pMemoryRequirements)
2334
V3DV_FROM_HANDLE(v3dv_image, image, pInfo->image);
2336
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
2337
.memoryTypeBits = 0x1,
2338
.alignment = image->alignment,
2342
vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2343
switch (ext->sType) {
2344
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2345
VkMemoryDedicatedRequirements *req =
2346
(VkMemoryDedicatedRequirements *) ext;
2347
req->requiresDedicatedAllocation = image->vk.external_handle_types != 0;
2348
req->prefersDedicatedAllocation = image->vk.external_handle_types != 0;
2352
v3dv_debug_ignored_stype(ext->sType);
2359
bind_image_memory(const VkBindImageMemoryInfo *info)
2361
V3DV_FROM_HANDLE(v3dv_image, image, info->image);
2362
V3DV_FROM_HANDLE(v3dv_device_memory, mem, info->memory);
2366
* "memoryOffset must be an integer multiple of the alignment member of
2367
* the VkMemoryRequirements structure returned from a call to
2368
* vkGetImageMemoryRequirements with image"
2370
assert(info->memoryOffset % image->alignment == 0);
2371
assert(info->memoryOffset < mem->bo->size);
2374
image->mem_offset = info->memoryOffset;
2377
VKAPI_ATTR VkResult VKAPI_CALL
2378
v3dv_BindImageMemory2(VkDevice _device,
2379
uint32_t bindInfoCount,
2380
const VkBindImageMemoryInfo *pBindInfos)
2382
for (uint32_t i = 0; i < bindInfoCount; i++) {
2383
const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
2384
vk_find_struct_const(pBindInfos->pNext,
2385
BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR);
2386
if (swapchain_info && swapchain_info->swapchain) {
2387
struct v3dv_image *swapchain_image =
2388
v3dv_wsi_get_image_from_swapchain(swapchain_info->swapchain,
2389
swapchain_info->imageIndex);
2390
VkBindImageMemoryInfo swapchain_bind = {
2391
.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2392
.image = pBindInfos[i].image,
2393
.memory = v3dv_device_memory_to_handle(swapchain_image->mem),
2394
.memoryOffset = swapchain_image->mem_offset,
2396
bind_image_memory(&swapchain_bind);
2398
bind_image_memory(&pBindInfos[i]);
2405
VKAPI_ATTR void VKAPI_CALL
2406
v3dv_GetBufferMemoryRequirements2(VkDevice device,
2407
const VkBufferMemoryRequirementsInfo2 *pInfo,
2408
VkMemoryRequirements2 *pMemoryRequirements)
2410
V3DV_FROM_HANDLE(v3dv_buffer, buffer, pInfo->buffer);
2412
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
2413
.memoryTypeBits = 0x1,
2414
.alignment = buffer->alignment,
2415
.size = align64(buffer->size, buffer->alignment),
2418
vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2419
switch (ext->sType) {
2420
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2421
VkMemoryDedicatedRequirements *req =
2422
(VkMemoryDedicatedRequirements *) ext;
2423
req->requiresDedicatedAllocation = false;
2424
req->prefersDedicatedAllocation = false;
2428
v3dv_debug_ignored_stype(ext->sType);
2435
bind_buffer_memory(const VkBindBufferMemoryInfo *info)
2437
V3DV_FROM_HANDLE(v3dv_buffer, buffer, info->buffer);
2438
V3DV_FROM_HANDLE(v3dv_device_memory, mem, info->memory);
2442
* "memoryOffset must be an integer multiple of the alignment member of
2443
* the VkMemoryRequirements structure returned from a call to
2444
* vkGetBufferMemoryRequirements with buffer"
2446
assert(info->memoryOffset % buffer->alignment == 0);
2447
assert(info->memoryOffset < mem->bo->size);
2450
buffer->mem_offset = info->memoryOffset;
2454
VKAPI_ATTR VkResult VKAPI_CALL
2455
v3dv_BindBufferMemory2(VkDevice device,
2456
uint32_t bindInfoCount,
2457
const VkBindBufferMemoryInfo *pBindInfos)
2459
for (uint32_t i = 0; i < bindInfoCount; i++)
2460
bind_buffer_memory(&pBindInfos[i]);
2465
VKAPI_ATTR VkResult VKAPI_CALL
2466
v3dv_CreateBuffer(VkDevice _device,
2467
const VkBufferCreateInfo *pCreateInfo,
2468
const VkAllocationCallbacks *pAllocator,
2471
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2472
struct v3dv_buffer *buffer;
2474
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
2475
assert(pCreateInfo->usage != 0);
2477
/* We don't support any flags for now */
2478
assert(pCreateInfo->flags == 0);
2480
buffer = vk_object_zalloc(&device->vk, pAllocator, sizeof(*buffer),
2481
VK_OBJECT_TYPE_BUFFER);
2483
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2485
buffer->size = pCreateInfo->size;
2486
buffer->usage = pCreateInfo->usage;
2487
buffer->alignment = V3D_NON_COHERENT_ATOM_SIZE;
2489
/* Limit allocations to 32-bit */
2490
const VkDeviceSize aligned_size = align64(buffer->size, buffer->alignment);
2491
if (aligned_size > UINT32_MAX || aligned_size < buffer->size) {
2492
vk_free(&device->vk.alloc, buffer);
2493
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2496
*pBuffer = v3dv_buffer_to_handle(buffer);
2501
VKAPI_ATTR void VKAPI_CALL
2502
v3dv_DestroyBuffer(VkDevice _device,
2504
const VkAllocationCallbacks *pAllocator)
2506
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2507
V3DV_FROM_HANDLE(v3dv_buffer, buffer, _buffer);
2512
vk_object_free(&device->vk, pAllocator, buffer);
2515
VKAPI_ATTR VkResult VKAPI_CALL
2516
v3dv_CreateFramebuffer(VkDevice _device,
2517
const VkFramebufferCreateInfo *pCreateInfo,
2518
const VkAllocationCallbacks *pAllocator,
2519
VkFramebuffer *pFramebuffer)
2521
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2522
struct v3dv_framebuffer *framebuffer;
2524
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2526
size_t size = sizeof(*framebuffer) +
2527
sizeof(struct v3dv_image_view *) * pCreateInfo->attachmentCount;
2528
framebuffer = vk_object_zalloc(&device->vk, pAllocator, size,
2529
VK_OBJECT_TYPE_FRAMEBUFFER);
2530
if (framebuffer == NULL)
2531
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2533
framebuffer->width = pCreateInfo->width;
2534
framebuffer->height = pCreateInfo->height;
2535
framebuffer->layers = pCreateInfo->layers;
2536
framebuffer->has_edge_padding = true;
2538
const VkFramebufferAttachmentsCreateInfo *imageless =
2539
vk_find_struct_const(pCreateInfo->pNext,
2540
FRAMEBUFFER_ATTACHMENTS_CREATE_INFO);
2542
framebuffer->attachment_count = pCreateInfo->attachmentCount;
2543
framebuffer->color_attachment_count = 0;
2544
for (uint32_t i = 0; i < framebuffer->attachment_count; i++) {
2546
framebuffer->attachments[i] =
2547
v3dv_image_view_from_handle(pCreateInfo->pAttachments[i]);
2548
if (framebuffer->attachments[i]->vk.aspects & VK_IMAGE_ASPECT_COLOR_BIT)
2549
framebuffer->color_attachment_count++;
2551
assert(i < imageless->attachmentImageInfoCount);
2552
if (imageless->pAttachmentImageInfos[i].usage &
2553
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
2554
framebuffer->color_attachment_count++;
2559
*pFramebuffer = v3dv_framebuffer_to_handle(framebuffer);
2564
VKAPI_ATTR void VKAPI_CALL
2565
v3dv_DestroyFramebuffer(VkDevice _device,
2567
const VkAllocationCallbacks *pAllocator)
2569
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2570
V3DV_FROM_HANDLE(v3dv_framebuffer, fb, _fb);
2575
vk_object_free(&device->vk, pAllocator, fb);
2578
VKAPI_ATTR VkResult VKAPI_CALL
2579
v3dv_GetMemoryFdPropertiesKHR(VkDevice _device,
2580
VkExternalMemoryHandleTypeFlagBits handleType,
2582
VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2584
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2585
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2587
switch (handleType) {
2588
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
2589
pMemoryFdProperties->memoryTypeBits =
2590
(1 << pdevice->memory.memoryTypeCount) - 1;
2593
return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
2597
VKAPI_ATTR VkResult VKAPI_CALL
2598
v3dv_GetMemoryFdKHR(VkDevice _device,
2599
const VkMemoryGetFdInfoKHR *pGetFdInfo,
2602
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2603
V3DV_FROM_HANDLE(v3dv_device_memory, mem, pGetFdInfo->memory);
2605
assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2606
assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2607
pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2610
ret = drmPrimeHandleToFD(device->pdevice->render_fd,
2614
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2621
VKAPI_ATTR VkResult VKAPI_CALL
2622
v3dv_CreateEvent(VkDevice _device,
2623
const VkEventCreateInfo *pCreateInfo,
2624
const VkAllocationCallbacks *pAllocator,
2627
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2628
struct v3dv_event *event =
2629
vk_object_zalloc(&device->vk, pAllocator, sizeof(*event),
2630
VK_OBJECT_TYPE_EVENT);
2632
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2634
/* Events are created in the unsignaled state */
2635
event->state = false;
2636
*pEvent = v3dv_event_to_handle(event);
2641
VKAPI_ATTR void VKAPI_CALL
2642
v3dv_DestroyEvent(VkDevice _device,
2644
const VkAllocationCallbacks *pAllocator)
2646
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2647
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2652
vk_object_free(&device->vk, pAllocator, event);
2655
VKAPI_ATTR VkResult VKAPI_CALL
2656
v3dv_GetEventStatus(VkDevice _device, VkEvent _event)
2658
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2659
return p_atomic_read(&event->state) ? VK_EVENT_SET : VK_EVENT_RESET;
2662
VKAPI_ATTR VkResult VKAPI_CALL
2663
v3dv_SetEvent(VkDevice _device, VkEvent _event)
2665
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2666
p_atomic_set(&event->state, 1);
2670
VKAPI_ATTR VkResult VKAPI_CALL
2671
v3dv_ResetEvent(VkDevice _device, VkEvent _event)
2673
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2674
p_atomic_set(&event->state, 0);
2678
VKAPI_ATTR VkResult VKAPI_CALL
2679
v3dv_CreateSampler(VkDevice _device,
2680
const VkSamplerCreateInfo *pCreateInfo,
2681
const VkAllocationCallbacks *pAllocator,
2682
VkSampler *pSampler)
2684
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2685
struct v3dv_sampler *sampler;
2687
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
2689
sampler = vk_object_zalloc(&device->vk, pAllocator, sizeof(*sampler),
2690
VK_OBJECT_TYPE_SAMPLER);
2692
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2694
sampler->compare_enable = pCreateInfo->compareEnable;
2695
sampler->unnormalized_coordinates = pCreateInfo->unnormalizedCoordinates;
2697
const VkSamplerCustomBorderColorCreateInfoEXT *bc_info =
2698
vk_find_struct_const(pCreateInfo->pNext,
2699
SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
2701
v3dv_X(device, pack_sampler_state)(sampler, pCreateInfo, bc_info);
2703
*pSampler = v3dv_sampler_to_handle(sampler);
2708
VKAPI_ATTR void VKAPI_CALL
2709
v3dv_DestroySampler(VkDevice _device,
2711
const VkAllocationCallbacks *pAllocator)
2713
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2714
V3DV_FROM_HANDLE(v3dv_sampler, sampler, _sampler);
2719
vk_object_free(&device->vk, pAllocator, sampler);
2722
VKAPI_ATTR void VKAPI_CALL
2723
v3dv_GetDeviceMemoryCommitment(VkDevice device,
2724
VkDeviceMemory memory,
2725
VkDeviceSize *pCommittedMemoryInBytes)
2727
*pCommittedMemoryInBytes = 0;
2730
VKAPI_ATTR void VKAPI_CALL
2731
v3dv_GetImageSparseMemoryRequirements(
2734
uint32_t *pSparseMemoryRequirementCount,
2735
VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
2737
*pSparseMemoryRequirementCount = 0;
2740
VKAPI_ATTR void VKAPI_CALL
2741
v3dv_GetImageSparseMemoryRequirements2(
2743
const VkImageSparseMemoryRequirementsInfo2 *pInfo,
2744
uint32_t *pSparseMemoryRequirementCount,
2745
VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
2747
*pSparseMemoryRequirementCount = 0;
2750
/* vk_icd.h does not declare this function, so we declare it here to
2751
* suppress Wmissing-prototypes.
2753
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2754
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion);
2756
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2757
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion)
2759
/* For the full details on loader interface versioning, see
2760
* <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2761
* What follows is a condensed summary, to help you navigate the large and
2762
* confusing official doc.
2764
* - Loader interface v0 is incompatible with later versions. We don't
2767
* - In loader interface v1:
2768
* - The first ICD entrypoint called by the loader is
2769
* vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2771
* - The ICD must statically expose no other Vulkan symbol unless it is
2772
* linked with -Bsymbolic.
2773
* - Each dispatchable Vulkan handle created by the ICD must be
2774
* a pointer to a struct whose first member is VK_LOADER_DATA. The
2775
* ICD must initialize VK_LOADER_DATA.loadMagic to ICD_LOADER_MAGIC.
2776
* - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2777
* vkDestroySurfaceKHR(). The ICD must be capable of working with
2778
* such loader-managed surfaces.
2780
* - Loader interface v2 differs from v1 in:
2781
* - The first ICD entrypoint called by the loader is
2782
* vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2783
* statically expose this entrypoint.
2785
* - Loader interface v3 differs from v2 in:
2786
* - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2787
* vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2788
* because the loader no longer does so.
2790
* - Loader interface v4 differs from v3 in:
2791
* - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
2793
* - Loader interface v5 differs from v4 in:
2794
* - The ICD must support Vulkan API version 1.1 and must not return
2795
* VK_ERROR_INCOMPATIBLE_DRIVER from vkCreateInstance() unless a
2796
* Vulkan Loader with interface v4 or smaller is being used and the
2797
* application provides an API version that is greater than 1.0.
2799
*pSupportedVersion = MIN2(*pSupportedVersion, 5u);