2
* Copyright 2019 Google LLC
3
* SPDX-License-Identifier: MIT
5
* based in part on anv and radv which are:
6
* Copyright © 2015 Intel Corporation
7
* Copyright © 2016 Red Hat.
8
* Copyright © 2016 Bas Nieuwenhuizen
11
#include "vn_pipeline.h"
13
#include "venus-protocol/vn_protocol_driver_pipeline.h"
14
#include "venus-protocol/vn_protocol_driver_pipeline_cache.h"
15
#include "venus-protocol/vn_protocol_driver_pipeline_layout.h"
16
#include "venus-protocol/vn_protocol_driver_shader_module.h"
18
#include "vn_device.h"
19
#include "vn_physical_device.h"
21
/* shader module commands */
24
vn_CreateShaderModule(VkDevice device,
25
const VkShaderModuleCreateInfo *pCreateInfo,
26
const VkAllocationCallbacks *pAllocator,
27
VkShaderModule *pShaderModule)
29
struct vn_device *dev = vn_device_from_handle(device);
30
const VkAllocationCallbacks *alloc =
31
pAllocator ? pAllocator : &dev->base.base.alloc;
33
struct vn_shader_module *mod =
34
vk_zalloc(alloc, sizeof(*mod), VN_DEFAULT_ALIGN,
35
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
37
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
39
vn_object_base_init(&mod->base, VK_OBJECT_TYPE_SHADER_MODULE, &dev->base);
41
VkShaderModule mod_handle = vn_shader_module_to_handle(mod);
42
vn_async_vkCreateShaderModule(dev->instance, device, pCreateInfo, NULL,
45
*pShaderModule = mod_handle;
51
vn_DestroyShaderModule(VkDevice device,
52
VkShaderModule shaderModule,
53
const VkAllocationCallbacks *pAllocator)
55
struct vn_device *dev = vn_device_from_handle(device);
56
struct vn_shader_module *mod = vn_shader_module_from_handle(shaderModule);
57
const VkAllocationCallbacks *alloc =
58
pAllocator ? pAllocator : &dev->base.base.alloc;
63
vn_async_vkDestroyShaderModule(dev->instance, device, shaderModule, NULL);
65
vn_object_base_fini(&mod->base);
69
/* pipeline layout commands */
72
vn_CreatePipelineLayout(VkDevice device,
73
const VkPipelineLayoutCreateInfo *pCreateInfo,
74
const VkAllocationCallbacks *pAllocator,
75
VkPipelineLayout *pPipelineLayout)
77
struct vn_device *dev = vn_device_from_handle(device);
78
const VkAllocationCallbacks *alloc =
79
pAllocator ? pAllocator : &dev->base.base.alloc;
81
struct vn_pipeline_layout *layout =
82
vk_zalloc(alloc, sizeof(*layout), VN_DEFAULT_ALIGN,
83
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
85
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
87
vn_object_base_init(&layout->base, VK_OBJECT_TYPE_PIPELINE_LAYOUT,
90
VkPipelineLayout layout_handle = vn_pipeline_layout_to_handle(layout);
91
vn_async_vkCreatePipelineLayout(dev->instance, device, pCreateInfo, NULL,
94
*pPipelineLayout = layout_handle;
100
vn_DestroyPipelineLayout(VkDevice device,
101
VkPipelineLayout pipelineLayout,
102
const VkAllocationCallbacks *pAllocator)
104
struct vn_device *dev = vn_device_from_handle(device);
105
struct vn_pipeline_layout *layout =
106
vn_pipeline_layout_from_handle(pipelineLayout);
107
const VkAllocationCallbacks *alloc =
108
pAllocator ? pAllocator : &dev->base.base.alloc;
113
vn_async_vkDestroyPipelineLayout(dev->instance, device, pipelineLayout,
116
vn_object_base_fini(&layout->base);
117
vk_free(alloc, layout);
120
/* pipeline cache commands */
123
vn_CreatePipelineCache(VkDevice device,
124
const VkPipelineCacheCreateInfo *pCreateInfo,
125
const VkAllocationCallbacks *pAllocator,
126
VkPipelineCache *pPipelineCache)
128
struct vn_device *dev = vn_device_from_handle(device);
129
const VkAllocationCallbacks *alloc =
130
pAllocator ? pAllocator : &dev->base.base.alloc;
132
struct vn_pipeline_cache *cache =
133
vk_zalloc(alloc, sizeof(*cache), VN_DEFAULT_ALIGN,
134
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
136
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
138
vn_object_base_init(&cache->base, VK_OBJECT_TYPE_PIPELINE_CACHE,
141
VkPipelineCacheCreateInfo local_create_info;
142
if (pCreateInfo->initialDataSize) {
143
const struct vk_pipeline_cache_header *header =
144
pCreateInfo->pInitialData;
146
local_create_info = *pCreateInfo;
147
local_create_info.initialDataSize -= header->header_size;
148
local_create_info.pInitialData += header->header_size;
149
pCreateInfo = &local_create_info;
152
VkPipelineCache cache_handle = vn_pipeline_cache_to_handle(cache);
153
vn_async_vkCreatePipelineCache(dev->instance, device, pCreateInfo, NULL,
156
*pPipelineCache = cache_handle;
162
vn_DestroyPipelineCache(VkDevice device,
163
VkPipelineCache pipelineCache,
164
const VkAllocationCallbacks *pAllocator)
166
struct vn_device *dev = vn_device_from_handle(device);
167
struct vn_pipeline_cache *cache =
168
vn_pipeline_cache_from_handle(pipelineCache);
169
const VkAllocationCallbacks *alloc =
170
pAllocator ? pAllocator : &dev->base.base.alloc;
175
vn_async_vkDestroyPipelineCache(dev->instance, device, pipelineCache,
178
vn_object_base_fini(&cache->base);
179
vk_free(alloc, cache);
183
vn_GetPipelineCacheData(VkDevice device,
184
VkPipelineCache pipelineCache,
188
struct vn_device *dev = vn_device_from_handle(device);
189
struct vn_physical_device *physical_dev = dev->physical_device;
191
struct vk_pipeline_cache_header *header = pData;
194
result = vn_call_vkGetPipelineCacheData(dev->instance, device,
195
pipelineCache, pDataSize, NULL);
196
if (result != VK_SUCCESS)
197
return vn_error(dev->instance, result);
199
*pDataSize += sizeof(*header);
203
if (*pDataSize <= sizeof(*header)) {
205
return VK_INCOMPLETE;
208
const VkPhysicalDeviceProperties *props =
209
&physical_dev->properties.vulkan_1_0;
210
header->header_size = sizeof(*header);
211
header->header_version = VK_PIPELINE_CACHE_HEADER_VERSION_ONE;
212
header->vendor_id = props->vendorID;
213
header->device_id = props->deviceID;
214
memcpy(header->uuid, props->pipelineCacheUUID, VK_UUID_SIZE);
216
*pDataSize -= header->header_size;
218
vn_call_vkGetPipelineCacheData(dev->instance, device, pipelineCache,
219
pDataSize, pData + header->header_size);
220
if (result < VK_SUCCESS)
221
return vn_error(dev->instance, result);
223
*pDataSize += header->header_size;
229
vn_MergePipelineCaches(VkDevice device,
230
VkPipelineCache dstCache,
231
uint32_t srcCacheCount,
232
const VkPipelineCache *pSrcCaches)
234
struct vn_device *dev = vn_device_from_handle(device);
236
vn_async_vkMergePipelineCaches(dev->instance, device, dstCache,
237
srcCacheCount, pSrcCaches);
242
/* pipeline commands */
244
static const VkGraphicsPipelineCreateInfo *
245
vn_fix_graphics_pipeline_create_info(
246
struct vn_device *dev,
247
uint32_t create_info_count,
248
const VkGraphicsPipelineCreateInfo *create_infos,
249
const VkAllocationCallbacks *alloc,
250
VkGraphicsPipelineCreateInfo **out)
252
VkGraphicsPipelineCreateInfo *infos = NULL;
253
bool has_ignored_state = false;
255
for (uint32_t i = 0; i < create_info_count; i++) {
256
if (create_infos[i].pRasterizationState->rasterizerDiscardEnable ==
260
if (create_infos[i].pViewportState ||
261
create_infos[i].pMultisampleState ||
262
create_infos[i].pDepthStencilState ||
263
create_infos[i].pColorBlendState) {
264
has_ignored_state = true;
269
if (!has_ignored_state)
272
infos = vk_alloc(alloc, sizeof(*infos) * create_info_count,
273
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
277
memcpy(infos, create_infos, sizeof(*infos) * create_info_count);
279
for (uint32_t i = 0; i < create_info_count; i++) {
280
if (infos[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE)
283
infos[i].pViewportState = NULL;
284
infos[i].pMultisampleState = NULL;
285
infos[i].pDepthStencilState = NULL;
286
infos[i].pColorBlendState = NULL;
294
vn_CreateGraphicsPipelines(VkDevice device,
295
VkPipelineCache pipelineCache,
296
uint32_t createInfoCount,
297
const VkGraphicsPipelineCreateInfo *pCreateInfos,
298
const VkAllocationCallbacks *pAllocator,
299
VkPipeline *pPipelines)
301
struct vn_device *dev = vn_device_from_handle(device);
302
const VkAllocationCallbacks *alloc =
303
pAllocator ? pAllocator : &dev->base.base.alloc;
304
VkGraphicsPipelineCreateInfo *local_infos = NULL;
306
pCreateInfos = vn_fix_graphics_pipeline_create_info(
307
dev, createInfoCount, pCreateInfos, alloc, &local_infos);
309
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
311
for (uint32_t i = 0; i < createInfoCount; i++) {
312
struct vn_pipeline *pipeline =
313
vk_zalloc(alloc, sizeof(*pipeline), VN_DEFAULT_ALIGN,
314
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
316
for (uint32_t j = 0; j < i; j++)
317
vk_free(alloc, vn_pipeline_from_handle(pPipelines[j]));
320
vk_free(alloc, local_infos);
322
memset(pPipelines, 0, sizeof(*pPipelines) * createInfoCount);
323
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
326
vn_object_base_init(&pipeline->base, VK_OBJECT_TYPE_PIPELINE,
329
VkPipeline pipeline_handle = vn_pipeline_to_handle(pipeline);
330
pPipelines[i] = pipeline_handle;
333
vn_async_vkCreateGraphicsPipelines(dev->instance, device, pipelineCache,
334
createInfoCount, pCreateInfos, NULL,
338
vk_free(alloc, local_infos);
344
vn_CreateComputePipelines(VkDevice device,
345
VkPipelineCache pipelineCache,
346
uint32_t createInfoCount,
347
const VkComputePipelineCreateInfo *pCreateInfos,
348
const VkAllocationCallbacks *pAllocator,
349
VkPipeline *pPipelines)
351
struct vn_device *dev = vn_device_from_handle(device);
352
const VkAllocationCallbacks *alloc =
353
pAllocator ? pAllocator : &dev->base.base.alloc;
355
for (uint32_t i = 0; i < createInfoCount; i++) {
356
struct vn_pipeline *pipeline =
357
vk_zalloc(alloc, sizeof(*pipeline), VN_DEFAULT_ALIGN,
358
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
360
for (uint32_t j = 0; j < i; j++)
361
vk_free(alloc, vn_pipeline_from_handle(pPipelines[j]));
362
memset(pPipelines, 0, sizeof(*pPipelines) * createInfoCount);
363
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
366
vn_object_base_init(&pipeline->base, VK_OBJECT_TYPE_PIPELINE,
369
VkPipeline pipeline_handle = vn_pipeline_to_handle(pipeline);
370
pPipelines[i] = pipeline_handle;
373
vn_async_vkCreateComputePipelines(dev->instance, device, pipelineCache,
374
createInfoCount, pCreateInfos, NULL,
381
vn_DestroyPipeline(VkDevice device,
382
VkPipeline _pipeline,
383
const VkAllocationCallbacks *pAllocator)
385
struct vn_device *dev = vn_device_from_handle(device);
386
struct vn_pipeline *pipeline = vn_pipeline_from_handle(_pipeline);
387
const VkAllocationCallbacks *alloc =
388
pAllocator ? pAllocator : &dev->base.base.alloc;
393
vn_async_vkDestroyPipeline(dev->instance, device, _pipeline, NULL);
395
vn_object_base_fini(&pipeline->base);
396
vk_free(alloc, pipeline);