36
36
* Descriptor set layouts.
39
/* RENDER_SURFACE_STATE is a bit smaller (48b) but since it is aligned to 64
40
* and we can't put anything else there we use 64b.
42
#define ANV_SURFACE_STATE_SIZE (64)
40
anv_descriptor_data_alignment(enum anv_descriptor_data data)
42
unsigned alignment = 1;
44
if (data & (ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
45
ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE |
46
ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE))
47
alignment = MAX2(alignment, 8);
49
if (data & (ANV_DESCRIPTOR_SURFACE |
50
ANV_DESCRIPTOR_SURFACE_SAMPLER))
51
alignment = MAX2(alignment, ANV_SURFACE_STATE_SIZE);
53
if (data & ANV_DESCRIPTOR_SAMPLER)
54
alignment = MAX2(alignment, ANV_SAMPLER_STATE_SIZE);
56
if (data & ANV_DESCRIPTOR_INLINE_UNIFORM)
57
alignment = MAX2(alignment, ANV_UBO_ALIGNMENT);
44
62
static enum anv_descriptor_data
45
anv_descriptor_data_for_type(const struct anv_physical_device *device,
46
VkDescriptorType type)
63
anv_indirect_descriptor_data_for_type(VkDescriptorType type)
48
65
enum anv_descriptor_data data = 0;
51
68
case VK_DESCRIPTOR_TYPE_SAMPLER:
52
data = ANV_DESCRIPTOR_SAMPLER_STATE |
53
ANV_DESCRIPTOR_SAMPLED_IMAGE;
69
data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
70
ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
56
73
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
57
data = ANV_DESCRIPTOR_SURFACE_STATE |
58
ANV_DESCRIPTOR_SAMPLER_STATE |
59
ANV_DESCRIPTOR_SAMPLED_IMAGE;
74
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
75
ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
76
ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
62
79
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
63
80
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
64
data = ANV_DESCRIPTOR_SURFACE_STATE |
65
ANV_DESCRIPTOR_SAMPLED_IMAGE;
81
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
82
ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
68
85
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
69
data = ANV_DESCRIPTOR_SURFACE_STATE;
86
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
87
ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
72
90
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
73
91
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
74
data = ANV_DESCRIPTOR_SURFACE_STATE |
75
ANV_DESCRIPTOR_STORAGE_IMAGE;
92
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
93
ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE;
78
96
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
79
97
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
80
data = ANV_DESCRIPTOR_SURFACE_STATE |
98
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
81
99
ANV_DESCRIPTOR_BUFFER_VIEW;
84
102
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
85
103
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
86
data = ANV_DESCRIPTOR_SURFACE_STATE;
104
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE;
89
107
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
105
123
type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
106
124
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
107
125
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
108
data |= ANV_DESCRIPTOR_ADDRESS_RANGE;
126
data |= ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
131
static enum anv_descriptor_data
132
anv_direct_descriptor_data_for_type(VkDescriptorType type)
134
enum anv_descriptor_data data = 0;
137
case VK_DESCRIPTOR_TYPE_SAMPLER:
138
data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
139
ANV_DESCRIPTOR_SAMPLER;
142
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
143
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
144
ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
145
ANV_DESCRIPTOR_SURFACE_SAMPLER;
148
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
149
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
150
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
151
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
152
ANV_DESCRIPTOR_SURFACE;
155
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
156
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
157
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
158
ANV_DESCRIPTOR_SURFACE;
161
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
162
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
163
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
164
ANV_DESCRIPTOR_SURFACE;
167
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
168
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
169
data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
170
ANV_DESCRIPTOR_SURFACE;
173
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
174
data = ANV_DESCRIPTOR_INLINE_UNIFORM;
177
case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
178
data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
182
unreachable("Unsupported descriptor type");
188
static enum anv_descriptor_data
189
anv_descriptor_data_for_type(const struct anv_physical_device *device,
190
VkDescriptorType type)
192
if (device->indirect_descriptors)
193
return anv_indirect_descriptor_data_for_type(type);
195
return anv_direct_descriptor_data_for_type(type);
113
198
static enum anv_descriptor_data
149
234
unsigned size = 0;
151
if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)
236
if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE)
152
237
size += sizeof(struct anv_sampled_image_descriptor);
154
if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)
239
if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE)
155
240
size += sizeof(struct anv_storage_image_descriptor);
157
if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)
242
if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE)
158
243
size += sizeof(struct anv_address_range_descriptor);
245
if (data & ANV_DESCRIPTOR_SURFACE)
246
size += ANV_SURFACE_STATE_SIZE;
248
if (data & ANV_DESCRIPTOR_SAMPLER)
249
size += ANV_SAMPLER_STATE_SIZE;
251
if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
252
size += ALIGN(ANV_SURFACE_STATE_SIZE + ANV_SAMPLER_STATE_SIZE,
253
ANV_SURFACE_STATE_SIZE);
660
set_layout->binding[b].descriptor_data_size =
661
anv_descriptor_data_size(set_layout->binding[b].data);
559
662
set_layout->binding[b].descriptor_stride =
560
663
binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
561
664
anv_descriptor_size_for_mutable_type(device->physical, mutable_info, b) :
562
665
anv_descriptor_size(&set_layout->binding[b]);
564
if (binding->descriptorType ==
565
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
566
/* Inline uniform blocks are specified to use the descriptor array
567
* size as the size in bytes of the block.
569
descriptor_buffer_size = align(descriptor_buffer_size,
667
descriptor_buffer_size =
668
align(descriptor_buffer_size,
669
anv_descriptor_data_alignment(set_layout->binding[b].data));
671
if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
571
672
set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
572
673
descriptor_buffer_size += binding->descriptorCount;
739
853
* just multiple descriptor set layouts pasted together
857
anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout *layout,
858
struct anv_device *device,
859
bool independent_sets)
861
memset(layout, 0, sizeof(*layout));
863
layout->device = device;
864
layout->independent_sets = independent_sets;
868
anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout *layout,
870
struct anv_descriptor_set_layout *set_layout)
872
if (layout->set[set_idx].layout)
875
/* Workaround CTS : Internal CTS issue 3584 */
876
if (layout->independent_sets && anv_descriptor_set_layout_empty(set_layout))
879
if (layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_UNKNOWN)
880
layout->type = set_layout->type;
882
assert(layout->type == set_layout->type);
884
layout->num_sets = MAX2(set_idx + 1, layout->num_sets);
886
layout->set[set_idx].layout =
887
anv_descriptor_set_layout_ref(set_layout);
889
layout->set[set_idx].dynamic_offset_start = layout->num_dynamic_buffers;
890
layout->num_dynamic_buffers += set_layout->dynamic_offset_count;
892
assert(layout->num_dynamic_buffers < MAX_DYNAMIC_BUFFERS);
896
anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout *layout)
898
struct mesa_sha1 ctx;
899
_mesa_sha1_init(&ctx);
900
for (unsigned s = 0; s < layout->num_sets; s++) {
901
if (!layout->set[s].layout)
903
sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
904
_mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
905
sizeof(layout->set[s].dynamic_offset_start));
907
_mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
908
_mesa_sha1_final(&ctx, layout->sha1);
912
anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout *layout)
914
for (unsigned s = 0; s < layout->num_sets; s++) {
915
if (!layout->set[s].layout)
918
anv_descriptor_set_layout_unref(layout->device, layout->set[s].layout);
923
anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout *layout)
925
fprintf(stderr, "layout: dyn_count=%u sets=%u ind=%u\n",
926
layout->num_dynamic_buffers,
928
layout->independent_sets);
929
for (unsigned s = 0; s < layout->num_sets; s++) {
930
if (!layout->set[s].layout)
933
fprintf(stderr, " set%i: dyn_start=%u flags=0x%x\n",
934
s, layout->set[s].dynamic_offset_start, layout->set[s].layout->flags);
742
938
VkResult anv_CreatePipelineLayout(
743
939
VkDevice _device,
744
940
const VkPipelineLayoutCreateInfo* pCreateInfo,
755
951
if (layout == NULL)
756
952
return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
758
layout->num_sets = pCreateInfo->setLayoutCount;
760
unsigned dynamic_offset_count = 0;
954
anv_pipeline_sets_layout_init(&layout->sets_layout, device,
955
pCreateInfo->flags & VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT);
762
957
for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
763
958
ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
764
959
pCreateInfo->pSetLayouts[set]);
765
layout->set[set].layout = set_layout;
766
anv_descriptor_set_layout_ref(set_layout);
768
layout->set[set].dynamic_offset_start = dynamic_offset_count;
769
dynamic_offset_count += set_layout->dynamic_offset_count;
771
assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
773
struct mesa_sha1 ctx;
774
_mesa_sha1_init(&ctx);
775
for (unsigned s = 0; s < layout->num_sets; s++) {
776
sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
777
_mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
778
sizeof(layout->set[s].dynamic_offset_start));
780
_mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
781
_mesa_sha1_final(&ctx, layout->sha1);
961
/* VUID-VkPipelineLayoutCreateInfo-graphicsPipelineLibrary-06753
963
* "If graphicsPipelineLibrary is not enabled, elements of
964
* pSetLayouts must be valid VkDescriptorSetLayout objects"
966
* As a result of supporting graphicsPipelineLibrary, we need to allow
967
* null descriptor set layouts.
969
if (set_layout == NULL)
972
anv_pipeline_sets_layout_add(&layout->sets_layout, set, set_layout);
975
anv_pipeline_sets_layout_hash(&layout->sets_layout);
783
977
*pPipelineLayout = anv_pipeline_layout_to_handle(layout);
791
985
const VkAllocationCallbacks* pAllocator)
793
987
ANV_FROM_HANDLE(anv_device, device, _device);
794
ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
988
ANV_FROM_HANDLE(anv_pipeline_layout, layout, _pipelineLayout);
796
if (!pipeline_layout)
799
for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
800
anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
993
anv_pipeline_sets_layout_fini(&layout->sets_layout);
802
vk_object_free(&device->vk, pAllocator, pipeline_layout);
995
vk_object_free(&device->vk, pAllocator, layout);
1210
/* Allocate surface states for real descriptor sets. For host only sets, we
1211
* just store the surface state data in malloc memory.
1409
/* Allocate surface states for real descriptor sets if we're using indirect
1410
* descriptors. For host only sets, we just store the surface state data in
1213
if (!pool->host_only) {
1214
for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1215
set->buffer_views[b].surface_state =
1216
anv_descriptor_pool_alloc_state(pool);
1219
void *host_surface_states =
1220
set->buffer_views + set->buffer_view_count;
1221
memset(host_surface_states, 0,
1222
set->buffer_view_count * ANV_SURFACE_STATE_SIZE);
1223
for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1224
set->buffer_views[b].surface_state = (struct anv_state) {
1225
.alloc_size = ANV_SURFACE_STATE_SIZE,
1226
.map = host_surface_states + b * ANV_SURFACE_STATE_SIZE,
1413
if (device->physical->indirect_descriptors) {
1414
if (!pool->host_only) {
1415
for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1416
set->buffer_views[b].general.state =
1417
anv_descriptor_pool_alloc_state(pool);
1420
void *host_surface_states =
1421
set->buffer_views + set->buffer_view_count;
1422
memset(host_surface_states, 0,
1423
set->buffer_view_count * ANV_SURFACE_STATE_SIZE);
1424
for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1425
set->buffer_views[b].general.state = (struct anv_state) {
1426
.alloc_size = ANV_SURFACE_STATE_SIZE,
1427
.map = host_surface_states + b * ANV_SURFACE_STATE_SIZE,
1339
1545
static uint32_t
1340
anv_surface_state_to_handle(struct anv_state state)
1546
anv_surface_state_to_handle(struct anv_physical_device *device,
1547
struct anv_state state)
1342
1549
/* Bits 31:12 of the bindless surface offset in the extended message
1343
1550
* descriptor is bits 25:6 of the byte-based address.
1345
1552
assert(state.offset >= 0);
1346
1553
uint32_t offset = state.offset;
1347
assert((offset & 0x3f) == 0 && offset < (1 << 26));
1554
if (device->uses_ex_bso) {
1555
assert((offset & 0x3f) == 0);
1558
assert((offset & 0x3f) == 0 && offset < (1 << 26));
1564
anv_image_view_surface_data_for_plane_layout(struct anv_image_view *image_view,
1565
VkDescriptorType desc_type,
1567
VkImageLayout layout)
1569
if (desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
1570
desc_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
1571
desc_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
1572
return layout == VK_IMAGE_LAYOUT_GENERAL ?
1573
&image_view->planes[plane].general_sampler.state_data :
1574
&image_view->planes[plane].optimal_sampler.state_data;
1577
if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1578
return &image_view->planes[plane].storage.state_data;
1580
unreachable("Invalid descriptor type");
1404
1636
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1405
1637
element * bind_layout->descriptor_stride;
1406
memset(desc_map, 0, bind_layout->descriptor_stride);
1408
if (image_view == NULL && sampler == NULL)
1411
1639
enum anv_descriptor_data data =
1412
1640
bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1413
1641
anv_descriptor_data_for_type(device->physical, type) :
1414
1642
bind_layout->data;
1417
if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1644
if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
1418
1645
struct anv_sampled_image_descriptor desc_data[3];
1419
1646
memset(desc_data, 0, sizeof(desc_data));
1421
1648
if (image_view) {
1422
1649
for (unsigned p = 0; p < image_view->n_planes; p++) {
1423
struct anv_surface_state sstate =
1650
const struct anv_surface_state *sstate =
1424
1651
(desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
1425
image_view->planes[p].general_sampler_surface_state :
1426
image_view->planes[p].optimal_sampler_surface_state;
1427
desc_data[p].image = anv_surface_state_to_handle(sstate.state);
1652
&image_view->planes[p].general_sampler :
1653
&image_view->planes[p].optimal_sampler;
1654
desc_data[p].image =
1655
anv_surface_state_to_handle(device->physical, sstate->state);
1441
1669
MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1444
if (image_view == NULL)
1447
if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1448
assert(image_view->n_planes == 1);
1449
struct anv_storage_image_descriptor desc_data = {
1450
.vanilla = anv_surface_state_to_handle(
1451
image_view->planes[0].storage_surface_state.state),
1452
.lowered = anv_surface_state_to_handle(
1453
image_view->planes[0].lowered_storage_surface_state.state),
1455
memcpy(desc_map, &desc_data, sizeof(desc_data));
1672
if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
1674
assert(image_view->n_planes == 1);
1675
struct anv_storage_image_descriptor desc_data = {
1676
.vanilla = anv_surface_state_to_handle(
1678
image_view->planes[0].storage.state),
1679
.image_depth = image_view->vk.storage.z_slice_count,
1681
memcpy(desc_map, &desc_data, sizeof(desc_data));
1683
memset(desc_map, 0, bind_layout->descriptor_stride);
1687
if (data & ANV_DESCRIPTOR_SAMPLER) {
1689
for (unsigned p = 0; p < sampler->n_planes; p++) {
1690
memcpy(desc_map + p * ANV_SAMPLER_STATE_SIZE,
1691
sampler->state[p], ANV_SAMPLER_STATE_SIZE);
1694
memset(desc_map, 0, bind_layout->descriptor_stride);
1698
if (data & ANV_DESCRIPTOR_SURFACE) {
1699
unsigned max_plane_count = image_view ? image_view->n_planes : 1;
1701
for (unsigned p = 0; p < max_plane_count; p++) {
1702
void *plane_map = desc_map + p * ANV_SURFACE_STATE_SIZE;
1706
anv_image_view_surface_data_for_plane_layout(image_view, type,
1708
ANV_SURFACE_STATE_SIZE);
1710
memcpy(plane_map, device->null_surface_state.map, ANV_SURFACE_STATE_SIZE);
1715
if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
1716
unsigned max_plane_count =
1717
MAX2(image_view ? image_view->n_planes : 1,
1718
sampler ? sampler->n_planes : 1);
1720
for (unsigned p = 0; p < max_plane_count; p++) {
1721
void *plane_map = desc_map + p * 2 * ANV_SURFACE_STATE_SIZE;
1725
anv_image_view_surface_data_for_plane_layout(image_view, type,
1727
ANV_SURFACE_STATE_SIZE);
1729
memcpy(plane_map, device->null_surface_state.map, ANV_SURFACE_STATE_SIZE);
1733
memcpy(plane_map + ANV_SURFACE_STATE_SIZE,
1734
sampler->state[p], ANV_SAMPLER_STATE_SIZE);
1736
memset(plane_map + ANV_SURFACE_STATE_SIZE, 0,
1737
ANV_SAMPLER_STATE_SIZE);
1744
anv_buffer_view_surface_data(struct anv_buffer_view *buffer_view,
1745
VkDescriptorType desc_type)
1747
if (desc_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER)
1748
return &buffer_view->general.state_data;
1750
if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
1751
return &buffer_view->storage.state_data;
1753
unreachable("Invalid descriptor type");
1486
1783
element * bind_layout->descriptor_stride;
1488
1785
if (buffer_view == NULL) {
1489
memset(desc_map, 0, bind_layout->descriptor_stride);
1786
if (data & ANV_DESCRIPTOR_SURFACE)
1787
memcpy(desc_map, device->null_surface_state.map, ANV_SURFACE_STATE_SIZE);
1789
memset(desc_map, 0, bind_layout->descriptor_stride);
1493
if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1793
if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
1494
1794
struct anv_sampled_image_descriptor desc_data = {
1495
.image = anv_surface_state_to_handle(buffer_view->surface_state),
1795
.image = anv_surface_state_to_handle(
1796
device->physical, buffer_view->general.state),
1497
1798
memcpy(desc_map, &desc_data, sizeof(desc_data));
1500
if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1801
if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
1501
1802
struct anv_storage_image_descriptor desc_data = {
1502
1803
.vanilla = anv_surface_state_to_handle(
1503
buffer_view->storage_surface_state),
1504
.lowered = anv_surface_state_to_handle(
1505
buffer_view->lowered_storage_surface_state),
1804
device->physical, buffer_view->storage.state),
1507
1806
memcpy(desc_map, &desc_data, sizeof(desc_data));
1809
if (data & ANV_DESCRIPTOR_SURFACE) {
1811
anv_buffer_view_surface_data(buffer_view, type),
1812
ANV_SURFACE_STATE_SIZE);
1557
1862
.buffer = buffer,
1865
enum anv_descriptor_data data =
1866
bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1867
anv_descriptor_data_for_type(device->physical, type) :
1560
1870
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1561
1871
element * bind_layout->descriptor_stride;
1563
1873
if (buffer == NULL) {
1564
memset(desc_map, 0, bind_layout->descriptor_stride);
1874
if (data & ANV_DESCRIPTOR_SURFACE)
1875
memcpy(desc_map, device->null_surface_state.map, ANV_SURFACE_STATE_SIZE);
1877
memset(desc_map, 0, bind_layout->descriptor_stride);
1568
1881
struct anv_address bind_addr = anv_address_add(buffer->address, offset);
1569
uint64_t bind_range = vk_buffer_range(&buffer->vk, offset, range);
1570
enum anv_descriptor_data data =
1571
bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1572
anv_descriptor_data_for_type(device->physical, type) :
1882
desc->bind_range = vk_buffer_range(&buffer->vk, offset, range);
1575
1884
/* We report a bounds checking alignment of 32B for the sake of block
1576
1885
* messages which read an entire register worth at a time.
1578
1887
if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1579
1888
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
1580
bind_range = align64(bind_range, ANV_UBO_ALIGNMENT);
1889
desc->bind_range = align64(desc->bind_range, ANV_UBO_ALIGNMENT);
1582
if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
1891
if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE) {
1583
1892
struct anv_address_range_descriptor desc_data = {
1584
1893
.address = anv_address_physical(bind_addr),
1585
.range = bind_range,
1894
.range = desc->bind_range,
1587
1896
memcpy(desc_map, &desc_data, sizeof(desc_data));
1899
if (data & ANV_DESCRIPTOR_SURFACE) {
1900
isl_surf_usage_flags_t usage =
1901
desc->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ?
1902
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
1903
ISL_SURF_USAGE_STORAGE_BIT;
1905
enum isl_format format =
1906
anv_isl_format_for_descriptor_type(device, desc->type);
1908
isl_buffer_fill_state(&device->isl_dev, desc_map,
1909
.address = anv_address_physical(bind_addr),
1910
.mocs = isl_mocs(&device->isl_dev, usage,
1911
bind_addr.bo && bind_addr.bo->is_external),
1912
.size_B = desc->bind_range,
1914
.swizzle = ISL_SWIZZLE_IDENTITY,
1590
1918
if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1591
1919
type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)
1594
assert(data & ANV_DESCRIPTOR_BUFFER_VIEW);
1595
struct anv_buffer_view *bview =
1596
&set->buffer_views[bind_layout->buffer_view_index + element];
1598
desc->set_buffer_view = bview;
1600
bview->range = bind_range;
1601
bview->address = bind_addr;
1604
set->generate_surface_states |= BITFIELD_BIT(descriptor_index);
1606
anv_descriptor_write_surface_state(device, desc, bview->surface_state);
1922
if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
1923
struct anv_buffer_view *bview =
1924
&set->buffer_views[bind_layout->buffer_view_index + element];
1926
desc->set_buffer_view = bview;
1928
bview->range = desc->bind_range;
1929
bview->address = bind_addr;
1932
set->generate_surface_states |= BITFIELD_BIT(descriptor_index);
1934
anv_descriptor_write_surface_state(device, desc, bview->general.state);