1 /*
2 * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3 * SPDX-License-Identifier: MIT
4 */
5 #include "nvk_descriptor_set_layout.h"
6
7 #include "nvk_descriptor_set.h"
8 #include "nvk_descriptor_types.h"
9 #include "nvk_device.h"
10 #include "nvk_entrypoints.h"
11 #include "nvk_physical_device.h"
12 #include "nvk_sampler.h"
13
14 #include "vk_pipeline_layout.h"
15
16 static bool
binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding * binding)17 binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding)
18 {
19 switch (binding->descriptorType) {
20 case VK_DESCRIPTOR_TYPE_SAMPLER:
21 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
22 return binding->pImmutableSamplers != NULL;
23
24 default:
25 return false;
26 }
27 }
28
29 void
nvk_descriptor_stride_align_for_type(const struct nvk_physical_device * pdev,VkPipelineLayoutCreateFlags layout_flags,VkDescriptorType type,const VkMutableDescriptorTypeListEXT * type_list,uint32_t * stride,uint32_t * alignment)30 nvk_descriptor_stride_align_for_type(const struct nvk_physical_device *pdev,
31 VkPipelineLayoutCreateFlags layout_flags,
32 VkDescriptorType type,
33 const VkMutableDescriptorTypeListEXT *type_list,
34 uint32_t *stride, uint32_t *alignment)
35 {
36 switch (type) {
37 case VK_DESCRIPTOR_TYPE_SAMPLER:
38 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
39 /* TODO: How do samplers work? */
40 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
41 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
42 *stride = *alignment = sizeof(struct nvk_sampled_image_descriptor);
43 break;
44
45 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
46 *stride = *alignment = sizeof(struct nvk_storage_image_descriptor);
47 break;
48
49 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
50 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
51 if ((layout_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) ||
52 nvk_use_edb_buffer_views(pdev)) {
53 *stride = *alignment = sizeof(struct nvk_edb_buffer_view_descriptor);
54 } else {
55 *stride = *alignment = sizeof(struct nvk_buffer_view_descriptor);
56 }
57 break;
58
59 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
60 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
61 *stride = *alignment = sizeof(union nvk_buffer_descriptor);
62 break;
63
64 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
65 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
66 *stride = *alignment = 0; /* These don't take up buffer space */
67 break;
68
69 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
70 *stride = 1; /* Array size is bytes */
71 *alignment = nvk_min_cbuf_alignment(&pdev->info);
72 break;
73
74 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
75 *stride = *alignment = 0;
76 if (type_list == NULL)
77 *stride = *alignment = NVK_MAX_DESCRIPTOR_SIZE;
78 for (unsigned i = 0; type_list && i < type_list->descriptorTypeCount; i++) {
79 /* This shouldn't recurse */
80 assert(type_list->pDescriptorTypes[i] !=
81 VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
82 uint32_t desc_stride, desc_align;
83 nvk_descriptor_stride_align_for_type(pdev, layout_flags,
84 type_list->pDescriptorTypes[i],
85 NULL, &desc_stride, &desc_align);
86 *stride = MAX2(*stride, desc_stride);
87 *alignment = MAX2(*alignment, desc_align);
88 }
89 *stride = ALIGN(*stride, *alignment);
90 break;
91
92 default:
93 unreachable("Invalid descriptor type");
94 }
95
96 assert(*stride <= NVK_MAX_DESCRIPTOR_SIZE);
97 }
98
99 static const VkMutableDescriptorTypeListEXT *
nvk_descriptor_get_type_list(VkDescriptorType type,const VkMutableDescriptorTypeCreateInfoEXT * info,const uint32_t info_idx)100 nvk_descriptor_get_type_list(VkDescriptorType type,
101 const VkMutableDescriptorTypeCreateInfoEXT *info,
102 const uint32_t info_idx)
103 {
104 const VkMutableDescriptorTypeListEXT *type_list = NULL;
105 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
106 assert(info != NULL);
107 assert(info_idx < info->mutableDescriptorTypeListCount);
108 type_list = &info->pMutableDescriptorTypeLists[info_idx];
109 }
110 return type_list;
111 }
112
113 static void
nvk_descriptor_set_layout_destroy(struct vk_device * vk_dev,struct vk_descriptor_set_layout * vk_layout)114 nvk_descriptor_set_layout_destroy(struct vk_device *vk_dev,
115 struct vk_descriptor_set_layout *vk_layout)
116 {
117 struct nvk_device *dev = container_of(vk_dev, struct nvk_device, vk);
118 struct nvk_descriptor_set_layout *layout =
119 vk_to_nvk_descriptor_set_layout(vk_layout);
120
121 if (layout->embedded_samplers_addr != 0) {
122 nvk_heap_free(dev, &dev->shader_heap,
123 layout->embedded_samplers_addr,
124 layout->non_variable_descriptor_buffer_size);
125 }
126
127 vk_object_free(&dev->vk, NULL, layout);
128 }
129
130 VKAPI_ATTR VkResult VKAPI_CALL
nvk_CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)131 nvk_CreateDescriptorSetLayout(VkDevice device,
132 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
133 const VkAllocationCallbacks *pAllocator,
134 VkDescriptorSetLayout *pSetLayout)
135 {
136 VK_FROM_HANDLE(nvk_device, dev, device);
137 struct nvk_physical_device *pdev = nvk_device_physical(dev);
138
139 uint32_t num_bindings = 0;
140 uint32_t immutable_sampler_count = 0;
141 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
142 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
143 num_bindings = MAX2(num_bindings, binding->binding + 1);
144
145 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
146 *
147 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
148 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
149 * pImmutableSamplers can be used to initialize a set of immutable
150 * samplers. [...] If descriptorType is not one of these descriptor
151 * types, then pImmutableSamplers is ignored.
152 *
153 * We need to be careful here and only parse pImmutableSamplers if we
154 * have one of the right descriptor types.
155 */
156 if (binding_has_immutable_samplers(binding))
157 immutable_sampler_count += binding->descriptorCount;
158 }
159
160 VK_MULTIALLOC(ma);
161 VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_layout, layout, 1);
162 VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_binding_layout, bindings,
163 num_bindings);
164 VK_MULTIALLOC_DECL(&ma, struct nvk_sampler *, samplers,
165 immutable_sampler_count);
166
167 if (!vk_descriptor_set_layout_multizalloc(&dev->vk, &ma))
168 return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
169
170 layout->vk.destroy = nvk_descriptor_set_layout_destroy;
171 layout->flags = pCreateInfo->flags;
172 layout->binding_count = num_bindings;
173
174 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
175 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
176 uint32_t b = binding->binding;
177 /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
178 * immutable_samplers pointer. This provides us with a quick-and-dirty
179 * way to sort the bindings by binding number.
180 */
181 layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
182 }
183
184 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
185 vk_find_struct_const(pCreateInfo->pNext,
186 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
187 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
188 vk_find_struct_const(pCreateInfo->pNext,
189 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
190
191 uint32_t buffer_size = 0;
192 uint32_t max_variable_descriptor_size = 0;
193 uint8_t dynamic_buffer_count = 0;
194 for (uint32_t b = 0; b < num_bindings; b++) {
195 /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
196 * immutable_samplers pointer. Check for NULL (empty binding) and then
197 * reset it and compute the index.
198 */
199 if (layout->binding[b].immutable_samplers == NULL)
200 continue;
201 const uint32_t info_idx =
202 (uintptr_t)(void *)layout->binding[b].immutable_samplers - 1;
203 layout->binding[b].immutable_samplers = NULL;
204
205 const VkDescriptorSetLayoutBinding *binding =
206 &pCreateInfo->pBindings[info_idx];
207
208 if (binding->descriptorCount == 0)
209 continue;
210
211 layout->binding[b].type = binding->descriptorType;
212
213 if (binding_flags_info && binding_flags_info->bindingCount > 0) {
214 assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
215 layout->binding[b].flags = binding_flags_info->pBindingFlags[info_idx];
216 }
217
218 layout->binding[b].array_size = binding->descriptorCount;
219
220 switch (binding->descriptorType) {
221 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
222 layout->binding[b].dynamic_buffer_index = dynamic_buffer_count;
223 BITSET_SET_RANGE(layout->dynamic_ubos, dynamic_buffer_count,
224 dynamic_buffer_count + binding->descriptorCount - 1);
225 dynamic_buffer_count += binding->descriptorCount;
226 break;
227
228 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
229 layout->binding[b].dynamic_buffer_index = dynamic_buffer_count;
230 dynamic_buffer_count += binding->descriptorCount;
231 break;
232
233 default:
234 break;
235 }
236
237 const VkMutableDescriptorTypeListEXT *type_list =
238 nvk_descriptor_get_type_list(binding->descriptorType,
239 mutable_info, info_idx);
240
241 uint32_t stride, alignment;
242 nvk_descriptor_stride_align_for_type(pdev, pCreateInfo->flags,
243 binding->descriptorType,
244 type_list, &stride, &alignment);
245
246 uint8_t max_plane_count = 1;
247
248 if (binding_has_immutable_samplers(binding)) {
249 layout->binding[b].immutable_samplers = samplers;
250 samplers += binding->descriptorCount;
251 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
252 VK_FROM_HANDLE(nvk_sampler, sampler, binding->pImmutableSamplers[i]);
253 layout->binding[b].immutable_samplers[i] = sampler;
254 const uint8_t sampler_plane_count = sampler->vk.ycbcr_conversion ?
255 vk_format_get_plane_count(sampler->vk.ycbcr_conversion->state.format) : 1;
256 if (max_plane_count < sampler_plane_count)
257 max_plane_count = sampler_plane_count;
258 }
259 }
260
261 stride *= max_plane_count;
262
263 if (stride > 0) {
264 assert(stride <= UINT8_MAX);
265 assert(util_is_power_of_two_nonzero(alignment));
266
267 buffer_size = align64(buffer_size, alignment);
268 layout->binding[b].offset = buffer_size;
269 layout->binding[b].stride = stride;
270
271 if (layout->binding[b].flags &
272 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
273 /* From the Vulkan 1.3.256 spec:
274 *
275 * VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03004
276 * "If an element of pBindingFlags includes
277 * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, then
278 * all other elements of
279 * VkDescriptorSetLayoutCreateInfo::pBindings must have a
280 * smaller value of binding"
281 *
282 * In other words, it has to be the last binding.
283 */
284 assert(b == num_bindings - 1);
285 assert(max_variable_descriptor_size == 0);
286 max_variable_descriptor_size = stride * binding->descriptorCount;
287 } else {
288 /* the allocation size will be computed at descriptor allocation,
289 * but the buffer size will be already aligned as this binding will
290 * be the last
291 */
292 buffer_size += stride * binding->descriptorCount;
293 }
294 }
295
296 }
297
298 layout->non_variable_descriptor_buffer_size = buffer_size;
299 layout->max_buffer_size = buffer_size + max_variable_descriptor_size;
300 layout->dynamic_buffer_count = dynamic_buffer_count;
301
302 struct mesa_blake3 blake3_ctx;
303 _mesa_blake3_init(&blake3_ctx);
304
305 #define BLAKE3_UPDATE_VALUE(x) _mesa_blake3_update(&blake3_ctx, &(x), sizeof(x));
306 BLAKE3_UPDATE_VALUE(layout->non_variable_descriptor_buffer_size);
307 BLAKE3_UPDATE_VALUE(layout->dynamic_buffer_count);
308 BLAKE3_UPDATE_VALUE(layout->binding_count);
309
310 for (uint32_t b = 0; b < num_bindings; b++) {
311 BLAKE3_UPDATE_VALUE(layout->binding[b].type);
312 BLAKE3_UPDATE_VALUE(layout->binding[b].flags);
313 BLAKE3_UPDATE_VALUE(layout->binding[b].array_size);
314 BLAKE3_UPDATE_VALUE(layout->binding[b].offset);
315 BLAKE3_UPDATE_VALUE(layout->binding[b].stride);
316 BLAKE3_UPDATE_VALUE(layout->binding[b].dynamic_buffer_index);
317
318 if (layout->binding[b].immutable_samplers != NULL) {
319 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
320 const struct nvk_sampler *sampler =
321 layout->binding[b].immutable_samplers[i];
322
323 /* We zalloc the object, so it's safe to hash the whole thing */
324 if (sampler != NULL && sampler->vk.ycbcr_conversion != NULL)
325 BLAKE3_UPDATE_VALUE(sampler->vk.ycbcr_conversion->state);
326 }
327 }
328 }
329 #undef BLAKE3_UPDATE_VALUE
330
331 _mesa_blake3_final(&blake3_ctx, layout->vk.blake3);
332
333 if (pCreateInfo->flags &
334 VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT) {
335 void *sampler_desc_data =
336 vk_alloc2(&dev->vk.alloc, pAllocator, buffer_size, 4,
337 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
338 if (sampler_desc_data == NULL) {
339 nvk_descriptor_set_layout_destroy(&dev->vk, &layout->vk);
340 return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
341 }
342
343 for (uint32_t b = 0; b < num_bindings; b++) {
344 assert(layout->binding[b].type == VK_DESCRIPTOR_TYPE_SAMPLER);
345 assert(layout->binding[b].array_size == 1);
346 assert(layout->binding[b].immutable_samplers != NULL);
347 assert(!(layout->binding[b].flags &
348 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT));
349
350 /* I'm paranoid */
351 if (layout->binding[b].immutable_samplers == NULL)
352 continue;
353
354 struct nvk_sampler *sampler = layout->binding[b].immutable_samplers[0];
355
356 /* YCbCr has to come in through a combined image/sampler */
357 assert(sampler->plane_count == 1);
358
359 assert(sampler->planes[0].desc_index < (1 << 12));
360 struct nvk_sampled_image_descriptor desc = {
361 .sampler_index = sampler->planes[0].desc_index,
362 };
363 memcpy(sampler_desc_data + layout->binding[b].offset,
364 &desc, sizeof(desc));
365 }
366
367 VkResult result = nvk_heap_upload(dev, &dev->shader_heap,
368 sampler_desc_data, buffer_size,
369 nvk_min_cbuf_alignment(&pdev->info),
370 &layout->embedded_samplers_addr);
371 vk_free2(&dev->vk.alloc, pAllocator, sampler_desc_data);
372 if (result != VK_SUCCESS) {
373 nvk_descriptor_set_layout_destroy(&dev->vk, &layout->vk);
374 return result;
375 }
376 }
377
378 *pSetLayout = nvk_descriptor_set_layout_to_handle(layout);
379
380 return VK_SUCCESS;
381 }
382
383 VKAPI_ATTR void VKAPI_CALL
nvk_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)384 nvk_GetDescriptorSetLayoutSupport(VkDevice device,
385 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
386 VkDescriptorSetLayoutSupport *pSupport)
387 {
388 VK_FROM_HANDLE(nvk_device, dev, device);
389 struct nvk_physical_device *pdev = nvk_device_physical(dev);
390
391 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
392 vk_find_struct_const(pCreateInfo->pNext,
393 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
394 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags =
395 vk_find_struct_const(pCreateInfo->pNext,
396 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
397
398 /* Figure out the maximum alignment up-front. Otherwise, we need to sort
399 * the list of descriptors by binding number in order to get the size
400 * accumulation right.
401 */
402 uint32_t max_align = 0;
403 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
404 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[i];
405 const VkMutableDescriptorTypeListEXT *type_list =
406 nvk_descriptor_get_type_list(binding->descriptorType,
407 mutable_info, i);
408
409 uint32_t stride, alignment;
410 nvk_descriptor_stride_align_for_type(pdev, pCreateInfo->flags,
411 binding->descriptorType,
412 type_list, &stride, &alignment);
413 max_align = MAX2(max_align, alignment);
414 }
415
416 uint64_t non_variable_size = 0;
417 uint32_t variable_stride = 0;
418 uint32_t variable_count = 0;
419 uint8_t dynamic_buffer_count = 0;
420
421 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
422 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[i];
423
424 VkDescriptorBindingFlags flags = 0;
425 if (binding_flags != NULL && binding_flags->bindingCount > 0)
426 flags = binding_flags->pBindingFlags[i];
427
428 switch (binding->descriptorType) {
429 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
430 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
431 dynamic_buffer_count += binding->descriptorCount;
432 break;
433 default:
434 break;
435 }
436
437 const VkMutableDescriptorTypeListEXT *type_list =
438 nvk_descriptor_get_type_list(binding->descriptorType,
439 mutable_info, i);
440
441 uint32_t stride, alignment;
442 nvk_descriptor_stride_align_for_type(pdev, pCreateInfo->flags,
443 binding->descriptorType,
444 type_list, &stride, &alignment);
445
446 if (stride > 0) {
447 assert(stride <= UINT8_MAX);
448 assert(util_is_power_of_two_nonzero(alignment));
449
450 if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
451 /* From the Vulkan 1.3.256 spec:
452 *
453 * "For the purposes of this command, a variable-sized
454 * descriptor binding with a descriptorCount of zero is treated
455 * as if the descriptorCount is one"
456 */
457 variable_count = MAX2(1, binding->descriptorCount);
458 variable_stride = stride;
459 } else {
460 /* Since we're aligning to the maximum and since this is just a
461 * check for whether or not the max buffer size is big enough, we
462 * keep non_variable_size aligned to max_align.
463 */
464 non_variable_size += stride * binding->descriptorCount;
465 non_variable_size = align64(non_variable_size, max_align);
466 }
467 }
468 }
469
470 uint64_t buffer_size = non_variable_size;
471 if (variable_stride > 0) {
472 buffer_size += variable_stride * variable_count;
473 buffer_size = align64(buffer_size, max_align);
474 }
475
476 uint32_t max_buffer_size;
477 if (pCreateInfo->flags &
478 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)
479 max_buffer_size = NVK_PUSH_DESCRIPTOR_SET_SIZE;
480 else
481 max_buffer_size = NVK_MAX_DESCRIPTOR_SET_SIZE;
482
483 pSupport->supported = dynamic_buffer_count <= NVK_MAX_DYNAMIC_BUFFERS &&
484 buffer_size <= max_buffer_size;
485
486 vk_foreach_struct(ext, pSupport->pNext) {
487 switch (ext->sType) {
488 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: {
489 VkDescriptorSetVariableDescriptorCountLayoutSupport *vs = (void *)ext;
490 if (variable_stride > 0) {
491 vs->maxVariableDescriptorCount =
492 (max_buffer_size - non_variable_size) / variable_stride;
493 } else {
494 vs->maxVariableDescriptorCount = 0;
495 }
496 break;
497 }
498
499 default:
500 vk_debug_ignored_stype(ext->sType);
501 break;
502 }
503 }
504 }
505
506 VKAPI_ATTR void VKAPI_CALL
nvk_GetDescriptorSetLayoutSizeEXT(VkDevice device,VkDescriptorSetLayout _layout,VkDeviceSize * pLayoutSizeInBytes)507 nvk_GetDescriptorSetLayoutSizeEXT(VkDevice device,
508 VkDescriptorSetLayout _layout,
509 VkDeviceSize *pLayoutSizeInBytes)
510 {
511 VK_FROM_HANDLE(nvk_descriptor_set_layout, layout, _layout);
512
513 *pLayoutSizeInBytes = layout->max_buffer_size;
514 }
515
516 VKAPI_ATTR void VKAPI_CALL
nvk_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice device,VkDescriptorSetLayout _layout,uint32_t binding,VkDeviceSize * pOffset)517 nvk_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice device,
518 VkDescriptorSetLayout _layout,
519 uint32_t binding,
520 VkDeviceSize *pOffset)
521 {
522 VK_FROM_HANDLE(nvk_descriptor_set_layout, layout, _layout);
523
524 *pOffset = layout->binding[binding].offset;
525 }
526