1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25 #include "vk_acceleration_structure.h"
26 #include "vk_descriptor_update_template.h"
27 #include "vk_descriptors.h"
28 #include "vk_util.h"
29 #include "util/u_math.h"
30 #include "util/u_inlines.h"
31 #include "lp_texture.h"
32
33 static bool
binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding * binding)34 binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding)
35 {
36 switch (binding->descriptorType) {
37 case VK_DESCRIPTOR_TYPE_SAMPLER:
38 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
39 return binding->pImmutableSamplers != NULL;
40
41 default:
42 return false;
43 }
44 }
45
46 static void
lvp_descriptor_set_layout_destroy(struct vk_device * _device,struct vk_descriptor_set_layout * _layout)47 lvp_descriptor_set_layout_destroy(struct vk_device *_device, struct vk_descriptor_set_layout *_layout)
48 {
49 struct lvp_device *device = container_of(_device, struct lvp_device, vk);
50 struct lvp_descriptor_set_layout *set_layout = (void*)vk_to_lvp_descriptor_set_layout(_layout);
51
52 _layout->ref_cnt = UINT32_MAX;
53 lvp_descriptor_set_destroy(device, set_layout->immutable_set);
54
55 vk_descriptor_set_layout_destroy(_device, _layout);
56 }
57
lvp_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)58 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorSetLayout(
59 VkDevice _device,
60 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
61 const VkAllocationCallbacks* pAllocator,
62 VkDescriptorSetLayout* pSetLayout)
63 {
64 LVP_FROM_HANDLE(lvp_device, device, _device);
65 struct lvp_descriptor_set_layout *set_layout;
66
67 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
68 uint32_t num_bindings = 0;
69 uint32_t immutable_sampler_count = 0;
70 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
71 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
72 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
73 *
74 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
75 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
76 * pImmutableSamplers can be used to initialize a set of immutable
77 * samplers. [...] If descriptorType is not one of these descriptor
78 * types, then pImmutableSamplers is ignored.
79 *
80 * We need to be careful here and only parse pImmutableSamplers if we
81 * have one of the right descriptor types.
82 */
83 if (binding_has_immutable_samplers(&pCreateInfo->pBindings[j]))
84 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
85 }
86
87 size_t size = sizeof(struct lvp_descriptor_set_layout) +
88 num_bindings * sizeof(set_layout->binding[0]) +
89 immutable_sampler_count * sizeof(struct lvp_sampler *);
90
91 set_layout = vk_descriptor_set_layout_zalloc(&device->vk, size);
92 if (!set_layout)
93 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
94
95 set_layout->immutable_sampler_count = immutable_sampler_count;
96 /* We just allocate all the samplers at the end of the struct */
97 struct lvp_sampler **samplers =
98 (struct lvp_sampler **)&set_layout->binding[num_bindings];
99
100 set_layout->binding_count = num_bindings;
101 set_layout->shader_stages = 0;
102 set_layout->size = 0;
103
104 VkDescriptorSetLayoutBinding *bindings = NULL;
105 VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
106 pCreateInfo->bindingCount,
107 &bindings);
108 if (result != VK_SUCCESS) {
109 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
110 return vk_error(device, result);
111 }
112
113 uint32_t uniform_block_size = 0;
114
115 uint32_t dynamic_offset_count = 0;
116 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
117 const VkDescriptorSetLayoutBinding *binding = bindings + j;
118 uint32_t b = binding->binding;
119
120 uint32_t descriptor_count = binding->descriptorCount;
121 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
122 descriptor_count = 1;
123
124 set_layout->binding[b].array_size = descriptor_count;
125 set_layout->binding[b].descriptor_index = set_layout->size;
126 set_layout->binding[b].type = binding->descriptorType;
127 set_layout->binding[b].valid = true;
128 set_layout->binding[b].uniform_block_offset = 0;
129 set_layout->binding[b].uniform_block_size = 0;
130
131 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
132 binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
133 set_layout->binding[b].dynamic_index = dynamic_offset_count;
134 dynamic_offset_count += binding->descriptorCount;
135 }
136
137 uint8_t max_plane_count = 1;
138 if (binding_has_immutable_samplers(binding)) {
139 set_layout->binding[b].immutable_samplers = samplers;
140 samplers += binding->descriptorCount;
141
142 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
143 VK_FROM_HANDLE(lvp_sampler, sampler, binding->pImmutableSamplers[i]);
144 set_layout->binding[b].immutable_samplers[i] = sampler;
145 const uint8_t sampler_plane_count = sampler->vk.ycbcr_conversion ?
146 vk_format_get_plane_count(sampler->vk.ycbcr_conversion->state.format) : 1;
147 if (max_plane_count < sampler_plane_count)
148 max_plane_count = sampler_plane_count;
149 }
150 }
151
152 set_layout->binding[b].stride = max_plane_count;
153 set_layout->size += descriptor_count * max_plane_count;
154
155 switch (binding->descriptorType) {
156 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
157 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
158 break;
159 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
160 set_layout->binding[b].uniform_block_offset = uniform_block_size;
161 set_layout->binding[b].uniform_block_size = binding->descriptorCount;
162 uniform_block_size += binding->descriptorCount;
163 break;
164 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
165 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
166 break;
167
168 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
169 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
170 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
171 break;
172 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
173 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
174 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
175 break;
176 default:
177 break;
178 }
179
180 set_layout->shader_stages |= binding->stageFlags;
181 }
182
183 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++)
184 set_layout->binding[i].uniform_block_offset += set_layout->size * sizeof(struct lp_descriptor);
185
186 free(bindings);
187
188 set_layout->dynamic_offset_count = dynamic_offset_count;
189
190 if (set_layout->binding_count == set_layout->immutable_sampler_count) {
191 /* create a bindable set with all the immutable samplers */
192 lvp_descriptor_set_create(device, set_layout, &set_layout->immutable_set);
193 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
194 set_layout->vk.destroy = lvp_descriptor_set_layout_destroy;
195 }
196
197 *pSetLayout = lvp_descriptor_set_layout_to_handle(set_layout);
198
199 return VK_SUCCESS;
200 }
201
202 struct lvp_pipeline_layout *
lvp_pipeline_layout_create(struct lvp_device * device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)203 lvp_pipeline_layout_create(struct lvp_device *device,
204 const VkPipelineLayoutCreateInfo* pCreateInfo,
205 const VkAllocationCallbacks* pAllocator)
206 {
207 struct lvp_pipeline_layout *layout = vk_pipeline_layout_zalloc(&device->vk, sizeof(*layout),
208 pCreateInfo);
209
210 layout->push_constant_size = 0;
211 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
212 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
213 layout->push_constant_size = MAX2(layout->push_constant_size,
214 range->offset + range->size);
215 layout->push_constant_stages |= (range->stageFlags & LVP_STAGE_MASK);
216 }
217 layout->push_constant_size = align(layout->push_constant_size, 16);
218 return layout;
219 }
220
lvp_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)221 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePipelineLayout(
222 VkDevice _device,
223 const VkPipelineLayoutCreateInfo* pCreateInfo,
224 const VkAllocationCallbacks* pAllocator,
225 VkPipelineLayout* pPipelineLayout)
226 {
227 LVP_FROM_HANDLE(lvp_device, device, _device);
228 struct lvp_pipeline_layout *layout = lvp_pipeline_layout_create(device, pCreateInfo, pAllocator);
229 *pPipelineLayout = lvp_pipeline_layout_to_handle(layout);
230
231 return VK_SUCCESS;
232 }
233
234 static struct pipe_resource *
get_buffer_resource(struct pipe_context * ctx,const VkDescriptorAddressInfoEXT * bda)235 get_buffer_resource(struct pipe_context *ctx, const VkDescriptorAddressInfoEXT *bda)
236 {
237 struct pipe_screen *pscreen = ctx->screen;
238 struct pipe_resource templ = {0};
239
240 templ.screen = pscreen;
241 templ.target = PIPE_BUFFER;
242 templ.format = PIPE_FORMAT_R8_UNORM;
243 templ.width0 = bda->range;
244 templ.height0 = 1;
245 templ.depth0 = 1;
246 templ.array_size = 1;
247 templ.bind |= PIPE_BIND_SAMPLER_VIEW;
248 templ.bind |= PIPE_BIND_SHADER_IMAGE;
249 templ.flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE;
250
251 uint64_t size;
252 struct pipe_resource *pres = pscreen->resource_create_unbacked(pscreen, &templ, &size);
253 assert(size == bda->range);
254
255 struct llvmpipe_memory_allocation alloc = {
256 .cpu_addr = (void *)(uintptr_t)bda->address,
257 };
258
259 pscreen->resource_bind_backing(pscreen, pres, (void *)&alloc, 0, 0, 0);
260 return pres;
261 }
262
263 static struct lp_texture_handle
get_texture_handle_bda(struct lvp_device * device,const VkDescriptorAddressInfoEXT * bda,enum pipe_format format)264 get_texture_handle_bda(struct lvp_device *device, const VkDescriptorAddressInfoEXT *bda, enum pipe_format format)
265 {
266 struct pipe_context *ctx = device->queue.ctx;
267
268 struct pipe_resource *pres = get_buffer_resource(ctx, bda);
269
270 struct pipe_sampler_view templ;
271 memset(&templ, 0, sizeof(templ));
272 templ.target = PIPE_BUFFER;
273 templ.swizzle_r = PIPE_SWIZZLE_X;
274 templ.swizzle_g = PIPE_SWIZZLE_Y;
275 templ.swizzle_b = PIPE_SWIZZLE_Z;
276 templ.swizzle_a = PIPE_SWIZZLE_W;
277 templ.format = format;
278 templ.u.buf.size = bda->range;
279 templ.texture = pres;
280 templ.context = ctx;
281 struct pipe_sampler_view *view = ctx->create_sampler_view(ctx, pres, &templ);
282
283 simple_mtx_lock(&device->queue.lock);
284
285 struct lp_texture_handle *handle = (void *)(uintptr_t)ctx->create_texture_handle(ctx, view, NULL);
286 util_dynarray_append(&device->bda_texture_handles, struct lp_texture_handle *, handle);
287
288 simple_mtx_unlock(&device->queue.lock);
289
290 ctx->sampler_view_destroy(ctx, view);
291 pipe_resource_reference(&pres, NULL);
292
293 return *handle;
294 }
295
296 static struct lp_texture_handle
get_image_handle_bda(struct lvp_device * device,const VkDescriptorAddressInfoEXT * bda,enum pipe_format format)297 get_image_handle_bda(struct lvp_device *device, const VkDescriptorAddressInfoEXT *bda, enum pipe_format format)
298 {
299 struct pipe_context *ctx = device->queue.ctx;
300
301 struct pipe_resource *pres = get_buffer_resource(ctx, bda);
302 struct pipe_image_view view = {0};
303 view.resource = pres;
304 view.format = format;
305 view.u.buf.size = bda->range;
306
307 simple_mtx_lock(&device->queue.lock);
308
309 struct lp_texture_handle *handle = (void *)(uintptr_t)ctx->create_image_handle(ctx, &view);
310 util_dynarray_append(&device->bda_image_handles, struct lp_texture_handle *, handle);
311
312 simple_mtx_unlock(&device->queue.lock);
313
314 pipe_resource_reference(&pres, NULL);
315
316 return *handle;
317 }
318
319 VkResult
lvp_descriptor_set_create(struct lvp_device * device,struct lvp_descriptor_set_layout * layout,struct lvp_descriptor_set ** out_set)320 lvp_descriptor_set_create(struct lvp_device *device,
321 struct lvp_descriptor_set_layout *layout,
322 struct lvp_descriptor_set **out_set)
323 {
324 struct lvp_descriptor_set *set = vk_zalloc(&device->vk.alloc /* XXX: Use the pool */,
325 sizeof(struct lvp_descriptor_set), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
326 if (!set)
327 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
328
329 vk_object_base_init(&device->vk, &set->base,
330 VK_OBJECT_TYPE_DESCRIPTOR_SET);
331 set->layout = layout;
332 vk_descriptor_set_layout_ref(&layout->vk);
333
334 uint64_t bo_size = layout->size * sizeof(struct lp_descriptor);
335
336 for (unsigned i = 0; i < layout->binding_count; i++)
337 bo_size += layout->binding[i].uniform_block_size;
338
339 bo_size = MAX2(bo_size, 64);
340
341 struct pipe_resource template = {
342 .bind = PIPE_BIND_CONSTANT_BUFFER,
343 .screen = device->pscreen,
344 .target = PIPE_BUFFER,
345 .format = PIPE_FORMAT_R8_UNORM,
346 .width0 = bo_size,
347 .height0 = 1,
348 .depth0 = 1,
349 .array_size = 1,
350 .flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE,
351 };
352
353 set->bo = device->pscreen->resource_create_unbacked(device->pscreen, &template, &bo_size);
354 set->pmem = device->pscreen->allocate_memory(device->pscreen, bo_size);
355
356 set->map = device->pscreen->map_memory(device->pscreen, set->pmem);
357 memset(set->map, 0, bo_size);
358
359 device->pscreen->resource_bind_backing(device->pscreen, set->bo, set->pmem, 0, 0, 0);
360
361 for (uint32_t binding_index = 0; binding_index < layout->binding_count; binding_index++) {
362 const struct lvp_descriptor_set_binding_layout *bind_layout = &set->layout->binding[binding_index];
363 if (!bind_layout->immutable_samplers)
364 continue;
365
366 struct lp_descriptor *desc = set->map;
367 desc += bind_layout->descriptor_index;
368
369 for (uint32_t sampler_index = 0; sampler_index < bind_layout->array_size; sampler_index++) {
370 if (bind_layout->immutable_samplers[sampler_index]) {
371 for (uint32_t s = 0; s < bind_layout->stride; s++) {
372 int idx = sampler_index * bind_layout->stride + s;
373 desc[idx] = bind_layout->immutable_samplers[sampler_index]->desc;
374 }
375 }
376 }
377 }
378
379 *out_set = set;
380
381 return VK_SUCCESS;
382 }
383
384 void
lvp_descriptor_set_destroy(struct lvp_device * device,struct lvp_descriptor_set * set)385 lvp_descriptor_set_destroy(struct lvp_device *device,
386 struct lvp_descriptor_set *set)
387 {
388 pipe_resource_reference(&set->bo, NULL);
389 device->pscreen->unmap_memory(device->pscreen, set->pmem);
390 device->pscreen->free_memory(device->pscreen, set->pmem);
391
392 vk_descriptor_set_layout_unref(&device->vk, &set->layout->vk);
393 vk_object_base_finish(&set->base);
394 vk_free(&device->vk.alloc, set);
395 }
396
lvp_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)397 VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateDescriptorSets(
398 VkDevice _device,
399 const VkDescriptorSetAllocateInfo* pAllocateInfo,
400 VkDescriptorSet* pDescriptorSets)
401 {
402 LVP_FROM_HANDLE(lvp_device, device, _device);
403 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, pAllocateInfo->descriptorPool);
404 VkResult result = VK_SUCCESS;
405 struct lvp_descriptor_set *set;
406 uint32_t i;
407
408 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
409 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout,
410 pAllocateInfo->pSetLayouts[i]);
411
412 result = lvp_descriptor_set_create(device, layout, &set);
413 if (result != VK_SUCCESS)
414 break;
415
416 list_addtail(&set->link, &pool->sets);
417 pDescriptorSets[i] = lvp_descriptor_set_to_handle(set);
418 }
419
420 if (result != VK_SUCCESS)
421 lvp_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
422 i, pDescriptorSets);
423
424 return result;
425 }
426
lvp_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)427 VKAPI_ATTR VkResult VKAPI_CALL lvp_FreeDescriptorSets(
428 VkDevice _device,
429 VkDescriptorPool descriptorPool,
430 uint32_t count,
431 const VkDescriptorSet* pDescriptorSets)
432 {
433 LVP_FROM_HANDLE(lvp_device, device, _device);
434 for (uint32_t i = 0; i < count; i++) {
435 LVP_FROM_HANDLE(lvp_descriptor_set, set, pDescriptorSets[i]);
436
437 if (!set)
438 continue;
439 list_del(&set->link);
440 lvp_descriptor_set_destroy(device, set);
441 }
442 return VK_SUCCESS;
443 }
444
lvp_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)445 VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSets(
446 VkDevice _device,
447 uint32_t descriptorWriteCount,
448 const VkWriteDescriptorSet* pDescriptorWrites,
449 uint32_t descriptorCopyCount,
450 const VkCopyDescriptorSet* pDescriptorCopies)
451 {
452 LVP_FROM_HANDLE(lvp_device, device, _device);
453
454 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
455 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
456 LVP_FROM_HANDLE(lvp_descriptor_set, set, write->dstSet);
457 const struct lvp_descriptor_set_binding_layout *bind_layout =
458 &set->layout->binding[write->dstBinding];
459
460 if (write->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
461 const VkWriteDescriptorSetInlineUniformBlock *uniform_data =
462 vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
463 assert(uniform_data);
464 memcpy((uint8_t *)set->map + bind_layout->uniform_block_offset + write->dstArrayElement, uniform_data->pData, uniform_data->dataSize);
465 continue;
466 }
467
468 struct lp_descriptor *desc = set->map;
469 desc += bind_layout->descriptor_index + (write->dstArrayElement * bind_layout->stride);
470
471 switch (write->descriptorType) {
472 case VK_DESCRIPTOR_TYPE_SAMPLER:
473 if (!bind_layout->immutable_samplers) {
474 for (uint32_t j = 0; j < write->descriptorCount; j++) {
475 LVP_FROM_HANDLE(lvp_sampler, sampler, write->pImageInfo[j].sampler);
476 uint32_t didx = j * bind_layout->stride;
477
478 for (unsigned k = 0; k < bind_layout->stride; k++) {
479 desc[didx + k].sampler = sampler->desc.sampler;
480 desc[didx + k].texture.sampler_index = sampler->desc.texture.sampler_index;
481 }
482 }
483 }
484 break;
485
486 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
487 for (uint32_t j = 0; j < write->descriptorCount; j++) {
488 LVP_FROM_HANDLE(lvp_image_view, iview,
489 write->pImageInfo[j].imageView);
490 uint32_t didx = j * bind_layout->stride;
491 if (iview) {
492 unsigned plane_count = iview->plane_count;
493
494 for (unsigned p = 0; p < plane_count; p++) {
495 lp_jit_texture_from_pipe(&desc[didx + p].texture, iview->planes[p].sv);
496 desc[didx + p].functions = iview->planes[p].texture_handle->functions;
497 }
498
499 if (!bind_layout->immutable_samplers) {
500 LVP_FROM_HANDLE(lvp_sampler, sampler,
501 write->pImageInfo[j].sampler);
502
503 for (unsigned p = 0; p < plane_count; p++) {
504 desc[didx + p].sampler = sampler->desc.sampler;
505 desc[didx + p].texture.sampler_index = sampler->desc.texture.sampler_index;
506 }
507 }
508 } else {
509 for (unsigned k = 0; k < bind_layout->stride; k++) {
510 desc[didx + k].functions = device->null_texture_handle->functions;
511 desc[didx + k].texture.sampler_index = 0;
512 }
513 }
514 }
515 break;
516
517 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
518 for (uint32_t j = 0; j < write->descriptorCount; j++) {
519 LVP_FROM_HANDLE(lvp_image_view, iview,
520 write->pImageInfo[j].imageView);
521 uint32_t didx = j * bind_layout->stride;
522 if (iview) {
523 unsigned plane_count = iview->plane_count;
524
525 for (unsigned p = 0; p < plane_count; p++) {
526 lp_jit_texture_from_pipe(&desc[didx + p].texture, iview->planes[p].sv);
527 desc[didx + p].functions = iview->planes[p].texture_handle->functions;
528 }
529 } else {
530 for (unsigned k = 0; k < bind_layout->stride; k++) {
531 desc[didx + k].functions = device->null_texture_handle->functions;
532 desc[didx + k].texture.sampler_index = 0;
533 }
534 }
535 }
536 break;
537 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
538 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
539 for (uint32_t j = 0; j < write->descriptorCount; j++) {
540 LVP_FROM_HANDLE(lvp_image_view, iview,
541 write->pImageInfo[j].imageView);
542 uint32_t didx = j * bind_layout->stride;
543 if (iview) {
544 unsigned plane_count = iview->plane_count;
545
546 for (unsigned p = 0; p < plane_count; p++) {
547 lp_jit_image_from_pipe(&desc[didx + p].image, &iview->planes[p].iv);
548 desc[didx + p].functions = iview->planes[p].image_handle->functions;
549 }
550 } else {
551 for (unsigned k = 0; k < bind_layout->stride; k++)
552 desc[didx + k].functions = device->null_image_handle->functions;
553 }
554 }
555 break;
556
557 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
558 for (uint32_t j = 0; j < write->descriptorCount; j++) {
559 LVP_FROM_HANDLE(lvp_buffer_view, bview,
560 write->pTexelBufferView[j]);
561 assert(bind_layout->stride == 1);
562 if (bview) {
563 lp_jit_texture_from_pipe(&desc[j].texture, bview->sv);
564 desc[j].functions = bview->texture_handle->functions;
565 } else {
566 desc[j].functions = device->null_texture_handle->functions;
567 desc[j].texture.sampler_index = 0;
568 }
569 }
570 break;
571
572 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
573 for (uint32_t j = 0; j < write->descriptorCount; j++) {
574 LVP_FROM_HANDLE(lvp_buffer_view, bview,
575 write->pTexelBufferView[j]);
576 assert(bind_layout->stride == 1);
577 if (bview) {
578 lp_jit_image_from_pipe(&desc[j].image, &bview->iv);
579 desc[j].functions = bview->image_handle->functions;
580 } else {
581 desc[j].functions = device->null_image_handle->functions;
582 }
583 }
584 break;
585
586 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
587 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
588 for (uint32_t j = 0; j < write->descriptorCount; j++) {
589 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
590 assert(bind_layout->stride == 1);
591 if (buffer) {
592 struct pipe_constant_buffer ubo = {
593 .buffer = buffer->bo,
594 .buffer_offset = write->pBufferInfo[j].offset,
595 .buffer_size = write->pBufferInfo[j].range,
596 };
597
598 if (write->pBufferInfo[j].range == VK_WHOLE_SIZE)
599 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
600
601 lp_jit_buffer_from_pipe_const(&desc[j].buffer, &ubo, device->pscreen);
602 } else {
603 lp_jit_buffer_from_pipe_const(&desc[j].buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
604 }
605 }
606 break;
607
608 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
609 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
610 for (uint32_t j = 0; j < write->descriptorCount; j++) {
611 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
612 assert(bind_layout->stride == 1);
613 if (buffer) {
614 struct pipe_shader_buffer ubo = {
615 .buffer = buffer->bo,
616 .buffer_offset = write->pBufferInfo[j].offset,
617 .buffer_size = write->pBufferInfo[j].range,
618 };
619
620 if (write->pBufferInfo[j].range == VK_WHOLE_SIZE)
621 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
622
623 lp_jit_buffer_from_pipe(&desc[j].buffer, &ubo);
624 } else {
625 lp_jit_buffer_from_pipe(&desc[j].buffer, &((struct pipe_shader_buffer){0}));
626 }
627 }
628 break;
629
630 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
631 for (uint32_t j = 0; j < write->descriptorCount; j++) {
632 const VkWriteDescriptorSetAccelerationStructureKHR *accel_structs =
633 vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
634 VK_FROM_HANDLE(vk_acceleration_structure, accel_struct, accel_structs->pAccelerationStructures[j]);
635
636 desc[j].accel_struct = accel_struct ? vk_acceleration_structure_get_va(accel_struct) : 0;
637 }
638 break;
639
640 default:
641 unreachable("Unsupported descriptor type");
642 break;
643 }
644 }
645
646 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
647 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
648 LVP_FROM_HANDLE(lvp_descriptor_set, src, copy->srcSet);
649 LVP_FROM_HANDLE(lvp_descriptor_set, dst, copy->dstSet);
650
651 const struct lvp_descriptor_set_binding_layout *src_layout =
652 &src->layout->binding[copy->srcBinding];
653 struct lp_descriptor *src_desc = src->map;
654 src_desc += src_layout->descriptor_index;
655
656 const struct lvp_descriptor_set_binding_layout *dst_layout =
657 &dst->layout->binding[copy->dstBinding];
658 struct lp_descriptor *dst_desc = dst->map;
659 dst_desc += dst_layout->descriptor_index;
660
661 if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
662 memcpy((uint8_t *)dst->map + dst_layout->uniform_block_offset + copy->dstArrayElement,
663 (uint8_t *)src->map + src_layout->uniform_block_offset + copy->srcArrayElement,
664 copy->descriptorCount);
665 } else {
666 src_desc += copy->srcArrayElement;
667 dst_desc += copy->dstArrayElement;
668
669 for (uint32_t j = 0; j < copy->descriptorCount; j++)
670 dst_desc[j] = src_desc[j];
671 }
672 }
673 }
674
lvp_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)675 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorPool(
676 VkDevice _device,
677 const VkDescriptorPoolCreateInfo* pCreateInfo,
678 const VkAllocationCallbacks* pAllocator,
679 VkDescriptorPool* pDescriptorPool)
680 {
681 LVP_FROM_HANDLE(lvp_device, device, _device);
682 struct lvp_descriptor_pool *pool;
683 size_t size = sizeof(struct lvp_descriptor_pool);
684 pool = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
685 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
686 if (!pool)
687 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
688
689 vk_object_base_init(&device->vk, &pool->base,
690 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
691 pool->flags = pCreateInfo->flags;
692 list_inithead(&pool->sets);
693 *pDescriptorPool = lvp_descriptor_pool_to_handle(pool);
694 return VK_SUCCESS;
695 }
696
lvp_reset_descriptor_pool(struct lvp_device * device,struct lvp_descriptor_pool * pool)697 static void lvp_reset_descriptor_pool(struct lvp_device *device,
698 struct lvp_descriptor_pool *pool)
699 {
700 struct lvp_descriptor_set *set, *tmp;
701 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->sets, link) {
702 list_del(&set->link);
703 lvp_descriptor_set_destroy(device, set);
704 }
705 }
706
lvp_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)707 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorPool(
708 VkDevice _device,
709 VkDescriptorPool _pool,
710 const VkAllocationCallbacks* pAllocator)
711 {
712 LVP_FROM_HANDLE(lvp_device, device, _device);
713 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
714
715 if (!_pool)
716 return;
717
718 lvp_reset_descriptor_pool(device, pool);
719 vk_object_base_finish(&pool->base);
720 vk_free2(&device->vk.alloc, pAllocator, pool);
721 }
722
lvp_ResetDescriptorPool(VkDevice _device,VkDescriptorPool _pool,VkDescriptorPoolResetFlags flags)723 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetDescriptorPool(
724 VkDevice _device,
725 VkDescriptorPool _pool,
726 VkDescriptorPoolResetFlags flags)
727 {
728 LVP_FROM_HANDLE(lvp_device, device, _device);
729 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
730
731 lvp_reset_descriptor_pool(device, pool);
732 return VK_SUCCESS;
733 }
734
lvp_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)735 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSupport(VkDevice device,
736 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
737 VkDescriptorSetLayoutSupport* pSupport)
738 {
739 const VkDescriptorSetLayoutBindingFlagsCreateInfo *variable_flags =
740 vk_find_struct_const(pCreateInfo->pNext, DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
741 VkDescriptorSetVariableDescriptorCountLayoutSupport *variable_count =
742 vk_find_struct(pSupport->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
743 if (variable_count) {
744 variable_count->maxVariableDescriptorCount = 0;
745 if (variable_flags) {
746 for (unsigned i = 0; i < variable_flags->bindingCount; i++) {
747 if (variable_flags->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
748 variable_count->maxVariableDescriptorCount = MAX_DESCRIPTORS;
749 }
750 }
751 }
752 pSupport->supported = true;
753 }
754
755 uint32_t
lvp_descriptor_update_template_entry_size(VkDescriptorType type)756 lvp_descriptor_update_template_entry_size(VkDescriptorType type)
757 {
758 switch (type) {
759 case VK_DESCRIPTOR_TYPE_SAMPLER:
760 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
761 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
762 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
763 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
764 return sizeof(VkDescriptorImageInfo);
765 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
766 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
767 return sizeof(VkBufferView);
768 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
769 return sizeof(VkAccelerationStructureKHR);
770 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
771 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
772 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
773 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
774 default:
775 return sizeof(VkDescriptorBufferInfo);
776 }
777 }
778
779 void
lvp_descriptor_set_update_with_template(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)780 lvp_descriptor_set_update_with_template(VkDevice _device, VkDescriptorSet descriptorSet,
781 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
782 const void *pData)
783 {
784 LVP_FROM_HANDLE(lvp_device, device, _device);
785 LVP_FROM_HANDLE(lvp_descriptor_set, set, descriptorSet);
786 LVP_FROM_HANDLE(vk_descriptor_update_template, templ, descriptorUpdateTemplate);
787 uint32_t i, j;
788
789 for (i = 0; i < templ->entry_count; ++i) {
790 struct vk_descriptor_template_entry *entry = &templ->entries[i];
791
792 const uint8_t *pSrc = ((const uint8_t *) pData) + entry->offset;
793
794 const struct lvp_descriptor_set_binding_layout *bind_layout =
795 &set->layout->binding[entry->binding];
796
797 if (entry->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
798 memcpy((uint8_t *)set->map + bind_layout->uniform_block_offset + entry->array_element, pSrc, entry->array_count);
799 continue;
800 }
801
802 struct lp_descriptor *desc = set->map;
803 desc += bind_layout->descriptor_index;
804
805 for (j = 0; j < entry->array_count; ++j) {
806 unsigned idx = j + entry->array_element;
807
808 idx *= bind_layout->stride;
809 switch (entry->type) {
810 case VK_DESCRIPTOR_TYPE_SAMPLER: {
811 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
812 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
813
814 for (unsigned k = 0; k < bind_layout->stride; k++) {
815 desc[idx + k].sampler = sampler->desc.sampler;
816 desc[idx + k].texture.sampler_index = sampler->desc.texture.sampler_index;
817 }
818 break;
819 }
820 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
821 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
822 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
823
824 if (iview) {
825 for (unsigned p = 0; p < iview->plane_count; p++) {
826 lp_jit_texture_from_pipe(&desc[idx + p].texture, iview->planes[p].sv);
827 desc[idx + p].functions = iview->planes[p].texture_handle->functions;
828 }
829
830 if (!bind_layout->immutable_samplers) {
831 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
832
833 for (unsigned p = 0; p < iview->plane_count; p++) {
834 desc[idx + p].sampler = sampler->desc.sampler;
835 desc[idx + p].texture.sampler_index = sampler->desc.texture.sampler_index;
836 }
837 }
838 } else {
839 for (unsigned k = 0; k < bind_layout->stride; k++) {
840 desc[idx + k].functions = device->null_texture_handle->functions;
841 desc[idx + k].texture.sampler_index = 0;
842 }
843 }
844 break;
845 }
846 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
847 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
848 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
849
850 if (iview) {
851 for (unsigned p = 0; p < iview->plane_count; p++) {
852 lp_jit_texture_from_pipe(&desc[idx + p].texture, iview->planes[p].sv);
853 desc[idx + p].functions = iview->planes[p].texture_handle->functions;
854 }
855 } else {
856 for (unsigned k = 0; k < bind_layout->stride; k++) {
857 desc[idx + k].functions = device->null_texture_handle->functions;
858 desc[idx + k].texture.sampler_index = 0;
859 }
860 }
861 break;
862 }
863 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
864 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
865 LVP_FROM_HANDLE(lvp_image_view, iview,
866 ((VkDescriptorImageInfo *)pSrc)->imageView);
867
868 if (iview) {
869 for (unsigned p = 0; p < iview->plane_count; p++) {
870 lp_jit_image_from_pipe(&desc[idx + p].image, &iview->planes[p].iv);
871 desc[idx + p].functions = iview->planes[p].image_handle->functions;
872 }
873 } else {
874 for (unsigned k = 0; k < bind_layout->stride; k++)
875 desc[idx + k].functions = device->null_image_handle->functions;
876 }
877 break;
878 }
879 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
880 LVP_FROM_HANDLE(lvp_buffer_view, bview,
881 *(VkBufferView *)pSrc);
882 assert(bind_layout->stride == 1);
883 if (bview) {
884 lp_jit_texture_from_pipe(&desc[idx].texture, bview->sv);
885 desc[idx].functions = bview->texture_handle->functions;
886 } else {
887 desc[j].functions = device->null_texture_handle->functions;
888 desc[j].texture.sampler_index = 0;
889 }
890 break;
891 }
892 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
893 LVP_FROM_HANDLE(lvp_buffer_view, bview,
894 *(VkBufferView *)pSrc);
895 assert(bind_layout->stride == 1);
896 if (bview) {
897 lp_jit_image_from_pipe(&desc[idx].image, &bview->iv);
898 desc[idx].functions = bview->image_handle->functions;
899 } else {
900 desc[idx].functions = device->null_image_handle->functions;
901 }
902 break;
903 }
904
905 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
906 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
907 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
908 LVP_FROM_HANDLE(lvp_buffer, buffer, info->buffer);
909 assert(bind_layout->stride == 1);
910 if (buffer) {
911 struct pipe_constant_buffer ubo = {
912 .buffer = buffer->bo,
913 .buffer_offset = info->offset,
914 .buffer_size = info->range,
915 };
916
917 if (info->range == VK_WHOLE_SIZE)
918 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
919
920 lp_jit_buffer_from_pipe_const(&desc[idx].buffer, &ubo, device->pscreen);
921 } else {
922 lp_jit_buffer_from_pipe_const(&desc[idx].buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
923 }
924 break;
925 }
926
927 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
928 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
929 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
930 LVP_FROM_HANDLE(lvp_buffer, buffer, info->buffer);
931 assert(bind_layout->stride == 1);
932
933 if (buffer) {
934 struct pipe_shader_buffer ubo = {
935 .buffer = buffer->bo,
936 .buffer_offset = info->offset,
937 .buffer_size = info->range,
938 };
939
940 if (info->range == VK_WHOLE_SIZE)
941 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
942
943 lp_jit_buffer_from_pipe(&desc[idx].buffer, &ubo);
944 } else {
945 lp_jit_buffer_from_pipe(&desc[idx].buffer, &((struct pipe_shader_buffer){0}));
946 }
947 break;
948 }
949
950 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
951 VK_FROM_HANDLE(vk_acceleration_structure, accel_struct, *(VkAccelerationStructureKHR *)pSrc);
952 desc[idx].accel_struct = accel_struct ? vk_acceleration_structure_get_va(accel_struct) : 0;
953 break;
954 }
955
956 default:
957 unreachable("Unsupported descriptor type");
958 break;
959 }
960
961 pSrc += entry->stride;
962 }
963 }
964 }
965
966 VKAPI_ATTR void VKAPI_CALL
lvp_UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)967 lvp_UpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
968 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
969 const void *pData)
970 {
971 lvp_descriptor_set_update_with_template(device, descriptorSet, descriptorUpdateTemplate, pData);
972 }
973
lvp_GetDescriptorSetLayoutSizeEXT(VkDevice _device,VkDescriptorSetLayout _layout,VkDeviceSize * pSize)974 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSizeEXT(
975 VkDevice _device,
976 VkDescriptorSetLayout _layout,
977 VkDeviceSize* pSize)
978 {
979 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout, _layout);
980
981 *pSize = layout->size * sizeof(struct lp_descriptor);
982
983 for (unsigned i = 0; i < layout->binding_count; i++)
984 *pSize += layout->binding[i].uniform_block_size;
985 }
986
lvp_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice _device,VkDescriptorSetLayout _layout,uint32_t binding,VkDeviceSize * pOffset)987 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutBindingOffsetEXT(
988 VkDevice _device,
989 VkDescriptorSetLayout _layout,
990 uint32_t binding,
991 VkDeviceSize* pOffset)
992 {
993 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout, _layout);
994 assert(binding < layout->binding_count);
995
996 const struct lvp_descriptor_set_binding_layout *bind_layout = &layout->binding[binding];
997 if (bind_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
998 *pOffset = bind_layout->uniform_block_offset;
999 else
1000 *pOffset = bind_layout->descriptor_index * sizeof(struct lp_descriptor);
1001 }
1002
lvp_GetDescriptorEXT(VkDevice _device,const VkDescriptorGetInfoEXT * pCreateInfo,size_t size,void * pDescriptor)1003 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorEXT(
1004 VkDevice _device,
1005 const VkDescriptorGetInfoEXT* pCreateInfo,
1006 size_t size,
1007 void* pDescriptor)
1008 {
1009 LVP_FROM_HANDLE(lvp_device, device, _device);
1010
1011 struct lp_descriptor *desc = pDescriptor;
1012
1013 struct pipe_sampler_state sampler = {
1014 .seamless_cube_map = 1,
1015 .max_lod = 0.25,
1016 };
1017
1018 switch (pCreateInfo->type) {
1019 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1020 unreachable("this is a spec violation");
1021 break;
1022 }
1023 case VK_DESCRIPTOR_TYPE_SAMPLER: {
1024 if (pCreateInfo->data.pSampler) {
1025 LVP_FROM_HANDLE(lvp_sampler, sampler, pCreateInfo->data.pSampler[0]);
1026 desc->sampler = sampler->desc.sampler;
1027 desc->texture.sampler_index = sampler->desc.texture.sampler_index;
1028 } else {
1029 lp_jit_sampler_from_pipe(&desc->sampler, &sampler);
1030 desc->texture.sampler_index = 0;
1031 }
1032 break;
1033 }
1034
1035 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1036 const VkDescriptorImageInfo *info = pCreateInfo->data.pCombinedImageSampler;
1037 if (info && info->imageView) {
1038 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
1039
1040 unsigned plane_count = iview->plane_count;
1041
1042 for (unsigned p = 0; p < plane_count; p++) {
1043 lp_jit_texture_from_pipe(&desc[p].texture, iview->planes[p].sv);
1044 desc[p].functions = iview->planes[p].texture_handle->functions;
1045
1046 if (info->sampler) {
1047 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
1048 desc[p].sampler = sampler->desc.sampler;
1049 desc[p].texture.sampler_index = sampler->desc.texture.sampler_index;
1050 } else {
1051 lp_jit_sampler_from_pipe(&desc->sampler, &sampler);
1052 desc[p].texture.sampler_index = 0;
1053 }
1054 }
1055 } else {
1056 unsigned plane_count = size / sizeof(struct lp_descriptor);
1057
1058 for (unsigned p = 0; p < plane_count; p++) {
1059 desc[p].functions = device->null_texture_handle->functions;
1060 desc[p].texture.sampler_index = 0;
1061 }
1062 }
1063
1064 break;
1065 }
1066
1067 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1068 if (pCreateInfo->data.pSampledImage && pCreateInfo->data.pSampledImage->imageView) {
1069 LVP_FROM_HANDLE(lvp_image_view, iview, pCreateInfo->data.pSampledImage->imageView);
1070
1071 unsigned plane_count = iview->plane_count;
1072
1073 for (unsigned p = 0; p < plane_count; p++) {
1074 lp_jit_texture_from_pipe(&desc[p].texture, iview->planes[p].sv);
1075 desc[p].functions = iview->planes[p].texture_handle->functions;
1076 }
1077 } else {
1078 unsigned plane_count = size / sizeof(struct lp_descriptor);
1079
1080 for (unsigned p = 0; p < plane_count; p++) {
1081 desc[p].functions = device->null_texture_handle->functions;
1082 desc[p].texture.sampler_index = 0;
1083 }
1084 }
1085 break;
1086 }
1087
1088 /* technically these use different pointers, but it's a union, so they're all the same */
1089 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1090 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
1091 if (pCreateInfo->data.pStorageImage && pCreateInfo->data.pStorageImage->imageView) {
1092 LVP_FROM_HANDLE(lvp_image_view, iview, pCreateInfo->data.pStorageImage->imageView);
1093
1094 unsigned plane_count = iview->plane_count;
1095
1096 for (unsigned p = 0; p < plane_count; p++) {
1097 lp_jit_image_from_pipe(&desc[p].image, &iview->planes[p].iv);
1098 desc[p].functions = iview->planes[p].image_handle->functions;
1099 }
1100 } else {
1101 unsigned plane_count = size / sizeof(struct lp_descriptor);
1102
1103 for (unsigned p = 0; p < plane_count; p++)
1104 desc[p].functions = device->null_image_handle->functions;
1105 }
1106 break;
1107 }
1108 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1109 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pUniformTexelBuffer;
1110 if (bda && bda->address) {
1111 enum pipe_format pformat = vk_format_to_pipe_format(bda->format);
1112 lp_jit_texture_buffer_from_bda(&desc->texture, (void*)(uintptr_t)bda->address, bda->range, pformat);
1113 desc->functions = get_texture_handle_bda(device, bda, pformat).functions;
1114 } else {
1115 desc->functions = device->null_texture_handle->functions;
1116 desc->texture.sampler_index = 0;
1117 }
1118 break;
1119 }
1120 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1121 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pStorageTexelBuffer;
1122 if (bda && bda->address) {
1123 enum pipe_format pformat = vk_format_to_pipe_format(bda->format);
1124 lp_jit_image_buffer_from_bda(&desc->image, (void *)(uintptr_t)bda->address, bda->range, pformat);
1125 desc->functions = get_image_handle_bda(device, bda, pformat).functions;
1126 } else {
1127 desc->functions = device->null_image_handle->functions;
1128 }
1129 break;
1130 }
1131 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
1132 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pUniformBuffer;
1133 if (bda && bda->address) {
1134 struct pipe_constant_buffer ubo = {
1135 .user_buffer = (void *)(uintptr_t)bda->address,
1136 .buffer_size = bda->range,
1137 };
1138
1139 lp_jit_buffer_from_pipe_const(&desc->buffer, &ubo, device->pscreen);
1140 } else {
1141 lp_jit_buffer_from_pipe_const(&desc->buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
1142 }
1143 break;
1144 }
1145 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
1146 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pStorageBuffer;
1147 if (bda && bda->address) {
1148 lp_jit_buffer_from_bda(&desc->buffer, (void *)(uintptr_t)bda->address, bda->range);
1149 } else {
1150 lp_jit_buffer_from_pipe(&desc->buffer, &((struct pipe_shader_buffer){0}));
1151 }
1152 break;
1153 }
1154 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
1155 desc->accel_struct = pCreateInfo->data.accelerationStructure;
1156 break;
1157 }
1158 default:
1159 unreachable("Unsupported descriptor type");
1160 break;
1161 }
1162 }
1163