xref: /aosp_15_r20/external/mesa3d/src/gallium/drivers/zink/zink_resource.c (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1 /*
2  * Copyright 2018 Collabora Ltd.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * on the rights to use, copy, modify, merge, publish, distribute, sub
8  * license, and/or sell copies of the Software, and to permit persons to whom
9  * the Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18  * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19  * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20  * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21  * USE OR OTHER DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include "zink_resource.h"
25 
26 #include "zink_batch.h"
27 #include "zink_clear.h"
28 #include "zink_context.h"
29 #include "zink_fence.h"
30 #include "zink_format.h"
31 #include "zink_program.h"
32 #include "zink_screen.h"
33 #include "zink_kopper.h"
34 
35 #ifdef VK_USE_PLATFORM_METAL_EXT
36 #include "QuartzCore/CAMetalLayer.h"
37 #endif
38 
39 #include "vk_format.h"
40 #include "util/u_blitter.h"
41 #include "util/u_debug.h"
42 #include "util/format/u_format.h"
43 #include "util/u_transfer_helper.h"
44 #include "util/u_inlines.h"
45 #include "util/u_memory.h"
46 #include "util/u_upload_mgr.h"
47 #include "util/os_file.h"
48 #include "frontend/winsys_handle.h"
49 
50 #if !defined(__APPLE__)
51 #define ZINK_USE_DMABUF
52 #endif
53 
54 #if defined(ZINK_USE_DMABUF) && !defined(_WIN32)
55 #include "drm-uapi/drm_fourcc.h"
56 #else
57 /* these won't actually be used */
58 #define DRM_FORMAT_MOD_INVALID 0
59 #define DRM_FORMAT_MOD_LINEAR 0
60 #endif
61 
62 #ifdef __APPLE__
63 #include "MoltenVK/mvk_vulkan.h"
64 // Source of MVK_VERSION
65 #include "MoltenVK/mvk_config.h"
66 #endif /* __APPLE__ */
67 
68 #define ZINK_EXTERNAL_MEMORY_HANDLE 999
69 
70 
71 
72 struct zink_debug_mem_entry {
73    uint32_t count;
74    uint64_t size;
75    const char *name;
76 };
77 
78 static const char *
zink_debug_mem_add(struct zink_screen * screen,uint64_t size,const char * name)79 zink_debug_mem_add(struct zink_screen *screen, uint64_t size, const char *name)
80 {
81    assert(name);
82 
83    simple_mtx_lock(&screen->debug_mem_lock);
84    struct hash_entry *entry = _mesa_hash_table_search(screen->debug_mem_sizes, name);
85    struct zink_debug_mem_entry *debug_bos;
86 
87    if (!entry) {
88       debug_bos = calloc(1, sizeof(struct zink_debug_mem_entry));
89       debug_bos->name = strdup(name);
90       _mesa_hash_table_insert(screen->debug_mem_sizes, debug_bos->name, debug_bos);
91    } else {
92       debug_bos = (struct zink_debug_mem_entry *) entry->data;
93    }
94 
95    debug_bos->count++;
96    debug_bos->size += align(size, 4096);
97    simple_mtx_unlock(&screen->debug_mem_lock);
98 
99    return debug_bos->name;
100 }
101 
102 static void
zink_debug_mem_del(struct zink_screen * screen,struct zink_bo * bo)103 zink_debug_mem_del(struct zink_screen *screen, struct zink_bo *bo)
104 {
105    simple_mtx_lock(&screen->debug_mem_lock);
106    struct hash_entry *entry = _mesa_hash_table_search(screen->debug_mem_sizes, bo->name);
107    /* If we're finishing the BO, it should have been added already */
108    assert(entry);
109 
110    struct zink_debug_mem_entry *debug_bos = entry->data;
111    debug_bos->count--;
112    debug_bos->size -= align(zink_bo_get_size(bo), 4096);
113    if (!debug_bos->count) {
114       _mesa_hash_table_remove(screen->debug_mem_sizes, entry);
115       free((void*)debug_bos->name);
116       free(debug_bos);
117    }
118    simple_mtx_unlock(&screen->debug_mem_lock);
119 }
120 
121 static int
debug_bos_count_compare(const void * in_a,const void * in_b)122 debug_bos_count_compare(const void *in_a, const void *in_b)
123 {
124    struct zink_debug_mem_entry *a = *(struct zink_debug_mem_entry **)in_a;
125    struct zink_debug_mem_entry *b = *(struct zink_debug_mem_entry **)in_b;
126    return a->count - b->count;
127 }
128 
129 void
zink_debug_mem_print_stats(struct zink_screen * screen)130 zink_debug_mem_print_stats(struct zink_screen *screen)
131 {
132    simple_mtx_lock(&screen->debug_mem_lock);
133 
134    /* Put the HT's sizes data in an array so we can sort by number of allocations. */
135    struct util_dynarray dyn;
136    util_dynarray_init(&dyn, NULL);
137 
138    uint32_t size = 0;
139    uint32_t count = 0;
140    hash_table_foreach(screen->debug_mem_sizes, entry)
141    {
142       struct zink_debug_mem_entry *debug_bos = entry->data;
143       util_dynarray_append(&dyn, struct zink_debug_mem_entry *, debug_bos);
144       size += debug_bos->size / 1024;
145       count += debug_bos->count;
146    }
147 
148    qsort(dyn.data,
149          util_dynarray_num_elements(&dyn, struct zink_debug_mem_entry *),
150          sizeof(struct zink_debug_mem_entryos_entry *), debug_bos_count_compare);
151 
152    util_dynarray_foreach(&dyn, struct zink_debug_mem_entry *, entryp)
153    {
154       struct zink_debug_mem_entry *debug_bos = *entryp;
155       mesa_logi("%30s: %4d bos, %lld kb\n", debug_bos->name, debug_bos->count,
156                 (long long) (debug_bos->size / 1024));
157    }
158 
159    mesa_logi("submitted %d bos (%d MB)\n", count, DIV_ROUND_UP(size, 1024));
160 
161    util_dynarray_fini(&dyn);
162 
163    simple_mtx_unlock(&screen->debug_mem_lock);
164 }
165 
166 static bool
equals_ivci(const void * a,const void * b)167 equals_ivci(const void *a, const void *b)
168 {
169    const uint8_t *pa = a;
170    const uint8_t *pb = b;
171    size_t offset = offsetof(VkImageViewCreateInfo, flags);
172    return memcmp(pa + offset, pb + offset, sizeof(VkImageViewCreateInfo) - offset) == 0;
173 }
174 
175 static bool
equals_bvci(const void * a,const void * b)176 equals_bvci(const void *a, const void *b)
177 {
178    const uint8_t *pa = a;
179    const uint8_t *pb = b;
180    size_t offset = offsetof(VkBufferViewCreateInfo, flags);
181    return memcmp(pa + offset, pb + offset, sizeof(VkBufferViewCreateInfo) - offset) == 0;
182 }
183 
184 static void
185 zink_transfer_flush_region(struct pipe_context *pctx,
186                            struct pipe_transfer *ptrans,
187                            const struct pipe_box *box);
188 
189 void
debug_describe_zink_resource_object(char * buf,const struct zink_resource_object * ptr)190 debug_describe_zink_resource_object(char *buf, const struct zink_resource_object *ptr)
191 {
192    sprintf(buf, "zink_resource_object");
193 }
194 
195 void
zink_destroy_resource_object(struct zink_screen * screen,struct zink_resource_object * obj)196 zink_destroy_resource_object(struct zink_screen *screen, struct zink_resource_object *obj)
197 {
198    if (obj->is_buffer) {
199       while (util_dynarray_contains(&obj->views, VkBufferView))
200          VKSCR(DestroyBufferView)(screen->dev, util_dynarray_pop(&obj->views, VkBufferView), NULL);
201    } else {
202       while (util_dynarray_contains(&obj->views, VkImageView))
203          VKSCR(DestroyImageView)(screen->dev, util_dynarray_pop(&obj->views, VkImageView), NULL);
204    }
205    if (!obj->dt && zink_debug & ZINK_DEBUG_MEM)
206       zink_debug_mem_del(screen, obj->bo);
207    util_dynarray_fini(&obj->views);
208    for (unsigned i = 0; i < ARRAY_SIZE(obj->copies); i++)
209       util_dynarray_fini(&obj->copies[i]);
210    if (obj->is_buffer) {
211       VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
212       VKSCR(DestroyBuffer)(screen->dev, obj->storage_buffer, NULL);
213    } else if (obj->dt) {
214       zink_kopper_displaytarget_destroy(screen, obj->dt);
215    } else if (!obj->is_aux) {
216       VKSCR(DestroyImage)(screen->dev, obj->image, NULL);
217    } else {
218 #if defined(ZINK_USE_DMABUF) && !defined(_WIN32)
219       close(obj->handle);
220 #endif
221    }
222 
223    simple_mtx_destroy(&obj->view_lock);
224    if (obj->dt) {
225       FREE(obj->bo); //this is a dummy struct
226    } else
227       zink_bo_unref(screen, obj->bo);
228    FREE(obj);
229 }
230 
231 static void
zink_resource_destroy(struct pipe_screen * pscreen,struct pipe_resource * pres)232 zink_resource_destroy(struct pipe_screen *pscreen,
233                       struct pipe_resource *pres)
234 {
235    struct zink_screen *screen = zink_screen(pscreen);
236    struct zink_resource *res = zink_resource(pres);
237    /* prevent double-free when unrefing internal surfaces */
238    res->base.b.reference.count = 999;
239    if (pres->target == PIPE_BUFFER) {
240       util_range_destroy(&res->valid_buffer_range);
241       util_idalloc_mt_free(&screen->buffer_ids, res->base.buffer_id_unique);
242       assert(!_mesa_hash_table_num_entries(&res->bufferview_cache));
243       simple_mtx_destroy(&res->bufferview_mtx);
244       ralloc_free(res->bufferview_cache.table);
245    } else {
246       pipe_surface_reference(&res->surface, NULL);
247       assert(!_mesa_hash_table_num_entries(&res->surface_cache));
248       simple_mtx_destroy(&res->surface_mtx);
249       ralloc_free(res->surface_cache.table);
250    }
251    /* no need to do anything for the caches, these objects own the resource lifetimes */
252 
253    zink_resource_object_reference(screen, &res->obj, NULL);
254    threaded_resource_deinit(pres);
255    FREE_CL(res);
256 }
257 
258 static VkImageAspectFlags
aspect_from_format(enum pipe_format fmt)259 aspect_from_format(enum pipe_format fmt)
260 {
261    if (util_format_is_depth_or_stencil(fmt)) {
262       VkImageAspectFlags aspect = 0;
263       const struct util_format_description *desc = util_format_description(fmt);
264       if (util_format_has_depth(desc))
265          aspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
266       if (util_format_has_stencil(desc))
267          aspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
268       return aspect;
269    } else
270      return VK_IMAGE_ASPECT_COLOR_BIT;
271 }
272 
273 static VkBufferCreateInfo
create_bci(struct zink_screen * screen,const struct pipe_resource * templ,unsigned bind)274 create_bci(struct zink_screen *screen, const struct pipe_resource *templ, unsigned bind)
275 {
276    VkBufferCreateInfo bci;
277    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
278    bci.pNext = NULL;
279    bci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
280    bci.queueFamilyIndexCount = 0;
281    bci.pQueueFamilyIndices = NULL;
282    bci.size = templ->width0;
283    bci.flags = 0;
284    assert(bci.size > 0);
285 
286    if (bind & ZINK_BIND_DESCRIPTOR) {
287       /* gallium sizes are all uint32_t, while the total size of this buffer may exceed that limit */
288       bci.usage = 0;
289       bci.usage |= VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT |
290                    VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT;
291    } else {
292       bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
293                   VK_BUFFER_USAGE_TRANSFER_DST_BIT |
294                   VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
295 
296       bci.usage |= VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
297                   VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT |
298                   VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
299                   VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
300                   VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
301                   VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT |
302                   VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT;
303    }
304    if (screen->info.have_KHR_buffer_device_address)
305       bci.usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
306 
307    if (bind & PIPE_BIND_SHADER_IMAGE)
308       bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
309 
310    if (bind & PIPE_BIND_QUERY_BUFFER)
311       bci.usage |= VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT;
312 
313    if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
314       bci.flags |= VK_BUFFER_CREATE_SPARSE_BINDING_BIT | VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
315    return bci;
316 }
317 
318 typedef enum {
319    USAGE_FAIL_NONE,
320    USAGE_FAIL_ERROR,
321    USAGE_FAIL_SUBOPTIMAL,
322 } usage_fail;
323 
324 static usage_fail
check_ici(struct zink_screen * screen,VkImageCreateInfo * ici,uint64_t modifier)325 check_ici(struct zink_screen *screen, VkImageCreateInfo *ici, uint64_t modifier)
326 {
327    VkImageFormatProperties image_props;
328    VkResult ret;
329    bool optimalDeviceAccess = true;
330    assert(modifier == DRM_FORMAT_MOD_INVALID ||
331           (VKSCR(GetPhysicalDeviceImageFormatProperties2) && screen->info.have_EXT_image_drm_format_modifier));
332    if (VKSCR(GetPhysicalDeviceImageFormatProperties2)) {
333       VkImageFormatProperties2 props2;
334       props2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
335       props2.pNext = NULL;
336       VkSamplerYcbcrConversionImageFormatProperties ycbcr_props;
337       ycbcr_props.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
338       ycbcr_props.pNext = NULL;
339       if (screen->info.have_KHR_sampler_ycbcr_conversion)
340          props2.pNext = &ycbcr_props;
341       VkHostImageCopyDevicePerformanceQueryEXT hic = {
342          VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT,
343          props2.pNext,
344       };
345       if (screen->info.have_EXT_host_image_copy && ici->usage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT)
346          props2.pNext = &hic;
347       VkPhysicalDeviceImageFormatInfo2 info;
348       info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
349       /* possibly VkImageFormatListCreateInfo */
350       info.pNext = ici->pNext;
351       info.format = ici->format;
352       info.type = ici->imageType;
353       info.tiling = ici->tiling;
354       info.usage = ici->usage;
355       info.flags = ici->flags;
356 
357       VkPhysicalDeviceImageDrmFormatModifierInfoEXT mod_info;
358       if (modifier != DRM_FORMAT_MOD_INVALID) {
359          mod_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
360          mod_info.pNext = info.pNext;
361          mod_info.drmFormatModifier = modifier;
362          mod_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
363          mod_info.queueFamilyIndexCount = 0;
364          mod_info.pQueueFamilyIndices = NULL;
365          info.pNext = &mod_info;
366       }
367 
368       ret = VKSCR(GetPhysicalDeviceImageFormatProperties2)(screen->pdev, &info, &props2);
369       /* this is using VK_IMAGE_CREATE_EXTENDED_USAGE_BIT and can't be validated */
370       if (vk_format_aspects(ici->format) & VK_IMAGE_ASPECT_PLANE_1_BIT)
371          ret = VK_SUCCESS;
372       image_props = props2.imageFormatProperties;
373       if (screen->info.have_EXT_host_image_copy && ici->usage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT)
374          optimalDeviceAccess = hic.optimalDeviceAccess;
375    } else
376       ret = VKSCR(GetPhysicalDeviceImageFormatProperties)(screen->pdev, ici->format, ici->imageType,
377                                                    ici->tiling, ici->usage, ici->flags, &image_props);
378    if (ret != VK_SUCCESS)
379       return USAGE_FAIL_ERROR;
380    if (ici->extent.depth > image_props.maxExtent.depth ||
381        ici->extent.height > image_props.maxExtent.height ||
382        ici->extent.width > image_props.maxExtent.width)
383       return USAGE_FAIL_ERROR;
384    if (ici->mipLevels > image_props.maxMipLevels)
385       return USAGE_FAIL_ERROR;
386    if (ici->arrayLayers > image_props.maxArrayLayers)
387       return USAGE_FAIL_ERROR;
388    if (!(ici->samples & image_props.sampleCounts))
389       return USAGE_FAIL_ERROR;
390    if (!optimalDeviceAccess)
391       return USAGE_FAIL_SUBOPTIMAL;
392    return USAGE_FAIL_NONE;
393 }
394 
395 static VkImageUsageFlags
get_image_usage_for_feats(struct zink_screen * screen,VkFormatFeatureFlags2 feats,const struct pipe_resource * templ,unsigned bind,bool * need_extended)396 get_image_usage_for_feats(struct zink_screen *screen, VkFormatFeatureFlags2 feats, const struct pipe_resource *templ, unsigned bind, bool *need_extended)
397 {
398    VkImageUsageFlags usage = 0;
399    bool is_planar = util_format_get_num_planes(templ->format) > 1;
400    *need_extended = false;
401 
402    if (bind & ZINK_BIND_TRANSIENT)
403       usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
404    else {
405       /* sadly, gallium doesn't let us know if it'll ever need this, so we have to assume */
406       if (is_planar || (feats & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
407          usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
408       if (is_planar || (feats & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
409          usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
410       if (feats & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)
411          usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
412 
413       if ((is_planar || (feats & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) && (bind & PIPE_BIND_SHADER_IMAGE)) {
414          assert(templ->nr_samples <= 1 || screen->info.feats.features.shaderStorageImageMultisample);
415          usage |= VK_IMAGE_USAGE_STORAGE_BIT;
416       }
417    }
418 
419    if (bind & PIPE_BIND_RENDER_TARGET) {
420       if (feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
421          usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
422          if (!(bind & ZINK_BIND_TRANSIENT) && (bind & (PIPE_BIND_LINEAR | PIPE_BIND_SHARED)) != (PIPE_BIND_LINEAR | PIPE_BIND_SHARED))
423             usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
424          if (!(bind & ZINK_BIND_TRANSIENT) && screen->info.have_EXT_attachment_feedback_loop_layout)
425             usage |= VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT;
426       } else {
427          /* trust that gallium isn't going to give us anything wild */
428          *need_extended = true;
429          return 0;
430       }
431    } else if ((bind & PIPE_BIND_SAMPLER_VIEW) && !util_format_is_depth_or_stencil(templ->format)) {
432       if (!(feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
433          /* ensure we can u_blitter this later */
434          *need_extended = true;
435          return 0;
436       }
437       usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
438    }
439 
440    if (bind & PIPE_BIND_DEPTH_STENCIL) {
441       if (feats & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
442          usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
443       else
444          return 0;
445       if (screen->info.have_EXT_attachment_feedback_loop_layout && !(bind & ZINK_BIND_TRANSIENT))
446          usage |= VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT;
447    /* this is unlikely to occur and has been included for completeness */
448    } else if (bind & PIPE_BIND_SAMPLER_VIEW && !(usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
449       if (feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)
450          usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
451       else
452          return 0;
453    }
454 
455    if (bind & PIPE_BIND_STREAM_OUTPUT)
456       usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
457 
458    if (screen->info.have_EXT_host_image_copy && feats & VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT)
459       usage |= VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT;
460 
461    return usage;
462 }
463 
464 static VkFormatFeatureFlags
find_modifier_feats(const struct zink_modifier_prop * prop,uint64_t modifier,uint64_t * mod)465 find_modifier_feats(const struct zink_modifier_prop *prop, uint64_t modifier, uint64_t *mod)
466 {
467    for (unsigned j = 0; j < prop->drmFormatModifierCount; j++) {
468       if (prop->pDrmFormatModifierProperties[j].drmFormatModifier == modifier) {
469          *mod = modifier;
470          return prop->pDrmFormatModifierProperties[j].drmFormatModifierTilingFeatures;
471       }
472    }
473    return 0;
474 }
475 
476 /* check HIC optimalness */
477 static bool
suboptimal_check_ici(struct zink_screen * screen,VkImageCreateInfo * ici,uint64_t * mod)478 suboptimal_check_ici(struct zink_screen *screen, VkImageCreateInfo *ici, uint64_t *mod)
479 {
480    usage_fail fail = check_ici(screen, ici, *mod);
481    if (!fail)
482       return true;
483    if (fail == USAGE_FAIL_SUBOPTIMAL) {
484       ici->usage &= ~VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT;
485       fail = check_ici(screen, ici, *mod);
486       if (!fail)
487          return true;
488    }
489    return false;
490 }
491 
492 /* If the driver can't do mutable with this ICI, then try again after removing mutable (and
493  * thus also the list of formats we might might mutate to)
494  */
495 static bool
double_check_ici(struct zink_screen * screen,VkImageCreateInfo * ici,VkImageUsageFlags usage,uint64_t * mod)496 double_check_ici(struct zink_screen *screen, VkImageCreateInfo *ici, VkImageUsageFlags usage, uint64_t *mod)
497 {
498    if (!usage)
499       return false;
500 
501    ici->usage = usage;
502 
503    if (suboptimal_check_ici(screen, ici, mod))
504       return true;
505    usage_fail fail = check_ici(screen, ici, *mod);
506    if (!fail)
507       return true;
508    if (fail == USAGE_FAIL_SUBOPTIMAL) {
509       ici->usage &= ~VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT;
510       fail = check_ici(screen, ici, *mod);
511       if (!fail)
512          return true;
513    }
514    const void *pNext = ici->pNext;
515    if (pNext) {
516       VkBaseOutStructure *prev = NULL;
517       VkBaseOutStructure *fmt_list = NULL;
518       vk_foreach_struct(strct, (void*)ici->pNext) {
519          if (strct->sType == VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO) {
520             fmt_list = strct;
521             if (prev) {
522                prev->pNext = strct->pNext;
523             } else {
524                ici->pNext = strct->pNext;
525             }
526             fmt_list->pNext = NULL;
527             break;
528          }
529          prev = strct;
530       }
531       ici->flags &= ~VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
532       if (suboptimal_check_ici(screen, ici, mod))
533          return true;
534       fmt_list->pNext = (void*)ici->pNext;
535       ici->pNext = fmt_list;
536       ici->flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
537    }
538    return false;
539 }
540 
541 static VkImageUsageFlags
get_image_usage(struct zink_screen * screen,VkImageCreateInfo * ici,const struct pipe_resource * templ,unsigned bind,unsigned modifiers_count,uint64_t * modifiers,uint64_t * mod)542 get_image_usage(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, unsigned bind, unsigned modifiers_count, uint64_t *modifiers, uint64_t *mod)
543 {
544    VkImageTiling tiling = ici->tiling;
545    bool need_extended = false;
546    *mod = DRM_FORMAT_MOD_INVALID;
547    if (modifiers_count) {
548       bool have_linear = false;
549       const struct zink_modifier_prop *prop = &screen->modifier_props[templ->format];
550       assert(tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
551       bool found = false;
552       uint64_t good_mod = 0;
553       VkImageUsageFlags good_usage = 0;
554       for (unsigned i = 0; i < modifiers_count; i++) {
555          if (modifiers[i] == DRM_FORMAT_MOD_LINEAR) {
556             have_linear = true;
557             if (!screen->info.have_EXT_image_drm_format_modifier)
558                break;
559             continue;
560          }
561          VkFormatFeatureFlags feats = find_modifier_feats(prop, modifiers[i], mod);
562          if (feats) {
563             VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
564             assert(!need_extended);
565             if (double_check_ici(screen, ici, usage, mod)) {
566                if (!found) {
567                   found = true;
568                   good_mod = modifiers[i];
569                   good_usage = usage;
570                }
571             } else {
572                modifiers[i] = DRM_FORMAT_MOD_LINEAR;
573             }
574          }
575       }
576       if (found) {
577          *mod = good_mod;
578          return good_usage;
579       }
580       /* only try linear if no other options available */
581       if (have_linear) {
582          VkFormatFeatureFlags feats = find_modifier_feats(prop, DRM_FORMAT_MOD_LINEAR, mod);
583          if (feats) {
584             VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
585             assert(!need_extended);
586             if (double_check_ici(screen, ici, usage, mod))
587                return usage;
588          }
589       }
590    } else {
591       struct zink_format_props props = screen->format_props[templ->format];
592       VkFormatFeatureFlags2 feats = tiling == VK_IMAGE_TILING_LINEAR ? props.linearTilingFeatures : props.optimalTilingFeatures;
593       if (ici->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT)
594          feats = UINT32_MAX;
595       VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
596       if (need_extended) {
597          ici->flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
598          feats = UINT32_MAX;
599          usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
600       }
601       if (double_check_ici(screen, ici, usage, mod))
602          return usage;
603       if (util_format_is_depth_or_stencil(templ->format)) {
604          if (!(templ->bind & PIPE_BIND_DEPTH_STENCIL)) {
605             usage &= ~VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
606             if (double_check_ici(screen, ici, usage, mod))
607                return usage;
608          }
609       } else if (!(templ->bind & PIPE_BIND_RENDER_TARGET)) {
610          usage &= ~VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
611          if (double_check_ici(screen, ici, usage, mod))
612             return usage;
613       }
614    }
615    *mod = DRM_FORMAT_MOD_INVALID;
616    return 0;
617 }
618 
619 static uint64_t
eval_ici(struct zink_screen * screen,VkImageCreateInfo * ici,const struct pipe_resource * templ,unsigned bind,unsigned modifiers_count,uint64_t * modifiers,bool * success)620 eval_ici(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, unsigned bind, unsigned modifiers_count, uint64_t *modifiers, bool *success)
621 {
622    /* sampleCounts will be set to VK_SAMPLE_COUNT_1_BIT if at least one of the following conditions is true:
623     * - flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
624     *
625     * 44.1.1. Supported Sample Counts
626     */
627    bool want_cube = ici->samples == 1 &&
628                     (templ->target == PIPE_TEXTURE_CUBE ||
629                     templ->target == PIPE_TEXTURE_CUBE_ARRAY ||
630                     (templ->target == PIPE_TEXTURE_2D_ARRAY && ici->extent.width == ici->extent.height && ici->arrayLayers >= 6));
631 
632    if (ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
633       modifiers_count = 0;
634 
635    bool first = true;
636    bool tried[2] = {0};
637    uint64_t mod = DRM_FORMAT_MOD_INVALID;
638 retry:
639    while (!ici->usage) {
640       if (!first) {
641          switch (ici->tiling) {
642          case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
643             ici->tiling = VK_IMAGE_TILING_OPTIMAL;
644             modifiers_count = 0;
645             break;
646          case VK_IMAGE_TILING_OPTIMAL:
647             ici->tiling = VK_IMAGE_TILING_LINEAR;
648             break;
649          case VK_IMAGE_TILING_LINEAR:
650             if (bind & PIPE_BIND_LINEAR) {
651                *success = false;
652                return DRM_FORMAT_MOD_INVALID;
653             }
654             ici->tiling = VK_IMAGE_TILING_OPTIMAL;
655             break;
656          default:
657             unreachable("unhandled tiling mode");
658          }
659          if (tried[ici->tiling]) {
660             if (ici->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT) {
661                *success = false;
662                return DRM_FORMAT_MOD_INVALID;
663             }
664             ici->flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
665             tried[0] = false;
666             tried[1] = false;
667             first = true;
668             goto retry;
669          }
670       }
671       ici->usage = get_image_usage(screen, ici, templ, bind, modifiers_count, modifiers, &mod);
672       first = false;
673       if (ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
674          tried[ici->tiling] = true;
675    }
676    if (want_cube) {
677       ici->flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
678       if ((get_image_usage(screen, ici, templ, bind, modifiers_count, modifiers, &mod) & ici->usage) != ici->usage)
679          ici->flags &= ~VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
680    }
681 
682    *success = true;
683    return mod;
684 }
685 
686 static void
init_ici(struct zink_screen * screen,VkImageCreateInfo * ici,const struct pipe_resource * templ,unsigned bind,unsigned modifiers_count)687 init_ici(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, unsigned bind, unsigned modifiers_count)
688 {
689    ici->sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
690    /* pNext may already be set */
691    if (util_format_get_num_planes(templ->format) > 1)
692       ici->flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
693    else if (bind & ZINK_BIND_MUTABLE)
694       ici->flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
695    else
696       ici->flags = 0;
697    if (ici->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)
698       /* unset VkImageFormatListCreateInfo if mutable */
699       ici->pNext = NULL;
700    else if (ici->pNext)
701       /* add mutable if VkImageFormatListCreateInfo */
702       ici->flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
703    ici->usage = 0;
704    ici->queueFamilyIndexCount = 0;
705    ici->pQueueFamilyIndices = NULL;
706 
707    /* assume we're going to be doing some CompressedTexSubImage */
708    if (util_format_is_compressed(templ->format) && (ici->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) &&
709        !vk_find_struct_const(ici->pNext, IMAGE_FORMAT_LIST_CREATE_INFO))
710       ici->flags |= VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
711 
712    if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
713       ici->flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
714 
715    bool need_2D = false;
716    switch (templ->target) {
717    case PIPE_TEXTURE_1D:
718    case PIPE_TEXTURE_1D_ARRAY:
719       if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
720          need_2D |= screen->need_2D_sparse;
721       if (util_format_is_depth_or_stencil(templ->format))
722          need_2D |= screen->need_2D_zs;
723       ici->imageType = need_2D ? VK_IMAGE_TYPE_2D : VK_IMAGE_TYPE_1D;
724       break;
725 
726    case PIPE_TEXTURE_CUBE:
727    case PIPE_TEXTURE_CUBE_ARRAY:
728    case PIPE_TEXTURE_2D:
729    case PIPE_TEXTURE_2D_ARRAY:
730    case PIPE_TEXTURE_RECT:
731       ici->imageType = VK_IMAGE_TYPE_2D;
732       break;
733 
734    case PIPE_TEXTURE_3D:
735       ici->imageType = VK_IMAGE_TYPE_3D;
736       if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE))
737          ici->flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
738       if (screen->info.have_EXT_image_2d_view_of_3d &&
739           (screen->driver_workarounds.can_2d_view_sparse || !(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)))
740          ici->flags |= VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT;
741       break;
742 
743    case PIPE_BUFFER:
744       unreachable("PIPE_BUFFER should already be handled");
745 
746    default:
747       unreachable("Unknown target");
748    }
749 
750    if (screen->info.have_EXT_sample_locations &&
751        bind & PIPE_BIND_DEPTH_STENCIL &&
752        util_format_has_depth(util_format_description(templ->format)))
753       ici->flags |= VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT;
754 
755    ici->format = zink_get_format(screen, templ->format);
756    ici->extent.width = templ->width0;
757    ici->extent.height = templ->height0;
758    ici->extent.depth = templ->depth0;
759    ici->mipLevels = templ->last_level + 1;
760    ici->arrayLayers = MAX2(templ->array_size, 1);
761    ici->samples = templ->nr_samples ? templ->nr_samples : VK_SAMPLE_COUNT_1_BIT;
762    ici->tiling = screen->info.have_EXT_image_drm_format_modifier && modifiers_count ?
763                  VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
764                  bind & (PIPE_BIND_LINEAR | ZINK_BIND_DMABUF) ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
765    /* XXX: does this have perf implications anywhere? hopefully not */
766    if (ici->samples == VK_SAMPLE_COUNT_1_BIT &&
767       screen->info.have_EXT_multisampled_render_to_single_sampled &&
768       ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
769       ici->flags |= VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT;
770    ici->sharingMode = VK_SHARING_MODE_EXCLUSIVE;
771    ici->initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
772 
773    if (templ->target == PIPE_TEXTURE_CUBE)
774       ici->arrayLayers *= 6;
775 }
776 
777 static inline bool
create_sampler_conversion(VkImageCreateInfo ici,struct zink_screen * screen,struct zink_resource_object * obj)778 create_sampler_conversion(VkImageCreateInfo ici, struct zink_screen *screen,
779                           struct zink_resource_object *obj)
780 {
781    if (obj->vkfeats & VK_FORMAT_FEATURE_DISJOINT_BIT)
782       ici.flags |= VK_IMAGE_CREATE_DISJOINT_BIT;
783    VkSamplerYcbcrConversionCreateInfo sycci = {0};
784    sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
785    sycci.pNext = NULL;
786    sycci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
787    sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709;
788    sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
789    sycci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
790    sycci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
791    sycci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
792    sycci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
793    if (!obj->vkfeats || (obj->vkfeats & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) {
794       sycci.xChromaOffset = VK_CHROMA_LOCATION_COSITED_EVEN;
795       sycci.yChromaOffset = VK_CHROMA_LOCATION_COSITED_EVEN;
796    } else {
797       assert(obj->vkfeats & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT);
798       sycci.xChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
799       sycci.yChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
800    }
801    sycci.chromaFilter = VK_FILTER_LINEAR;
802    sycci.forceExplicitReconstruction = VK_FALSE;
803    VkResult res = VKSCR(CreateSamplerYcbcrConversion)(screen->dev, &sycci, NULL, &obj->sampler_conversion);
804    if (res != VK_SUCCESS) {
805       mesa_loge("ZINK: vkCreateSamplerYcbcrConversion failed");
806       return false;
807    }
808    return true;
809 }
810 
811 static const VkImageAspectFlags plane_aspects[] = {
812    VK_IMAGE_ASPECT_PLANE_0_BIT,
813    VK_IMAGE_ASPECT_PLANE_1_BIT,
814    VK_IMAGE_ASPECT_PLANE_2_BIT,
815 };
816 
817 static inline bool
get_image_memory_requirement(struct zink_screen * screen,struct zink_resource_object * obj,unsigned num_planes,VkMemoryRequirements * reqs)818 get_image_memory_requirement(struct zink_screen *screen, struct zink_resource_object *obj,
819                              unsigned num_planes, VkMemoryRequirements *reqs)
820 {
821    bool need_dedicated = false;
822    if (VKSCR(GetImageMemoryRequirements2)) {
823       VkMemoryRequirements2 req2;
824       req2.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
825       VkImageMemoryRequirementsInfo2 info2;
826       info2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
827       info2.pNext = NULL;
828       info2.image = obj->image;
829       VkMemoryDedicatedRequirements ded;
830       ded.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
831       ded.pNext = NULL;
832       req2.pNext = &ded;
833       VkImagePlaneMemoryRequirementsInfo plane;
834       plane.sType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
835       plane.pNext = NULL;
836       if (num_planes > 1)
837          info2.pNext = &plane;
838       unsigned offset = 0;
839       for (unsigned i = 0; i < num_planes; i++) {
840          assert(i < ARRAY_SIZE(plane_aspects));
841          plane.planeAspect = plane_aspects[i];
842          VKSCR(GetImageMemoryRequirements2)(screen->dev, &info2, &req2);
843          if (!i)
844             reqs->alignment = req2.memoryRequirements.alignment;
845          obj->plane_offsets[i] = offset;
846          offset += req2.memoryRequirements.size;
847          reqs->size += req2.memoryRequirements.size;
848          reqs->memoryTypeBits |= req2.memoryRequirements.memoryTypeBits;
849          need_dedicated |= ded.prefersDedicatedAllocation || ded.requiresDedicatedAllocation;
850       }
851    } else {
852       VKSCR(GetImageMemoryRequirements)(screen->dev, obj->image, reqs);
853    }
854    return need_dedicated;
855 }
856 
857 static inline VkFormatFeatureFlags
get_format_feature_flags(VkImageCreateInfo ici,struct zink_screen * screen,const struct pipe_resource * templ)858 get_format_feature_flags(VkImageCreateInfo ici, struct zink_screen *screen, const struct pipe_resource *templ)
859 {
860    VkFormatFeatureFlags feats = 0;
861    switch (ici.tiling) {
862    case VK_IMAGE_TILING_LINEAR:
863       feats = screen->format_props[templ->format].linearTilingFeatures;
864       break;
865    case VK_IMAGE_TILING_OPTIMAL:
866       feats = screen->format_props[templ->format].optimalTilingFeatures;
867       break;
868    case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
869       feats = VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM;
870       /*
871          If is tiling then VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, the value of
872          imageCreateFormatFeatures is found by calling vkGetPhysicalDeviceFormatProperties2
873          with VkImageFormatProperties::format equal to VkImageCreateInfo::format and with
874          VkDrmFormatModifierPropertiesListEXT chained into VkImageFormatProperties2; by
875          collecting all members of the returned array
876          VkDrmFormatModifierPropertiesListEXT::pDrmFormatModifierProperties
877          whose drmFormatModifier belongs to imageCreateDrmFormatModifiers; and by taking the bitwise
878          intersection, over the collected array members, of drmFormatModifierTilingFeatures.
879          (The resultant imageCreateFormatFeatures may be empty).
880          * -Chapter 12. Resource Creation
881        */
882       for (unsigned i = 0; i < screen->modifier_props[templ->format].drmFormatModifierCount; i++)
883          feats &= screen->modifier_props[templ->format].pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
884       break;
885    default:
886          unreachable("unknown tiling");
887    }
888    return feats;
889 }
890 
891 #if !defined(_WIN32)
892    #define ZINK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_BIT VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT
893 #else
894    #define ZINK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_BIT VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
895 #endif
896 
897 
898 struct mem_alloc_info {
899    struct winsys_handle *whandle;
900    VkMemoryPropertyFlags flags;
901    enum zink_alloc_flag aflags;
902    bool need_dedicated;
903    bool shared;
904    const void *user_mem;
905    VkExternalMemoryHandleTypeFlags external;
906    VkExternalMemoryHandleTypeFlags export_types;
907 };
908 
909 static inline bool
get_export_flags(struct zink_screen * screen,const struct pipe_resource * templ,struct mem_alloc_info * alloc_info)910 get_export_flags(struct zink_screen *screen, const struct pipe_resource *templ, struct mem_alloc_info *alloc_info)
911 {
912    bool needs_export = (templ->bind & (ZINK_BIND_VIDEO | ZINK_BIND_DMABUF)) != 0;
913    if (alloc_info->whandle) {
914       if (alloc_info->whandle->type == WINSYS_HANDLE_TYPE_FD ||
915           alloc_info->whandle->type == ZINK_EXTERNAL_MEMORY_HANDLE)
916          needs_export |= true;
917       else
918          unreachable("unknown handle type");
919    }
920    if (needs_export) {
921       if (alloc_info->whandle && alloc_info->whandle->type == ZINK_EXTERNAL_MEMORY_HANDLE) {
922          alloc_info->external = ZINK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_BIT;
923       } else if (screen->info.have_EXT_external_memory_dma_buf) {
924          alloc_info->external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
925          alloc_info->export_types |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
926       } else {
927          return false;
928       }
929    }
930    if (alloc_info->user_mem) {
931       assert(!alloc_info->whandle);
932       alloc_info->external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
933       alloc_info->export_types = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
934    }
935    /* we may export WINSYS_HANDLE_TYPE_FD handle which is dma-buf */
936    if (templ->bind & PIPE_BIND_SHARED && screen->info.have_EXT_external_memory_dma_buf)
937       alloc_info->export_types |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
938    return true;
939 }
940 
941 enum resource_object_create_result {
942    roc_success,
943    roc_success_early_return,
944    roc_fail_and_free_object,
945    roc_fail_and_cleanup_object,
946    roc_fail_and_cleanup_all
947 };
948 
949 static inline enum resource_object_create_result
allocate_bo(struct zink_screen * screen,const struct pipe_resource * templ,VkMemoryRequirements * reqs,struct zink_resource_object * obj,struct mem_alloc_info * alloc_info)950 allocate_bo(struct zink_screen *screen, const struct pipe_resource *templ,
951             VkMemoryRequirements *reqs, struct zink_resource_object *obj,
952             struct mem_alloc_info *alloc_info)
953 {
954    VkMemoryAllocateInfo mai;
955    mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
956    mai.pNext = NULL;
957    mai.allocationSize = reqs->size;
958    enum zink_heap heap = zink_heap_from_domain_flags(alloc_info->flags, alloc_info->aflags);
959    if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT) {
960       if (!(vk_domain_from_heap(heap) & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
961          heap = zink_heap_from_domain_flags(alloc_info->flags & ~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, alloc_info->aflags);
962    }
963 
964    VkMemoryDedicatedAllocateInfo ded_alloc_info = {
965       .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
966       .pNext = mai.pNext,
967       .image = obj->image,
968       .buffer = VK_NULL_HANDLE,
969    };
970 
971    if (screen->info.have_KHR_dedicated_allocation && alloc_info->need_dedicated) {
972       ded_alloc_info.pNext = mai.pNext;
973       mai.pNext = &ded_alloc_info;
974    }
975 
976    VkExportMemoryAllocateInfo emai;
977    if ((templ->bind & ZINK_BIND_VIDEO) || ((templ->bind & PIPE_BIND_SHARED) && alloc_info->shared) || (templ->bind & ZINK_BIND_DMABUF)) {
978       emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
979       emai.handleTypes = alloc_info->export_types;
980 
981       emai.pNext = mai.pNext;
982       mai.pNext = &emai;
983       obj->exportable = true;
984    }
985 
986 #ifdef ZINK_USE_DMABUF
987 
988 #if !defined(_WIN32)
989    VkImportMemoryFdInfoKHR imfi = {
990       VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
991       NULL,
992    };
993 
994    if (alloc_info->whandle) {
995       imfi.pNext = NULL;
996       imfi.handleType = alloc_info->external;
997       imfi.fd = os_dupfd_cloexec(alloc_info->whandle->handle);
998       if (imfi.fd < 0) {
999          mesa_loge("ZINK: failed to dup dmabuf fd: %s\n", strerror(errno));
1000          return roc_fail_and_cleanup_object;
1001       }
1002 
1003       imfi.pNext = mai.pNext;
1004       mai.pNext = &imfi;
1005    }
1006 #else
1007    VkImportMemoryWin32HandleInfoKHR imfi = {
1008       VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
1009       NULL,
1010    };
1011 
1012    if (alloc_info->whandle) {
1013       HANDLE source_target = GetCurrentProcess();
1014       HANDLE out_handle;
1015 
1016       bool result = DuplicateHandle(source_target, alloc_info->whandle->handle, source_target, &out_handle, 0, false, DUPLICATE_SAME_ACCESS);
1017 
1018       if (!result || !out_handle) {
1019          mesa_loge("ZINK: failed to DuplicateHandle with winerr: %08x\n", (int)GetLastError());
1020          return roc_fail_and_cleanup_object;
1021       }
1022 
1023       imfi.pNext = NULL;
1024       imfi.handleType = alloc_info->external;
1025       imfi.handle = out_handle;
1026 
1027       imfi.pNext = mai.pNext;
1028       mai.pNext = &imfi;
1029    }
1030 #endif
1031 
1032 #endif
1033 
1034    VkImportMemoryHostPointerInfoEXT imhpi = {
1035       VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
1036       NULL,
1037    };
1038    if (alloc_info->user_mem) {
1039       imhpi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
1040       imhpi.pHostPointer = (void*)alloc_info->user_mem;
1041       imhpi.pNext = mai.pNext;
1042       mai.pNext = &imhpi;
1043    }
1044 
1045    unsigned alignment = MAX2(reqs->alignment, 256);
1046    if (templ->usage == PIPE_USAGE_STAGING && obj->is_buffer)
1047       alignment = MAX2(alignment, screen->info.props.limits.minMemoryMapAlignment);
1048    obj->alignment = alignment;
1049 
1050    if (zink_mem_type_idx_from_types(screen, heap, reqs->memoryTypeBits) == UINT32_MAX) {
1051       /* not valid based on reqs; demote to more compatible type */
1052       switch (heap) {
1053       case ZINK_HEAP_DEVICE_LOCAL_VISIBLE:
1054          heap = ZINK_HEAP_DEVICE_LOCAL;
1055          break;
1056       case ZINK_HEAP_HOST_VISIBLE_COHERENT_CACHED:
1057          heap = ZINK_HEAP_HOST_VISIBLE_COHERENT;
1058          break;
1059       default:
1060          break;
1061       }
1062       assert(zink_mem_type_idx_from_types(screen, heap, reqs->memoryTypeBits) != UINT32_MAX);
1063    }
1064 
1065    while (1) {
1066       /* iterate over all available memory types to reduce chance of oom */
1067       for (unsigned i = 0; !obj->bo && i < screen->heap_count[heap]; i++) {
1068          if (!(reqs->memoryTypeBits & BITFIELD_BIT(screen->heap_map[heap][i])))
1069             continue;
1070 
1071          mai.memoryTypeIndex = screen->heap_map[heap][i];
1072          obj->bo = zink_bo(zink_bo_create(screen, reqs->size, alignment, heap, mai.pNext ? ZINK_ALLOC_NO_SUBALLOC : 0, mai.memoryTypeIndex, mai.pNext));
1073       }
1074 
1075       if (obj->bo || heap != ZINK_HEAP_DEVICE_LOCAL_VISIBLE)
1076          break;
1077 
1078       /* demote BAR allocations to a different heap on failure to avoid oom */
1079       if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT || templ->usage == PIPE_USAGE_DYNAMIC)
1080           heap = ZINK_HEAP_HOST_VISIBLE_COHERENT;
1081       else
1082           heap = ZINK_HEAP_DEVICE_LOCAL;
1083    };
1084 
1085    return obj->bo ? roc_success : roc_fail_and_cleanup_object;
1086 }
1087 
1088 static inline bool
update_alloc_info_flags(struct zink_screen * screen,const struct pipe_resource * templ,VkMemoryRequirements * reqs,struct mem_alloc_info * alloc_info)1089 update_alloc_info_flags(struct zink_screen *screen, const struct pipe_resource *templ,
1090                         VkMemoryRequirements *reqs, struct mem_alloc_info *alloc_info)
1091 {
1092    if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT || templ->usage == PIPE_USAGE_DYNAMIC)
1093       alloc_info->flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1094    else if (!(alloc_info->flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) &&
1095             templ->usage == PIPE_USAGE_STAGING)
1096       alloc_info->flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1097 
1098    if (templ->bind & ZINK_BIND_TRANSIENT)
1099       alloc_info->flags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
1100 
1101    if (alloc_info->user_mem) {
1102       VkExternalMemoryHandleTypeFlagBits handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
1103       VkMemoryHostPointerPropertiesEXT memory_host_pointer_properties = {0};
1104       memory_host_pointer_properties.sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT;
1105       memory_host_pointer_properties.pNext = NULL;
1106       VkResult res = VKSCR(GetMemoryHostPointerPropertiesEXT)(screen->dev, handle_type, alloc_info->user_mem, &memory_host_pointer_properties);
1107       if (res != VK_SUCCESS) {
1108          mesa_loge("ZINK: vkGetMemoryHostPointerPropertiesEXT failed");
1109          return false;
1110       }
1111       reqs->memoryTypeBits &= memory_host_pointer_properties.memoryTypeBits;
1112       alloc_info->flags &= ~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1113    }
1114 
1115    alloc_info->aflags = templ->flags & PIPE_RESOURCE_FLAG_SPARSE ? ZINK_ALLOC_SPARSE : 0;
1116    return true;
1117 }
1118 
1119 static inline void
update_obj_info(struct zink_screen * screen,struct zink_resource_object * obj,const struct pipe_resource * templ,struct mem_alloc_info * alloc_info)1120 update_obj_info(struct zink_screen *screen, struct zink_resource_object *obj,
1121                 const struct pipe_resource *templ, struct mem_alloc_info *alloc_info)
1122 {
1123    if (alloc_info->aflags == ZINK_ALLOC_SPARSE) {
1124       obj->size = templ->width0;
1125    } else {
1126       obj->offset = zink_bo_get_offset(obj->bo);
1127       obj->size = zink_bo_get_size(obj->bo);
1128    }
1129 
1130    obj->coherent = screen->info.mem_props.memoryTypes[obj->bo->base.base.placement].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1131    if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)) {
1132       obj->host_visible = screen->info.mem_props.memoryTypes[obj->bo->base.base.placement].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1133    }
1134 }
1135 
1136 static inline void
debug_resource_mem(struct zink_resource_object * obj,const struct pipe_resource * templ,struct zink_screen * screen)1137 debug_resource_mem(struct zink_resource_object *obj, const struct pipe_resource *templ, struct zink_screen *screen)
1138 {
1139       char buf[4096];
1140       unsigned idx = 0;
1141       if (obj->is_buffer) {
1142          size_t size = (size_t)DIV_ROUND_UP(obj->size, 1024);
1143          if (templ->bind == PIPE_BIND_QUERY_BUFFER && templ->usage == PIPE_USAGE_STAGING) //internal qbo
1144             idx += snprintf(buf, sizeof(buf), "QBO(%zu)", size);
1145          else
1146             idx += snprintf(buf, sizeof(buf), "BUF(%zu)", size);
1147       } else {
1148          idx += snprintf(buf, sizeof(buf), "IMG(%s:%ux%ux%u)", util_format_short_name(templ->format), templ->width0, templ->height0, templ->depth0);
1149       }
1150       /*
1151       zink_vkflags_func flag_func = obj->is_buffer ? (zink_vkflags_func)vk_BufferCreateFlagBits_to_str : (zink_vkflags_func)vk_ImageCreateFlagBits_to_str;
1152       zink_vkflags_func usage_func = obj->is_buffer ? (zink_vkflags_func)vk_BufferUsageFlagBits_to_str : (zink_vkflags_func)vk_ImageUsageFlagBits_to_str;
1153       if (obj->vkflags) {
1154          buf[idx++] = '[';
1155          idx += zink_string_vkflags_unroll(&buf[idx], sizeof(buf) - idx, obj->vkflags, flag_func);
1156          buf[idx++] = ']';
1157       }
1158       if (obj->vkusage) {
1159          buf[idx++] = '[';
1160          idx += zink_string_vkflags_unroll(&buf[idx], sizeof(buf) - idx, obj->vkusage, usage_func);
1161          buf[idx++] = ']';
1162       }
1163       */
1164       buf[idx] = 0;
1165       obj->bo->name = zink_debug_mem_add(screen, obj->size, buf);
1166 }
1167 
1168 static inline enum resource_object_create_result
allocate_bo_and_update_obj(struct zink_screen * screen,const struct pipe_resource * templ,VkMemoryRequirements * reqs,struct zink_resource_object * obj,struct mem_alloc_info * alloc_info)1169 allocate_bo_and_update_obj(struct zink_screen *screen, const struct pipe_resource *templ,
1170                            VkMemoryRequirements *reqs, struct zink_resource_object *obj,
1171                            struct mem_alloc_info *alloc_info)
1172 {
1173    if (!update_alloc_info_flags(screen, templ, reqs, alloc_info))
1174       return roc_fail_and_free_object;
1175 
1176    enum resource_object_create_result retval = allocate_bo(screen, templ, reqs, obj, alloc_info);
1177    assert(retval != roc_success_early_return);
1178    if (retval != roc_success)
1179       return retval;
1180 
1181    update_obj_info(screen, obj, templ, alloc_info);
1182 
1183    if (zink_debug & ZINK_DEBUG_MEM)
1184       debug_resource_mem(obj, templ, screen);
1185    return roc_success;
1186 }
1187 
1188 static inline enum resource_object_create_result
create_buffer(struct zink_screen * screen,struct zink_resource_object * obj,const struct pipe_resource * templ,uint64_t * modifiers,int modifiers_count,struct mem_alloc_info * alloc_info)1189 create_buffer(struct zink_screen *screen, struct zink_resource_object *obj,
1190               const struct pipe_resource *templ, uint64_t *modifiers,
1191               int modifiers_count, struct mem_alloc_info *alloc_info)
1192 {
1193    VkBufferCreateInfo bci = create_bci(screen, templ, templ->bind);
1194    VkExternalMemoryBufferCreateInfo embci;
1195    VkMemoryRequirements reqs = {0};
1196 
1197    embci.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO;
1198    if (alloc_info->external) {
1199       embci.pNext = bci.pNext;
1200       embci.handleTypes = alloc_info->export_types;
1201       bci.pNext = &embci;
1202    }
1203 
1204    if (VKSCR(CreateBuffer)(screen->dev, &bci, NULL, &obj->buffer) != VK_SUCCESS) {
1205       mesa_loge("ZINK: vkCreateBuffer failed");
1206       return roc_fail_and_free_object;
1207    }
1208 
1209    if (!(templ->bind & (PIPE_BIND_SHADER_IMAGE | ZINK_BIND_DESCRIPTOR))) {
1210        bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
1211      if (VKSCR(CreateBuffer)(screen->dev, &bci, NULL, &obj->storage_buffer) != VK_SUCCESS) {
1212         mesa_loge("ZINK: vkCreateBuffer failed");
1213         VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
1214         return roc_fail_and_free_object;
1215      }
1216    }
1217 
1218    if (modifiers_count) {
1219       assert(modifiers_count == 3);
1220       /* this is the DGC path because there's no other way to pass mem bits and I don't wanna copy/paste everything around */
1221       reqs.size = modifiers[0];
1222       reqs.alignment = modifiers[1];
1223       reqs.memoryTypeBits = modifiers[2];
1224    } else {
1225       VKSCR(GetBufferMemoryRequirements)(screen->dev, obj->buffer, &reqs);
1226    }
1227 
1228    if (templ->usage == PIPE_USAGE_STAGING)
1229       alloc_info->flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1230    else if (templ->usage == PIPE_USAGE_STREAM)
1231       alloc_info->flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1232    else if (templ->usage == PIPE_USAGE_IMMUTABLE)
1233       alloc_info->flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1234    else
1235       alloc_info->flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1236 
1237    obj->is_buffer = true;
1238    obj->transfer_dst = true;
1239    obj->vkflags = bci.flags;
1240    obj->vkusage = bci.usage;
1241 
1242    enum resource_object_create_result retval = allocate_bo_and_update_obj(screen, templ, &reqs, obj,  alloc_info);
1243    assert(retval != roc_success_early_return);
1244    if (retval != roc_success)
1245       return retval;
1246 
1247    if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)) {
1248       if (VKSCR(BindBufferMemory)(screen->dev, obj->buffer, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
1249          mesa_loge("ZINK: vkBindBufferMemory failed");
1250          return roc_fail_and_cleanup_all ;
1251       }
1252       if (obj->storage_buffer && VKSCR(BindBufferMemory)(screen->dev, obj->storage_buffer, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
1253          mesa_loge("ZINK: vkBindBufferMemory failed");
1254          return roc_fail_and_cleanup_all;
1255       }
1256    }
1257    return roc_success;
1258 }
1259 
1260 static inline enum resource_object_create_result
create_image(struct zink_screen * screen,struct zink_resource_object * obj,const struct pipe_resource * templ,bool * linear,uint64_t * modifiers,int modifiers_count,struct mem_alloc_info * alloc_info)1261 create_image(struct zink_screen *screen, struct zink_resource_object *obj,
1262              const struct pipe_resource *templ, bool *linear,
1263              uint64_t *modifiers, int modifiers_count,
1264              struct mem_alloc_info *alloc_info)
1265 {
1266    VkMemoryRequirements reqs = {0};
1267    bool winsys_modifier = (alloc_info->export_types & VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) &&
1268                           alloc_info->whandle &&
1269                           alloc_info->whandle->modifier != DRM_FORMAT_MOD_INVALID;
1270    uint64_t *ici_modifiers = winsys_modifier ? &alloc_info->whandle->modifier : modifiers;
1271    unsigned ici_modifier_count = winsys_modifier ? 1 : modifiers_count;
1272    VkImageCreateInfo ici;
1273    enum pipe_format srgb = PIPE_FORMAT_NONE;
1274    /* we often need to be able to mutate between srgb and linear, but we don't need general
1275     * image view/shader image format compatibility (that path means losing fast clears or compression on some hardware).
1276     */
1277    if (!(templ->bind & ZINK_BIND_MUTABLE)) {
1278       srgb = util_format_is_srgb(templ->format) ? util_format_linear(templ->format) : util_format_srgb(templ->format);
1279       /* why do these helpers have different default return values? */
1280       if (srgb == templ->format)
1281          srgb = PIPE_FORMAT_NONE;
1282    }
1283    VkFormat formats[2];
1284    VkImageFormatListCreateInfo format_list;
1285    if (srgb) {
1286       formats[0] = zink_get_format(screen, templ->format);
1287       formats[1] = zink_get_format(screen, srgb);
1288       /* only use format list if both formats have supported vk equivalents */
1289       if (formats[0] && formats[1]) {
1290          format_list.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO;
1291          format_list.pNext = NULL;
1292          format_list.viewFormatCount = 2;
1293          format_list.pViewFormats = formats;
1294          ici.pNext = &format_list;
1295       } else {
1296          ici.pNext = NULL;
1297       }
1298    } else {
1299       ici.pNext = NULL;
1300    }
1301    init_ici(screen, &ici, templ, templ->bind, ici_modifier_count);
1302 
1303    bool success = false;
1304    uint64_t mod = eval_ici(screen, &ici, templ, templ->bind, ici_modifier_count, ici_modifiers, &success);
1305    if (ici.format == VK_FORMAT_A8_UNORM_KHR && !success) {
1306       ici.format = zink_get_format(screen, zink_format_get_emulated_alpha(templ->format));
1307       mod = eval_ici(screen, &ici, templ, templ->bind, ici_modifier_count, ici_modifiers, &success);
1308    }
1309    if (!success)
1310       return roc_fail_and_free_object;
1311 
1312    if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT && srgb &&
1313       util_format_get_nr_components(srgb) == 4 &&
1314       !(ici.flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)) {
1315          mesa_loge("zink: refusing to create possibly-srgb dmabuf due to missing driver support: %s not supported!", util_format_name(srgb));
1316          return roc_fail_and_free_object;
1317    }
1318    VkExternalMemoryImageCreateInfo emici;
1319    VkImageDrmFormatModifierExplicitCreateInfoEXT idfmeci;
1320    VkImageDrmFormatModifierListCreateInfoEXT idfmlci;
1321    VkSubresourceLayout plane_layouts[4];
1322    VkSubresourceLayout plane_layout = {
1323       .offset = alloc_info->whandle ? alloc_info->whandle->offset : 0,
1324       .size = 0,
1325       .rowPitch = alloc_info->whandle ? alloc_info->whandle->stride : 0,
1326       .arrayPitch = 0,
1327       .depthPitch = 0,
1328    };
1329 
1330    obj->render_target = (ici.usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0;
1331 
1332    if (alloc_info->shared || alloc_info->external) {
1333       emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
1334       emici.pNext = ici.pNext;
1335       emici.handleTypes = alloc_info->export_types;
1336       ici.pNext = &emici;
1337 
1338       assert(ici.tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT || mod != DRM_FORMAT_MOD_INVALID);
1339       if (alloc_info->whandle && ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
1340          assert(mod == alloc_info->whandle->modifier || !winsys_modifier);
1341          idfmeci.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
1342          idfmeci.pNext = ici.pNext;
1343          idfmeci.drmFormatModifier = mod;
1344          idfmeci.drmFormatModifierPlaneCount = obj->plane_count;
1345 
1346          plane_layouts[0] = plane_layout;
1347          struct pipe_resource *pnext = templ->next;
1348          for (unsigned i = 1; i < obj->plane_count; i++, pnext = pnext->next) {
1349             struct zink_resource *next = zink_resource(pnext);
1350             obj->plane_offsets[i] = plane_layouts[i].offset = next->obj->plane_offsets[i];
1351             obj->plane_strides[i] = plane_layouts[i].rowPitch = next->obj->plane_strides[i];
1352             plane_layouts[i].size = 0;
1353             plane_layouts[i].arrayPitch = 0;
1354             plane_layouts[i].depthPitch = 0;
1355          }
1356          idfmeci.pPlaneLayouts = plane_layouts;
1357 
1358          ici.pNext = &idfmeci;
1359       } else if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
1360          idfmlci.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
1361          idfmlci.pNext = ici.pNext;
1362          idfmlci.drmFormatModifierCount = modifiers_count;
1363          idfmlci.pDrmFormatModifiers = modifiers;
1364          ici.pNext = &idfmlci;
1365       } else if (ici.tiling == VK_IMAGE_TILING_OPTIMAL) {
1366          alloc_info->shared = false;
1367       }
1368    } else if (alloc_info->user_mem) {
1369       emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
1370       emici.pNext = ici.pNext;
1371       emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
1372       ici.pNext = &emici;
1373    }
1374 
1375    if (linear)
1376       *linear = ici.tiling == VK_IMAGE_TILING_LINEAR;
1377 
1378    if (ici.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
1379       obj->transfer_dst = true;
1380 
1381 #if defined(ZINK_USE_DMABUF) && !defined(_WIN32)
1382    if (obj->is_aux) {
1383       obj->modifier = mod;
1384       obj->modifier_aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT << alloc_info->whandle->plane;
1385       obj->plane_offsets[alloc_info->whandle->plane] = alloc_info->whandle->offset;
1386       obj->plane_strides[alloc_info->whandle->plane] = alloc_info->whandle->stride;
1387       obj->handle = os_dupfd_cloexec(alloc_info->whandle->handle);
1388       if (obj->handle < 0) {
1389          mesa_loge("ZINK: failed to dup dmabuf fd: %s\n", strerror(errno));
1390          return roc_fail_and_free_object;
1391       }
1392       return roc_success_early_return;
1393    }
1394 #endif
1395 
1396    obj->vkfeats = get_format_feature_flags(ici, screen, templ);;
1397    if (util_format_is_yuv(templ->format)) {
1398       if (!create_sampler_conversion(ici, screen, obj))
1399          return roc_fail_and_free_object;
1400    } else if (alloc_info->whandle) {
1401       obj->plane_strides[alloc_info->whandle->plane] = alloc_info->whandle->stride;
1402    }
1403 
1404    VkResult result = VKSCR(CreateImage)(screen->dev, &ici, NULL, &obj->image);
1405    if (result != VK_SUCCESS) {
1406       mesa_loge("ZINK: vkCreateImage failed (%s)", vk_Result_to_str(result));
1407       return roc_fail_and_free_object;
1408    }
1409 
1410    if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
1411       VkImageDrmFormatModifierPropertiesEXT modprops = {0};
1412       modprops.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
1413       result = VKSCR(GetImageDrmFormatModifierPropertiesEXT)(screen->dev, obj->image, &modprops);
1414       if (result != VK_SUCCESS) {
1415          mesa_loge("ZINK: vkGetImageDrmFormatModifierPropertiesEXT failed");
1416          return roc_fail_and_free_object;
1417       }
1418       obj->modifier = modprops.drmFormatModifier;
1419       unsigned num_dmabuf_planes = screen->base.get_dmabuf_modifier_planes(&screen->base, obj->modifier, templ->format);
1420       obj->modifier_aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
1421       if (num_dmabuf_planes > 1)
1422          obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
1423       if (num_dmabuf_planes > 2)
1424          obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
1425       if (num_dmabuf_planes > 3)
1426          obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
1427       assert(num_dmabuf_planes <= 4);
1428    }
1429 
1430    unsigned num_planes = util_format_get_num_planes(templ->format);
1431    alloc_info->need_dedicated = get_image_memory_requirement(screen, obj, num_planes, &reqs);
1432    if (templ->usage == PIPE_USAGE_STAGING && ici.tiling == VK_IMAGE_TILING_LINEAR)
1433       alloc_info->flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1434    else
1435       alloc_info->flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1436 
1437    obj->vkflags = ici.flags;
1438    obj->vkusage = ici.usage;
1439 
1440    enum resource_object_create_result retval = allocate_bo_and_update_obj(screen, templ, &reqs, obj,  alloc_info);
1441    assert(retval != roc_success_early_return);
1442    if (retval != roc_success)
1443       return retval;
1444 
1445    if (num_planes > 1) {
1446       VkBindImageMemoryInfo infos[3];
1447       VkBindImagePlaneMemoryInfo planes[3];
1448       for (unsigned i = 0; i < num_planes; i++) {
1449          infos[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1450          infos[i].image = obj->image;
1451          infos[i].memory = zink_bo_get_mem(obj->bo);
1452          infos[i].memoryOffset = obj->plane_offsets[i];
1453          if (templ->bind & ZINK_BIND_VIDEO) {
1454             infos[i].pNext = &planes[i];
1455             planes[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
1456             planes[i].pNext = NULL;
1457             planes[i].planeAspect = plane_aspects[i];
1458          }
1459       }
1460       if (VKSCR(BindImageMemory2)(screen->dev, num_planes, infos) != VK_SUCCESS) {
1461          mesa_loge("ZINK: vkBindImageMemory2 failed");
1462          return roc_fail_and_cleanup_all;
1463       }
1464    } else {
1465       if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE))
1466          if (VKSCR(BindImageMemory)(screen->dev, obj->image, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
1467             mesa_loge("ZINK: vkBindImageMemory failed");
1468             return roc_fail_and_cleanup_all;
1469          }
1470    }
1471 
1472    return roc_success;
1473 }
1474 
1475 static struct zink_resource_object *
resource_object_create(struct zink_screen * screen,const struct pipe_resource * templ,struct winsys_handle * whandle,bool * linear,uint64_t * modifiers,int modifiers_count,const void * loader_private,const void * user_mem)1476 resource_object_create(struct zink_screen *screen, const struct pipe_resource *templ, struct winsys_handle *whandle, bool *linear,
1477                        uint64_t *modifiers, int modifiers_count, const void *loader_private, const void *user_mem)
1478 {
1479    struct zink_resource_object *obj = CALLOC_STRUCT(zink_resource_object);
1480    unsigned max_level = 0;
1481    if (!obj)
1482       return NULL;
1483    simple_mtx_init(&obj->view_lock, mtx_plain);
1484    util_dynarray_init(&obj->views, NULL);
1485    u_rwlock_init(&obj->copy_lock);
1486    obj->unordered_read = true;
1487    obj->unordered_write = true;
1488    obj->unsync_access = true;
1489    obj->last_dt_idx = obj->dt_idx = UINT32_MAX; //TODO: unionize
1490 
1491    struct mem_alloc_info alloc_info = {
1492       .whandle = whandle,
1493       .need_dedicated = false,
1494       .export_types = ZINK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_BIT,
1495       .shared = templ->bind & PIPE_BIND_SHARED,
1496       .user_mem = user_mem
1497    };
1498 
1499    /* figure out aux plane count */
1500    if (whandle && whandle->plane >= util_format_get_num_planes(whandle->format))
1501       obj->is_aux = true;
1502    struct pipe_resource *pnext = templ->next;
1503    for (obj->plane_count = 1; pnext; obj->plane_count++, pnext = pnext->next) {
1504       struct zink_resource *next = zink_resource(pnext);
1505       if (!next->obj->is_aux)
1506          break;
1507    }
1508 
1509    if (!get_export_flags(screen, templ, &alloc_info)) {
1510       /* can't export anything, fail early */
1511       return NULL;
1512    }
1513 
1514    pipe_reference_init(&obj->reference, 1);
1515    if (loader_private) {
1516       obj->bo = CALLOC_STRUCT(zink_bo);
1517       if (!obj->bo) {
1518          mesa_loge("ZINK: failed to allocate obj->bo!");
1519          return NULL;
1520       }
1521 
1522       obj->transfer_dst = true;
1523       return obj;
1524    }
1525 
1526    enum resource_object_create_result create_result;
1527    if (templ->target == PIPE_BUFFER) {
1528       max_level = 1;
1529       create_result = create_buffer(screen, obj, templ, modifiers, modifiers_count, &alloc_info);
1530    } else {
1531       max_level = templ->last_level + 1;
1532       create_result = create_image(screen, obj, templ, linear, modifiers, modifiers_count,
1533                                    &alloc_info);
1534    }
1535 
1536    switch (create_result) {
1537    case roc_success:
1538       for (unsigned i = 0; i < max_level; i++)
1539          util_dynarray_init(&obj->copies[i], NULL);
1540       FALLTHROUGH;
1541    case roc_success_early_return:
1542       return obj;
1543 
1544    case roc_fail_and_cleanup_all:
1545       zink_bo_unref(screen, obj->bo);
1546       FALLTHROUGH;
1547    case roc_fail_and_cleanup_object:
1548       if (templ->target == PIPE_BUFFER) {
1549          VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
1550          VKSCR(DestroyBuffer)(screen->dev, obj->storage_buffer, NULL);
1551       } else
1552          VKSCR(DestroyImage)(screen->dev, obj->image, NULL);
1553       FALLTHROUGH;
1554    case roc_fail_and_free_object:
1555       FREE(obj);
1556       return NULL;
1557    default:
1558       unreachable("Invalid create object result code");
1559    }
1560 }
1561 
1562 static struct pipe_resource *
resource_create(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct winsys_handle * whandle,unsigned external_usage,const uint64_t * modifiers,int modifiers_count,const void * loader_private,const void * user_mem)1563 resource_create(struct pipe_screen *pscreen,
1564                 const struct pipe_resource *templ,
1565                 struct winsys_handle *whandle,
1566                 unsigned external_usage,
1567                 const uint64_t *modifiers, int modifiers_count,
1568                 const void *loader_private, const void *user_mem)
1569 {
1570    struct zink_screen *screen = zink_screen(pscreen);
1571    struct zink_resource *res = CALLOC_STRUCT_CL(zink_resource);
1572 
1573    if (!res) {
1574       mesa_loge("ZINK: failed to allocate res!");
1575       return NULL;
1576    }
1577 
1578    if (modifiers_count > 0 && screen->info.have_EXT_image_drm_format_modifier) {
1579       /* for rebinds */
1580       res->modifiers_count = modifiers_count;
1581       res->modifiers = mem_dup(modifiers, modifiers_count * sizeof(uint64_t));
1582       if (!res->modifiers) {
1583          FREE_CL(res);
1584          return NULL;
1585       }
1586    }
1587 
1588    res->base.b = *templ;
1589 
1590    bool allow_cpu_storage = (templ->target == PIPE_BUFFER) &&
1591                             (templ->usage != PIPE_USAGE_STREAM) &&
1592                             (templ->width0 < 0x1000);
1593    threaded_resource_init(&res->base.b, allow_cpu_storage);
1594    pipe_reference_init(&res->base.b.reference, 1);
1595    res->base.b.screen = pscreen;
1596 
1597    bool linear = false;
1598    struct pipe_resource templ2 = *templ;
1599    if (templ2.flags & PIPE_RESOURCE_FLAG_SPARSE &&
1600        (util_res_sample_count(templ) == 1 || screen->info.feats.features.shaderStorageImageMultisample))
1601       templ2.bind |= PIPE_BIND_SHADER_IMAGE;
1602    res->obj = resource_object_create(screen, &templ2, whandle, &linear, res->modifiers, res->modifiers_count, loader_private, user_mem);
1603    if (!res->obj) {
1604       free(res->modifiers);
1605       FREE_CL(res);
1606       return NULL;
1607    }
1608 
1609    res->queue = VK_QUEUE_FAMILY_IGNORED;
1610    res->internal_format = templ->format;
1611    if (templ->target == PIPE_BUFFER) {
1612       util_range_init(&res->valid_buffer_range);
1613       res->base.b.bind |= PIPE_BIND_SHADER_IMAGE;
1614       if (!screen->resizable_bar && templ->width0 >= 8196) {
1615          /* We don't want to evict buffers from VRAM by mapping them for CPU access,
1616           * because they might never be moved back again. If a buffer is large enough,
1617           * upload data by copying from a temporary GTT buffer. 8K might not seem much,
1618           * but there can be 100000 buffers.
1619           *
1620           * This tweak improves performance for viewperf.
1621           */
1622          res->base.b.flags |= PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY;
1623       }
1624       if (zink_descriptor_mode == ZINK_DESCRIPTOR_MODE_DB)
1625          zink_resource_get_address(screen, res);
1626    } else {
1627       if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
1628          res->base.b.bind |= PIPE_BIND_SHADER_IMAGE;
1629       if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE) {
1630          uint32_t count = 1;
1631          VKSCR(GetImageSparseMemoryRequirements)(screen->dev, res->obj->image, &count, &res->sparse);
1632          res->base.b.nr_sparse_levels = res->sparse.imageMipTailFirstLod;
1633       }
1634       res->format = zink_get_format(screen, templ->format);
1635       if (templ->target == PIPE_TEXTURE_1D || templ->target == PIPE_TEXTURE_1D_ARRAY) {
1636          res->need_2D = (screen->need_2D_zs && util_format_is_depth_or_stencil(templ->format)) ||
1637                         (screen->need_2D_sparse && (templ->flags & PIPE_RESOURCE_FLAG_SPARSE));
1638       }
1639       res->dmabuf = whandle && whandle->type == WINSYS_HANDLE_TYPE_FD;
1640       if (res->dmabuf)
1641          res->queue = VK_QUEUE_FAMILY_FOREIGN_EXT;
1642       res->layout = res->dmabuf ? VK_IMAGE_LAYOUT_PREINITIALIZED : VK_IMAGE_LAYOUT_UNDEFINED;
1643       res->linear = linear;
1644       res->aspect = aspect_from_format(templ->format);
1645    }
1646 
1647    if (loader_private) {
1648       if (templ->bind & PIPE_BIND_DISPLAY_TARGET) {
1649          /* backbuffer */
1650          res->obj->dt = zink_kopper_displaytarget_create(screen,
1651                                                          res->base.b.bind,
1652                                                          res->base.b.format,
1653                                                          templ->width0,
1654                                                          templ->height0,
1655                                                          64, loader_private,
1656                                                          &res->dt_stride);
1657          if (!res->obj->dt) {
1658             mesa_loge("zink: could not create swapchain");
1659             FREE(res->obj);
1660             free(res->modifiers);
1661             FREE_CL(res);
1662             return NULL;
1663          }
1664          struct kopper_displaytarget *cdt = res->obj->dt;
1665          if (cdt->swapchain->num_acquires) {
1666             /* this should be a reused swapchain after a MakeCurrent dance that deleted the original resource */
1667             for (unsigned i = 0; i < cdt->swapchain->num_images; i++) {
1668                if (!cdt->swapchain->images[i].acquired)
1669                   continue;
1670                res->obj->dt_idx = i;
1671                res->obj->image = cdt->swapchain->images[i].image;
1672                res->layout = cdt->swapchain->images[i].layout;
1673             }
1674          }
1675       } else {
1676          /* frontbuffer */
1677          struct zink_resource *back = (void*)loader_private;
1678          struct kopper_displaytarget *cdt = back->obj->dt;
1679          cdt->refcount++;
1680          assert(back->obj->dt);
1681          res->obj->dt = back->obj->dt;
1682       }
1683       struct kopper_displaytarget *cdt = res->obj->dt;
1684       if (zink_kopper_has_srgb(cdt))
1685          res->obj->vkflags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1686       if (cdt->swapchain->scci.flags == VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
1687          res->obj->vkflags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
1688       res->obj->vkusage = cdt->swapchain->scci.imageUsage;
1689       res->base.b.bind |= PIPE_BIND_DISPLAY_TARGET;
1690       res->linear = false;
1691       res->swapchain = true;
1692    }
1693 
1694    if (!res->obj->host_visible) {
1695       res->base.b.flags |= PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY;
1696       res->base.allow_cpu_storage = false;
1697    }
1698    if (res->obj->is_buffer) {
1699       res->base.buffer_id_unique = util_idalloc_mt_alloc(&screen->buffer_ids);
1700       _mesa_hash_table_init(&res->bufferview_cache, NULL, NULL, equals_bvci);
1701       simple_mtx_init(&res->bufferview_mtx, mtx_plain);
1702    } else {
1703       _mesa_hash_table_init(&res->surface_cache, NULL, NULL, equals_ivci);
1704       simple_mtx_init(&res->surface_mtx, mtx_plain);
1705    }
1706    if (res->obj->exportable)
1707       res->base.b.bind |= ZINK_BIND_DMABUF;
1708    return &res->base.b;
1709 }
1710 
1711 static struct pipe_resource *
zink_resource_create(struct pipe_screen * pscreen,const struct pipe_resource * templ)1712 zink_resource_create(struct pipe_screen *pscreen,
1713                      const struct pipe_resource *templ)
1714 {
1715    return resource_create(pscreen, templ, NULL, 0, NULL, 0, NULL, NULL);
1716 }
1717 
1718 static struct pipe_resource *
zink_resource_create_with_modifiers(struct pipe_screen * pscreen,const struct pipe_resource * templ,const uint64_t * modifiers,int modifiers_count)1719 zink_resource_create_with_modifiers(struct pipe_screen *pscreen, const struct pipe_resource *templ,
1720                                     const uint64_t *modifiers, int modifiers_count)
1721 {
1722    return resource_create(pscreen, templ, NULL, 0, modifiers, modifiers_count, NULL, NULL);
1723 }
1724 
1725 static struct pipe_resource *
zink_resource_create_drawable(struct pipe_screen * pscreen,const struct pipe_resource * templ,const void * loader_private)1726 zink_resource_create_drawable(struct pipe_screen *pscreen,
1727                               const struct pipe_resource *templ,
1728                               const void *loader_private)
1729 {
1730    return resource_create(pscreen, templ, NULL, 0, NULL, 0, loader_private, NULL);
1731 }
1732 
1733 static bool
add_resource_bind(struct zink_context * ctx,struct zink_resource * res,unsigned bind)1734 add_resource_bind(struct zink_context *ctx, struct zink_resource *res, unsigned bind)
1735 {
1736    struct zink_screen *screen = zink_screen(ctx->base.screen);
1737    assert((res->base.b.bind & bind) == 0);
1738    res->base.b.bind |= bind;
1739    struct zink_resource_object *old_obj = res->obj;
1740    if (bind & ZINK_BIND_DMABUF && !res->modifiers_count && screen->info.have_EXT_image_drm_format_modifier) {
1741       res->modifiers_count = 1;
1742       res->modifiers = malloc(res->modifiers_count * sizeof(uint64_t));
1743       if (!res->modifiers) {
1744          mesa_loge("ZINK: failed to allocate res->modifiers!");
1745          return false;
1746       }
1747 
1748       res->modifiers[0] = DRM_FORMAT_MOD_LINEAR;
1749    }
1750    struct zink_resource_object *new_obj = resource_object_create(screen, &res->base.b, NULL, &res->linear, res->modifiers, res->modifiers_count, NULL, NULL);
1751    if (!new_obj) {
1752       debug_printf("new backing resource alloc failed!\n");
1753       res->base.b.bind &= ~bind;
1754       return false;
1755    }
1756    struct zink_resource staging = *res;
1757    staging.obj = old_obj;
1758    staging.all_binds = 0;
1759    res->layout = VK_IMAGE_LAYOUT_UNDEFINED;
1760    res->obj = new_obj;
1761    res->queue = VK_QUEUE_FAMILY_IGNORED;
1762    for (unsigned i = 0; i <= res->base.b.last_level; i++) {
1763       struct pipe_box box;
1764       u_box_3d(0, 0, 0,
1765                u_minify(res->base.b.width0, i),
1766                u_minify(res->base.b.height0, i), res->base.b.array_size, &box);
1767       box.depth = util_num_layers(&res->base.b, i);
1768       ctx->base.resource_copy_region(&ctx->base, &res->base.b, i, 0, 0, 0, &staging.base.b, i, &box);
1769    }
1770    if (old_obj->exportable) {
1771       simple_mtx_lock(&ctx->bs->exportable_lock);
1772       _mesa_set_remove_key(&ctx->bs->dmabuf_exports, &staging);
1773       simple_mtx_unlock(&ctx->bs->exportable_lock);
1774    }
1775    zink_resource_object_reference(screen, &old_obj, NULL);
1776    return true;
1777 }
1778 
1779 static bool
zink_resource_get_param(struct pipe_screen * pscreen,struct pipe_context * pctx,struct pipe_resource * pres,unsigned plane,unsigned layer,unsigned level,enum pipe_resource_param param,unsigned handle_usage,uint64_t * value)1780 zink_resource_get_param(struct pipe_screen *pscreen, struct pipe_context *pctx,
1781                         struct pipe_resource *pres,
1782                         unsigned plane,
1783                         unsigned layer,
1784                         unsigned level,
1785                         enum pipe_resource_param param,
1786                         unsigned handle_usage,
1787                         uint64_t *value)
1788 {
1789    struct zink_screen *screen = zink_screen(pscreen);
1790    struct zink_resource *res = zink_resource(pres);
1791    struct zink_resource_object *obj = res->obj;
1792    struct winsys_handle whandle;
1793    VkImageAspectFlags aspect;
1794    if (obj->modifier_aspect) {
1795       switch (plane) {
1796       case 0:
1797          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
1798          break;
1799       case 1:
1800          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
1801          break;
1802       case 2:
1803          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
1804          break;
1805       case 3:
1806          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
1807          break;
1808       default:
1809          unreachable("how many planes you got in this thing?");
1810       }
1811    } else if (res->obj->sampler_conversion) {
1812       aspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
1813    } else {
1814       aspect = res->aspect;
1815    }
1816    switch (param) {
1817    case PIPE_RESOURCE_PARAM_NPLANES:
1818       if (screen->info.have_EXT_image_drm_format_modifier)
1819          *value = screen->base.get_dmabuf_modifier_planes(&screen->base, obj->modifier, res->internal_format);
1820       else
1821          *value = 1;
1822       break;
1823 
1824    case PIPE_RESOURCE_PARAM_STRIDE: {
1825       VkImageSubresource sub_res = {0};
1826       VkSubresourceLayout sub_res_layout = {0};
1827 
1828       sub_res.aspectMask = aspect;
1829 
1830       VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &sub_res, &sub_res_layout);
1831 
1832       *value = sub_res_layout.rowPitch;
1833       break;
1834    }
1835 
1836    case PIPE_RESOURCE_PARAM_OFFSET: {
1837          VkImageSubresource isr = {
1838             aspect,
1839             level,
1840             layer
1841          };
1842          VkSubresourceLayout srl;
1843          VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &isr, &srl);
1844          *value = srl.offset;
1845          break;
1846    }
1847 
1848    case PIPE_RESOURCE_PARAM_MODIFIER: {
1849       *value = obj->modifier;
1850       break;
1851    }
1852 
1853    case PIPE_RESOURCE_PARAM_LAYER_STRIDE: {
1854          VkImageSubresource isr = {
1855             aspect,
1856             level,
1857             layer
1858          };
1859          VkSubresourceLayout srl;
1860          VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &isr, &srl);
1861          if (res->base.b.target == PIPE_TEXTURE_3D)
1862             *value = srl.depthPitch;
1863          else
1864             *value = srl.arrayPitch;
1865          break;
1866    }
1867 
1868       return false;
1869    case PIPE_RESOURCE_PARAM_HANDLE_TYPE_KMS:
1870    case PIPE_RESOURCE_PARAM_HANDLE_TYPE_SHARED:
1871    case PIPE_RESOURCE_PARAM_HANDLE_TYPE_FD: {
1872 #ifdef ZINK_USE_DMABUF
1873       memset(&whandle, 0, sizeof(whandle));
1874       if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_SHARED)
1875          whandle.type = WINSYS_HANDLE_TYPE_SHARED;
1876       if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_KMS)
1877          whandle.type = WINSYS_HANDLE_TYPE_KMS;
1878       else if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_FD)
1879          whandle.type = WINSYS_HANDLE_TYPE_FD;
1880 
1881       if (!pscreen->resource_get_handle(pscreen, pctx, pres, &whandle, handle_usage))
1882          return false;
1883 
1884 #ifdef _WIN32
1885       *value = (uintptr_t)whandle.handle;
1886 #else
1887       *value = whandle.handle;
1888 #endif
1889       break;
1890 #else
1891       (void)whandle;
1892       return false;
1893 #endif
1894    }
1895    }
1896    return true;
1897 }
1898 
1899 static bool
zink_resource_get_handle(struct pipe_screen * pscreen,struct pipe_context * context,struct pipe_resource * tex,struct winsys_handle * whandle,unsigned usage)1900 zink_resource_get_handle(struct pipe_screen *pscreen,
1901                          struct pipe_context *context,
1902                          struct pipe_resource *tex,
1903                          struct winsys_handle *whandle,
1904                          unsigned usage)
1905 {
1906    if (tex->target == PIPE_BUFFER)
1907       tc_buffer_disable_cpu_storage(tex);
1908    if (whandle->type == WINSYS_HANDLE_TYPE_FD || whandle->type == WINSYS_HANDLE_TYPE_KMS) {
1909 #ifdef ZINK_USE_DMABUF
1910       struct zink_resource *res = zink_resource(tex);
1911       struct zink_screen *screen = zink_screen(pscreen);
1912       struct zink_resource_object *obj = res->obj;
1913 
1914 #if !defined(_WIN32)
1915       if (whandle->type == WINSYS_HANDLE_TYPE_KMS && screen->drm_fd == -1) {
1916          whandle->handle = -1;
1917       } else {
1918          if (!res->obj->exportable) {
1919             assert(!zink_resource_usage_is_unflushed(res));
1920             if (!screen->info.have_EXT_image_drm_format_modifier) {
1921                static bool warned = false;
1922                warn_missing_feature(warned, "EXT_image_drm_format_modifier");
1923                return false;
1924             }
1925             unsigned bind = ZINK_BIND_DMABUF;
1926             if (!(res->base.b.bind & PIPE_BIND_SHARED))
1927                bind |= PIPE_BIND_SHARED;
1928             zink_screen_lock_context(screen);
1929             if (!add_resource_bind(screen->copy_context, res, bind)) {
1930                zink_screen_unlock_context(screen);
1931                return false;
1932             }
1933             if (res->all_binds)
1934                p_atomic_inc(&screen->image_rebind_counter);
1935             screen->copy_context->base.flush(&screen->copy_context->base, NULL, 0);
1936             zink_screen_unlock_context(screen);
1937             obj = res->obj;
1938          }
1939 
1940          VkMemoryGetFdInfoKHR fd_info = {0};
1941          int fd;
1942          fd_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
1943          fd_info.memory = zink_bo_get_mem(obj->bo);
1944          if (whandle->type == WINSYS_HANDLE_TYPE_FD)
1945             fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1946          else
1947             fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
1948          VkResult result = VKSCR(GetMemoryFdKHR)(screen->dev, &fd_info, &fd);
1949          if (result != VK_SUCCESS) {
1950             mesa_loge("ZINK: vkGetMemoryFdKHR failed");
1951             return false;
1952          }
1953          if (whandle->type == WINSYS_HANDLE_TYPE_KMS) {
1954             uint32_t h;
1955             bool ret = zink_bo_get_kms_handle(screen, obj->bo, fd, &h);
1956             close(fd);
1957             if (!ret)
1958                return false;
1959             fd = h;
1960          }
1961 
1962          whandle->handle = fd;
1963       }
1964 #else
1965       VkMemoryGetWin32HandleInfoKHR handle_info = {0};
1966       HANDLE handle;
1967       handle_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR;
1968       //TODO: remove for wsi
1969       handle_info.memory = zink_bo_get_mem(obj->bo);
1970       handle_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
1971       VkResult result = VKSCR(GetMemoryWin32HandleKHR)(screen->dev, &handle_info, &handle);
1972       if (result != VK_SUCCESS)
1973          return false;
1974       whandle->handle = handle;
1975 #endif
1976       uint64_t value;
1977       zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_MODIFIER, 0, &value);
1978       whandle->modifier = value;
1979       zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_OFFSET, 0, &value);
1980       whandle->offset = value;
1981       zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_STRIDE, 0, &value);
1982       whandle->stride = value;
1983 #else
1984       return false;
1985 #endif
1986    }
1987    return true;
1988 }
1989 
1990 static struct pipe_resource *
zink_resource_from_handle(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct winsys_handle * whandle,unsigned usage)1991 zink_resource_from_handle(struct pipe_screen *pscreen,
1992                  const struct pipe_resource *templ,
1993                  struct winsys_handle *whandle,
1994                  unsigned usage)
1995 {
1996 #ifdef ZINK_USE_DMABUF
1997    if (whandle->modifier != DRM_FORMAT_MOD_INVALID &&
1998        !zink_screen(pscreen)->info.have_EXT_image_drm_format_modifier)
1999       return NULL;
2000 
2001    struct pipe_resource templ2 = *templ;
2002    if (templ->format == PIPE_FORMAT_NONE)
2003       templ2.format = whandle->format;
2004 
2005    uint64_t modifier = DRM_FORMAT_MOD_LINEAR;
2006    int modifier_count = 1;
2007    if (whandle->modifier != DRM_FORMAT_MOD_INVALID)
2008       modifier = whandle->modifier;
2009    else {
2010       if (!zink_screen(pscreen)->driver_workarounds.can_do_invalid_linear_modifier) {
2011          mesa_loge("zink: display server doesn't support DRI3 modifiers and driver can't handle INVALID<->LINEAR!");
2012          return NULL;
2013       }
2014       whandle->modifier = modifier;
2015    }
2016    templ2.bind |= ZINK_BIND_DMABUF;
2017    struct pipe_resource *pres = resource_create(pscreen, &templ2, whandle, usage, &modifier, modifier_count, NULL, NULL);
2018    if (pres) {
2019       struct zink_resource *res = zink_resource(pres);
2020       if (pres->target != PIPE_BUFFER)
2021          res->valid = true;
2022       else
2023          tc_buffer_disable_cpu_storage(pres);
2024       res->internal_format = whandle->format;
2025    }
2026    return pres;
2027 #else
2028    return NULL;
2029 #endif
2030 }
2031 
2032 static struct pipe_resource *
zink_resource_from_user_memory(struct pipe_screen * pscreen,const struct pipe_resource * templ,void * user_memory)2033 zink_resource_from_user_memory(struct pipe_screen *pscreen,
2034                  const struct pipe_resource *templ,
2035                  void *user_memory)
2036 {
2037    struct zink_screen *screen = zink_screen(pscreen);
2038    VkDeviceSize alignMask = screen->info.ext_host_mem_props.minImportedHostPointerAlignment - 1;
2039 
2040    /* Validate the user_memory pointer and fail early.
2041     * minImportedHostPointerAlignment is required to be POT */
2042    if (((uintptr_t)user_memory) & alignMask)
2043       return NULL;
2044 
2045    return resource_create(pscreen, templ, NULL, 0, NULL, 0, NULL, user_memory);
2046 }
2047 
2048 struct zink_memory_object {
2049    struct pipe_memory_object b;
2050    struct winsys_handle whandle;
2051 };
2052 
2053 static struct pipe_memory_object *
zink_memobj_create_from_handle(struct pipe_screen * pscreen,struct winsys_handle * whandle,bool dedicated)2054 zink_memobj_create_from_handle(struct pipe_screen *pscreen, struct winsys_handle *whandle, bool dedicated)
2055 {
2056    struct zink_memory_object *memobj = CALLOC_STRUCT(zink_memory_object);
2057    if (!memobj)
2058       return NULL;
2059    memcpy(&memobj->whandle, whandle, sizeof(struct winsys_handle));
2060    memobj->whandle.type = ZINK_EXTERNAL_MEMORY_HANDLE;
2061 
2062 #ifdef ZINK_USE_DMABUF
2063 
2064 #if !defined(_WIN32)
2065    memobj->whandle.handle = os_dupfd_cloexec(whandle->handle);
2066 #else
2067    HANDLE source_target = GetCurrentProcess();
2068    HANDLE out_handle;
2069 
2070    DuplicateHandle(source_target, whandle->handle, source_target, &out_handle, 0, false, DUPLICATE_SAME_ACCESS);
2071    memobj->whandle.handle = out_handle;
2072 
2073 #endif /* _WIN32 */
2074 #endif /* ZINK_USE_DMABUF */
2075 
2076    return (struct pipe_memory_object *)memobj;
2077 }
2078 
2079 static void
zink_memobj_destroy(struct pipe_screen * pscreen,struct pipe_memory_object * pmemobj)2080 zink_memobj_destroy(struct pipe_screen *pscreen, struct pipe_memory_object *pmemobj)
2081 {
2082 #ifdef ZINK_USE_DMABUF
2083    struct zink_memory_object *memobj = (struct zink_memory_object *)pmemobj;
2084 
2085 #if !defined(_WIN32)
2086    close(memobj->whandle.handle);
2087 #else
2088    CloseHandle(memobj->whandle.handle);
2089 #endif /* _WIN32 */
2090 #endif /* ZINK_USE_DMABUF */
2091 
2092    FREE(pmemobj);
2093 }
2094 
2095 static struct pipe_resource *
zink_resource_from_memobj(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct pipe_memory_object * pmemobj,uint64_t offset)2096 zink_resource_from_memobj(struct pipe_screen *pscreen,
2097                           const struct pipe_resource *templ,
2098                           struct pipe_memory_object *pmemobj,
2099                           uint64_t offset)
2100 {
2101    struct zink_memory_object *memobj = (struct zink_memory_object *)pmemobj;
2102 
2103    struct pipe_resource *pres = resource_create(pscreen, templ, &memobj->whandle, 0, NULL, 0, NULL, NULL);
2104    if (pres) {
2105       if (pres->target != PIPE_BUFFER)
2106          zink_resource(pres)->valid = true;
2107       else
2108          tc_buffer_disable_cpu_storage(pres);
2109    }
2110    return pres;
2111 }
2112 
2113 static bool
invalidate_buffer(struct zink_context * ctx,struct zink_resource * res)2114 invalidate_buffer(struct zink_context *ctx, struct zink_resource *res)
2115 {
2116    struct zink_screen *screen = zink_screen(ctx->base.screen);
2117 
2118    assert(res->base.b.target == PIPE_BUFFER);
2119 
2120    if (res->base.b.flags & PIPE_RESOURCE_FLAG_SPARSE)
2121       return false;
2122 
2123    struct pipe_box box;
2124    u_box_3d(0, 0, 0, res->base.b.width0, 0, 0, &box);
2125    if (res->valid_buffer_range.start > res->valid_buffer_range.end &&
2126        !zink_resource_copy_box_intersects(res, 0, &box))
2127       return false;
2128 
2129    if (res->so_valid)
2130       ctx->dirty_so_targets = true;
2131    /* force counter buffer reset */
2132    res->so_valid = false;
2133 
2134    util_range_set_empty(&res->valid_buffer_range);
2135    if (!zink_resource_has_usage(res))
2136       return false;
2137 
2138    struct zink_resource_object *new_obj = resource_object_create(screen, &res->base.b, NULL, NULL, NULL, 0, NULL, 0);
2139    if (!new_obj) {
2140       debug_printf("new backing resource alloc failed!\n");
2141       return false;
2142    }
2143    bool needs_bda = !!res->obj->bda;
2144    /* this ref must be transferred before rebind or else BOOM */
2145    zink_batch_reference_resource_move(ctx, res);
2146    res->obj = new_obj;
2147    res->queue = VK_QUEUE_FAMILY_IGNORED;
2148    if (needs_bda)
2149       zink_resource_get_address(screen, res);
2150    zink_resource_rebind(ctx, res);
2151    return true;
2152 }
2153 
2154 
2155 static void
zink_resource_invalidate(struct pipe_context * pctx,struct pipe_resource * pres)2156 zink_resource_invalidate(struct pipe_context *pctx, struct pipe_resource *pres)
2157 {
2158    if (pres->target == PIPE_BUFFER)
2159       invalidate_buffer(zink_context(pctx), zink_resource(pres));
2160    else {
2161       struct zink_resource *res = zink_resource(pres);
2162       if (res->valid && res->fb_bind_count)
2163          zink_context(pctx)->rp_loadop_changed = true;
2164       res->valid = false;
2165    }
2166 }
2167 
2168 static void
zink_transfer_copy_bufimage(struct zink_context * ctx,struct zink_resource * dst,struct zink_resource * src,struct zink_transfer * trans)2169 zink_transfer_copy_bufimage(struct zink_context *ctx,
2170                             struct zink_resource *dst,
2171                             struct zink_resource *src,
2172                             struct zink_transfer *trans)
2173 {
2174    assert((trans->base.b.usage & (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY)) !=
2175           (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY));
2176 
2177    bool buf2img = src->base.b.target == PIPE_BUFFER;
2178 
2179    struct pipe_box box = trans->base.b.box;
2180    int x = box.x;
2181    if (buf2img)
2182       box.x = trans->offset;
2183 
2184    assert(dst->obj->transfer_dst);
2185    zink_copy_image_buffer(ctx, dst, src, trans->base.b.level, buf2img ? x : 0,
2186                            box.y, box.z, trans->base.b.level, &box, trans->base.b.usage);
2187 }
2188 
2189 ALWAYS_INLINE static void
align_offset_size(const VkDeviceSize alignment,VkDeviceSize * offset,VkDeviceSize * size,VkDeviceSize obj_size)2190 align_offset_size(const VkDeviceSize alignment, VkDeviceSize *offset, VkDeviceSize *size, VkDeviceSize obj_size)
2191 {
2192    VkDeviceSize align = *offset % alignment;
2193    if (alignment - 1 > *offset)
2194       *offset = 0;
2195    else
2196       *offset -= align, *size += align;
2197    align = alignment - (*size % alignment);
2198    if (*offset + *size + align > obj_size)
2199       *size = obj_size - *offset;
2200    else
2201       *size += align;
2202 }
2203 
2204 VkMappedMemoryRange
zink_resource_init_mem_range(struct zink_screen * screen,struct zink_resource_object * obj,VkDeviceSize offset,VkDeviceSize size)2205 zink_resource_init_mem_range(struct zink_screen *screen, struct zink_resource_object *obj, VkDeviceSize offset, VkDeviceSize size)
2206 {
2207    assert(obj->size);
2208    align_offset_size(screen->info.props.limits.nonCoherentAtomSize, &offset, &size, obj->size);
2209    VkMappedMemoryRange range = {
2210       VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2211       NULL,
2212       zink_bo_get_mem(obj->bo),
2213       offset,
2214       size
2215    };
2216    assert(range.size);
2217    return range;
2218 }
2219 
2220 static void *
map_resource(struct zink_screen * screen,struct zink_resource * res)2221 map_resource(struct zink_screen *screen, struct zink_resource *res)
2222 {
2223    assert(res->obj->host_visible);
2224    return zink_bo_map(screen, res->obj->bo);
2225 }
2226 
2227 static void
unmap_resource(struct zink_screen * screen,struct zink_resource * res)2228 unmap_resource(struct zink_screen *screen, struct zink_resource *res)
2229 {
2230    zink_bo_unmap(screen, res->obj->bo);
2231 }
2232 
2233 static struct zink_transfer *
create_transfer(struct zink_context * ctx,struct pipe_resource * pres,unsigned usage,const struct pipe_box * box)2234 create_transfer(struct zink_context *ctx, struct pipe_resource *pres, unsigned usage, const struct pipe_box *box)
2235 {
2236    struct zink_transfer *trans;
2237 
2238    if (usage & PIPE_MAP_THREAD_SAFE)
2239       trans = calloc(1, sizeof(*trans));
2240    else if (usage & TC_TRANSFER_MAP_THREADED_UNSYNC)
2241       trans = slab_zalloc(&ctx->transfer_pool_unsync);
2242    else
2243       trans = slab_zalloc(&ctx->transfer_pool);
2244    if (!trans)
2245       return NULL;
2246 
2247    pipe_resource_reference(&trans->base.b.resource, pres);
2248 
2249    trans->base.b.usage = usage;
2250    trans->base.b.box = *box;
2251    return trans;
2252 }
2253 
2254 static void
destroy_transfer(struct zink_context * ctx,struct zink_transfer * trans)2255 destroy_transfer(struct zink_context *ctx, struct zink_transfer *trans)
2256 {
2257    if (trans->base.b.usage & PIPE_MAP_THREAD_SAFE) {
2258       free(trans);
2259    } else {
2260       /* Don't use pool_transfers_unsync. We are always in the driver
2261        * thread. Freeing an object into a different pool is allowed.
2262        */
2263       slab_free(&ctx->transfer_pool, trans);
2264    }
2265 }
2266 
2267 static void *
zink_buffer_map(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,unsigned usage,const struct pipe_box * box,struct pipe_transfer ** transfer)2268 zink_buffer_map(struct pipe_context *pctx,
2269                     struct pipe_resource *pres,
2270                     unsigned level,
2271                     unsigned usage,
2272                     const struct pipe_box *box,
2273                     struct pipe_transfer **transfer)
2274 {
2275    struct zink_context *ctx = zink_context(pctx);
2276    struct zink_screen *screen = zink_screen(pctx->screen);
2277    struct zink_resource *res = zink_resource(pres);
2278    struct zink_transfer *trans = create_transfer(ctx, pres, usage, box);
2279    if (!trans)
2280       return NULL;
2281 
2282    void *ptr = NULL;
2283 
2284    if (res->base.is_user_ptr)
2285       usage |= PIPE_MAP_PERSISTENT;
2286 
2287    /* See if the buffer range being mapped has never been initialized,
2288     * in which case it can be mapped unsynchronized. */
2289    if (!(usage & (PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_NO_INFER_UNSYNCHRONIZED)) &&
2290        usage & PIPE_MAP_WRITE && !res->base.is_shared &&
2291        !util_ranges_intersect(&res->valid_buffer_range, box->x, box->x + box->width) &&
2292        !zink_resource_copy_box_intersects(res, 0, box)) {
2293       usage |= PIPE_MAP_UNSYNCHRONIZED;
2294    }
2295 
2296    /* If discarding the entire range, discard the whole resource instead. */
2297    if (usage & PIPE_MAP_DISCARD_RANGE && box->x == 0 && box->width == res->base.b.width0) {
2298       usage |= PIPE_MAP_DISCARD_WHOLE_RESOURCE;
2299    }
2300 
2301    /* If a buffer in VRAM is too large and the range is discarded, don't
2302     * map it directly. This makes sure that the buffer stays in VRAM.
2303     */
2304    bool force_discard_range = false;
2305    if (usage & (PIPE_MAP_DISCARD_WHOLE_RESOURCE | PIPE_MAP_DISCARD_RANGE) &&
2306        !(usage & PIPE_MAP_PERSISTENT) &&
2307        res->base.b.flags & PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY) {
2308       usage &= ~(PIPE_MAP_DISCARD_WHOLE_RESOURCE | PIPE_MAP_UNSYNCHRONIZED);
2309       usage |= PIPE_MAP_DISCARD_RANGE;
2310       force_discard_range = true;
2311    }
2312 
2313    if (usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE &&
2314        !(usage & (PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_NO_INVALIDATE))) {
2315       assert(usage & PIPE_MAP_WRITE);
2316 
2317       if (invalidate_buffer(ctx, res)) {
2318          /* At this point, the buffer is always idle. */
2319          usage |= PIPE_MAP_UNSYNCHRONIZED;
2320       } else {
2321          /* Fall back to a temporary buffer. */
2322          usage |= PIPE_MAP_DISCARD_RANGE;
2323       }
2324    }
2325 
2326    unsigned map_offset = box->x;
2327    if (usage & PIPE_MAP_DISCARD_RANGE &&
2328         (!res->obj->host_visible ||
2329         !(usage & (PIPE_MAP_UNSYNCHRONIZED | PIPE_MAP_PERSISTENT)))) {
2330 
2331       /* Check if mapping this buffer would cause waiting for the GPU.
2332        */
2333 
2334       if (!res->obj->host_visible || force_discard_range ||
2335           !zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_RW)) {
2336          /* Do a wait-free write-only transfer using a temporary buffer. */
2337          unsigned offset;
2338 
2339          /* If we are not called from the driver thread, we have
2340           * to use the uploader from u_threaded_context, which is
2341           * local to the calling thread.
2342           */
2343          struct u_upload_mgr *mgr;
2344          if (usage & TC_TRANSFER_MAP_THREADED_UNSYNC)
2345             mgr = ctx->tc->base.stream_uploader;
2346          else
2347             mgr = ctx->base.stream_uploader;
2348          u_upload_alloc(mgr, 0, box->width,
2349                      screen->info.props.limits.minMemoryMapAlignment, &offset,
2350                      (struct pipe_resource **)&trans->staging_res, (void **)&ptr);
2351          res = zink_resource(trans->staging_res);
2352          trans->offset = offset;
2353          usage |= PIPE_MAP_UNSYNCHRONIZED;
2354          ptr = ((uint8_t *)ptr);
2355       } else {
2356          /* At this point, the buffer is always idle (we checked it above). */
2357          usage |= PIPE_MAP_UNSYNCHRONIZED;
2358       }
2359    } else if (usage & PIPE_MAP_DONTBLOCK) {
2360       /* sparse/device-local will always need to wait since it has to copy */
2361       if (!res->obj->host_visible)
2362          goto success;
2363       if (!zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_WRITE))
2364          goto success;
2365       usage |= PIPE_MAP_UNSYNCHRONIZED;
2366    } else if (((usage & PIPE_MAP_READ) && !(usage & PIPE_MAP_PERSISTENT) &&
2367                ((screen->info.mem_props.memoryTypes[res->obj->bo->base.base.placement].propertyFlags & VK_STAGING_RAM) != VK_STAGING_RAM)) ||
2368               !res->obj->host_visible) {
2369       /* any read, non-HV write, or unmappable that reaches this point needs staging */
2370       if ((usage & PIPE_MAP_READ) || !res->obj->host_visible || res->base.b.flags & PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY) {
2371 overwrite:
2372          trans->offset = box->x % MAX2(screen->info.props.limits.minMemoryMapAlignment, 1 << MIN_SLAB_ORDER);
2373          trans->staging_res = pipe_buffer_create(&screen->base, PIPE_BIND_LINEAR, PIPE_USAGE_STAGING, box->width + trans->offset);
2374          if (!trans->staging_res)
2375             goto fail;
2376          struct zink_resource *staging_res = zink_resource(trans->staging_res);
2377          if (usage & (PIPE_MAP_THREAD_SAFE | PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_THREADED_UNSYNC)) {
2378             assert(ctx != screen->copy_context);
2379             /* this map can't access the passed context: use the copy context */
2380             zink_screen_lock_context(screen);
2381             ctx = screen->copy_context;
2382          }
2383          if (usage & PIPE_MAP_READ)
2384             zink_copy_buffer(ctx, staging_res, res, trans->offset, box->x, box->width);
2385          res = staging_res;
2386          usage &= ~PIPE_MAP_UNSYNCHRONIZED;
2387          map_offset = trans->offset;
2388       }
2389    }
2390 
2391    if (!(usage & PIPE_MAP_UNSYNCHRONIZED)) {
2392       if (usage & PIPE_MAP_WRITE) {
2393          if (!(usage & PIPE_MAP_READ)) {
2394             zink_resource_usage_try_wait(ctx, res, ZINK_RESOURCE_ACCESS_RW);
2395             if (zink_resource_has_unflushed_usage(res))
2396                goto overwrite;
2397          }
2398          zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_RW);
2399       } else
2400          zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
2401       if (!res->real_buffer_range) {
2402          res->obj->access = 0;
2403          res->obj->access_stage = 0;
2404          res->obj->last_write = 0;
2405          zink_resource_copies_reset(res);
2406       }
2407    }
2408 
2409    if (!ptr) {
2410       /* if writing to a streamout buffer, ensure synchronization next time it's used */
2411       if (usage & PIPE_MAP_WRITE && res->so_valid) {
2412          ctx->dirty_so_targets = true;
2413          /* force counter buffer reset */
2414          res->so_valid = false;
2415       }
2416       ptr = map_resource(screen, res);
2417       if (!ptr)
2418          goto fail;
2419       ptr = ((uint8_t *)ptr) + map_offset;
2420    }
2421 
2422    if (!res->obj->coherent
2423 #if defined(MVK_VERSION)
2424       // Work around for MoltenVk limitation specifically on coherent memory
2425       // MoltenVk returns blank memory ranges when there should be data present
2426       // This is a known limitation of MoltenVK.
2427       // See https://github.com/KhronosGroup/MoltenVK/blob/master/Docs/MoltenVK_Runtime_UserGuide.md#known-moltenvk-limitations
2428 
2429        || screen->instance_info.have_MVK_moltenvk
2430 #endif
2431       ) {
2432       VkDeviceSize size = box->width;
2433       VkDeviceSize offset = res->obj->offset + trans->offset;
2434       VkMappedMemoryRange range = zink_resource_init_mem_range(screen, res->obj, offset, size);
2435       if (VKSCR(InvalidateMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
2436          mesa_loge("ZINK: vkInvalidateMappedMemoryRanges failed");
2437          zink_bo_unmap(screen, res->obj->bo);
2438          goto fail;
2439       }
2440    }
2441    trans->base.b.usage = usage;
2442    if (usage & PIPE_MAP_WRITE) {
2443       util_range_add(&res->base.b, &res->valid_buffer_range, box->x, box->x + box->width);
2444 
2445       struct zink_resource *orig_res = zink_resource(trans->base.b.resource);
2446       util_range_add(&orig_res->base.b, &orig_res->valid_buffer_range, box->x, box->x + box->width);
2447       if (orig_res->real_buffer_range)
2448          util_range_add(&orig_res->base.b, orig_res->real_buffer_range, box->x, box->x + box->width);
2449    }
2450 
2451 success:
2452    /* ensure the copy context gets unlocked */
2453    if (ctx == screen->copy_context)
2454       zink_screen_unlock_context(screen);
2455    *transfer = &trans->base.b;
2456    return ptr;
2457 
2458 fail:
2459    if (ctx == screen->copy_context)
2460       zink_screen_unlock_context(screen);
2461    destroy_transfer(ctx, trans);
2462    return NULL;
2463 }
2464 
2465 static void *
zink_image_map(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,unsigned usage,const struct pipe_box * box,struct pipe_transfer ** transfer)2466 zink_image_map(struct pipe_context *pctx,
2467                   struct pipe_resource *pres,
2468                   unsigned level,
2469                   unsigned usage,
2470                   const struct pipe_box *box,
2471                   struct pipe_transfer **transfer)
2472 {
2473    struct zink_context *ctx = zink_context(pctx);
2474    struct zink_screen *screen = zink_screen(pctx->screen);
2475    struct zink_resource *res = zink_resource(pres);
2476    struct zink_transfer *trans = create_transfer(ctx, pres, usage, box);
2477    if (!trans)
2478       return NULL;
2479 
2480    trans->base.b.level = level;
2481    if (zink_is_swapchain(res))
2482       /* this is probably a multi-chain which has already been acquired */
2483       zink_kopper_acquire(ctx, res, 0);
2484 
2485    void *ptr;
2486    if (!(usage & PIPE_MAP_UNSYNCHRONIZED)) {
2487       if (usage & PIPE_MAP_WRITE && !(usage & PIPE_MAP_READ))
2488          /* this is like a blit, so we can potentially dump some clears or maybe we have to  */
2489          zink_fb_clears_apply_or_discard(ctx, pres, zink_rect_from_box(box), false);
2490       else if (usage & PIPE_MAP_READ)
2491          /* if the map region intersects with any clears then we have to apply them */
2492          zink_fb_clears_apply_region(ctx, pres, zink_rect_from_box(box));
2493    }
2494    if (!res->linear || !res->obj->host_visible) {
2495       enum pipe_format format = pres->format;
2496       if (usage & PIPE_MAP_DEPTH_ONLY)
2497          format = util_format_get_depth_only(pres->format);
2498       else if (usage & PIPE_MAP_STENCIL_ONLY)
2499          format = PIPE_FORMAT_S8_UINT;
2500       trans->base.b.stride = util_format_get_stride(format, box->width);
2501       trans->base.b.layer_stride = util_format_get_2d_size(format,
2502                                                          trans->base.b.stride,
2503                                                          box->height);
2504 
2505       struct pipe_resource templ = *pres;
2506       templ.next = NULL;
2507       templ.format = format;
2508       templ.usage = usage & PIPE_MAP_READ ? PIPE_USAGE_STAGING : PIPE_USAGE_STREAM;
2509       templ.target = PIPE_BUFFER;
2510       templ.bind = PIPE_BIND_LINEAR;
2511       templ.width0 = trans->base.b.layer_stride * box->depth;
2512       templ.height0 = templ.depth0 = 0;
2513       templ.last_level = 0;
2514       templ.array_size = 1;
2515       templ.flags = 0;
2516 
2517       trans->staging_res = zink_resource_create(pctx->screen, &templ);
2518       if (!trans->staging_res)
2519          goto fail;
2520 
2521       struct zink_resource *staging_res = zink_resource(trans->staging_res);
2522 
2523       if (usage & PIPE_MAP_READ) {
2524          assert(!(usage & TC_TRANSFER_MAP_THREADED_UNSYNC));
2525          /* force multi-context sync */
2526          if (zink_resource_usage_is_unflushed_write(res))
2527             zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
2528          zink_transfer_copy_bufimage(ctx, staging_res, res, trans);
2529          /* need to wait for rendering to finish */
2530          zink_fence_wait(pctx);
2531       }
2532 
2533       ptr = map_resource(screen, staging_res);
2534    } else {
2535       assert(res->linear);
2536       ptr = map_resource(screen, res);
2537       if (!ptr)
2538          goto fail;
2539       if (zink_resource_has_usage(res)) {
2540          assert(!(usage & PIPE_MAP_UNSYNCHRONIZED));
2541          if (usage & PIPE_MAP_WRITE)
2542             zink_fence_wait(pctx);
2543          else
2544             zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
2545       }
2546       VkImageSubresource isr = {
2547          res->modifiers ? res->obj->modifier_aspect : res->aspect,
2548          level,
2549          0
2550       };
2551       VkSubresourceLayout srl;
2552       VKSCR(GetImageSubresourceLayout)(screen->dev, res->obj->image, &isr, &srl);
2553       trans->base.b.stride = srl.rowPitch;
2554       if (res->base.b.target == PIPE_TEXTURE_3D)
2555          trans->base.b.layer_stride = srl.depthPitch;
2556       else
2557          trans->base.b.layer_stride = srl.arrayPitch;
2558       trans->offset = srl.offset;
2559       trans->depthPitch = srl.depthPitch;
2560       const struct util_format_description *desc = util_format_description(res->base.b.format);
2561       unsigned offset = srl.offset +
2562                         box->z * srl.depthPitch +
2563                         (box->y / desc->block.height) * srl.rowPitch +
2564                         (box->x / desc->block.width) * (desc->block.bits / 8);
2565       if (!res->obj->coherent) {
2566          VkDeviceSize size = (VkDeviceSize)box->width * box->height * desc->block.bits / 8;
2567          VkMappedMemoryRange range = zink_resource_init_mem_range(screen, res->obj, res->obj->offset + offset, size);
2568          if (VKSCR(FlushMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
2569             mesa_loge("ZINK: vkFlushMappedMemoryRanges failed");
2570          }
2571       }
2572       ptr = ((uint8_t *)ptr) + offset;
2573    }
2574    if (!ptr)
2575       goto fail;
2576    if (usage & PIPE_MAP_WRITE) {
2577       if (!res->valid && res->fb_bind_count) {
2578          assert(!(usage & PIPE_MAP_UNSYNCHRONIZED));
2579          ctx->rp_loadop_changed = true;
2580       }
2581       res->valid = true;
2582    }
2583 
2584    if (sizeof(void*) == 4)
2585       trans->base.b.usage |= ZINK_MAP_TEMPORARY;
2586 
2587    *transfer = &trans->base.b;
2588    return ptr;
2589 
2590 fail:
2591    destroy_transfer(ctx, trans);
2592    return NULL;
2593 }
2594 
2595 static void
zink_image_subdata(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,unsigned usage,const struct pipe_box * box,const void * data,unsigned stride,uintptr_t layer_stride)2596 zink_image_subdata(struct pipe_context *pctx,
2597                   struct pipe_resource *pres,
2598                   unsigned level,
2599                   unsigned usage,
2600                   const struct pipe_box *box,
2601                   const void *data,
2602                   unsigned stride,
2603                   uintptr_t layer_stride)
2604 {
2605    struct zink_screen *screen = zink_screen(pctx->screen);
2606    struct zink_context *ctx = zink_context(pctx);
2607    struct zink_resource *res = zink_resource(pres);
2608 
2609    /* flush clears to avoid subdata conflict */
2610    if (!(usage & TC_TRANSFER_MAP_THREADED_UNSYNC) &&
2611        (res->obj->vkusage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT))
2612       zink_fb_clears_apply_or_discard(ctx, pres, zink_rect_from_box(box), false);
2613    /* only use HIC if supported on image and no pending usage */
2614    while (res->obj->vkusage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT &&
2615           zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_RW)) {
2616       /* uninit images are always supported */
2617       bool change_layout = res->layout == VK_IMAGE_LAYOUT_UNDEFINED || res->layout == VK_IMAGE_LAYOUT_PREINITIALIZED;
2618       if (!change_layout) {
2619          /* image in some other layout: test for support */
2620          bool can_copy_layout = false;
2621          for (unsigned i = 0; i < screen->info.hic_props.copyDstLayoutCount; i++) {
2622             if (screen->info.hic_props.pCopyDstLayouts[i] == res->layout) {
2623                can_copy_layout = true;
2624                break;
2625             }
2626          }
2627          /* some layouts don't permit HIC copies */
2628          if (!can_copy_layout)
2629             break;
2630       }
2631       bool is_arrayed = false;
2632       switch (pres->target) {
2633       case PIPE_TEXTURE_1D_ARRAY:
2634       case PIPE_TEXTURE_2D_ARRAY:
2635       case PIPE_TEXTURE_CUBE:
2636       case PIPE_TEXTURE_CUBE_ARRAY:
2637          is_arrayed = true;
2638          break;
2639       default: break;
2640       }
2641       /* recalc strides into texel strides because HIC spec is insane */
2642       unsigned vk_stride = util_format_get_stride(pres->format, 1);
2643       stride /= vk_stride;
2644       unsigned vk_layer_stride = util_format_get_2d_size(pres->format, stride, 1) * vk_stride;
2645       layer_stride /= vk_layer_stride;
2646 
2647       VkHostImageLayoutTransitionInfoEXT t = {
2648          VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT,
2649          NULL,
2650          res->obj->image,
2651          res->layout,
2652          /* GENERAL support is guaranteed */
2653          VK_IMAGE_LAYOUT_GENERAL,
2654          {res->aspect, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS}
2655       };
2656       /* only pre-transition uninit images to avoid thrashing */
2657       if (change_layout) {
2658          VKSCR(TransitionImageLayoutEXT)(screen->dev, 1, &t);
2659          res->layout = VK_IMAGE_LAYOUT_GENERAL;
2660       }
2661       VkMemoryToImageCopyEXT region = {
2662          VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT,
2663          NULL,
2664          data,
2665          stride,
2666          layer_stride,
2667          {res->aspect, level, is_arrayed ? box->z : 0, is_arrayed ? box->depth : 1},
2668          {box->x, box->y, is_arrayed ? 0 : box->z},
2669          {box->width, box->height, is_arrayed ? 1 : box->depth}
2670       };
2671       VkCopyMemoryToImageInfoEXT copy = {
2672          VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT,
2673          NULL,
2674          0,
2675          res->obj->image,
2676          res->layout,
2677          1,
2678          &region
2679       };
2680       VKSCR(CopyMemoryToImageEXT)(screen->dev, &copy);
2681       if (change_layout && screen->can_hic_shader_read && !pres->last_level && !box->x && !box->y && !box->z &&
2682           box->width == pres->width0 && box->height == pres->height0 &&
2683           ((is_arrayed && box->depth == pres->array_size) || (!is_arrayed && box->depth == pres->depth0))) {
2684          /* assume full copy single-mip images use shader read access */
2685          t.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
2686          t.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2687          VKSCR(TransitionImageLayoutEXT)(screen->dev, 1, &t);
2688          res->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2689          /* assume multi-mip where further subdata calls may happen */
2690       }
2691       /* make sure image is marked as having data */
2692       res->valid = true;
2693       return;
2694    }
2695    /* fallback case for per-resource unsupported or device-level unsupported */
2696    u_default_texture_subdata(pctx, pres, level, usage, box, data, stride, layer_stride);
2697 }
2698 
2699 static void
zink_transfer_flush_region(struct pipe_context * pctx,struct pipe_transfer * ptrans,const struct pipe_box * box)2700 zink_transfer_flush_region(struct pipe_context *pctx,
2701                            struct pipe_transfer *ptrans,
2702                            const struct pipe_box *box)
2703 {
2704    struct zink_context *ctx = zink_context(pctx);
2705    struct zink_resource *res = zink_resource(ptrans->resource);
2706    struct zink_transfer *trans = (struct zink_transfer *)ptrans;
2707 
2708    if (trans->base.b.usage & PIPE_MAP_WRITE) {
2709       struct zink_screen *screen = zink_screen(pctx->screen);
2710       struct zink_resource *m = trans->staging_res ? zink_resource(trans->staging_res) :
2711                                                      res;
2712       ASSERTED VkDeviceSize size, src_offset, dst_offset = 0;
2713       if (m->obj->is_buffer) {
2714          size = box->width;
2715          src_offset = box->x + (trans->staging_res ? trans->offset : ptrans->box.x);
2716          dst_offset = box->x + ptrans->box.x;
2717       } else {
2718          size = (VkDeviceSize)box->width * box->height * util_format_get_blocksize(m->base.b.format);
2719          src_offset = trans->offset +
2720                   box->z * trans->depthPitch +
2721                   util_format_get_2d_size(m->base.b.format, trans->base.b.stride, box->y) +
2722                   util_format_get_stride(m->base.b.format, box->x);
2723          assert(src_offset + size <= res->obj->size);
2724       }
2725       if (!m->obj->coherent) {
2726          VkMappedMemoryRange range = zink_resource_init_mem_range(screen, m->obj, m->obj->offset, m->obj->size);
2727          if (VKSCR(FlushMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
2728             mesa_loge("ZINK: vkFlushMappedMemoryRanges failed");
2729          }
2730       }
2731       if (trans->staging_res) {
2732          struct zink_resource *staging_res = zink_resource(trans->staging_res);
2733 
2734          if (ptrans->resource->target == PIPE_BUFFER)
2735             zink_copy_buffer(ctx, res, staging_res, dst_offset, src_offset, size);
2736          else
2737             zink_transfer_copy_bufimage(ctx, res, staging_res, trans);
2738       }
2739    }
2740 }
2741 
2742 /* used to determine whether to emit a TRANSFER_DST barrier on copies */
2743 bool
zink_resource_copy_box_intersects(struct zink_resource * res,unsigned level,const struct pipe_box * box)2744 zink_resource_copy_box_intersects(struct zink_resource *res, unsigned level, const struct pipe_box *box)
2745 {
2746    /* if there are no valid copy rects tracked, this needs a barrier */
2747    if (!res->obj->copies_valid)
2748       return true;
2749    /* untracked huge miplevel */
2750    if (level >= ARRAY_SIZE(res->obj->copies))
2751       return true;
2752    u_rwlock_rdlock(&res->obj->copy_lock);
2753    struct pipe_box *b = res->obj->copies[level].data;
2754    unsigned num_boxes = util_dynarray_num_elements(&res->obj->copies[level], struct pipe_box);
2755    bool (*intersect)(const struct pipe_box *, const struct pipe_box *);
2756    /* determine intersection function based on dimensionality */
2757    switch (res->base.b.target) {
2758    case PIPE_BUFFER:
2759    case PIPE_TEXTURE_1D:
2760       intersect = u_box_test_intersection_1d;
2761       break;
2762 
2763    case PIPE_TEXTURE_1D_ARRAY:
2764    case PIPE_TEXTURE_2D:
2765       intersect = u_box_test_intersection_2d;
2766       break;
2767 
2768    default:
2769       intersect = u_box_test_intersection_3d;
2770       break;
2771    }
2772    /* if any of the tracked boxes intersect with this one, a barrier is needed */
2773    bool ret = false;
2774    for (unsigned i = 0; i < num_boxes; i++) {
2775       if (intersect(box, b + i)) {
2776          ret = true;
2777          break;
2778       }
2779    }
2780    u_rwlock_rdunlock(&res->obj->copy_lock);
2781    /* no intersection = no barrier */
2782    return ret;
2783 }
2784 
2785 /* track a new region for TRANSFER_DST barrier emission */
2786 void
zink_resource_copy_box_add(struct zink_context * ctx,struct zink_resource * res,unsigned level,const struct pipe_box * box)2787 zink_resource_copy_box_add(struct zink_context *ctx, struct zink_resource *res, unsigned level, const struct pipe_box *box)
2788 {
2789    u_rwlock_wrlock(&res->obj->copy_lock);
2790    if (res->obj->copies_valid) {
2791       struct pipe_box *b = res->obj->copies[level].data;
2792       unsigned num_boxes = util_dynarray_num_elements(&res->obj->copies[level], struct pipe_box);
2793       for (unsigned i = 0; i < num_boxes; i++) {
2794          switch (res->base.b.target) {
2795          case PIPE_BUFFER:
2796          case PIPE_TEXTURE_1D:
2797             /* no-op included region */
2798             if (b[i].x <= box->x && b[i].x + b[i].width >= box->x + box->width)
2799                goto out;
2800 
2801             /* try to merge adjacent regions */
2802             if (b[i].x == box->x + box->width) {
2803                b[i].x -= box->width;
2804                b[i].width += box->width;
2805                goto out;
2806             }
2807             if (b[i].x + b[i].width == box->x) {
2808                b[i].width += box->width;
2809                goto out;
2810             }
2811 
2812             /* try to merge into region */
2813             if (box->x <= b[i].x && box->x + box->width >= b[i].x + b[i].width) {
2814                *b = *box;
2815                goto out;
2816             }
2817             break;
2818 
2819          case PIPE_TEXTURE_1D_ARRAY:
2820          case PIPE_TEXTURE_2D:
2821             /* no-op included region */
2822             if (b[i].x <= box->x && b[i].x + b[i].width >= box->x + box->width &&
2823                 b[i].y <= box->y && b[i].y + b[i].height >= box->y + box->height)
2824                goto out;
2825 
2826             /* try to merge adjacent regions */
2827             if (b[i].y == box->y && b[i].height == box->height) {
2828                if (b[i].x == box->x + box->width) {
2829                   b[i].x -= box->width;
2830                   b[i].width += box->width;
2831                   goto out;
2832                }
2833                if (b[i].x + b[i].width == box->x) {
2834                   b[i].width += box->width;
2835                   goto out;
2836                }
2837             } else if (b[i].x == box->x && b[i].width == box->width) {
2838                if (b[i].y == box->y + box->height) {
2839                   b[i].y -= box->height;
2840                   b[i].height += box->height;
2841                   goto out;
2842                }
2843                if (b[i].y + b[i].height == box->y) {
2844                   b[i].height += box->height;
2845                   goto out;
2846                }
2847             }
2848 
2849             /* try to merge into region */
2850             if (box->x <= b[i].x && box->x + box->width >= b[i].x + b[i].width &&
2851                 box->y <= b[i].y && box->y + box->height >= b[i].y + b[i].height) {
2852                *b = *box;
2853                goto out;
2854             }
2855             break;
2856 
2857          default:
2858             /* no-op included region */
2859             if (b[i].x <= box->x && b[i].x + b[i].width >= box->x + box->width &&
2860                 b[i].y <= box->y && b[i].y + b[i].height >= box->y + box->height &&
2861                 b[i].z <= box->z && b[i].z + b[i].depth >= box->z + box->depth)
2862                goto out;
2863 
2864                /* try to merge adjacent regions */
2865             if (b[i].z == box->z && b[i].depth == box->depth) {
2866                if (b[i].y == box->y && b[i].height == box->height) {
2867                   if (b[i].x == box->x + box->width) {
2868                      b[i].x -= box->width;
2869                      b[i].width += box->width;
2870                      goto out;
2871                   }
2872                   if (b[i].x + b[i].width == box->x) {
2873                      b[i].width += box->width;
2874                      goto out;
2875                   }
2876                } else if (b[i].x == box->x && b[i].width == box->width) {
2877                   if (b[i].y == box->y + box->height) {
2878                      b[i].y -= box->height;
2879                      b[i].height += box->height;
2880                      goto out;
2881                   }
2882                   if (b[i].y + b[i].height == box->y) {
2883                      b[i].height += box->height;
2884                      goto out;
2885                   }
2886                }
2887             } else if (b[i].x == box->x && b[i].width == box->width) {
2888                if (b[i].y == box->y && b[i].height == box->height) {
2889                   if (b[i].z == box->z + box->depth) {
2890                      b[i].z -= box->depth;
2891                      b[i].depth += box->depth;
2892                      goto out;
2893                   }
2894                   if (b[i].z + b[i].depth == box->z) {
2895                      b[i].depth += box->depth;
2896                      goto out;
2897                   }
2898                } else if (b[i].z == box->z && b[i].depth == box->depth) {
2899                   if (b[i].y == box->y + box->height) {
2900                      b[i].y -= box->height;
2901                      b[i].height += box->height;
2902                      goto out;
2903                   }
2904                   if (b[i].y + b[i].height == box->y) {
2905                      b[i].height += box->height;
2906                      goto out;
2907                   }
2908                }
2909             } else if (b[i].y == box->y && b[i].height == box->height) {
2910                if (b[i].z == box->z && b[i].depth == box->depth) {
2911                   if (b[i].x == box->x + box->width) {
2912                      b[i].x -= box->width;
2913                      b[i].width += box->width;
2914                      goto out;
2915                   }
2916                   if (b[i].x + b[i].width == box->x) {
2917                      b[i].width += box->width;
2918                      goto out;
2919                   }
2920                } else if (b[i].x == box->x && b[i].width == box->width) {
2921                   if (b[i].z == box->z + box->depth) {
2922                      b[i].z -= box->depth;
2923                      b[i].depth += box->depth;
2924                      goto out;
2925                   }
2926                   if (b[i].z + b[i].depth == box->z) {
2927                      b[i].depth += box->depth;
2928                      goto out;
2929                   }
2930                }
2931             }
2932 
2933             /* try to merge into region */
2934             if (box->x <= b[i].x && box->x + box->width >= b[i].x + b[i].width &&
2935                 box->y <= b[i].y && box->y + box->height >= b[i].y + b[i].height &&
2936                 box->z <= b[i].z && box->z + box->depth >= b[i].z + b[i].depth)
2937                goto out;
2938 
2939             break;
2940          }
2941       }
2942    }
2943    util_dynarray_append(&res->obj->copies[level], struct pipe_box, *box);
2944    if (!res->copies_warned && util_dynarray_num_elements(&res->obj->copies[level], struct pipe_box) > 100) {
2945       perf_debug(ctx, "zink: PERF WARNING! > 100 copy boxes detected for %p\n", res);
2946       mesa_logw("zink: PERF WARNING! > 100 copy boxes detected for %p\n", res);
2947       res->copies_warned = true;
2948    }
2949    res->obj->copies_valid = true;
2950 out:
2951    u_rwlock_wrunlock(&res->obj->copy_lock);
2952 }
2953 
2954 void
zink_resource_copies_reset(struct zink_resource * res)2955 zink_resource_copies_reset(struct zink_resource *res)
2956 {
2957    if (!res->obj->copies_valid)
2958       return;
2959    u_rwlock_wrlock(&res->obj->copy_lock);
2960    unsigned max_level = res->base.b.target == PIPE_BUFFER ? 1 : (res->base.b.last_level + 1);
2961    if (res->base.b.target == PIPE_BUFFER) {
2962       /* flush transfer regions back to valid range on reset */
2963       struct pipe_box *b = res->obj->copies[0].data;
2964       unsigned num_boxes = util_dynarray_num_elements(&res->obj->copies[0], struct pipe_box);
2965       for (unsigned i = 0; i < num_boxes; i++)
2966          util_range_add(&res->base.b, &res->valid_buffer_range, b[i].x, b[i].x + b[i].width);
2967    }
2968    for (unsigned i = 0; i < max_level; i++)
2969       util_dynarray_clear(&res->obj->copies[i]);
2970    res->obj->copies_valid = false;
2971    res->obj->copies_need_reset = false;
2972    u_rwlock_wrunlock(&res->obj->copy_lock);
2973 }
2974 
2975 static void
transfer_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)2976 transfer_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
2977 {
2978    struct zink_context *ctx = zink_context(pctx);
2979    struct zink_transfer *trans = (struct zink_transfer *)ptrans;
2980 
2981    if (!(trans->base.b.usage & (PIPE_MAP_FLUSH_EXPLICIT | PIPE_MAP_COHERENT))) {
2982       /* flush_region is relative to the mapped region: use only the extents */
2983       struct pipe_box box = ptrans->box;
2984       box.x = box.y = box.z = 0;
2985       zink_transfer_flush_region(pctx, ptrans, &box);
2986    }
2987 
2988    if (trans->staging_res)
2989       pipe_resource_reference(&trans->staging_res, NULL);
2990    pipe_resource_reference(&trans->base.b.resource, NULL);
2991 
2992    destroy_transfer(ctx, trans);
2993 }
2994 
2995 static void
do_transfer_unmap(struct zink_screen * screen,struct zink_transfer * trans)2996 do_transfer_unmap(struct zink_screen *screen, struct zink_transfer *trans)
2997 {
2998    struct zink_resource *res = zink_resource(trans->staging_res);
2999    if (!res)
3000       res = zink_resource(trans->base.b.resource);
3001    unmap_resource(screen, res);
3002 }
3003 
3004 void
zink_screen_buffer_unmap(struct pipe_screen * pscreen,struct pipe_transfer * ptrans)3005 zink_screen_buffer_unmap(struct pipe_screen *pscreen, struct pipe_transfer *ptrans)
3006 {
3007    struct zink_screen *screen = zink_screen(pscreen);
3008    struct zink_transfer *trans = (struct zink_transfer *)ptrans;
3009    if (trans->base.b.usage & PIPE_MAP_ONCE && !trans->staging_res)
3010       do_transfer_unmap(screen, trans);
3011    transfer_unmap(NULL, ptrans);
3012 }
3013 
3014 static void
zink_buffer_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)3015 zink_buffer_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
3016 {
3017    struct zink_screen *screen = zink_screen(pctx->screen);
3018    struct zink_transfer *trans = (struct zink_transfer *)ptrans;
3019    if (trans->base.b.usage & PIPE_MAP_ONCE && !trans->staging_res)
3020       do_transfer_unmap(screen, trans);
3021    transfer_unmap(pctx, ptrans);
3022 }
3023 
3024 static void
zink_image_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)3025 zink_image_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
3026 {
3027    struct zink_screen *screen = zink_screen(pctx->screen);
3028    struct zink_transfer *trans = (struct zink_transfer *)ptrans;
3029    if (sizeof(void*) == 4)
3030       do_transfer_unmap(screen, trans);
3031    transfer_unmap(pctx, ptrans);
3032 }
3033 
3034 static void
zink_buffer_subdata(struct pipe_context * ctx,struct pipe_resource * buffer,unsigned usage,unsigned offset,unsigned size,const void * data)3035 zink_buffer_subdata(struct pipe_context *ctx, struct pipe_resource *buffer,
3036                     unsigned usage, unsigned offset, unsigned size, const void *data)
3037 {
3038    struct pipe_transfer *transfer = NULL;
3039    struct pipe_box box;
3040    uint8_t *map = NULL;
3041 
3042    usage |= PIPE_MAP_WRITE;
3043 
3044    if (!(usage & PIPE_MAP_DIRECTLY))
3045       usage |= PIPE_MAP_DISCARD_RANGE;
3046 
3047    u_box_1d(offset, size, &box);
3048    map = zink_buffer_map(ctx, buffer, 0, usage, &box, &transfer);
3049    if (!map)
3050       return;
3051 
3052    memcpy(map, data, size);
3053    zink_buffer_unmap(ctx, transfer);
3054 }
3055 
3056 static struct pipe_resource *
zink_resource_get_separate_stencil(struct pipe_resource * pres)3057 zink_resource_get_separate_stencil(struct pipe_resource *pres)
3058 {
3059    /* For packed depth-stencil, we treat depth as the primary resource
3060     * and store S8 as the "second plane" resource.
3061     */
3062    if (pres->next && pres->next->format == PIPE_FORMAT_S8_UINT)
3063       return pres->next;
3064 
3065    return NULL;
3066 
3067 }
3068 
3069 static bool
resource_object_add_bind(struct zink_context * ctx,struct zink_resource * res,unsigned bind)3070 resource_object_add_bind(struct zink_context *ctx, struct zink_resource *res, unsigned bind)
3071 {
3072    /* base resource already has the cap */
3073    if (res->base.b.bind & bind)
3074       return true;
3075    if (res->obj->is_buffer) {
3076       unreachable("zink: all buffers should have this bit");
3077       return true;
3078    }
3079    assert(!res->obj->dt);
3080    zink_fb_clears_apply_region(ctx, &res->base.b, (struct u_rect){0, res->base.b.width0, 0, res->base.b.height0});
3081    bool ret = add_resource_bind(ctx, res, bind);
3082    if (ret)
3083       zink_resource_rebind(ctx, res);
3084 
3085    return ret;
3086 }
3087 
3088 bool
zink_resource_object_init_storage(struct zink_context * ctx,struct zink_resource * res)3089 zink_resource_object_init_storage(struct zink_context *ctx, struct zink_resource *res)
3090 {
3091    return resource_object_add_bind(ctx, res, PIPE_BIND_SHADER_IMAGE);
3092 }
3093 
3094 bool
zink_resource_object_init_mutable(struct zink_context * ctx,struct zink_resource * res)3095 zink_resource_object_init_mutable(struct zink_context *ctx, struct zink_resource *res)
3096 {
3097    return resource_object_add_bind(ctx, res, ZINK_BIND_MUTABLE);
3098 }
3099 
3100 VkDeviceAddress
zink_resource_get_address(struct zink_screen * screen,struct zink_resource * res)3101 zink_resource_get_address(struct zink_screen *screen, struct zink_resource *res)
3102 {
3103    assert(res->obj->is_buffer);
3104    if (!res->obj->bda) {
3105       VkBufferDeviceAddressInfo info = {
3106          VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
3107          NULL,
3108          res->obj->buffer
3109       };
3110       res->obj->bda = VKSCR(GetBufferDeviceAddress)(screen->dev, &info);
3111    }
3112    return res->obj->bda;
3113 }
3114 
3115 void
zink_resource_setup_transfer_layouts(struct zink_context * ctx,struct zink_resource * src,struct zink_resource * dst)3116 zink_resource_setup_transfer_layouts(struct zink_context *ctx, struct zink_resource *src, struct zink_resource *dst)
3117 {
3118    if (src == dst) {
3119       /* The Vulkan 1.1 specification says the following about valid usage
3120        * of vkCmdBlitImage:
3121        *
3122        * "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
3123        *  VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
3124        *
3125        * and:
3126        *
3127        * "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
3128        *  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
3129        *
3130        * Since we cant have the same image in two states at the same time,
3131        * we're effectively left with VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR or
3132        * VK_IMAGE_LAYOUT_GENERAL. And since this isn't a present-related
3133        * operation, VK_IMAGE_LAYOUT_GENERAL seems most appropriate.
3134        */
3135       zink_screen(ctx->base.screen)->image_barrier(ctx, src,
3136                                   VK_IMAGE_LAYOUT_GENERAL,
3137                                   VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
3138                                   VK_PIPELINE_STAGE_TRANSFER_BIT);
3139    } else {
3140       zink_screen(ctx->base.screen)->image_barrier(ctx, src,
3141                                   VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3142                                   VK_ACCESS_TRANSFER_READ_BIT,
3143                                   VK_PIPELINE_STAGE_TRANSFER_BIT);
3144 
3145       zink_screen(ctx->base.screen)->image_barrier(ctx, dst,
3146                                   VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3147                                   VK_ACCESS_TRANSFER_WRITE_BIT,
3148                                   VK_PIPELINE_STAGE_TRANSFER_BIT);
3149    }
3150 }
3151 
3152 void
zink_get_depth_stencil_resources(struct pipe_resource * res,struct zink_resource ** out_z,struct zink_resource ** out_s)3153 zink_get_depth_stencil_resources(struct pipe_resource *res,
3154                                  struct zink_resource **out_z,
3155                                  struct zink_resource **out_s)
3156 {
3157    if (!res) {
3158       if (out_z) *out_z = NULL;
3159       if (out_s) *out_s = NULL;
3160       return;
3161    }
3162 
3163    if (res->format != PIPE_FORMAT_S8_UINT) {
3164       if (out_z) *out_z = zink_resource(res);
3165       if (out_s) *out_s = zink_resource(zink_resource_get_separate_stencil(res));
3166    } else {
3167       if (out_z) *out_z = NULL;
3168       if (out_s) *out_s = zink_resource(res);
3169    }
3170 }
3171 
3172 static void
zink_resource_set_separate_stencil(struct pipe_resource * pres,struct pipe_resource * stencil)3173 zink_resource_set_separate_stencil(struct pipe_resource *pres,
3174                                    struct pipe_resource *stencil)
3175 {
3176    assert(util_format_has_depth(util_format_description(pres->format)));
3177    pipe_resource_reference(&pres->next, stencil);
3178 }
3179 
3180 static enum pipe_format
zink_resource_get_internal_format(struct pipe_resource * pres)3181 zink_resource_get_internal_format(struct pipe_resource *pres)
3182 {
3183    struct zink_resource *res = zink_resource(pres);
3184    return res->internal_format;
3185 }
3186 
3187 static const struct u_transfer_vtbl transfer_vtbl = {
3188    .resource_create       = zink_resource_create,
3189    .resource_destroy      = zink_resource_destroy,
3190    .transfer_map          = zink_image_map,
3191    .transfer_unmap        = zink_image_unmap,
3192    .transfer_flush_region = zink_transfer_flush_region,
3193    .get_internal_format   = zink_resource_get_internal_format,
3194    .set_stencil           = zink_resource_set_separate_stencil,
3195    .get_stencil           = zink_resource_get_separate_stencil,
3196 };
3197 
3198 bool
zink_screen_resource_init(struct pipe_screen * pscreen)3199 zink_screen_resource_init(struct pipe_screen *pscreen)
3200 {
3201    struct zink_screen *screen = zink_screen(pscreen);
3202    pscreen->resource_create = u_transfer_helper_resource_create;
3203    pscreen->resource_create_with_modifiers = zink_resource_create_with_modifiers;
3204    pscreen->resource_create_drawable = zink_resource_create_drawable;
3205    pscreen->resource_destroy = u_transfer_helper_resource_destroy;
3206    pscreen->transfer_helper = u_transfer_helper_create(&transfer_vtbl,
3207       U_TRANSFER_HELPER_SEPARATE_Z32S8 | U_TRANSFER_HELPER_SEPARATE_STENCIL |
3208       U_TRANSFER_HELPER_INTERLEAVE_IN_PLACE |
3209       U_TRANSFER_HELPER_MSAA_MAP |
3210       (!screen->have_D24_UNORM_S8_UINT ? U_TRANSFER_HELPER_Z24_IN_Z32F : 0));
3211 
3212    if (screen->info.have_KHR_external_memory_fd || screen->info.have_KHR_external_memory_win32) {
3213       pscreen->resource_get_handle = zink_resource_get_handle;
3214       pscreen->resource_from_handle = zink_resource_from_handle;
3215    }
3216    if (screen->info.have_EXT_external_memory_host) {
3217       pscreen->resource_from_user_memory = zink_resource_from_user_memory;
3218    }
3219    if (screen->instance_info.have_KHR_external_memory_capabilities) {
3220       pscreen->memobj_create_from_handle = zink_memobj_create_from_handle;
3221       pscreen->memobj_destroy = zink_memobj_destroy;
3222       pscreen->resource_from_memobj = zink_resource_from_memobj;
3223    }
3224    pscreen->resource_get_param = zink_resource_get_param;
3225    return true;
3226 }
3227 
3228 void
zink_context_resource_init(struct pipe_context * pctx)3229 zink_context_resource_init(struct pipe_context *pctx)
3230 {
3231    pctx->buffer_map = zink_buffer_map;
3232    pctx->buffer_unmap = zink_buffer_unmap;
3233    pctx->texture_map = u_transfer_helper_transfer_map;
3234    pctx->texture_unmap = u_transfer_helper_transfer_unmap;
3235 
3236    pctx->transfer_flush_region = u_transfer_helper_transfer_flush_region;
3237    pctx->buffer_subdata = zink_buffer_subdata;
3238    pctx->texture_subdata = zink_image_subdata;
3239    pctx->invalidate_resource = zink_resource_invalidate;
3240 }
3241