xref: /aosp_15_r20/external/deqp/external/vulkancts/framework/vulkan/vkNullDriver.cpp (revision 35238bce31c2a825756842865a792f8cf7f89930)
1 /*-------------------------------------------------------------------------
2  * Vulkan CTS Framework
3  * --------------------
4  *
5  * Copyright (c) 2015 Google Inc.
6  * Copyright (c) 2023 LunarG, Inc.
7  * Copyright (c) 2023 Nintendo
8  *
9  * Licensed under the Apache License, Version 2.0 (the "License");
10  * you may not use this file except in compliance with the License.
11  * You may obtain a copy of the License at
12  *
13  *      http://www.apache.org/licenses/LICENSE-2.0
14  *
15  * Unless required by applicable law or agreed to in writing, software
16  * distributed under the License is distributed on an "AS IS" BASIS,
17  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18  * See the License for the specific language governing permissions and
19  * limitations under the License.
20  *
21  *//*!
22  * \file
23  * \brief Null (do-nothing) Vulkan implementation.
24  *//*--------------------------------------------------------------------*/
25 
26 #include "vkNullDriver.hpp"
27 #include "vkPlatform.hpp"
28 #include "vkImageUtil.hpp"
29 #include "vkQueryUtil.hpp"
30 #include "tcuFunctionLibrary.hpp"
31 #include "deMemory.h"
32 
33 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && \
34     (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
35 #define USE_ANDROID_O_HARDWARE_BUFFER
36 #endif
37 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
38 #include <android/hardware_buffer.h>
39 #endif
40 
41 #include <stdexcept>
42 #include <algorithm>
43 
44 namespace vk
45 {
46 
47 namespace
48 {
49 
50 using std::vector;
51 
52 // Memory management
53 
54 template <typename T>
allocateSystemMem(const VkAllocationCallbacks * pAllocator,VkSystemAllocationScope scope)55 void *allocateSystemMem(const VkAllocationCallbacks *pAllocator, VkSystemAllocationScope scope)
56 {
57     void *ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void *), scope);
58     if (!ptr)
59         throw std::bad_alloc();
60     return ptr;
61 }
62 
freeSystemMem(const VkAllocationCallbacks * pAllocator,void * mem)63 void freeSystemMem(const VkAllocationCallbacks *pAllocator, void *mem)
64 {
65     pAllocator->pfnFree(pAllocator->pUserData, mem);
66 }
67 
68 template <typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)69 Handle allocateHandle(Parent parent, const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
70 {
71     Object *obj = DE_NULL;
72 
73     if (pAllocator)
74     {
75         void *mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
76         try
77         {
78             obj = new (mem) Object(parent, pCreateInfo);
79             DE_ASSERT(obj == mem);
80         }
81         catch (...)
82         {
83             pAllocator->pfnFree(pAllocator->pUserData, mem);
84             throw;
85         }
86     }
87     else
88         obj = new Object(parent, pCreateInfo);
89 
90     return reinterpret_cast<Handle>(obj);
91 }
92 
93 template <typename Object, typename Handle, typename CreateInfo>
allocateHandle(const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)94 Handle allocateHandle(const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
95 {
96     Object *obj = DE_NULL;
97 
98     if (pAllocator)
99     {
100         void *mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
101         try
102         {
103             obj = new (mem) Object(pCreateInfo);
104             DE_ASSERT(obj == mem);
105         }
106         catch (...)
107         {
108             pAllocator->pfnFree(pAllocator->pUserData, mem);
109             throw;
110         }
111     }
112     else
113         obj = new Object(pCreateInfo);
114 
115     return reinterpret_cast<Handle>(obj);
116 }
117 
118 template <typename Object, typename Handle, typename Parent>
allocateHandle(Parent parent,const VkAllocationCallbacks * pAllocator)119 Handle allocateHandle(Parent parent, const VkAllocationCallbacks *pAllocator)
120 {
121     Object *obj = DE_NULL;
122 
123     if (pAllocator)
124     {
125         void *mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
126         try
127         {
128             obj = new (mem) Object(parent);
129             DE_ASSERT(obj == mem);
130         }
131         catch (...)
132         {
133             pAllocator->pfnFree(pAllocator->pUserData, mem);
134             throw;
135         }
136     }
137     else
138         obj = new Object(parent);
139 
140     return reinterpret_cast<Handle>(obj);
141 }
142 
143 template <typename Object, typename Handle>
freeHandle(Handle handle,const VkAllocationCallbacks * pAllocator)144 void freeHandle(Handle handle, const VkAllocationCallbacks *pAllocator)
145 {
146     Object *obj = reinterpret_cast<Object *>(handle);
147 
148     if (pAllocator)
149     {
150         obj->~Object();
151         freeSystemMem(pAllocator, reinterpret_cast<void *>(obj));
152     }
153     else
154         delete obj;
155 }
156 
157 template <typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandleArray(Parent parent,VkPipelineCache pipelineCache,uint32_t createInfoCount,const CreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,Handle * pHandles)158 void allocateNonDispHandleArray(Parent parent, VkPipelineCache pipelineCache, uint32_t createInfoCount,
159                                 const CreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator,
160                                 Handle *pHandles)
161 {
162     (void)pipelineCache;
163     for (uint32_t i = 0; i < createInfoCount; i++)
164     {
165         Object *const obj = allocateHandle<Object, Object *>(parent, &pCreateInfos[i], pAllocator);
166         pHandles[i]       = Handle((uint64_t)(uintptr_t)obj);
167     }
168 }
169 
170 template <typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)171 Handle allocateNonDispHandle(Parent parent, const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
172 {
173     Object *const obj = allocateHandle<Object, Object *>(parent, pCreateInfo, pAllocator);
174     return Handle((uint64_t)(uintptr_t) static_cast<BaseObject *>(obj));
175 }
176 
177 template <typename Object, typename Handle, typename Parent, typename CreateInfo>
allocateNonDispHandle(Parent parent,const CreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)178 Handle allocateNonDispHandle(Parent parent, const CreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator)
179 {
180     return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
181 }
182 
183 template <typename Object, typename Handle, typename Parent>
allocateNonDispHandle(Parent parent,const VkAllocationCallbacks * pAllocator)184 Handle allocateNonDispHandle(Parent parent, const VkAllocationCallbacks *pAllocator)
185 {
186     Object *const obj = allocateHandle<Object, Object *>(parent, pAllocator);
187     return Handle((uint64_t)(uintptr_t)obj);
188 }
189 
190 template <typename Object, typename Handle>
freeNonDispHandle(Handle handle,const VkAllocationCallbacks * pAllocator)191 void freeNonDispHandle(Handle handle, const VkAllocationCallbacks *pAllocator)
192 {
193     freeHandle<Object>(reinterpret_cast<Object *>((uintptr_t)handle.getInternal()), pAllocator);
194 }
195 
196 // Object definitions
197 
198 #define VK_NULL_RETURN(STMT)                    \
199     do                                          \
200     {                                           \
201         try                                     \
202         {                                       \
203             STMT;                               \
204             return VK_SUCCESS;                  \
205         }                                       \
206         catch (const std::bad_alloc &)          \
207         {                                       \
208             return VK_ERROR_OUT_OF_HOST_MEMORY; \
209         }                                       \
210         catch (VkResult res)                    \
211         {                                       \
212             return res;                         \
213         }                                       \
214     } while (false)
215 
216 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
217 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) \
218     {                                  \
219         #NAME, (deFunctionPtr)FUNC     \
220     } // NOLINT(FUNC)
221 
222 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME)              \
223     struct NAME                                      \
224     {                                                \
225         NAME(VkDevice, const Vk##NAME##CreateInfo *) \
226         {                                            \
227         }                                            \
228     }
229 
230 #define VK_NULL_DEFINE_OBJ_WITH_POSTFIX(DEVICE_OR_INSTANCE, NAME, POSTFIX)       \
231     struct NAME##POSTFIX                                                         \
232     {                                                                            \
233         NAME##POSTFIX(DEVICE_OR_INSTANCE, const Vk##NAME##CreateInfo##POSTFIX *) \
234         {                                                                        \
235         }                                                                        \
236     };
237 
238 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
239 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
240 VK_NULL_DEFINE_DEVICE_OBJ(Event);
241 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
242 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
243 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
244 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
245 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
246 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
247 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
248 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
249 VK_NULL_DEFINE_DEVICE_OBJ(SamplerYcbcrConversion);
250 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Swapchain, KHR)
251 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugUtilsMessenger, EXT)
252 
253 #ifdef CTS_USES_VULKANSC
254 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, SemaphoreSciSyncPool, NV)
255 #else
256 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
257 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorUpdateTemplate);
258 VK_NULL_DEFINE_DEVICE_OBJ(PrivateDataSlot);
259 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugReportCallback, EXT)
260 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuModule, NVX)
261 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuFunction, NVX)
262 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CudaModule, NV)
263 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CudaFunction, NV)
264 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Micromap, EXT)
265 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, OpticalFlowSession, NV)
266 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, NV)
267 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, NV)
268 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, KHR)
269 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSession, KHR)
270 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSessionParameters, KHR)
271 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, ValidationCache, EXT)
272 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, BufferCollection, FUCHSIA)
273 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Shader, EXT)
274 #endif // CTS_USES_VULKANSC
275 
276 class Instance
277 {
278 public:
279     Instance(const VkInstanceCreateInfo *instanceInfo);
~Instance(void)280     ~Instance(void)
281     {
282     }
283 
getProcAddr(const char * name) const284     PFN_vkVoidFunction getProcAddr(const char *name) const
285     {
286         return (PFN_vkVoidFunction)m_functions.getFunction(name);
287     }
288 
289 private:
290     const tcu::StaticFunctionLibrary m_functions;
291 };
292 
293 class SurfaceKHR
294 {
295 public:
296 #ifndef CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkXlibSurfaceCreateInfoKHR *)297     SurfaceKHR(VkInstance, const VkXlibSurfaceCreateInfoKHR *)
298     {
299     }
SurfaceKHR(VkInstance,const VkXcbSurfaceCreateInfoKHR *)300     SurfaceKHR(VkInstance, const VkXcbSurfaceCreateInfoKHR *)
301     {
302     }
SurfaceKHR(VkInstance,const VkWaylandSurfaceCreateInfoKHR *)303     SurfaceKHR(VkInstance, const VkWaylandSurfaceCreateInfoKHR *)
304     {
305     }
SurfaceKHR(VkInstance,const VkAndroidSurfaceCreateInfoKHR *)306     SurfaceKHR(VkInstance, const VkAndroidSurfaceCreateInfoKHR *)
307     {
308     }
SurfaceKHR(VkInstance,const VkWin32SurfaceCreateInfoKHR *)309     SurfaceKHR(VkInstance, const VkWin32SurfaceCreateInfoKHR *)
310     {
311     }
SurfaceKHR(VkInstance,const VkViSurfaceCreateInfoNN *)312     SurfaceKHR(VkInstance, const VkViSurfaceCreateInfoNN *)
313     {
314     }
SurfaceKHR(VkInstance,const VkIOSSurfaceCreateInfoMVK *)315     SurfaceKHR(VkInstance, const VkIOSSurfaceCreateInfoMVK *)
316     {
317     }
SurfaceKHR(VkInstance,const VkMacOSSurfaceCreateInfoMVK *)318     SurfaceKHR(VkInstance, const VkMacOSSurfaceCreateInfoMVK *)
319     {
320     }
SurfaceKHR(VkInstance,const VkImagePipeSurfaceCreateInfoFUCHSIA *)321     SurfaceKHR(VkInstance, const VkImagePipeSurfaceCreateInfoFUCHSIA *)
322     {
323     }
SurfaceKHR(VkInstance,const VkStreamDescriptorSurfaceCreateInfoGGP *)324     SurfaceKHR(VkInstance, const VkStreamDescriptorSurfaceCreateInfoGGP *)
325     {
326     }
SurfaceKHR(VkInstance,const VkMetalSurfaceCreateInfoEXT *)327     SurfaceKHR(VkInstance, const VkMetalSurfaceCreateInfoEXT *)
328     {
329     }
SurfaceKHR(VkInstance,const VkScreenSurfaceCreateInfoQNX *)330     SurfaceKHR(VkInstance, const VkScreenSurfaceCreateInfoQNX *)
331     {
332     }
333 #endif // CTS_USES_VULKANSC
SurfaceKHR(VkInstance,const VkDisplaySurfaceCreateInfoKHR *)334     SurfaceKHR(VkInstance, const VkDisplaySurfaceCreateInfoKHR *)
335     {
336     }
SurfaceKHR(VkInstance,const VkHeadlessSurfaceCreateInfoEXT *)337     SurfaceKHR(VkInstance, const VkHeadlessSurfaceCreateInfoEXT *)
338     {
339     }
~SurfaceKHR(void)340     ~SurfaceKHR(void)
341     {
342     }
343 };
344 
345 class DisplayModeKHR
346 {
347 public:
DisplayModeKHR(VkDisplayKHR,const VkDisplayModeCreateInfoKHR *)348     DisplayModeKHR(VkDisplayKHR, const VkDisplayModeCreateInfoKHR *)
349     {
350     }
~DisplayModeKHR(void)351     ~DisplayModeKHR(void)
352     {
353     }
354 };
355 
356 class Device
357 {
358 public:
359     Device(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *deviceInfo);
~Device(void)360     ~Device(void)
361     {
362     }
363 
getProcAddr(const char * name) const364     PFN_vkVoidFunction getProcAddr(const char *name) const
365     {
366         return (PFN_vkVoidFunction)m_functions.getFunction(name);
367     }
368 
369 private:
370     const tcu::StaticFunctionLibrary m_functions;
371 };
372 
373 class Pipeline
374 {
375 public:
Pipeline(VkDevice,const VkGraphicsPipelineCreateInfo *)376     Pipeline(VkDevice, const VkGraphicsPipelineCreateInfo *)
377     {
378     }
Pipeline(VkDevice,const VkComputePipelineCreateInfo *)379     Pipeline(VkDevice, const VkComputePipelineCreateInfo *)
380     {
381     }
382 #ifndef CTS_USES_VULKANSC
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoNV *)383     Pipeline(VkDevice, const VkRayTracingPipelineCreateInfoNV *)
384     {
385     }
Pipeline(VkDevice,const VkRayTracingPipelineCreateInfoKHR *)386     Pipeline(VkDevice, const VkRayTracingPipelineCreateInfoKHR *)
387     {
388     }
Pipeline(VkDevice,const VkExecutionGraphPipelineCreateInfoAMDX *)389     Pipeline(VkDevice, const VkExecutionGraphPipelineCreateInfoAMDX *)
390     {
391     }
392 #endif // CTS_USES_VULKANSC
393 };
394 
395 class RenderPass
396 {
397 public:
RenderPass(VkDevice,const VkRenderPassCreateInfo *)398     RenderPass(VkDevice, const VkRenderPassCreateInfo *)
399     {
400     }
RenderPass(VkDevice,const VkRenderPassCreateInfo2 *)401     RenderPass(VkDevice, const VkRenderPassCreateInfo2 *)
402     {
403     }
404 };
405 
406 class Buffer
407 {
408 public:
Buffer(VkDevice,const VkBufferCreateInfo * pCreateInfo)409     Buffer(VkDevice, const VkBufferCreateInfo *pCreateInfo) : m_size(pCreateInfo->size)
410     {
411     }
412 
getSize(void) const413     VkDeviceSize getSize(void) const
414     {
415         return m_size;
416     }
417 
418 private:
419     const VkDeviceSize m_size;
420 };
421 
getExternalTypesHandle(const VkImageCreateInfo * pCreateInfo)422 VkExternalMemoryHandleTypeFlags getExternalTypesHandle(const VkImageCreateInfo *pCreateInfo)
423 {
424     const VkExternalMemoryImageCreateInfo *const externalInfo =
425         findStructure<VkExternalMemoryImageCreateInfo>(pCreateInfo->pNext);
426 
427     return externalInfo ? externalInfo->handleTypes : 0u;
428 }
429 
430 class Image
431 {
432 public:
Image(VkDevice,const VkImageCreateInfo * pCreateInfo)433     Image(VkDevice, const VkImageCreateInfo *pCreateInfo)
434         : m_imageType(pCreateInfo->imageType)
435         , m_format(pCreateInfo->format)
436         , m_extent(pCreateInfo->extent)
437         , m_arrayLayers(pCreateInfo->arrayLayers)
438         , m_samples(pCreateInfo->samples)
439         , m_usage(pCreateInfo->usage)
440         , m_flags(pCreateInfo->flags)
441         , m_externalHandleTypes(getExternalTypesHandle(pCreateInfo))
442     {
443     }
444 
getImageType(void) const445     VkImageType getImageType(void) const
446     {
447         return m_imageType;
448     }
getFormat(void) const449     VkFormat getFormat(void) const
450     {
451         return m_format;
452     }
getExtent(void) const453     VkExtent3D getExtent(void) const
454     {
455         return m_extent;
456     }
getArrayLayers(void) const457     uint32_t getArrayLayers(void) const
458     {
459         return m_arrayLayers;
460     }
getSamples(void) const461     VkSampleCountFlagBits getSamples(void) const
462     {
463         return m_samples;
464     }
getUsage(void) const465     VkImageUsageFlags getUsage(void) const
466     {
467         return m_usage;
468     }
getFlags(void) const469     VkImageCreateFlags getFlags(void) const
470     {
471         return m_flags;
472     }
getExternalHandleTypes(void) const473     VkExternalMemoryHandleTypeFlags getExternalHandleTypes(void) const
474     {
475         return m_externalHandleTypes;
476     }
477 
478 private:
479     const VkImageType m_imageType;
480     const VkFormat m_format;
481     const VkExtent3D m_extent;
482     const uint32_t m_arrayLayers;
483     const VkSampleCountFlagBits m_samples;
484     const VkImageUsageFlags m_usage;
485     const VkImageCreateFlags m_flags;
486     const VkExternalMemoryHandleTypeFlags m_externalHandleTypes;
487 };
488 
allocateHeap(const VkMemoryAllocateInfo * pAllocInfo)489 void *allocateHeap(const VkMemoryAllocateInfo *pAllocInfo)
490 {
491     // \todo [2015-12-03 pyry] Alignment requirements?
492     // \todo [2015-12-03 pyry] Empty allocations okay?
493     if (pAllocInfo->allocationSize > 0)
494     {
495         void *const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
496         if (!heapPtr)
497             throw std::bad_alloc();
498         return heapPtr;
499     }
500     else
501         return DE_NULL;
502 }
503 
freeHeap(void * ptr)504 void freeHeap(void *ptr)
505 {
506     deFree(ptr);
507 }
508 
509 class DeviceMemory
510 {
511 public:
~DeviceMemory(void)512     virtual ~DeviceMemory(void)
513     {
514     }
515     virtual void *map(void)  = 0;
516     virtual void unmap(void) = 0;
517 };
518 
519 class PrivateDeviceMemory : public DeviceMemory
520 {
521 public:
PrivateDeviceMemory(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)522     PrivateDeviceMemory(VkDevice, const VkMemoryAllocateInfo *pAllocInfo) : m_memory(allocateHeap(pAllocInfo))
523     {
524         // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
525         //                           but currently it mostly hinders it.
526         if (m_memory)
527             deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
528     }
~PrivateDeviceMemory(void)529     virtual ~PrivateDeviceMemory(void)
530     {
531         freeHeap(m_memory);
532     }
533 
map(void)534     virtual void *map(void) /*override*/
535     {
536         return m_memory;
537     }
unmap(void)538     virtual void unmap(void) /*override*/
539     {
540     }
541 
542 private:
543     void *const m_memory;
544 };
545 
546 #ifndef CTS_USES_VULKANSC
547 
548 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
findOrCreateHwBuffer(const VkMemoryAllocateInfo * pAllocInfo)549 AHardwareBuffer *findOrCreateHwBuffer(const VkMemoryAllocateInfo *pAllocInfo)
550 {
551     const VkExportMemoryAllocateInfo *const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
552     const VkImportAndroidHardwareBufferInfoANDROID *const importInfo =
553         findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
554     const VkMemoryDedicatedAllocateInfo *const dedicatedInfo =
555         findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
556     const Image *const image  = dedicatedInfo && !!dedicatedInfo->image ?
557                                     reinterpret_cast<const Image *>(dedicatedInfo->image.getInternal()) :
558                                     DE_NULL;
559     AHardwareBuffer *hwbuffer = DE_NULL;
560 
561     // Import and export aren't mutually exclusive; we can have both simultaneously.
562     DE_ASSERT((importInfo && importInfo->buffer.internal) ||
563               (exportInfo &&
564                (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
565 
566     if (importInfo && importInfo->buffer.internal)
567     {
568         hwbuffer = (AHardwareBuffer *)importInfo->buffer.internal;
569         AHardwareBuffer_acquire(hwbuffer);
570     }
571     else if (exportInfo &&
572              (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
573     {
574         AHardwareBuffer_Desc hwbufferDesc;
575         deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
576 
577         if (image)
578         {
579             hwbufferDesc.width  = image->getExtent().width;
580             hwbufferDesc.height = image->getExtent().height;
581             hwbufferDesc.layers = image->getArrayLayers();
582             switch (image->getFormat())
583             {
584             case VK_FORMAT_R8G8B8A8_UNORM:
585                 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
586                 break;
587             case VK_FORMAT_R8G8B8_UNORM:
588                 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
589                 break;
590             case VK_FORMAT_R5G6B5_UNORM_PACK16:
591                 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
592                 break;
593             case VK_FORMAT_R16G16B16A16_SFLOAT:
594                 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
595                 break;
596             case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
597                 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
598                 break;
599             default:
600                 DE_FATAL("Unsupported image format for Android hardware buffer export");
601                 break;
602             }
603             if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
604                 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
605             if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
606                 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
607             // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
608             // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
609 
610             // Make sure we have at least one AHB GPU usage, even if the image doesn't have any
611             // Vulkan usages with corresponding to AHB GPU usages.
612             if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
613                 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
614         }
615         else
616         {
617             hwbufferDesc.width  = static_cast<uint32_t>(pAllocInfo->allocationSize);
618             hwbufferDesc.height = 1, hwbufferDesc.layers = 1, hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
619             hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
620         }
621 
622         AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
623     }
624 
625     return hwbuffer;
626 }
627 
628 class ExternalDeviceMemoryAndroid : public DeviceMemory
629 {
630 public:
ExternalDeviceMemoryAndroid(VkDevice,const VkMemoryAllocateInfo * pAllocInfo)631     ExternalDeviceMemoryAndroid(VkDevice, const VkMemoryAllocateInfo *pAllocInfo)
632         : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
633     {
634     }
~ExternalDeviceMemoryAndroid(void)635     virtual ~ExternalDeviceMemoryAndroid(void)
636     {
637         if (m_hwbuffer)
638             AHardwareBuffer_release(m_hwbuffer);
639     }
640 
map(void)641     virtual void *map(void) /*override*/
642     {
643         void *p;
644         AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
645                              -1, NULL, &p);
646         return p;
647     }
648 
unmap(void)649     virtual void unmap(void) /*override*/
650     {
651         AHardwareBuffer_unlock(m_hwbuffer, NULL);
652     }
653 
getHwBuffer(void)654     AHardwareBuffer *getHwBuffer(void)
655     {
656         return m_hwbuffer;
657     }
658 
659 private:
660     AHardwareBuffer *const m_hwbuffer;
661 };
662 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
663 
664 #endif // CTS_USES_VULKANSC
665 
666 class DeferredOperationKHR
667 {
668 public:
DeferredOperationKHR(VkDevice)669     DeferredOperationKHR(VkDevice)
670     {
671     }
672 };
673 
674 class CommandBuffer
675 {
676 public:
CommandBuffer(VkDevice,VkCommandPool,VkCommandBufferLevel)677     CommandBuffer(VkDevice, VkCommandPool, VkCommandBufferLevel)
678     {
679     }
680 };
681 
682 class CommandPool
683 {
684 public:
CommandPool(VkDevice device,const VkCommandPoolCreateInfo *)685     CommandPool(VkDevice device, const VkCommandPoolCreateInfo *) : m_device(device)
686     {
687     }
688 #ifndef CTS_USES_VULKANSC
689     ~CommandPool(void);
690 #endif // CTS_USES_VULKANSC
691 
692     VkCommandBuffer allocate(VkCommandBufferLevel level);
693     void free(VkCommandBuffer buffer);
694 
695 private:
696     const VkDevice m_device;
697 
698     vector<CommandBuffer *> m_buffers;
699 };
700 
701 #ifndef CTS_USES_VULKANSC
702 
~CommandPool(void)703 CommandPool::~CommandPool(void)
704 {
705     for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
706         delete m_buffers[ndx];
707 }
708 
709 #endif // CTS_USES_VULKANSC
710 
allocate(VkCommandBufferLevel level)711 VkCommandBuffer CommandPool::allocate(VkCommandBufferLevel level)
712 {
713     CommandBuffer *const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<uintptr_t>(this)), level);
714 
715     try
716     {
717         m_buffers.push_back(impl);
718     }
719     catch (...)
720     {
721         delete impl;
722         throw;
723     }
724 
725     return reinterpret_cast<VkCommandBuffer>(impl);
726 }
727 
free(VkCommandBuffer buffer)728 void CommandPool::free(VkCommandBuffer buffer)
729 {
730     CommandBuffer *const impl = reinterpret_cast<CommandBuffer *>(buffer);
731 
732     for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
733     {
734         if (m_buffers[ndx] == impl)
735         {
736             std::swap(m_buffers[ndx], m_buffers.back());
737             m_buffers.pop_back();
738             delete impl;
739             return;
740         }
741     }
742 
743     DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
744 }
745 
746 class DescriptorSet
747 {
748 public:
DescriptorSet(VkDevice,VkDescriptorPool,VkDescriptorSetLayout)749     DescriptorSet(VkDevice, VkDescriptorPool, VkDescriptorSetLayout)
750     {
751     }
752 };
753 
754 class DescriptorPool
755 {
756 public:
DescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo)757     DescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo)
758         : m_device(device)
759         , m_flags(pCreateInfo->flags)
760     {
761     }
~DescriptorPool(void)762     ~DescriptorPool(void)
763     {
764         reset();
765     }
766 
767     VkDescriptorSet allocate(VkDescriptorSetLayout setLayout);
768     void free(VkDescriptorSet set);
769 
770     void reset(void);
771 
772 private:
773     const VkDevice m_device;
774     const VkDescriptorPoolCreateFlags m_flags;
775 
776     vector<DescriptorSet *> m_managedSets;
777 };
778 
allocate(VkDescriptorSetLayout setLayout)779 VkDescriptorSet DescriptorPool::allocate(VkDescriptorSetLayout setLayout)
780 {
781     DescriptorSet *const impl =
782         new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<uintptr_t>(this)), setLayout);
783 
784     try
785     {
786         m_managedSets.push_back(impl);
787     }
788     catch (...)
789     {
790         delete impl;
791         throw;
792     }
793 
794     return VkDescriptorSet(reinterpret_cast<uintptr_t>(impl));
795 }
796 
free(VkDescriptorSet set)797 void DescriptorPool::free(VkDescriptorSet set)
798 {
799     DescriptorSet *const impl = reinterpret_cast<DescriptorSet *>((uintptr_t)set.getInternal());
800 
801     DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
802     DE_UNREF(m_flags);
803 
804     for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
805     {
806         if (m_managedSets[ndx] == impl)
807         {
808             std::swap(m_managedSets[ndx], m_managedSets.back());
809             m_managedSets.pop_back();
810             delete impl;
811             return;
812         }
813     }
814 
815     DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
816 }
817 
reset(void)818 void DescriptorPool::reset(void)
819 {
820     for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
821         delete m_managedSets[ndx];
822     m_managedSets.clear();
823 }
824 
825 // API implementation
826 
827 extern "C"
828 {
829 
getDeviceProcAddr(VkDevice device,const char * pName)830     VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr(VkDevice device, const char *pName)
831     {
832         return reinterpret_cast<Device *>(device)->getProcAddr(pName);
833     }
834 
createGraphicsPipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)835     VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines(VkDevice device, VkPipelineCache, uint32_t count,
836                                                            const VkGraphicsPipelineCreateInfo *pCreateInfos,
837                                                            const VkAllocationCallbacks *pAllocator,
838                                                            VkPipeline *pPipelines)
839     {
840         uint32_t allocNdx;
841         try
842         {
843             for (allocNdx = 0; allocNdx < count; allocNdx++)
844                 pPipelines[allocNdx] =
845                     allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
846 
847             return VK_SUCCESS;
848         }
849         catch (const std::bad_alloc &)
850         {
851             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
852                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
853 
854             return VK_ERROR_OUT_OF_HOST_MEMORY;
855         }
856         catch (VkResult err)
857         {
858             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
859                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
860 
861             return err;
862         }
863     }
864 
createComputePipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)865     VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines(VkDevice device, VkPipelineCache, uint32_t count,
866                                                           const VkComputePipelineCreateInfo *pCreateInfos,
867                                                           const VkAllocationCallbacks *pAllocator,
868                                                           VkPipeline *pPipelines)
869     {
870         uint32_t allocNdx;
871         try
872         {
873             for (allocNdx = 0; allocNdx < count; allocNdx++)
874                 pPipelines[allocNdx] =
875                     allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
876 
877             return VK_SUCCESS;
878         }
879         catch (const std::bad_alloc &)
880         {
881             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
882                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
883 
884             return VK_ERROR_OUT_OF_HOST_MEMORY;
885         }
886         catch (VkResult err)
887         {
888             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
889                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
890 
891             return err;
892         }
893     }
894 
895 #ifndef CTS_USES_VULKANSC
896 
createRayTracingPipelinesNV(VkDevice device,VkPipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)897     VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesNV(VkDevice device, VkPipelineCache, uint32_t count,
898                                                                const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
899                                                                const VkAllocationCallbacks *pAllocator,
900                                                                VkPipeline *pPipelines)
901     {
902         uint32_t allocNdx;
903         try
904         {
905             for (allocNdx = 0; allocNdx < count; allocNdx++)
906                 pPipelines[allocNdx] =
907                     allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
908 
909             return VK_SUCCESS;
910         }
911         catch (const std::bad_alloc &)
912         {
913             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
914                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
915 
916             return VK_ERROR_OUT_OF_HOST_MEMORY;
917         }
918         catch (VkResult err)
919         {
920             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
921                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
922 
923             return err;
924         }
925     }
926 
createRayTracingPipelinesKHR(VkDevice device,VkPipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)927     VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesKHR(VkDevice device, VkPipelineCache, uint32_t count,
928                                                                 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
929                                                                 const VkAllocationCallbacks *pAllocator,
930                                                                 VkPipeline *pPipelines)
931     {
932         uint32_t allocNdx;
933         try
934         {
935             for (allocNdx = 0; allocNdx < count; allocNdx++)
936                 pPipelines[allocNdx] =
937                     allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos + allocNdx, pAllocator);
938 
939             return VK_SUCCESS;
940         }
941         catch (const std::bad_alloc &)
942         {
943             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
944                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
945 
946             return VK_ERROR_OUT_OF_HOST_MEMORY;
947         }
948         catch (VkResult err)
949         {
950             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
951                 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
952 
953             return err;
954         }
955     }
956 
createShadersEXT(VkDevice device,uint32_t createInfoCount,const VkShaderCreateInfoEXT * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkShaderEXT * pShaders)957     VKAPI_ATTR VkResult VKAPI_CALL createShadersEXT(VkDevice device, uint32_t createInfoCount,
958                                                     const VkShaderCreateInfoEXT *pCreateInfos,
959                                                     const VkAllocationCallbacks *pAllocator, VkShaderEXT *pShaders)
960     {
961         uint32_t allocNdx;
962         try
963         {
964             for (allocNdx = 0; allocNdx < createInfoCount; allocNdx++)
965                 pShaders[allocNdx] =
966                     allocateNonDispHandle<ShaderEXT, VkShaderEXT>(device, pCreateInfos + allocNdx, pAllocator);
967             return VK_SUCCESS;
968         }
969         catch (const std::bad_alloc &)
970         {
971             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
972                 freeNonDispHandle<ShaderEXT, VkShaderEXT>(pShaders[freeNdx], pAllocator);
973             return VK_ERROR_OUT_OF_HOST_MEMORY;
974         }
975         catch (VkResult err)
976         {
977             for (uint32_t freeNdx = 0; freeNdx < allocNdx; freeNdx++)
978                 freeNonDispHandle<ShaderEXT, VkShaderEXT>(pShaders[freeNdx], pAllocator);
979             return err;
980         }
981     }
982 
983 #endif // CTS_USES_VULKANSC
984 
enumeratePhysicalDevices(VkInstance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pDevices)985     VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices(VkInstance, uint32_t *pPhysicalDeviceCount,
986                                                             VkPhysicalDevice *pDevices)
987     {
988         if (pDevices && *pPhysicalDeviceCount >= 1u)
989             *pDevices = reinterpret_cast<VkPhysicalDevice>((void *)(uintptr_t)1u);
990 
991         *pPhysicalDeviceCount = 1;
992 
993         return VK_SUCCESS;
994     }
995 
enumerateExtensions(uint32_t numExtensions,const VkExtensionProperties * extensions,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)996     VkResult enumerateExtensions(uint32_t numExtensions, const VkExtensionProperties *extensions,
997                                  uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
998     {
999         const uint32_t dstSize = pPropertyCount ? *pPropertyCount : 0;
1000 
1001         if (pPropertyCount)
1002             *pPropertyCount = numExtensions;
1003 
1004         if (pProperties)
1005         {
1006             for (uint32_t ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
1007                 pProperties[ndx] = extensions[ndx];
1008 
1009             if (dstSize < numExtensions)
1010                 return VK_INCOMPLETE;
1011         }
1012 
1013         return VK_SUCCESS;
1014     }
1015 
enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1016     VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties(const char *pLayerName,
1017                                                                         uint32_t *pPropertyCount,
1018                                                                         VkExtensionProperties *pProperties)
1019     {
1020         static const VkExtensionProperties s_extensions[] = {
1021             {"VK_KHR_get_physical_device_properties2", 1u},
1022             {"VK_KHR_external_memory_capabilities", 1u},
1023         };
1024 
1025         if (!pLayerName)
1026             return enumerateExtensions((uint32_t)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount,
1027                                        pProperties);
1028         else
1029             return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
1030     }
1031 
enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1032     VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
1033                                                                       const char *pLayerName, uint32_t *pPropertyCount,
1034                                                                       VkExtensionProperties *pProperties)
1035     {
1036         DE_UNREF(physicalDevice);
1037 
1038         static const VkExtensionProperties s_extensions[] = {
1039             {"VK_KHR_bind_memory2", 1u},
1040             {"VK_KHR_external_memory", 1u},
1041             {"VK_KHR_get_memory_requirements2", 1u},
1042             {"VK_KHR_maintenance1", 1u},
1043             {"VK_KHR_sampler_ycbcr_conversion", 1u},
1044 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1045             {"VK_ANDROID_external_memory_android_hardware_buffer", 1u},
1046 #endif
1047         };
1048 
1049         if (!pLayerName)
1050             return enumerateExtensions((uint32_t)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount,
1051                                        pProperties);
1052         else
1053             return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
1054     }
1055 
getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)1056     VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
1057                                                          VkPhysicalDeviceFeatures *pFeatures)
1058     {
1059         DE_UNREF(physicalDevice);
1060 
1061         // Enable all features allow as many tests to run as possible
1062         pFeatures->robustBufferAccess                      = VK_TRUE;
1063         pFeatures->fullDrawIndexUint32                     = VK_TRUE;
1064         pFeatures->imageCubeArray                          = VK_TRUE;
1065         pFeatures->independentBlend                        = VK_TRUE;
1066         pFeatures->geometryShader                          = VK_TRUE;
1067         pFeatures->tessellationShader                      = VK_TRUE;
1068         pFeatures->sampleRateShading                       = VK_TRUE;
1069         pFeatures->dualSrcBlend                            = VK_TRUE;
1070         pFeatures->logicOp                                 = VK_TRUE;
1071         pFeatures->multiDrawIndirect                       = VK_TRUE;
1072         pFeatures->drawIndirectFirstInstance               = VK_TRUE;
1073         pFeatures->depthClamp                              = VK_TRUE;
1074         pFeatures->depthBiasClamp                          = VK_TRUE;
1075         pFeatures->fillModeNonSolid                        = VK_TRUE;
1076         pFeatures->depthBounds                             = VK_TRUE;
1077         pFeatures->wideLines                               = VK_TRUE;
1078         pFeatures->largePoints                             = VK_TRUE;
1079         pFeatures->alphaToOne                              = VK_TRUE;
1080         pFeatures->multiViewport                           = VK_TRUE;
1081         pFeatures->samplerAnisotropy                       = VK_TRUE;
1082         pFeatures->textureCompressionETC2                  = VK_TRUE;
1083         pFeatures->textureCompressionASTC_LDR              = VK_TRUE;
1084         pFeatures->textureCompressionBC                    = VK_TRUE;
1085         pFeatures->occlusionQueryPrecise                   = VK_TRUE;
1086         pFeatures->pipelineStatisticsQuery                 = VK_TRUE;
1087         pFeatures->vertexPipelineStoresAndAtomics          = VK_TRUE;
1088         pFeatures->fragmentStoresAndAtomics                = VK_TRUE;
1089         pFeatures->shaderTessellationAndGeometryPointSize  = VK_TRUE;
1090         pFeatures->shaderImageGatherExtended               = VK_TRUE;
1091         pFeatures->shaderStorageImageExtendedFormats       = VK_TRUE;
1092         pFeatures->shaderStorageImageMultisample           = VK_TRUE;
1093         pFeatures->shaderStorageImageReadWithoutFormat     = VK_TRUE;
1094         pFeatures->shaderStorageImageWriteWithoutFormat    = VK_TRUE;
1095         pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
1096         pFeatures->shaderSampledImageArrayDynamicIndexing  = VK_TRUE;
1097         pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
1098         pFeatures->shaderStorageImageArrayDynamicIndexing  = VK_TRUE;
1099         pFeatures->shaderClipDistance                      = VK_TRUE;
1100         pFeatures->shaderCullDistance                      = VK_TRUE;
1101         pFeatures->shaderFloat64                           = VK_TRUE;
1102         pFeatures->shaderInt64                             = VK_TRUE;
1103         pFeatures->shaderInt16                             = VK_TRUE;
1104         pFeatures->shaderResourceResidency                 = VK_TRUE;
1105         pFeatures->shaderResourceMinLod                    = VK_TRUE;
1106         pFeatures->sparseBinding                           = VK_TRUE;
1107         pFeatures->sparseResidencyBuffer                   = VK_TRUE;
1108         pFeatures->sparseResidencyImage2D                  = VK_TRUE;
1109         pFeatures->sparseResidencyImage3D                  = VK_TRUE;
1110         pFeatures->sparseResidency2Samples                 = VK_TRUE;
1111         pFeatures->sparseResidency4Samples                 = VK_TRUE;
1112         pFeatures->sparseResidency8Samples                 = VK_TRUE;
1113         pFeatures->sparseResidency16Samples                = VK_TRUE;
1114         pFeatures->sparseResidencyAliased                  = VK_TRUE;
1115         pFeatures->variableMultisampleRate                 = VK_TRUE;
1116         pFeatures->inheritedQueries                        = VK_TRUE;
1117     }
1118 
getPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * props)1119     VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties(VkPhysicalDevice, VkPhysicalDeviceProperties *props)
1120     {
1121         deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
1122 
1123         props->apiVersion    = VK_API_VERSION_1_1;
1124         props->driverVersion = 1u;
1125         props->deviceType    = VK_PHYSICAL_DEVICE_TYPE_OTHER;
1126 
1127         deMemcpy(props->deviceName, "null", 5);
1128 
1129         // Spec minmax
1130         props->limits.maxImageDimension1D                             = 4096;
1131         props->limits.maxImageDimension2D                             = 4096;
1132         props->limits.maxImageDimension3D                             = 256;
1133         props->limits.maxImageDimensionCube                           = 4096;
1134         props->limits.maxImageArrayLayers                             = 256;
1135         props->limits.maxTexelBufferElements                          = 65536;
1136         props->limits.maxUniformBufferRange                           = 16384;
1137         props->limits.maxStorageBufferRange                           = 1u << 27;
1138         props->limits.maxPushConstantsSize                            = 128;
1139         props->limits.maxMemoryAllocationCount                        = 4096;
1140         props->limits.maxSamplerAllocationCount                       = 4000;
1141         props->limits.bufferImageGranularity                          = 131072;
1142         props->limits.sparseAddressSpaceSize                          = 1u << 31;
1143         props->limits.maxBoundDescriptorSets                          = 4;
1144         props->limits.maxPerStageDescriptorSamplers                   = 16;
1145         props->limits.maxPerStageDescriptorUniformBuffers             = 12;
1146         props->limits.maxPerStageDescriptorStorageBuffers             = 4;
1147         props->limits.maxPerStageDescriptorSampledImages              = 16;
1148         props->limits.maxPerStageDescriptorStorageImages              = 4;
1149         props->limits.maxPerStageDescriptorInputAttachments           = 4;
1150         props->limits.maxPerStageResources                            = 128;
1151         props->limits.maxDescriptorSetSamplers                        = 96;
1152         props->limits.maxDescriptorSetUniformBuffers                  = 72;
1153         props->limits.maxDescriptorSetUniformBuffersDynamic           = 8;
1154         props->limits.maxDescriptorSetStorageBuffers                  = 24;
1155         props->limits.maxDescriptorSetStorageBuffersDynamic           = 4;
1156         props->limits.maxDescriptorSetSampledImages                   = 96;
1157         props->limits.maxDescriptorSetStorageImages                   = 24;
1158         props->limits.maxDescriptorSetInputAttachments                = 4;
1159         props->limits.maxVertexInputAttributes                        = 16;
1160         props->limits.maxVertexInputBindings                          = 16;
1161         props->limits.maxVertexInputAttributeOffset                   = 2047;
1162         props->limits.maxVertexInputBindingStride                     = 2048;
1163         props->limits.maxVertexOutputComponents                       = 64;
1164         props->limits.maxTessellationGenerationLevel                  = 64;
1165         props->limits.maxTessellationPatchSize                        = 32;
1166         props->limits.maxTessellationControlPerVertexInputComponents  = 64;
1167         props->limits.maxTessellationControlPerVertexOutputComponents = 64;
1168         props->limits.maxTessellationControlPerPatchOutputComponents  = 120;
1169         props->limits.maxTessellationControlTotalOutputComponents     = 2048;
1170         props->limits.maxTessellationEvaluationInputComponents        = 64;
1171         props->limits.maxTessellationEvaluationOutputComponents       = 64;
1172         props->limits.maxGeometryShaderInvocations                    = 32;
1173         props->limits.maxGeometryInputComponents                      = 64;
1174         props->limits.maxGeometryOutputComponents                     = 64;
1175         props->limits.maxGeometryOutputVertices                       = 256;
1176         props->limits.maxGeometryTotalOutputComponents                = 1024;
1177         props->limits.maxFragmentInputComponents                      = 64;
1178         props->limits.maxFragmentOutputAttachments                    = 4;
1179         props->limits.maxFragmentDualSrcAttachments                   = 1;
1180         props->limits.maxFragmentCombinedOutputResources              = 4;
1181         props->limits.maxComputeSharedMemorySize                      = 16384;
1182         props->limits.maxComputeWorkGroupCount[0]                     = 65535;
1183         props->limits.maxComputeWorkGroupCount[1]                     = 65535;
1184         props->limits.maxComputeWorkGroupCount[2]                     = 65535;
1185         props->limits.maxComputeWorkGroupInvocations                  = 128;
1186         props->limits.maxComputeWorkGroupSize[0]                      = 128;
1187         props->limits.maxComputeWorkGroupSize[1]                      = 128;
1188         props->limits.maxComputeWorkGroupSize[2]                      = 128;
1189         props->limits.subPixelPrecisionBits                           = 4;
1190         props->limits.subTexelPrecisionBits                           = 4;
1191         props->limits.mipmapPrecisionBits                             = 4;
1192         props->limits.maxDrawIndexedIndexValue                        = 0xffffffffu;
1193         props->limits.maxDrawIndirectCount                            = (1u << 16) - 1u;
1194         props->limits.maxSamplerLodBias                               = 2.0f;
1195         props->limits.maxSamplerAnisotropy                            = 16.0f;
1196         props->limits.maxViewports                                    = 16;
1197         props->limits.maxViewportDimensions[0]                        = 4096;
1198         props->limits.maxViewportDimensions[1]                        = 4096;
1199         props->limits.viewportBoundsRange[0]                          = -8192.f;
1200         props->limits.viewportBoundsRange[1]                          = 8191.f;
1201         props->limits.viewportSubPixelBits                            = 0;
1202         props->limits.minMemoryMapAlignment                           = 64;
1203         props->limits.minTexelBufferOffsetAlignment                   = 256;
1204         props->limits.minUniformBufferOffsetAlignment                 = 256;
1205         props->limits.minStorageBufferOffsetAlignment                 = 256;
1206         props->limits.minTexelOffset                                  = -8;
1207         props->limits.maxTexelOffset                                  = 7;
1208         props->limits.minTexelGatherOffset                            = -8;
1209         props->limits.maxTexelGatherOffset                            = 7;
1210         props->limits.minInterpolationOffset                          = -0.5f;
1211         props->limits.maxInterpolationOffset                          = 0.5f; // -1ulp
1212         props->limits.subPixelInterpolationOffsetBits                 = 4;
1213         props->limits.maxFramebufferWidth                             = 4096;
1214         props->limits.maxFramebufferHeight                            = 4096;
1215         props->limits.maxFramebufferLayers                            = 256;
1216         props->limits.framebufferColorSampleCounts                    = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1217         props->limits.framebufferDepthSampleCounts                    = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1218         props->limits.framebufferStencilSampleCounts                  = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1219         props->limits.framebufferNoAttachmentsSampleCounts            = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1220         props->limits.maxColorAttachments                             = 4;
1221         props->limits.sampledImageColorSampleCounts                   = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1222         props->limits.sampledImageIntegerSampleCounts                 = VK_SAMPLE_COUNT_1_BIT;
1223         props->limits.sampledImageDepthSampleCounts                   = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1224         props->limits.sampledImageStencilSampleCounts                 = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1225         props->limits.storageImageSampleCounts                        = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1226         props->limits.maxSampleMaskWords                              = 1;
1227         props->limits.timestampComputeAndGraphics                     = VK_TRUE;
1228         props->limits.timestampPeriod                                 = 1.0f;
1229         props->limits.maxClipDistances                                = 8;
1230         props->limits.maxCullDistances                                = 8;
1231         props->limits.maxCombinedClipAndCullDistances                 = 8;
1232         props->limits.discreteQueuePriorities                         = 2;
1233         props->limits.pointSizeRange[0]                               = 1.0f;
1234         props->limits.pointSizeRange[1]                               = 64.0f; // -1ulp
1235         props->limits.lineWidthRange[0]                               = 1.0f;
1236         props->limits.lineWidthRange[1]                               = 8.0f; // -1ulp
1237         props->limits.pointSizeGranularity                            = 1.0f;
1238         props->limits.lineWidthGranularity                            = 1.0f;
1239         props->limits.strictLines                                     = 0;
1240         props->limits.standardSampleLocations                         = VK_TRUE;
1241         props->limits.optimalBufferCopyOffsetAlignment                = 256;
1242         props->limits.optimalBufferCopyRowPitchAlignment              = 256;
1243         props->limits.nonCoherentAtomSize                             = 128;
1244     }
1245 
getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,uint32_t * count,VkQueueFamilyProperties * props)1246     VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice, uint32_t *count,
1247                                                                       VkQueueFamilyProperties *props)
1248     {
1249         if (props && *count >= 1u)
1250         {
1251             deMemset(props, 0, sizeof(VkQueueFamilyProperties));
1252 
1253             props->queueCount         = 4u;
1254             props->queueFlags         = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1255             props->timestampValidBits = 64;
1256         }
1257 
1258         *count = 1u;
1259     }
1260 
getPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * props)1261     VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties(VkPhysicalDevice,
1262                                                                  VkPhysicalDeviceMemoryProperties *props)
1263     {
1264         deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
1265 
1266         props->memoryTypeCount              = 1u;
1267         props->memoryTypes[0].heapIndex     = 0u;
1268         props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1269                                               VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
1270                                               VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1271 
1272         props->memoryHeapCount      = 1u;
1273         props->memoryHeaps[0].size  = 1ull << 31;
1274         props->memoryHeaps[0].flags = 0u;
1275     }
1276 
getPhysicalDeviceFormatProperties(VkPhysicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1277     VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties(VkPhysicalDevice, VkFormat format,
1278                                                                  VkFormatProperties *pFormatProperties)
1279     {
1280         const VkFormatFeatureFlags allFeatures =
1281             VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT |
1282             VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT |
1283             VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT |
1284             VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT |
1285             VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT |
1286             VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
1287             VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT |
1288             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT |
1289             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT |
1290             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT |
1291             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT |
1292             VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1293 
1294         pFormatProperties->linearTilingFeatures  = allFeatures;
1295         pFormatProperties->optimalTilingFeatures = allFeatures;
1296         pFormatProperties->bufferFeatures        = allFeatures;
1297 
1298         if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1299             pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1300     }
1301 
getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1302     VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties(
1303         VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling,
1304         VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
1305     {
1306         DE_UNREF(physicalDevice);
1307         DE_UNREF(format);
1308         DE_UNREF(type);
1309         DE_UNREF(tiling);
1310         DE_UNREF(usage);
1311         DE_UNREF(flags);
1312 
1313         pImageFormatProperties->maxArrayLayers   = 8;
1314         pImageFormatProperties->maxExtent.width  = 4096;
1315         pImageFormatProperties->maxExtent.height = 4096;
1316         pImageFormatProperties->maxExtent.depth  = 4096;
1317         pImageFormatProperties->maxMipLevels     = deLog2Ceil32(4096) + 1;
1318         pImageFormatProperties->maxResourceSize  = 64u * 1024u * 1024u;
1319         pImageFormatProperties->sampleCounts     = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1320 
1321         return VK_SUCCESS;
1322     }
1323 
getDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1324     VKAPI_ATTR void VKAPI_CALL getDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1325                                               VkQueue *pQueue)
1326     {
1327         DE_UNREF(device);
1328         DE_UNREF(queueFamilyIndex);
1329 
1330         if (pQueue)
1331             *pQueue = reinterpret_cast<VkQueue>((uint64_t)queueIndex + 1);
1332     }
1333 
getBufferMemoryRequirements(VkDevice,VkBuffer bufferHandle,VkMemoryRequirements * requirements)1334     VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements(VkDevice, VkBuffer bufferHandle,
1335                                                            VkMemoryRequirements *requirements)
1336     {
1337         const Buffer *buffer = reinterpret_cast<const Buffer *>(bufferHandle.getInternal());
1338 
1339         requirements->memoryTypeBits = 1u;
1340         requirements->size           = buffer->getSize();
1341         requirements->alignment      = (VkDeviceSize)1u;
1342     }
1343 
getPackedImageDataSize(VkFormat format,VkExtent3D extent,VkSampleCountFlagBits samples)1344     VkDeviceSize getPackedImageDataSize(VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1345     {
1346         return (VkDeviceSize)getPixelSize(mapVkFormat(format)) * (VkDeviceSize)extent.width *
1347                (VkDeviceSize)extent.height * (VkDeviceSize)extent.depth * (VkDeviceSize)samples;
1348     }
1349 
getCompressedImageDataSize(VkFormat format,VkExtent3D extent)1350     VkDeviceSize getCompressedImageDataSize(VkFormat format, VkExtent3D extent)
1351     {
1352         try
1353         {
1354             const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
1355             const size_t blockSize                   = tcu::getBlockSize(tcuFormat);
1356             const tcu::IVec3 blockPixelSize          = tcu::getBlockPixelSize(tcuFormat);
1357             const int numBlocksX                     = deDivRoundUp32((int)extent.width, blockPixelSize.x());
1358             const int numBlocksY                     = deDivRoundUp32((int)extent.height, blockPixelSize.y());
1359             const int numBlocksZ                     = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1360 
1361             return blockSize * numBlocksX * numBlocksY * numBlocksZ;
1362         }
1363         catch (...)
1364         {
1365             return 0; // Unsupported compressed format
1366         }
1367     }
1368 
getYCbCrImageDataSize(VkFormat format,VkExtent3D extent)1369     VkDeviceSize getYCbCrImageDataSize(VkFormat format, VkExtent3D extent)
1370     {
1371         const PlanarFormatDescription desc = getPlanarFormatDescription(format);
1372         VkDeviceSize totalSize             = 0;
1373 
1374         DE_ASSERT(extent.depth == 1);
1375 
1376         for (uint32_t planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1377         {
1378             const uint32_t elementSize = desc.planes[planeNdx].elementSizeBytes;
1379 
1380             totalSize = (VkDeviceSize)deAlign64((int64_t)totalSize, elementSize);
1381             totalSize += getPlaneSizeInBytes(desc, extent, planeNdx, 0, BUFFER_IMAGE_COPY_OFFSET_GRANULARITY);
1382         }
1383 
1384         return totalSize;
1385     }
1386 
getImageMemoryRequirements(VkDevice,VkImage imageHandle,VkMemoryRequirements * requirements)1387     VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements(VkDevice, VkImage imageHandle,
1388                                                           VkMemoryRequirements *requirements)
1389     {
1390         const Image *image = reinterpret_cast<const Image *>(imageHandle.getInternal());
1391 
1392         requirements->memoryTypeBits = 1u;
1393         requirements->alignment      = 16u;
1394 
1395         if (isCompressedFormat(image->getFormat()))
1396             requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1397         else if (isYCbCrFormat(image->getFormat()))
1398             requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1399         else
1400             requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1401     }
1402 
allocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1403     VKAPI_ATTR VkResult VKAPI_CALL allocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1404                                                   const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1405     {
1406 #ifndef CTS_USES_VULKANSC
1407         const VkExportMemoryAllocateInfo *const exportInfo =
1408             findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1409         const VkImportAndroidHardwareBufferInfoANDROID *const importInfo =
1410             findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1411 
1412         if ((exportInfo &&
1413              (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0) ||
1414             (importInfo && importInfo->buffer.internal))
1415         {
1416 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1417             VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(
1418                                 device, pAllocateInfo, pAllocator)));
1419 #else
1420             return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1421 #endif
1422         }
1423         else
1424         {
1425             VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(
1426                                 device, pAllocateInfo, pAllocator)));
1427         }
1428 #else  // CTS_USES_VULKANSC
1429         VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(
1430                             device, pAllocateInfo, pAllocator)));
1431 #endif // CTS_USES_VULKANSC
1432     }
1433 
mapMemory(VkDevice,VkDeviceMemory memHandle,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1434     VKAPI_ATTR VkResult VKAPI_CALL mapMemory(VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size,
1435                                              VkMemoryMapFlags flags, void **ppData)
1436     {
1437         DeviceMemory *const memory = reinterpret_cast<DeviceMemory *>(memHandle.getInternal());
1438 
1439         DE_UNREF(size);
1440         DE_UNREF(flags);
1441 
1442         *ppData = (uint8_t *)memory->map() + offset;
1443 
1444         return VK_SUCCESS;
1445     }
1446 
unmapMemory(VkDevice device,VkDeviceMemory memHandle)1447     VKAPI_ATTR void VKAPI_CALL unmapMemory(VkDevice device, VkDeviceMemory memHandle)
1448     {
1449         DeviceMemory *const memory = reinterpret_cast<DeviceMemory *>(memHandle.getInternal());
1450 
1451         DE_UNREF(device);
1452 
1453         memory->unmap();
1454     }
1455 
1456 #ifndef CTS_USES_VULKANSC
1457 
1458     VKAPI_ATTR VkResult VKAPI_CALL
getMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,pt::AndroidHardwareBufferPtr * pBuffer)1459     getMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1460                                           pt::AndroidHardwareBufferPtr *pBuffer)
1461     {
1462         DE_UNREF(device);
1463 
1464 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1465         DeviceMemory *const memory = reinterpret_cast<ExternalDeviceMemoryAndroid *>(pInfo->memory.getInternal());
1466         ExternalDeviceMemoryAndroid *const androidMemory = static_cast<ExternalDeviceMemoryAndroid *>(memory);
1467 
1468         AHardwareBuffer *hwbuffer = androidMemory->getHwBuffer();
1469         AHardwareBuffer_acquire(hwbuffer);
1470         pBuffer->internal = hwbuffer;
1471 #else
1472         DE_UNREF(pInfo);
1473         DE_UNREF(pBuffer);
1474 #endif
1475 
1476         return VK_SUCCESS;
1477     }
1478 
1479 #endif // CTS_USES_VULKANSC
1480 
allocateDescriptorSets(VkDevice,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1481     VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets(VkDevice, const VkDescriptorSetAllocateInfo *pAllocateInfo,
1482                                                           VkDescriptorSet *pDescriptorSets)
1483     {
1484         DescriptorPool *const poolImpl =
1485             reinterpret_cast<DescriptorPool *>((uintptr_t)pAllocateInfo->descriptorPool.getInternal());
1486 
1487         for (uint32_t ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1488         {
1489             try
1490             {
1491                 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1492             }
1493             catch (const std::bad_alloc &)
1494             {
1495                 for (uint32_t freeNdx = 0; freeNdx < ndx; freeNdx++)
1496                     delete reinterpret_cast<DescriptorSet *>((uintptr_t)pDescriptorSets[freeNdx].getInternal());
1497 
1498                 return VK_ERROR_OUT_OF_HOST_MEMORY;
1499             }
1500             catch (VkResult res)
1501             {
1502                 for (uint32_t freeNdx = 0; freeNdx < ndx; freeNdx++)
1503                     delete reinterpret_cast<DescriptorSet *>((uintptr_t)pDescriptorSets[freeNdx].getInternal());
1504 
1505                 return res;
1506             }
1507         }
1508 
1509         return VK_SUCCESS;
1510     }
1511 
freeDescriptorSets(VkDevice,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1512     VKAPI_ATTR void VKAPI_CALL freeDescriptorSets(VkDevice, VkDescriptorPool descriptorPool, uint32_t count,
1513                                                   const VkDescriptorSet *pDescriptorSets)
1514     {
1515         DescriptorPool *const poolImpl = reinterpret_cast<DescriptorPool *>((uintptr_t)descriptorPool.getInternal());
1516 
1517         for (uint32_t ndx = 0; ndx < count; ++ndx)
1518             poolImpl->free(pDescriptorSets[ndx]);
1519     }
1520 
resetDescriptorPool(VkDevice,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags)1521     VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool(VkDevice, VkDescriptorPool descriptorPool,
1522                                                        VkDescriptorPoolResetFlags)
1523     {
1524         DescriptorPool *const poolImpl = reinterpret_cast<DescriptorPool *>((uintptr_t)descriptorPool.getInternal());
1525 
1526         poolImpl->reset();
1527 
1528         return VK_SUCCESS;
1529     }
1530 
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)1531     VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers(VkDevice device,
1532                                                           const VkCommandBufferAllocateInfo *pAllocateInfo,
1533                                                           VkCommandBuffer *pCommandBuffers)
1534     {
1535         DE_UNREF(device);
1536 
1537         if (pAllocateInfo && pCommandBuffers)
1538         {
1539             CommandPool *const poolImpl =
1540                 reinterpret_cast<CommandPool *>((uintptr_t)pAllocateInfo->commandPool.getInternal());
1541 
1542             for (uint32_t ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1543                 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1544         }
1545 
1546         return VK_SUCCESS;
1547     }
1548 
freeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)1549     VKAPI_ATTR void VKAPI_CALL freeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1550                                                   uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
1551     {
1552         CommandPool *const poolImpl = reinterpret_cast<CommandPool *>((uintptr_t)commandPool.getInternal());
1553 
1554         DE_UNREF(device);
1555 
1556         for (uint32_t ndx = 0; ndx < commandBufferCount; ++ndx)
1557             poolImpl->free(pCommandBuffers[ndx]);
1558     }
1559 
createDisplayModeKHR(VkPhysicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)1560     VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR(VkPhysicalDevice, VkDisplayKHR display,
1561                                                         const VkDisplayModeCreateInfoKHR *pCreateInfo,
1562                                                         const VkAllocationCallbacks *pAllocator,
1563                                                         VkDisplayModeKHR *pMode)
1564     {
1565         DE_UNREF(pAllocator);
1566         VK_NULL_RETURN(
1567             (*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1568     }
1569 
createSharedSwapchainsKHR(VkDevice device,uint32_t swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains)1570     VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
1571                                                              const VkSwapchainCreateInfoKHR *pCreateInfos,
1572                                                              const VkAllocationCallbacks *pAllocator,
1573                                                              VkSwapchainKHR *pSwapchains)
1574     {
1575         for (uint32_t ndx = 0; ndx < swapchainCount; ++ndx)
1576         {
1577             pSwapchains[ndx] =
1578                 allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos + ndx, pAllocator);
1579         }
1580 
1581         return VK_SUCCESS;
1582     }
1583 
getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1584     VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR(
1585         VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
1586         VkExternalBufferProperties *pExternalBufferProperties)
1587     {
1588         DE_UNREF(physicalDevice);
1589         DE_UNREF(pExternalBufferInfo);
1590 
1591         pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures        = 0;
1592         pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1593         pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes         = 0;
1594 
1595 #ifndef CTS_USES_VULKANSC
1596         if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1597         {
1598             pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures =
1599                 VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1600             pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes =
1601                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1602             pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes =
1603                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1604         }
1605 #endif // CTS_USES_VULKANSC
1606     }
1607 
getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1608     VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR(
1609         VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
1610         VkImageFormatProperties2 *pImageFormatProperties)
1611     {
1612 #ifndef CTS_USES_VULKANSC
1613         const VkPhysicalDeviceExternalImageFormatInfo *const externalInfo =
1614             findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1615         VkExternalImageFormatProperties *const externalProperties =
1616             findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1617         VkResult result;
1618 
1619         result = getPhysicalDeviceImageFormatProperties(
1620             physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
1621             pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1622         if (result != VK_SUCCESS)
1623             return result;
1624 
1625         if (externalInfo && externalInfo->handleType != 0)
1626         {
1627             if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1628                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1629 
1630             if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM ||
1631                   pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM ||
1632                   pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16 ||
1633                   pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT ||
1634                   pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1635             {
1636                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1637             }
1638 
1639             if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1640                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1641 
1642             if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1643                                              VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) != 0)
1644             {
1645                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1646             }
1647 
1648             if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1649                                              /*| VK_IMAGE_CREATE_PROTECTED_BIT*/
1650                                              /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT*/)) != 0)
1651             {
1652                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1653             }
1654 
1655             if (externalProperties)
1656             {
1657                 externalProperties->externalMemoryProperties.externalMemoryFeatures =
1658                     VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
1659                     VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1660                 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes =
1661                     VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1662                 externalProperties->externalMemoryProperties.compatibleHandleTypes =
1663                     VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1664             }
1665         }
1666 
1667         return VK_SUCCESS;
1668 #else  // CTS_USES_VULKANSC
1669         return getPhysicalDeviceImageFormatProperties(
1670             physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
1671             pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1672 #endif // CTS_USES_VULKANSC
1673     }
1674 
1675     // \note getInstanceProcAddr is a little bit special:
1676     // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1677     // getInstanceProcAddr() implementation needs other entry points from
1678     // vkNullDriverImpl.inl.
1679     VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr(VkInstance instance, const char *pName);
1680 
1681 #include "vkNullDriverImpl.inl"
1682 
getInstanceProcAddr(VkInstance instance,const char * pName)1683     VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr(VkInstance instance, const char *pName)
1684     {
1685         if (instance)
1686         {
1687             return reinterpret_cast<Instance *>(instance)->getProcAddr(pName);
1688         }
1689         else
1690         {
1691             const std::string name = pName;
1692 
1693             if (name == "vkCreateInstance")
1694                 return (PFN_vkVoidFunction)createInstance;
1695             else if (name == "vkEnumerateInstanceExtensionProperties")
1696                 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1697             else if (name == "vkEnumerateInstanceLayerProperties")
1698                 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1699             else
1700                 return (PFN_vkVoidFunction)DE_NULL;
1701         }
1702     }
1703 
1704 } // extern "C"
1705 
Instance(const VkInstanceCreateInfo *)1706 Instance::Instance(const VkInstanceCreateInfo *)
1707     : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1708 {
1709 }
1710 
Device(VkPhysicalDevice,const VkDeviceCreateInfo *)1711 Device::Device(VkPhysicalDevice, const VkDeviceCreateInfo *)
1712     : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1713 {
1714 }
1715 
1716 class NullDriverLibrary : public Library
1717 {
1718 public:
NullDriverLibrary(void)1719     NullDriverLibrary(void)
1720         : m_library(s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1721         , m_driver(m_library)
1722     {
1723     }
1724 
getPlatformInterface(void) const1725     const PlatformInterface &getPlatformInterface(void) const
1726     {
1727         return m_driver;
1728     }
getFunctionLibrary(void) const1729     const tcu::FunctionLibrary &getFunctionLibrary(void) const
1730     {
1731         return m_library;
1732     }
1733 
1734 private:
1735     const tcu::StaticFunctionLibrary m_library;
1736     const PlatformDriver m_driver;
1737 };
1738 
1739 } // namespace
1740 
createNullDriver(void)1741 Library *createNullDriver(void)
1742 {
1743     return new NullDriverLibrary();
1744 }
1745 
1746 } // namespace vk
1747