1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "VkBuffer.hpp"
16 #include "VkBufferView.hpp"
17 #include "VkCommandBuffer.hpp"
18 #include "VkCommandPool.hpp"
19 #include "VkConfig.hpp"
20 #include "VkDebugUtilsMessenger.hpp"
21 #include "VkDescriptorPool.hpp"
22 #include "VkDescriptorSetLayout.hpp"
23 #include "VkDescriptorUpdateTemplate.hpp"
24 #include "VkDestroy.hpp"
25 #include "VkDevice.hpp"
26 #include "VkDeviceMemory.hpp"
27 #include "VkEvent.hpp"
28 #include "VkFence.hpp"
29 #include "VkFramebuffer.hpp"
30 #include "VkGetProcAddress.hpp"
31 #include "VkImage.hpp"
32 #include "VkImageView.hpp"
33 #include "VkInstance.hpp"
34 #include "VkPhysicalDevice.hpp"
35 #include "VkPipeline.hpp"
36 #include "VkPipelineCache.hpp"
37 #include "VkPipelineLayout.hpp"
38 #include "VkQueryPool.hpp"
39 #include "VkQueue.hpp"
40 #include "VkRenderPass.hpp"
41 #include "VkSampler.hpp"
42 #include "VkSemaphore.hpp"
43 #include "VkShaderModule.hpp"
44 #include "VkStringify.hpp"
45 #include "VkStructConversion.hpp"
46 #include "VkTimelineSemaphore.hpp"
47
48 #include "Reactor/Nucleus.hpp"
49 #include "System/CPUID.hpp"
50 #include "System/Debug.hpp"
51 #include "System/SwiftConfig.hpp"
52 #include "WSI/HeadlessSurfaceKHR.hpp"
53 #include "WSI/VkSwapchainKHR.hpp"
54
55 #if defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_MACOS_MVK)
56 # include "WSI/MetalSurface.hpp"
57 #endif
58
59 #ifdef VK_USE_PLATFORM_XCB_KHR
60 # include "WSI/XcbSurfaceKHR.hpp"
61 #endif
62
63 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
64 # include "WSI/WaylandSurfaceKHR.hpp"
65 #endif
66
67 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
68 # include "WSI/DirectFBSurfaceEXT.hpp"
69 #endif
70
71 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
72 # include "WSI/DisplaySurfaceKHR.hpp"
73 #endif
74
75 #ifdef VK_USE_PLATFORM_WIN32_KHR
76 # include "WSI/Win32SurfaceKHR.hpp"
77 #endif
78
79 #include "marl/mutex.h"
80 #include "marl/scheduler.h"
81 #include "marl/thread.h"
82 #include "marl/tsa.h"
83
84 #ifdef __ANDROID__
85 # include <unistd.h>
86
87 # include "commit.h"
88 # include <android/log.h>
89 # include <hardware/gralloc.h>
90 # include <hardware/gralloc1.h>
91 # include <sync/sync.h>
92 # ifdef SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
93 # include "VkDeviceMemoryExternalAndroid.hpp"
94 # endif
95 #endif
96
97 #include <algorithm>
98 #include <cinttypes>
99 #include <cmath>
100 #include <cstring>
101 #include <functional>
102 #include <map>
103 #include <string>
104
105 namespace {
106
107 // Enable commit_id.py and #include commit.h for other platforms.
108 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
logBuildVersionInformation()109 void logBuildVersionInformation()
110 {
111 // TODO(b/144093703): Don't call __android_log_print() directly
112 __android_log_print(ANDROID_LOG_INFO, "SwiftShader", "SwiftShader Version: %s", SWIFTSHADER_VERSION_STRING);
113 }
114 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
115
getOrCreateScheduler()116 std::shared_ptr<marl::Scheduler> getOrCreateScheduler()
117 {
118 struct Scheduler
119 {
120 marl::mutex mutex;
121 std::weak_ptr<marl::Scheduler> weakptr GUARDED_BY(mutex);
122 };
123
124 static Scheduler scheduler; // TODO(b/208256248): Avoid exit-time destructor.
125
126 marl::lock lock(scheduler.mutex);
127 auto sptr = scheduler.weakptr.lock();
128 if(!sptr)
129 {
130 const sw::Configuration &config = sw::getConfiguration();
131 marl::Scheduler::Config cfg = sw::getSchedulerConfiguration(config);
132 sptr = std::make_shared<marl::Scheduler>(cfg);
133 scheduler.weakptr = sptr;
134 }
135 return sptr;
136 }
137
138 // initializeLibrary() is called by vkCreateInstance() to perform one-off global
139 // initialization of the swiftshader driver.
initializeLibrary()140 void initializeLibrary()
141 {
142 static bool doOnce = [] {
143 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
144 logBuildVersionInformation();
145 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
146 return true;
147 }();
148 (void)doOnce;
149 }
150
151 template<class T>
ValidateRenderPassPNextChain(VkDevice device,const T * pCreateInfo)152 void ValidateRenderPassPNextChain(VkDevice device, const T *pCreateInfo)
153 {
154 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
155
156 while(extensionCreateInfo)
157 {
158 switch(extensionCreateInfo->sType)
159 {
160 case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
161 {
162 const VkRenderPassInputAttachmentAspectCreateInfo *inputAttachmentAspectCreateInfo = reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(extensionCreateInfo);
163
164 for(uint32_t i = 0; i < inputAttachmentAspectCreateInfo->aspectReferenceCount; i++)
165 {
166 const auto &aspectReference = inputAttachmentAspectCreateInfo->pAspectReferences[i];
167 ASSERT(aspectReference.subpass < pCreateInfo->subpassCount);
168 const auto &subpassDescription = pCreateInfo->pSubpasses[aspectReference.subpass];
169 ASSERT(aspectReference.inputAttachmentIndex < subpassDescription.inputAttachmentCount);
170 const auto &attachmentReference = subpassDescription.pInputAttachments[aspectReference.inputAttachmentIndex];
171 if(attachmentReference.attachment != VK_ATTACHMENT_UNUSED)
172 {
173 // If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any
174 // element of the pInputAttachments member of any element of pSubpasses where the attachment member
175 // is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of
176 // VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are
177 // present in images of the format specified by the element of pAttachments at attachment
178 vk::Format format(pCreateInfo->pAttachments[attachmentReference.attachment].format);
179 bool isDepth = format.isDepth();
180 bool isStencil = format.isStencil();
181 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) || (!isDepth && !isStencil));
182 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || isDepth);
183 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) || isStencil);
184 }
185 }
186 }
187 break;
188 case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
189 {
190 const VkRenderPassMultiviewCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(extensionCreateInfo);
191 ASSERT((multiviewCreateInfo->subpassCount == 0) || (multiviewCreateInfo->subpassCount == pCreateInfo->subpassCount));
192 ASSERT((multiviewCreateInfo->dependencyCount == 0) || (multiviewCreateInfo->dependencyCount == pCreateInfo->dependencyCount));
193
194 bool zeroMask = (multiviewCreateInfo->pViewMasks[0] == 0);
195 for(uint32_t i = 1; i < multiviewCreateInfo->subpassCount; i++)
196 {
197 ASSERT((multiviewCreateInfo->pViewMasks[i] == 0) == zeroMask);
198 }
199
200 if(zeroMask)
201 {
202 ASSERT(multiviewCreateInfo->correlationMaskCount == 0);
203 }
204
205 for(uint32_t i = 0; i < multiviewCreateInfo->dependencyCount; i++)
206 {
207 const auto &dependency = pCreateInfo->pDependencies[i];
208 if(multiviewCreateInfo->pViewOffsets[i] != 0)
209 {
210 ASSERT(dependency.srcSubpass != dependency.dstSubpass);
211 ASSERT(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT);
212 }
213 if(zeroMask)
214 {
215 ASSERT(!(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT));
216 }
217 }
218
219 // If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo,
220 // each element of its pViewMask member must not include a bit at a position
221 // greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers
222 // pViewMask is a 32 bit value. If maxFramebufferLayers > 32, it's impossible
223 // for pViewMask to contain a bit at an illegal position
224 // Note: Verify pViewMask values instead if we hit this assert
225 ASSERT(vk::Cast(device)->getPhysicalDevice()->getProperties().limits.maxFramebufferLayers >= 32);
226 }
227 break;
228 case VK_STRUCTURE_TYPE_MAX_ENUM:
229 // dEQP tests that this value is ignored.
230 break;
231 default:
232 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
233 break;
234 }
235
236 extensionCreateInfo = extensionCreateInfo->pNext;
237 }
238 }
239
240 // This variable will be set to the negotiated ICD interface version negotiated with the loader.
241 // It defaults to 1 because if vk_icdNegotiateLoaderICDInterfaceVersion is never called it means
242 // that the loader doens't support version 2 of that interface.
243 uint32_t sICDInterfaceVersion = 1;
244 // Whether any vk_icd* entrypoints were used. This is used to distinguish between applications that
245 // use the Vulkan loader to load Swiftshader (in which case vk_icd functions are called), and
246 // applications that load Swiftshader and grab vkGetInstanceProcAddr directly.
247 bool sICDEntryPointsUsed = false;
248
249 } // namespace
250
251 extern "C" {
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)252 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
253 {
254 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
255 sICDEntryPointsUsed = true;
256
257 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
258 }
259
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)260 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
261 {
262 sICDEntryPointsUsed = true;
263
264 sICDInterfaceVersion = std::min(*pSupportedVersion, 7u);
265 *pSupportedVersion = sICDInterfaceVersion;
266 return VK_SUCCESS;
267 }
268
vk_icdGetPhysicalDeviceProcAddr(VkInstance instance,const char * pName)269 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char *pName)
270 {
271 sICDEntryPointsUsed = true;
272 return vk::GetPhysicalDeviceProcAddr(vk::Cast(instance), pName);
273 }
274
275 #if VK_USE_PLATFORM_WIN32_KHR
276
vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance,LUID adapterLUID,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)277 VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
278 {
279 sICDEntryPointsUsed = true;
280 if(!pPhysicalDevices)
281 {
282 *pPhysicalDeviceCount = 0;
283 }
284
285 return VK_SUCCESS;
286 }
287
288 #endif // VK_USE_PLATFORM_WIN32_KHR
289
290 #if VK_USE_PLATFORM_FUCHSIA
291
292 // This symbol must be exported by a Fuchsia Vulkan ICD. The Vulkan loader will
293 // call it, passing the address of a global function pointer that can later be
294 // used at runtime to connect to Fuchsia FIDL services, as required by certain
295 // extensions. See https://fxbug.dev/13095 for more details.
296 //
297 // NOTE: This entry point has not been upstreamed to Khronos yet, which reserves
298 // all symbols starting with vk_icd. See https://fxbug.dev/13074 which
299 // tracks upstreaming progress.
vk_icdInitializeConnectToServiceCallback(PFN_vkConnectToService callback)300 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdInitializeConnectToServiceCallback(
301 PFN_vkConnectToService callback)
302 {
303 TRACE("(callback = %p)", callback);
304 sICDEntryPointsUsed = true;
305 vk::icdFuchsiaServiceConnectCallback = callback;
306 return VK_SUCCESS;
307 }
308
309 #endif // VK_USE_PLATFORM_FUCHSIA
310
311 struct ExtensionProperties : public VkExtensionProperties
312 {
__anon00b16cca0302ExtensionProperties313 std::function<bool()> isSupported = [] { return true; };
314 };
315
316 // TODO(b/208256248): Avoid exit-time destructor.
317 static const ExtensionProperties instanceExtensionProperties[] = {
318 { { VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION } },
319 { { VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION } },
320 { { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION } },
321 { { VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION } },
322 { { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION } },
323 { { VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION } },
324 { { VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, VK_EXT_HEADLESS_SURFACE_SPEC_VERSION } },
325 #ifndef __ANDROID__
326 { { VK_KHR_SURFACE_EXTENSION_NAME, VK_KHR_SURFACE_SPEC_VERSION } },
327 { { VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME, VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION } },
328 { { VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME, VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION } },
329 #endif
330 #ifdef VK_USE_PLATFORM_XCB_KHR
__anon00b16cca0402null331 { { VK_KHR_XCB_SURFACE_EXTENSION_NAME, VK_KHR_XCB_SURFACE_SPEC_VERSION }, [] { return vk::XcbSurfaceKHR::isSupported(); } },
332 #endif
333 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
__anon00b16cca0502null334 { { VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, VK_KHR_WAYLAND_SURFACE_SPEC_VERSION }, [] { return vk::WaylandSurfaceKHR::isSupported(); } },
335 #endif
336 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
337 { { VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME, VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION } },
338 #endif
339 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
340 { { VK_KHR_DISPLAY_EXTENSION_NAME, VK_KHR_DISPLAY_SPEC_VERSION } },
341 #endif
342 #ifdef VK_USE_PLATFORM_MACOS_MVK
343 { { VK_MVK_MACOS_SURFACE_EXTENSION_NAME, VK_MVK_MACOS_SURFACE_SPEC_VERSION } },
344 #endif
345 #ifdef VK_USE_PLATFORM_METAL_EXT
346 { { VK_EXT_METAL_SURFACE_EXTENSION_NAME, VK_EXT_METAL_SURFACE_SPEC_VERSION } },
347 #endif
348 #ifdef VK_USE_PLATFORM_WIN32_KHR
349 { { VK_KHR_WIN32_SURFACE_EXTENSION_NAME, VK_KHR_WIN32_SURFACE_SPEC_VERSION } },
350 #endif
351 };
352
353 // TODO(b/208256248): Avoid exit-time destructor.
354 static const ExtensionProperties deviceExtensionProperties[] = {
355 { { VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION } },
356 // Vulkan 1.1 promoted extensions
357 { { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_SPEC_VERSION } },
358 { { VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION } },
359 { { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION } },
360 { { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION } },
361 { { VK_KHR_DEVICE_GROUP_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_SPEC_VERSION } },
362 { { VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_SPEC_VERSION } },
363 { { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION } },
364 { { VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
365 { { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION } },
366 { { VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_MAINTENANCE1_SPEC_VERSION } },
367 { { VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_KHR_MAINTENANCE2_SPEC_VERSION } },
368 { { VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_KHR_MAINTENANCE3_SPEC_VERSION } },
369 { { VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_KHR_MULTIVIEW_SPEC_VERSION } },
370 { { VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION } },
371 { { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION } },
372 { { VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION } },
373 // Only 1.1 core version of this is supported. The extension has additional requirements
374 //{{ VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION }},
375 { { VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION } },
376 // Only 1.1 core version of this is supported. The extension has additional requirements
377 //{{ VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_KHR_VARIABLE_POINTERS_SPEC_VERSION }},
378 { { VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION } },
379 #ifndef __ANDROID__
380 // We fully support the KHR_swapchain v70 additions, so just track the spec version.
381 { { VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_SWAPCHAIN_SPEC_VERSION } },
382 #else
383 // We only support V7 of this extension. Missing functionality: in V8,
384 // it becomes possible to pass a VkNativeBufferANDROID structure to
385 // vkBindImageMemory2. Android's swapchain implementation does this in
386 // order to support passing VkBindImageMemorySwapchainInfoKHR
387 // (from KHR_swapchain v70) to vkBindImageMemory2.
388 { { VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME, 7 } },
389 #endif
390 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
391 { { VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION } },
392 #endif
393 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
394 { { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION } },
395 #endif
396 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
397 { { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION } },
398 #endif
399 #if !defined(__APPLE__)
400 { { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION } },
401 #endif
402 #if VK_USE_PLATFORM_FUCHSIA
403 { { VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
404 { { VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION } },
405 #endif
406 { { VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, VK_EXT_PROVOKING_VERTEX_SPEC_VERSION } },
407 { { VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION } },
408 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
409 { { VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION } },
410 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
411 // Vulkan 1.2 promoted extensions
412 { { VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, VK_EXT_HOST_QUERY_RESET_SPEC_VERSION } },
413 { { VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION } },
414 { { VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION } },
415 { { VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION } },
416 { { VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION } },
417 { { VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION } },
418 { { VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION } },
419 { { VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION } },
420 { { VK_KHR_SPIRV_1_4_EXTENSION_NAME, VK_KHR_SPIRV_1_4_SPEC_VERSION } },
421 { { VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION } },
422 { { VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION } },
423 // Vulkan 1.3 promoted extensions
424 { { VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION } },
425 { { VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME, VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION } },
426 { { VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION } },
427 { { VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION } },
428 { { VK_EXT_PRIVATE_DATA_EXTENSION_NAME, VK_EXT_PRIVATE_DATA_SPEC_VERSION } },
429 { { VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION } },
430 { { VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME, VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION } },
431 { { VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION } },
432 { { VK_EXT_TOOLING_INFO_EXTENSION_NAME, VK_EXT_TOOLING_INFO_SPEC_VERSION } },
433 { { VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME, VK_KHR_COPY_COMMANDS_2_SPEC_VERSION } },
434 { { VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME, VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION } },
435 { { VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME, VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION } },
436 { { VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME, VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION } },
437 { { VK_KHR_MAINTENANCE_4_EXTENSION_NAME, VK_KHR_MAINTENANCE_4_SPEC_VERSION } },
438 { { VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME, VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION } },
439 { { VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME, VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION } },
440 { { VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME, VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION } },
441 { { VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME, VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION } },
442 // Roadmap 2022 extension
443 { { VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME, VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION } },
444 // Additional extension
445 { { VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME, VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION } },
446 { { VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME, VK_GOOGLE_DECORATE_STRING_SPEC_VERSION } },
447 { { VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME, VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION } },
448 { { VK_GOOGLE_USER_TYPE_EXTENSION_NAME, VK_GOOGLE_USER_TYPE_SPEC_VERSION } },
449 { { VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME, VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION } },
450 { { VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION } },
451 { { VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME, VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION } },
452 #ifndef __ANDROID__
453 { { VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION } },
454 { { VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME, VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION } },
455 #endif
456 { { VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME, VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION } },
457 { { VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION } },
458 { { VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION } },
459 { { VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION } },
460 { { VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME, VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION } },
461 // The following extension is only used to add support for Bresenham lines
462 { { VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, VK_EXT_LINE_RASTERIZATION_SPEC_VERSION } },
463 // The following extension is used by ANGLE to emulate blitting the stencil buffer
464 { { VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION } },
465 { { VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION } },
466 // Useful for D3D emulation
467 { { VK_EXT_4444_FORMATS_EXTENSION_NAME, VK_EXT_4444_FORMATS_SPEC_VERSION } },
468 // Used by ANGLE to support GL_KHR_blend_equation_advanced
469 { { VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION } },
470 // Used by ANGLE to implement triangle/etc list restarts as possible in OpenGL
471 { { VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME, VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION } },
472 { { VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION } },
473 { { VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME, VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION } },
474 { { VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME, VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION } },
475 { { VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION } },
476 };
477
numSupportedExtensions(const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)478 static uint32_t numSupportedExtensions(const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
479 {
480 uint32_t count = 0;
481
482 for(uint32_t i = 0; i < extensionPropertiesCount; i++)
483 {
484 if(extensionProperties[i].isSupported())
485 {
486 count++;
487 }
488 }
489
490 return count;
491 }
492
numInstanceSupportedExtensions()493 static uint32_t numInstanceSupportedExtensions()
494 {
495 return numSupportedExtensions(instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
496 }
497
numDeviceSupportedExtensions()498 static uint32_t numDeviceSupportedExtensions()
499 {
500 return numSupportedExtensions(deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
501 }
502
hasExtension(const char * extensionName,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)503 static bool hasExtension(const char *extensionName, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
504 {
505 for(uint32_t i = 0; i < extensionPropertiesCount; i++)
506 {
507 if(strcmp(extensionName, extensionProperties[i].extensionName) == 0)
508 {
509 return extensionProperties[i].isSupported();
510 }
511 }
512
513 return false;
514 }
515
hasInstanceExtension(const char * extensionName)516 static bool hasInstanceExtension(const char *extensionName)
517 {
518 return hasExtension(extensionName, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
519 }
520
hasDeviceExtension(const char * extensionName)521 static bool hasDeviceExtension(const char *extensionName)
522 {
523 return hasExtension(extensionName, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
524 }
525
copyExtensions(VkExtensionProperties * pProperties,uint32_t toCopy,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)526 static void copyExtensions(VkExtensionProperties *pProperties, uint32_t toCopy, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
527 {
528 for(uint32_t i = 0, j = 0; i < toCopy; i++, j++)
529 {
530 while((j < extensionPropertiesCount) && !extensionProperties[j].isSupported())
531 {
532 j++;
533 }
534 if(j < extensionPropertiesCount)
535 {
536 pProperties[i] = extensionProperties[j];
537 }
538 }
539 }
540
copyInstanceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)541 static void copyInstanceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
542 {
543 copyExtensions(pProperties, toCopy, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
544 }
545
copyDeviceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)546 static void copyDeviceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
547 {
548 copyExtensions(pProperties, toCopy, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
549 }
550
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)551 VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance)
552 {
553 TRACE("(const VkInstanceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkInstance* pInstance = %p)",
554 pCreateInfo, pAllocator, pInstance);
555
556 initializeLibrary();
557
558 // ICD interface rule for version 5 of the interface:
559 // - If the loader supports version 4 or lower, the driver must fail with
560 // VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with apiVersion
561 // set to > Vulkan 1.0
562 // - If the loader supports version 5 or above, the loader must fail with
563 // VK_ERROR_INCOMPATIBLE_DRIVER if it can't handle the apiVersion, and drivers
564 // should fail with VK_ERROR_INCOMPATIBLE_DRIVER only if they can not support the
565 // specified apiVersion.
566 if(pCreateInfo->pApplicationInfo)
567 {
568 uint32_t appApiVersion = pCreateInfo->pApplicationInfo->apiVersion;
569 if(sICDEntryPointsUsed && sICDInterfaceVersion <= 4)
570 {
571 // Any version above 1.0 is an error.
572 if(VK_API_VERSION_MAJOR(appApiVersion) != 1 || VK_API_VERSION_MINOR(appApiVersion) != 0)
573 {
574 return VK_ERROR_INCOMPATIBLE_DRIVER;
575 }
576 }
577 else
578 {
579 if(VK_API_VERSION_MAJOR(appApiVersion) > VK_API_VERSION_MINOR(vk::API_VERSION))
580 {
581 return VK_ERROR_INCOMPATIBLE_DRIVER;
582 }
583 if((VK_API_VERSION_MAJOR(appApiVersion) == VK_API_VERSION_MINOR(vk::API_VERSION)) &&
584 VK_API_VERSION_MINOR(appApiVersion) > VK_API_VERSION_MINOR(vk::API_VERSION))
585 {
586 return VK_ERROR_INCOMPATIBLE_DRIVER;
587 }
588 }
589 }
590
591 if(pCreateInfo->flags != 0)
592 {
593 // Vulkan 1.3: "flags is reserved for future use." "flags must be 0"
594 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
595 }
596
597 if(pCreateInfo->enabledLayerCount != 0)
598 {
599 // Creating instances with unsupported layers should fail and SwiftShader doesn't support any layer
600 return VK_ERROR_LAYER_NOT_PRESENT;
601 }
602
603 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
604 {
605 if(!hasInstanceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
606 {
607 return VK_ERROR_EXTENSION_NOT_PRESENT;
608 }
609 }
610
611 VkDebugUtilsMessengerEXT messenger = { VK_NULL_HANDLE };
612 if(pCreateInfo->pNext)
613 {
614 const VkBaseInStructure *createInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
615 switch(createInfo->sType)
616 {
617 case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
618 {
619 const VkDebugUtilsMessengerCreateInfoEXT *debugUtilsMessengerCreateInfoEXT = reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(createInfo);
620 VkResult result = vk::DebugUtilsMessenger::Create(pAllocator, debugUtilsMessengerCreateInfoEXT, &messenger);
621 if(result != VK_SUCCESS)
622 {
623 return result;
624 }
625 }
626 break;
627 case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
628 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
629 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
630 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
631 // internal use by the loader, and do not have corresponding
632 // Vulkan structures in this Specification."
633 break;
634 case VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG:
635 // TODO(b/229112690): This structure is only meant to be used by the Vulkan Loader
636 // and should not be forwarded to the driver.
637 break;
638 default:
639 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(createInfo->sType).c_str());
640 break;
641 }
642 }
643
644 *pInstance = VK_NULL_HANDLE;
645 VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
646
647 VkResult result = vk::DispatchablePhysicalDevice::Create(pAllocator, pCreateInfo, &physicalDevice);
648 if(result != VK_SUCCESS)
649 {
650 vk::destroy(messenger, pAllocator);
651 return result;
652 }
653
654 result = vk::DispatchableInstance::Create(pAllocator, pCreateInfo, pInstance, physicalDevice, vk::Cast(messenger));
655 if(result != VK_SUCCESS)
656 {
657 vk::destroy(messenger, pAllocator);
658 vk::destroy(physicalDevice, pAllocator);
659 return result;
660 }
661
662 return result;
663 }
664
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)665 VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
666 {
667 TRACE("(VkInstance instance = %p, const VkAllocationCallbacks* pAllocator = %p)", instance, pAllocator);
668
669 vk::destroy(instance, pAllocator);
670 }
671
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)672 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
673 {
674 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceCount = %p, VkPhysicalDevice* pPhysicalDevices = %p)",
675 instance, pPhysicalDeviceCount, pPhysicalDevices);
676
677 return vk::Cast(instance)->getPhysicalDevices(pPhysicalDeviceCount, pPhysicalDevices);
678 }
679
vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)680 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures)
681 {
682 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures* pFeatures = %p)",
683 physicalDevice, pFeatures);
684
685 *pFeatures = vk::Cast(physicalDevice)->getFeatures();
686 }
687
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)688 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties)
689 {
690 TRACE("GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties* pFormatProperties = %p)",
691 physicalDevice, (int)format, pFormatProperties);
692
693 vk::PhysicalDevice::GetFormatProperties(format, pFormatProperties);
694 }
695
vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)696 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
697 {
698 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkImageTiling tiling = %d, VkImageUsageFlags usage = %d, VkImageCreateFlags flags = %d, VkImageFormatProperties* pImageFormatProperties = %p)",
699 physicalDevice, (int)format, (int)type, (int)tiling, usage, flags, pImageFormatProperties);
700
701 VkPhysicalDeviceImageFormatInfo2 info2 = {};
702 info2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
703 info2.pNext = nullptr;
704 info2.format = format;
705 info2.type = type;
706 info2.tiling = tiling;
707 info2.usage = usage;
708 info2.flags = flags;
709
710 VkImageFormatProperties2 properties2 = {};
711 properties2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
712 properties2.pNext = nullptr;
713
714 VkResult result = vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, &info2, &properties2);
715
716 *pImageFormatProperties = properties2.imageFormatProperties;
717
718 return result;
719 }
720
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)721 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
722 {
723 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties* pProperties = %p)",
724 physicalDevice, pProperties);
725
726 *pProperties = vk::Cast(physicalDevice)->getProperties();
727 }
728
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)729 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties)
730 {
731 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties* pQueueFamilyProperties = %p))", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
732
733 if(!pQueueFamilyProperties)
734 {
735 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
736 }
737 else
738 {
739 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
740 }
741 }
742
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)743 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
744 {
745 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
746
747 *pMemoryProperties = vk::PhysicalDevice::GetMemoryProperties();
748 }
749
vkGetInstanceProcAddr(VkInstance instance,const char * pName)750 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName)
751 {
752 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
753
754 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
755 }
756
vkGetDeviceProcAddr(VkDevice device,const char * pName)757 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName)
758 {
759 TRACE("(VkDevice device = %p, const char* pName = %p)", device, pName);
760
761 return vk::GetDeviceProcAddr(vk::Cast(device), pName);
762 }
763
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)764 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice)
765 {
766 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkDeviceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDevice* pDevice = %p)",
767 physicalDevice, pCreateInfo, pAllocator, pDevice);
768
769 if(pCreateInfo->flags != 0)
770 {
771 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
772 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
773 }
774
775 if(pCreateInfo->enabledLayerCount != 0)
776 {
777 // "The ppEnabledLayerNames and enabledLayerCount members of VkDeviceCreateInfo are deprecated and their values must be ignored by implementations."
778 UNSUPPORTED("pCreateInfo->enabledLayerCount != 0");
779 }
780
781 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
782 {
783 if(!hasDeviceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
784 {
785 return VK_ERROR_EXTENSION_NOT_PRESENT;
786 }
787 }
788
789 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
790
791 const VkPhysicalDeviceFeatures *enabledFeatures = pCreateInfo->pEnabledFeatures;
792
793 while(extensionCreateInfo)
794 {
795 switch(extensionCreateInfo->sType)
796 {
797 case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
798 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
799 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
800 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
801 // internal use by the loader, and do not have corresponding
802 // Vulkan structures in this Specification."
803 break;
804 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
805 {
806 ASSERT(!pCreateInfo->pEnabledFeatures); // "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL"
807
808 const VkPhysicalDeviceFeatures2 *physicalDeviceFeatures2 = reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(extensionCreateInfo);
809
810 enabledFeatures = &physicalDeviceFeatures2->features;
811 }
812 break;
813 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
814 {
815 const VkPhysicalDeviceSamplerYcbcrConversionFeatures *samplerYcbcrConversionFeatures = reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionCreateInfo);
816
817 // YCbCr conversion is supported.
818 // samplerYcbcrConversionFeatures->samplerYcbcrConversion can be VK_TRUE or VK_FALSE.
819 // No action needs to be taken on our end in either case; it's the apps responsibility that
820 // "To create a sampler Y'CbCr conversion, the samplerYcbcrConversion feature must be enabled."
821 (void)samplerYcbcrConversionFeatures->samplerYcbcrConversion;
822 }
823 break;
824 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
825 {
826 const VkPhysicalDevice16BitStorageFeatures *storage16BitFeatures = reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(extensionCreateInfo);
827
828 if(storage16BitFeatures->storageBuffer16BitAccess != VK_FALSE ||
829 storage16BitFeatures->uniformAndStorageBuffer16BitAccess != VK_FALSE ||
830 storage16BitFeatures->storagePushConstant16 != VK_FALSE ||
831 storage16BitFeatures->storageInputOutput16 != VK_FALSE)
832 {
833 return VK_ERROR_FEATURE_NOT_PRESENT;
834 }
835 }
836 break;
837 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
838 {
839 const VkPhysicalDeviceVariablePointerFeatures *variablePointerFeatures = reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures *>(extensionCreateInfo);
840
841 if(variablePointerFeatures->variablePointersStorageBuffer != VK_FALSE ||
842 variablePointerFeatures->variablePointers != VK_FALSE)
843 {
844 return VK_ERROR_FEATURE_NOT_PRESENT;
845 }
846 }
847 break;
848 case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
849 {
850 const VkDeviceGroupDeviceCreateInfo *groupDeviceCreateInfo = reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(extensionCreateInfo);
851
852 if((groupDeviceCreateInfo->physicalDeviceCount != 1) ||
853 (groupDeviceCreateInfo->pPhysicalDevices[0] != physicalDevice))
854 {
855 return VK_ERROR_FEATURE_NOT_PRESENT;
856 }
857 }
858 break;
859 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
860 {
861 const VkPhysicalDeviceMultiviewFeatures *multiviewFeatures = reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(extensionCreateInfo);
862
863 if(multiviewFeatures->multiviewGeometryShader ||
864 multiviewFeatures->multiviewTessellationShader)
865 {
866 return VK_ERROR_FEATURE_NOT_PRESENT;
867 }
868 }
869 break;
870 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
871 {
872 const VkPhysicalDeviceShaderDrawParametersFeatures *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(extensionCreateInfo);
873
874 if(shaderDrawParametersFeatures->shaderDrawParameters)
875 {
876 return VK_ERROR_FEATURE_NOT_PRESENT;
877 }
878 }
879 break;
880 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES:
881 {
882 const VkPhysicalDeviceDynamicRenderingFeatures *dynamicRenderingFeatures = reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>(extensionCreateInfo);
883
884 // Dynamic rendering is supported
885 (void)(dynamicRenderingFeatures->dynamicRendering);
886 }
887 break;
888 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR:
889 {
890 const VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR *dynamicRenderingLocalReadFeatures = reinterpret_cast<const VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR *>(extensionCreateInfo);
891
892 // Dynamic rendering local read is supported
893 (void)(dynamicRenderingLocalReadFeatures->dynamicRenderingLocalRead);
894 }
895 break;
896 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES:
897 {
898 const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionCreateInfo);
899
900 // Separate depth and stencil layouts is already supported
901 (void)(shaderDrawParametersFeatures->separateDepthStencilLayouts);
902 }
903 break;
904 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
905 {
906 const auto *lineRasterizationFeatures = reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionCreateInfo);
907 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(lineRasterizationFeatures);
908 if(!hasFeatures)
909 {
910 return VK_ERROR_FEATURE_NOT_PRESENT;
911 }
912 }
913 break;
914 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
915 {
916 const VkPhysicalDeviceProvokingVertexFeaturesEXT *provokingVertexFeatures = reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionCreateInfo);
917 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(provokingVertexFeatures);
918 if(!hasFeatures)
919 {
920 return VK_ERROR_FEATURE_NOT_PRESENT;
921 }
922 }
923 break;
924 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES:
925 {
926 const VkPhysicalDeviceImageRobustnessFeatures *imageRobustnessFeatures = reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>(extensionCreateInfo);
927
928 // We currently always provide robust image accesses. When the feature is disabled, results are
929 // undefined (for images with Dim != Buffer), so providing robustness is also acceptable.
930 // TODO(b/159329067): Only provide robustness when requested.
931 (void)imageRobustnessFeatures->robustImageAccess;
932 }
933 break;
934 // For unsupported structures, check that we don't expose the corresponding extension string:
935 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT:
936 ASSERT(!hasDeviceExtension(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME));
937 break;
938 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES:
939 {
940 const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *imagelessFramebufferFeatures = reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(extensionCreateInfo);
941 // Always provide Imageless Framebuffers
942 (void)imagelessFramebufferFeatures->imagelessFramebuffer;
943 }
944 break;
945 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
946 {
947 const VkPhysicalDeviceScalarBlockLayoutFeatures *scalarBlockLayoutFeatures = reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>(extensionCreateInfo);
948
949 // VK_EXT_scalar_block_layout is supported, allowing C-like structure layout for SPIR-V blocks.
950 (void)scalarBlockLayoutFeatures->scalarBlockLayout;
951 }
952 break;
953 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
954 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT:
955 {
956 const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *deviceMemoryReportFeatures = reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>(extensionCreateInfo);
957 (void)deviceMemoryReportFeatures->deviceMemoryReport;
958 }
959 break;
960 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
961 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
962 {
963 const VkPhysicalDeviceHostQueryResetFeatures *hostQueryResetFeatures = reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>(extensionCreateInfo);
964
965 // VK_EXT_host_query_reset is always enabled.
966 (void)hostQueryResetFeatures->hostQueryReset;
967 }
968 break;
969 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES:
970 {
971 const VkPhysicalDevicePipelineCreationCacheControlFeatures *pipelineCreationCacheControlFeatures = reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>(extensionCreateInfo);
972
973 // VK_EXT_pipeline_creation_cache_control is always enabled.
974 (void)pipelineCreationCacheControlFeatures->pipelineCreationCacheControl;
975 }
976 break;
977 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
978 {
979 const auto *tsFeatures = reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>(extensionCreateInfo);
980
981 // VK_KHR_timeline_semaphores is always enabled
982 (void)tsFeatures->timelineSemaphore;
983 }
984 break;
985 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT:
986 {
987 const auto *customBorderColorFeatures = reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>(extensionCreateInfo);
988
989 // VK_EXT_custom_border_color is always enabled
990 (void)customBorderColorFeatures->customBorderColors;
991 (void)customBorderColorFeatures->customBorderColorWithoutFormat;
992 }
993 break;
994 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
995 {
996 const auto *vk11Features = reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>(extensionCreateInfo);
997 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk11Features);
998 if(!hasFeatures)
999 {
1000 return VK_ERROR_FEATURE_NOT_PRESENT;
1001 }
1002 }
1003 break;
1004 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
1005 {
1006 const auto *vk12Features = reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>(extensionCreateInfo);
1007 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk12Features);
1008 if(!hasFeatures)
1009 {
1010 return VK_ERROR_FEATURE_NOT_PRESENT;
1011 }
1012 }
1013 break;
1014 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
1015 {
1016 const auto *vk13Features = reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>(extensionCreateInfo);
1017 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk13Features);
1018 if(!hasFeatures)
1019 {
1020 return VK_ERROR_FEATURE_NOT_PRESENT;
1021 }
1022 }
1023 break;
1024 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
1025 {
1026 const auto *depthClipFeatures = reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(extensionCreateInfo);
1027 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(depthClipFeatures);
1028 if(!hasFeatures)
1029 {
1030 return VK_ERROR_FEATURE_NOT_PRESENT;
1031 }
1032 }
1033 break;
1034 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
1035 {
1036 const auto *blendOpFeatures = reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(extensionCreateInfo);
1037 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(blendOpFeatures);
1038 if(!hasFeatures)
1039 {
1040 return VK_ERROR_FEATURE_NOT_PRESENT;
1041 }
1042 }
1043 break;
1044 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT:
1045 {
1046 const auto *dynamicStateFeatures = reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>(extensionCreateInfo);
1047 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(dynamicStateFeatures);
1048 if(!hasFeatures)
1049 {
1050 return VK_ERROR_FEATURE_NOT_PRESENT;
1051 }
1052 }
1053 break;
1054 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT:
1055 {
1056 const auto *dynamicStateFeatures = reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>(extensionCreateInfo);
1057 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(dynamicStateFeatures);
1058 if(!hasFeatures)
1059 {
1060 return VK_ERROR_FEATURE_NOT_PRESENT;
1061 }
1062 }
1063 break;
1064 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES:
1065 {
1066 const auto *privateDataFeatures = reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>(extensionCreateInfo);
1067 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(privateDataFeatures);
1068 if(!hasFeatures)
1069 {
1070 return VK_ERROR_FEATURE_NOT_PRESENT;
1071 }
1072 }
1073 break;
1074 case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO:
1075 {
1076 const auto *privateDataCreateInfo = reinterpret_cast<const VkDevicePrivateDataCreateInfo *>(extensionCreateInfo);
1077 (void)privateDataCreateInfo->privateDataSlotRequestCount;
1078 }
1079 break;
1080 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES:
1081 {
1082 const auto *textureCompressionASTCHDRFeatures = reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>(extensionCreateInfo);
1083 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(textureCompressionASTCHDRFeatures);
1084 if(!hasFeatures)
1085 {
1086 return VK_ERROR_FEATURE_NOT_PRESENT;
1087 }
1088 }
1089 break;
1090 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES:
1091 {
1092 const auto *shaderDemoteToHelperInvocationFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>(extensionCreateInfo);
1093 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(shaderDemoteToHelperInvocationFeatures);
1094 if(!hasFeatures)
1095 {
1096 return VK_ERROR_FEATURE_NOT_PRESENT;
1097 }
1098 }
1099 break;
1100 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES:
1101 {
1102 const auto *shaderTerminateInvocationFeatures = reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>(extensionCreateInfo);
1103 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(shaderTerminateInvocationFeatures);
1104 if(!hasFeatures)
1105 {
1106 return VK_ERROR_FEATURE_NOT_PRESENT;
1107 }
1108 }
1109 break;
1110 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES:
1111 {
1112 const auto *subgroupSizeControlFeatures = reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>(extensionCreateInfo);
1113 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(subgroupSizeControlFeatures);
1114 if(!hasFeatures)
1115 {
1116 return VK_ERROR_FEATURE_NOT_PRESENT;
1117 }
1118 }
1119 break;
1120 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES:
1121 {
1122 const auto *uniformBlockFeatures = reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>(extensionCreateInfo);
1123 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(uniformBlockFeatures);
1124 if(!hasFeatures)
1125 {
1126 return VK_ERROR_FEATURE_NOT_PRESENT;
1127 }
1128 }
1129 break;
1130 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES:
1131 {
1132 const auto *integerDotProductFeatures = reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>(extensionCreateInfo);
1133 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(integerDotProductFeatures);
1134 if(!hasFeatures)
1135 {
1136 return VK_ERROR_FEATURE_NOT_PRESENT;
1137 }
1138 }
1139 break;
1140 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES:
1141 {
1142 const auto *zeroInitializeWorkgroupMemoryFeatures = reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>(extensionCreateInfo);
1143 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(zeroInitializeWorkgroupMemoryFeatures);
1144 if(!hasFeatures)
1145 {
1146 return VK_ERROR_FEATURE_NOT_PRESENT;
1147 }
1148 }
1149 break;
1150 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT:
1151 {
1152 const auto *primitiveTopologyListRestartFeatures = reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>(extensionCreateInfo);
1153 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(primitiveTopologyListRestartFeatures);
1154 if(!hasFeatures)
1155 {
1156 return VK_ERROR_FEATURE_NOT_PRESENT;
1157 }
1158 }
1159 break;
1160 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES:
1161 {
1162 const auto *descriptorIndexingFeatures = reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>(extensionCreateInfo);
1163 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(descriptorIndexingFeatures);
1164 if(!hasFeatures)
1165 {
1166 return VK_ERROR_FEATURE_NOT_PRESENT;
1167 }
1168 }
1169 break;
1170 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR:
1171 {
1172 const auto *globalPriorityQueryFeatures = reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR *>(extensionCreateInfo);
1173 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(globalPriorityQueryFeatures);
1174 if(!hasFeatures)
1175 {
1176 return VK_ERROR_FEATURE_NOT_PRESENT;
1177 }
1178 }
1179 break;
1180 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
1181 {
1182 const auto *protectedMemoryFeatures = reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>(extensionCreateInfo);
1183 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(protectedMemoryFeatures);
1184 if(!hasFeatures)
1185 {
1186 return VK_ERROR_FEATURE_NOT_PRESENT;
1187 }
1188 }
1189 break;
1190 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES:
1191 {
1192 const auto *bufferDeviceAddressFeatures = reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>(extensionCreateInfo);
1193 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(bufferDeviceAddressFeatures);
1194 if(!hasFeatures)
1195 {
1196 return VK_ERROR_FEATURE_NOT_PRESENT;
1197 }
1198 }
1199 break;
1200 // These structs are supported, but no behavior changes based on their feature flags
1201 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES:
1202 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES:
1203 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES:
1204 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES:
1205 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
1206 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT:
1207 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES:
1208 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES:
1209 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES:
1210 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT:
1211 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT:
1212 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT:
1213 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT:
1214 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT:
1215 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT:
1216 break;
1217 default:
1218 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1219 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1220 break;
1221 }
1222
1223 extensionCreateInfo = extensionCreateInfo->pNext;
1224 }
1225
1226 ASSERT(pCreateInfo->queueCreateInfoCount > 0);
1227
1228 if(enabledFeatures)
1229 {
1230 if(!vk::Cast(physicalDevice)->hasFeatures(*enabledFeatures))
1231 {
1232 return VK_ERROR_FEATURE_NOT_PRESENT;
1233 }
1234 }
1235
1236 uint32_t queueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
1237
1238 for(uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
1239 {
1240 const VkDeviceQueueCreateInfo &queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
1241 if(queueCreateInfo.flags != 0)
1242 {
1243 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d]->flags 0x%08X", i, queueCreateInfo.flags);
1244 }
1245
1246 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(queueCreateInfo.pNext);
1247 while(extInfo)
1248 {
1249 switch(extInfo->sType)
1250 {
1251 case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR:
1252 {
1253 const auto *globalPriorityCreateInfo = reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoKHR *>(extInfo);
1254 if(!(vk::Cast(physicalDevice)->validateQueueGlobalPriority(globalPriorityCreateInfo->globalPriority)))
1255 {
1256 return VK_ERROR_INITIALIZATION_FAILED;
1257 }
1258 }
1259 break;
1260 default:
1261 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
1262 break;
1263 }
1264
1265 extInfo = extInfo->pNext;
1266 }
1267
1268 ASSERT(queueCreateInfo.queueFamilyIndex < queueFamilyPropertyCount);
1269 (void)queueFamilyPropertyCount; // Silence unused variable warning
1270 }
1271
1272 auto scheduler = getOrCreateScheduler();
1273 return vk::DispatchableDevice::Create(pAllocator, pCreateInfo, pDevice, vk::Cast(physicalDevice), enabledFeatures, scheduler);
1274 }
1275
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)1276 VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
1277 {
1278 TRACE("(VkDevice device = %p, const VkAllocationCallbacks* pAllocator = %p)", device, pAllocator);
1279
1280 vk::destroy(device, pAllocator);
1281 }
1282
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1283 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1284 {
1285 TRACE("(const char* pLayerName = %p, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)",
1286 pLayerName, pPropertyCount, pProperties);
1287
1288 uint32_t extensionPropertiesCount = numInstanceSupportedExtensions();
1289
1290 if(!pProperties)
1291 {
1292 *pPropertyCount = extensionPropertiesCount;
1293 return VK_SUCCESS;
1294 }
1295
1296 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1297 copyInstanceExtensions(pProperties, toCopy);
1298
1299 *pPropertyCount = toCopy;
1300 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1301 }
1302
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1303 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1304 {
1305 TRACE("(VkPhysicalDevice physicalDevice = %p, const char* pLayerName, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1306
1307 uint32_t extensionPropertiesCount = numDeviceSupportedExtensions();
1308
1309 if(!pProperties)
1310 {
1311 *pPropertyCount = extensionPropertiesCount;
1312 return VK_SUCCESS;
1313 }
1314
1315 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1316 copyDeviceExtensions(pProperties, toCopy);
1317
1318 *pPropertyCount = toCopy;
1319 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1320 }
1321
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1322 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1323 {
1324 TRACE("(uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", pPropertyCount, pProperties);
1325
1326 if(!pProperties)
1327 {
1328 *pPropertyCount = 0;
1329 return VK_SUCCESS;
1330 }
1331
1332 return VK_SUCCESS;
1333 }
1334
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1335 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1336 {
1337 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1338
1339 if(!pProperties)
1340 {
1341 *pPropertyCount = 0;
1342 return VK_SUCCESS;
1343 }
1344
1345 return VK_SUCCESS;
1346 }
1347
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1348 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue)
1349 {
1350 TRACE("(VkDevice device = %p, uint32_t queueFamilyIndex = %d, uint32_t queueIndex = %d, VkQueue* pQueue = %p)",
1351 device, queueFamilyIndex, queueIndex, pQueue);
1352
1353 *pQueue = vk::Cast(device)->getQueue(queueFamilyIndex, queueIndex);
1354 }
1355
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1356 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence)
1357 {
1358 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo* pSubmits = %p, VkFence fence = %p)",
1359 queue, submitCount, pSubmits, static_cast<void *>(fence));
1360
1361 return vk::Cast(queue)->submit(submitCount, vk::SubmitInfo::Allocate(submitCount, pSubmits), vk::Cast(fence));
1362 }
1363
vkQueueSubmit2(VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)1364 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence)
1365 {
1366 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo2* pSubmits = %p, VkFence fence = %p)",
1367 queue, submitCount, pSubmits, static_cast<void *>(fence));
1368
1369 return vk::Cast(queue)->submit(submitCount, vk::SubmitInfo::Allocate(submitCount, pSubmits), vk::Cast(fence));
1370 }
1371
vkQueueWaitIdle(VkQueue queue)1372 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
1373 {
1374 TRACE("(VkQueue queue = %p)", queue);
1375
1376 return vk::Cast(queue)->waitIdle();
1377 }
1378
vkDeviceWaitIdle(VkDevice device)1379 VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
1380 {
1381 TRACE("(VkDevice device = %p)", device);
1382
1383 return vk::Cast(device)->waitIdle();
1384 }
1385
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1386 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1387 {
1388 TRACE("(VkDevice device = %p, const VkMemoryAllocateInfo* pAllocateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDeviceMemory* pMemory = %p)",
1389 device, pAllocateInfo, pAllocator, pMemory);
1390
1391 VkResult result = vk::DeviceMemory::Allocate(pAllocator, pAllocateInfo, pMemory, vk::Cast(device));
1392
1393 if(result != VK_SUCCESS)
1394 {
1395 vk::destroy(*pMemory, pAllocator);
1396 *pMemory = VK_NULL_HANDLE;
1397 }
1398
1399 return result;
1400 }
1401
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)1402 VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
1403 {
1404 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, const VkAllocationCallbacks* pAllocator = %p)",
1405 device, static_cast<void *>(memory), pAllocator);
1406
1407 vk::destroy(memory, pAllocator);
1408 }
1409
1410 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * getFdInfo,int * pFd)1411 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR *getFdInfo, int *pFd)
1412 {
1413 TRACE("(VkDevice device = %p, const VkMemoryGetFdInfoKHR* getFdInfo = %p, int* pFd = %p",
1414 device, getFdInfo, pFd);
1415
1416 if(getFdInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1417 {
1418 UNSUPPORTED("pGetFdInfo->handleType %u", getFdInfo->handleType);
1419 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1420 }
1421 return vk::Cast(getFdInfo->memory)->exportFd(pFd);
1422 }
1423
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1424 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1425 {
1426 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, int fd = %d, VkMemoryFdPropertiesKHR* pMemoryFdProperties = %p)",
1427 device, handleType, fd, pMemoryFdProperties);
1428
1429 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1430 {
1431 UNSUPPORTED("handleType %u", handleType);
1432 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1433 }
1434
1435 if(fd < 0)
1436 {
1437 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1438 }
1439
1440 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1441 vk::PhysicalDevice::GetMemoryProperties();
1442
1443 // All SwiftShader memory types support this!
1444 pMemoryFdProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1445
1446 return VK_SUCCESS;
1447 }
1448 #endif // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1449 #if VK_USE_PLATFORM_FUCHSIA
vkGetMemoryZirconHandleFUCHSIA(VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pGetHandleInfo,zx_handle_t * pHandle)1450 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA *pGetHandleInfo, zx_handle_t *pHandle)
1451 {
1452 TRACE("(VkDevice device = %p, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetHandleInfo = %p, zx_handle_t* pHandle = %p",
1453 device, pGetHandleInfo, pHandle);
1454
1455 if(pGetHandleInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1456 {
1457 UNSUPPORTED("pGetHandleInfo->handleType %u", pGetHandleInfo->handleType);
1458 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1459 }
1460 return vk::Cast(pGetHandleInfo->memory)->exportHandle(pHandle);
1461 }
1462
vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,zx_handle_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties)1463 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t handle, VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties)
1464 {
1465 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, zx_handle_t handle = %d, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties = %p)",
1466 device, handleType, handle, pMemoryZirconHandleProperties);
1467
1468 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1469 {
1470 UNSUPPORTED("handleType %u", handleType);
1471 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1472 }
1473
1474 if(handle == ZX_HANDLE_INVALID)
1475 {
1476 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1477 }
1478
1479 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1480 vk::PhysicalDevice::GetMemoryProperties();
1481
1482 // All SwiftShader memory types support this!
1483 pMemoryZirconHandleProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1484
1485 return VK_SUCCESS;
1486 }
1487 #endif // VK_USE_PLATFORM_FUCHSIA
1488
vkGetMemoryHostPointerPropertiesEXT(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1489 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1490 {
1491 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, const void *pHostPointer = %p, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties = %p)",
1492 device, handleType, pHostPointer, pMemoryHostPointerProperties);
1493
1494 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1495 {
1496 UNSUPPORTED("handleType %u", handleType);
1497 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1498 }
1499 pMemoryHostPointerProperties->memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1500
1501 return VK_SUCCESS;
1502 }
1503
1504 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1505 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, struct AHardwareBuffer **pBuffer)
1506 {
1507 TRACE("(VkDevice device = %p, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo = %p, struct AHardwareBuffer **pBuffer = %p)",
1508 device, pInfo, pBuffer);
1509
1510 return vk::Cast(pInfo->memory)->exportAndroidHardwareBuffer(pBuffer);
1511 }
1512
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1513 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties)
1514 {
1515 TRACE("(VkDevice device = %p, const struct AHardwareBuffer *buffer = %p, VkAndroidHardwareBufferPropertiesANDROID *pProperties = %p)",
1516 device, buffer, pProperties);
1517
1518 return vk::DeviceMemory::GetAndroidHardwareBufferProperties(device, buffer, pProperties);
1519 }
1520 #endif // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1521
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1522 VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
1523 {
1524 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize offset = %d, VkDeviceSize size = %d, VkMemoryMapFlags flags = %d, void** ppData = %p)",
1525 device, static_cast<void *>(memory), int(offset), int(size), flags, ppData);
1526
1527 if(flags != 0)
1528 {
1529 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1530 UNSUPPORTED("flags 0x%08X", int(flags));
1531 }
1532
1533 return vk::Cast(memory)->map(offset, size, ppData);
1534 }
1535
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)1536 VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
1537 {
1538 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p)", device, static_cast<void *>(memory));
1539
1540 // Noop, memory will be released when the DeviceMemory object is released
1541 }
1542
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1543 VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1544 {
1545 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1546 device, memoryRangeCount, pMemoryRanges);
1547
1548 // Noop, host and device memory are the same to SwiftShader
1549
1550 return VK_SUCCESS;
1551 }
1552
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1553 VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1554 {
1555 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1556 device, memoryRangeCount, pMemoryRanges);
1557
1558 // Noop, host and device memory are the same to SwiftShader
1559
1560 return VK_SUCCESS;
1561 }
1562
vkGetDeviceMemoryCommitment(VkDevice pDevice,VkDeviceMemory pMemory,VkDeviceSize * pCommittedMemoryInBytes)1563 VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice pDevice, VkDeviceMemory pMemory, VkDeviceSize *pCommittedMemoryInBytes)
1564 {
1565 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize* pCommittedMemoryInBytes = %p)",
1566 pDevice, static_cast<void *>(pMemory), pCommittedMemoryInBytes);
1567
1568 auto *memory = vk::Cast(pMemory);
1569
1570 #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
1571 const auto &memoryProperties = vk::PhysicalDevice::GetMemoryProperties();
1572 uint32_t typeIndex = memory->getMemoryTypeIndex();
1573 ASSERT(typeIndex < memoryProperties.memoryTypeCount);
1574 ASSERT(memoryProperties.memoryTypes[typeIndex].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
1575 #endif
1576
1577 *pCommittedMemoryInBytes = memory->getCommittedMemoryInBytes();
1578 }
1579
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1580 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1581 {
1582 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1583 device, static_cast<void *>(buffer), static_cast<void *>(memory), int(memoryOffset));
1584
1585 if(!vk::Cast(buffer)->canBindToMemory(vk::Cast(memory)))
1586 {
1587 UNSUPPORTED("vkBindBufferMemory with invalid external memory");
1588 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1589 }
1590 vk::Cast(buffer)->bind(vk::Cast(memory), memoryOffset);
1591 return VK_SUCCESS;
1592 }
1593
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1594 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1595 {
1596 TRACE("(VkDevice device = %p, VkImage image = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1597 device, static_cast<void *>(image), static_cast<void *>(memory), int(memoryOffset));
1598
1599 if(!vk::Cast(image)->canBindToMemory(vk::Cast(memory)))
1600 {
1601 UNSUPPORTED("vkBindImageMemory with invalid external memory");
1602 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1603 }
1604 vk::Cast(image)->bind(vk::Cast(memory), memoryOffset);
1605 return VK_SUCCESS;
1606 }
1607
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)1608 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
1609 {
1610 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1611 device, static_cast<void *>(buffer), pMemoryRequirements);
1612
1613 *pMemoryRequirements = vk::Cast(buffer)->getMemoryRequirements();
1614 }
1615
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)1616 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
1617 {
1618 TRACE("(VkDevice device = %p, VkImage image = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1619 device, static_cast<void *>(image), pMemoryRequirements);
1620
1621 *pMemoryRequirements = vk::Cast(image)->getMemoryRequirements();
1622 }
1623
vkGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1624 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1625 {
1626 TRACE("(VkDevice device = %p, VkImage image = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements* pSparseMemoryRequirements = %p)",
1627 device, static_cast<void *>(image), pSparseMemoryRequirementCount, pSparseMemoryRequirements);
1628
1629 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
1630 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
1631 *pSparseMemoryRequirementCount = 0;
1632 }
1633
vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pPropertyCount,VkSparseImageFormatProperties * pProperties)1634 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties)
1635 {
1636 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkSampleCountFlagBits samples = %d, VkImageUsageFlags usage = %d, VkImageTiling tiling = %d, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties* pProperties = %p)",
1637 physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
1638
1639 // We do not support sparse images.
1640 *pPropertyCount = 0;
1641 }
1642
vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1643 VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence)
1644 {
1645 TRACE("()");
1646 UNSUPPORTED("vkQueueBindSparse");
1647 return VK_SUCCESS;
1648 }
1649
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1650 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence)
1651 {
1652 TRACE("(VkDevice device = %p, const VkFenceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFence* pFence = %p)",
1653 device, pCreateInfo, pAllocator, pFence);
1654
1655 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1656 while(nextInfo)
1657 {
1658 switch(nextInfo->sType)
1659 {
1660 case VK_STRUCTURE_TYPE_MAX_ENUM:
1661 // dEQP tests that this value is ignored.
1662 break;
1663 default:
1664 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1665 break;
1666 }
1667 nextInfo = nextInfo->pNext;
1668 }
1669
1670 return vk::Fence::Create(pAllocator, pCreateInfo, pFence);
1671 }
1672
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)1673 VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator)
1674 {
1675 TRACE("(VkDevice device = %p, VkFence fence = %p, const VkAllocationCallbacks* pAllocator = %p)",
1676 device, static_cast<void *>(fence), pAllocator);
1677
1678 vk::destroy(fence, pAllocator);
1679 }
1680
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1681 VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences)
1682 {
1683 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p)",
1684 device, fenceCount, pFences);
1685
1686 for(uint32_t i = 0; i < fenceCount; i++)
1687 {
1688 vk::Cast(pFences[i])->reset();
1689 }
1690
1691 return VK_SUCCESS;
1692 }
1693
vkGetFenceStatus(VkDevice device,VkFence fence)1694 VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
1695 {
1696 TRACE("(VkDevice device = %p, VkFence fence = %p)", device, static_cast<void *>(fence));
1697
1698 return vk::Cast(fence)->getStatus();
1699 }
1700
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1701 VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout)
1702 {
1703 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p, VkBool32 waitAll = %d, uint64_t timeout = %" PRIu64 ")",
1704 device, int(fenceCount), pFences, int(waitAll), timeout);
1705
1706 return vk::Cast(device)->waitForFences(fenceCount, pFences, waitAll, timeout);
1707 }
1708
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1709 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore)
1710 {
1711 TRACE("(VkDevice device = %p, const VkSemaphoreCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSemaphore* pSemaphore = %p)",
1712 device, pCreateInfo, pAllocator, pSemaphore);
1713
1714 if(pCreateInfo->flags != 0)
1715 {
1716 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1717 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
1718 }
1719
1720 VkSemaphoreType type = VK_SEMAPHORE_TYPE_BINARY;
1721 for(const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1722 nextInfo != nullptr; nextInfo = nextInfo->pNext)
1723 {
1724 switch(nextInfo->sType)
1725 {
1726 case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
1727 // Let the semaphore constructor handle this
1728 break;
1729 case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO:
1730 {
1731 const VkSemaphoreTypeCreateInfo *info = reinterpret_cast<const VkSemaphoreTypeCreateInfo *>(nextInfo);
1732 type = info->semaphoreType;
1733 }
1734 break;
1735 default:
1736 WARN("nextInfo->sType = %s", vk::Stringify(nextInfo->sType).c_str());
1737 break;
1738 }
1739 }
1740
1741 if(type == VK_SEMAPHORE_TYPE_BINARY)
1742 {
1743 return vk::BinarySemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1744 }
1745 else
1746 {
1747 return vk::TimelineSemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1748 }
1749 }
1750
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)1751 VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator)
1752 {
1753 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, const VkAllocationCallbacks* pAllocator = %p)",
1754 device, static_cast<void *>(semaphore), pAllocator);
1755
1756 vk::destroy(semaphore, pAllocator);
1757 }
1758
1759 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)1760 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd)
1761 {
1762 TRACE("(VkDevice device = %p, const VkSemaphoreGetFdInfoKHR* pGetFdInfo = %p, int* pFd = %p)",
1763 device, static_cast<const void *>(pGetFdInfo), static_cast<void *>(pFd));
1764
1765 if(pGetFdInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1766 {
1767 UNSUPPORTED("pGetFdInfo->handleType %d", int(pGetFdInfo->handleType));
1768 }
1769
1770 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetFdInfo->semaphore);
1771 ASSERT(sem != nullptr);
1772 return sem->exportFd(pFd);
1773 }
1774
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreInfo)1775 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreInfo)
1776 {
1777 TRACE("(VkDevice device = %p, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreInfo = %p",
1778 device, static_cast<const void *>(pImportSemaphoreInfo));
1779
1780 if(pImportSemaphoreInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1781 {
1782 UNSUPPORTED("pImportSemaphoreInfo->handleType %d", int(pImportSemaphoreInfo->handleType));
1783 }
1784 bool temporaryImport = (pImportSemaphoreInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1785
1786 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreInfo->semaphore);
1787 ASSERT(sem != nullptr);
1788 return sem->importFd(pImportSemaphoreInfo->fd, temporaryImport);
1789 }
1790 #endif // SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
1791
1792 #if VK_USE_PLATFORM_FUCHSIA
vkImportSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo)1793 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
1794 VkDevice device,
1795 const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo)
1796 {
1797 TRACE("(VkDevice device = %p, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo = %p)",
1798 device, pImportSemaphoreZirconHandleInfo);
1799
1800 if(pImportSemaphoreZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1801 {
1802 UNSUPPORTED("pImportSemaphoreZirconHandleInfo->handleType %d", int(pImportSemaphoreZirconHandleInfo->handleType));
1803 }
1804 bool temporaryImport = (pImportSemaphoreZirconHandleInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1805 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreZirconHandleInfo->semaphore);
1806 ASSERT(sem != nullptr);
1807 return sem->importHandle(pImportSemaphoreZirconHandleInfo->zirconHandle, temporaryImport);
1808 }
1809
vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle)1810 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
1811 VkDevice device,
1812 const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo,
1813 zx_handle_t *pZirconHandle)
1814 {
1815 TRACE("(VkDevice device = %p, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo = %p, zx_handle_t* pZirconHandle = %p)",
1816 device, static_cast<const void *>(pGetZirconHandleInfo), static_cast<void *>(pZirconHandle));
1817
1818 if(pGetZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1819 {
1820 UNSUPPORTED("pGetZirconHandleInfo->handleType %d", int(pGetZirconHandleInfo->handleType));
1821 }
1822
1823 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetZirconHandleInfo->semaphore);
1824 ASSERT(sem != nullptr);
1825 return sem->exportHandle(pZirconHandle);
1826 }
1827 #endif // VK_USE_PLATFORM_FUCHSIA
1828
vkGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue)1829 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue)
1830 {
1831 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, uint64_t* pValue = %p)",
1832 device, static_cast<void *>(semaphore), pValue);
1833 *pValue = vk::DynamicCast<vk::TimelineSemaphore>(semaphore)->getCounterValue();
1834 return VK_SUCCESS;
1835 }
1836
vkSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo)1837 VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo)
1838 {
1839 TRACE("(VkDevice device = %p, const VkSemaphoreSignalInfo *pSignalInfo = %p)",
1840 device, pSignalInfo);
1841 vk::DynamicCast<vk::TimelineSemaphore>(pSignalInfo->semaphore)->signal(pSignalInfo->value);
1842 return VK_SUCCESS;
1843 }
1844
vkWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1845 VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout)
1846 {
1847 TRACE("(VkDevice device = %p, const VkSemaphoreWaitInfo *pWaitInfo = %p, uint64_t timeout = %" PRIu64 ")",
1848 device, pWaitInfo, timeout);
1849 return vk::Cast(device)->waitForSemaphores(pWaitInfo, timeout);
1850 }
1851
vkCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1852 VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent)
1853 {
1854 TRACE("(VkDevice device = %p, const VkEventCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkEvent* pEvent = %p)",
1855 device, pCreateInfo, pAllocator, pEvent);
1856
1857 // VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR is provided by VK_KHR_synchronization2
1858 if((pCreateInfo->flags != 0) && (pCreateInfo->flags != VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR))
1859 {
1860 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
1861 }
1862
1863 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1864 while(extInfo)
1865 {
1866 // Vulkan 1.2: "pNext must be NULL"
1867 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1868 extInfo = extInfo->pNext;
1869 }
1870
1871 return vk::Event::Create(pAllocator, pCreateInfo, pEvent);
1872 }
1873
vkDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)1874 VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator)
1875 {
1876 TRACE("(VkDevice device = %p, VkEvent event = %p, const VkAllocationCallbacks* pAllocator = %p)",
1877 device, static_cast<void *>(event), pAllocator);
1878
1879 vk::destroy(event, pAllocator);
1880 }
1881
vkGetEventStatus(VkDevice device,VkEvent event)1882 VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
1883 {
1884 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1885
1886 return vk::Cast(event)->getStatus();
1887 }
1888
vkSetEvent(VkDevice device,VkEvent event)1889 VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
1890 {
1891 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1892
1893 vk::Cast(event)->signal();
1894
1895 return VK_SUCCESS;
1896 }
1897
vkResetEvent(VkDevice device,VkEvent event)1898 VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
1899 {
1900 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1901
1902 vk::Cast(event)->reset();
1903
1904 return VK_SUCCESS;
1905 }
1906
vkCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)1907 VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool)
1908 {
1909 TRACE("(VkDevice device = %p, const VkQueryPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkQueryPool* pQueryPool = %p)",
1910 device, pCreateInfo, pAllocator, pQueryPool);
1911
1912 if(pCreateInfo->flags != 0)
1913 {
1914 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1915 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
1916 }
1917
1918 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1919 while(extInfo)
1920 {
1921 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1922 extInfo = extInfo->pNext;
1923 }
1924
1925 return vk::QueryPool::Create(pAllocator, pCreateInfo, pQueryPool);
1926 }
1927
vkDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)1928 VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator)
1929 {
1930 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1931 device, static_cast<void *>(queryPool), pAllocator);
1932
1933 vk::destroy(queryPool, pAllocator);
1934 }
1935
vkGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1936 VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags)
1937 {
1938 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, size_t dataSize = %d, void* pData = %p, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
1939 device, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), int(dataSize), pData, int(stride), flags);
1940
1941 return vk::Cast(queryPool)->getResults(firstQuery, queryCount, dataSize, pData, stride, flags);
1942 }
1943
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1944 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
1945 {
1946 TRACE("(VkDevice device = %p, const VkBufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBuffer* pBuffer = %p)",
1947 device, pCreateInfo, pAllocator, pBuffer);
1948
1949 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1950 while(nextInfo)
1951 {
1952 switch(nextInfo->sType)
1953 {
1954 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1955 // Do nothing. Should be handled by vk::Buffer::Create().
1956 break;
1957 case VK_STRUCTURE_TYPE_MAX_ENUM:
1958 // dEQP tests that this value is ignored.
1959 break;
1960 default:
1961 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1962 break;
1963 }
1964 nextInfo = nextInfo->pNext;
1965 }
1966
1967 return vk::Buffer::Create(pAllocator, pCreateInfo, pBuffer);
1968 }
1969
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1970 VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
1971 {
1972 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1973 device, static_cast<void *>(buffer), pAllocator);
1974
1975 vk::destroy(buffer, pAllocator);
1976 }
1977
vkGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1978 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1979 {
1980 TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1981 device, pInfo);
1982
1983 // This function must return VkBufferDeviceAddressCreateInfoEXT::deviceAddress if provided
1984 ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME));
1985
1986 return vk::Cast(pInfo->buffer)->getOpaqueCaptureAddress();
1987 }
1988
vkGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1989 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1990 {
1991 TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1992 device, pInfo);
1993
1994 return vk::Cast(pInfo->buffer)->getOpaqueCaptureAddress();
1995 }
1996
vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)1997 VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
1998 {
1999 TRACE("(VkDevice device = %p, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo = %p)",
2000 device, pInfo);
2001
2002 return vk::Cast(pInfo->memory)->getOpaqueCaptureAddress();
2003 }
2004
vkCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)2005 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView)
2006 {
2007 TRACE("(VkDevice device = %p, const VkBufferViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBufferView* pView = %p)",
2008 device, pCreateInfo, pAllocator, pView);
2009
2010 if(pCreateInfo->flags != 0)
2011 {
2012 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2013 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2014 }
2015
2016 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2017 while(extInfo)
2018 {
2019 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2020 extInfo = extInfo->pNext;
2021 }
2022
2023 return vk::BufferView::Create(pAllocator, pCreateInfo, pView);
2024 }
2025
vkDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)2026 VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator)
2027 {
2028 TRACE("(VkDevice device = %p, VkBufferView bufferView = %p, const VkAllocationCallbacks* pAllocator = %p)",
2029 device, static_cast<void *>(bufferView), pAllocator);
2030
2031 vk::destroy(bufferView, pAllocator);
2032 }
2033
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)2034 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
2035 {
2036 TRACE("(VkDevice device = %p, const VkImageCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImage* pImage = %p)",
2037 device, pCreateInfo, pAllocator, pImage);
2038
2039 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2040
2041 #ifdef __ANDROID__
2042 vk::BackingMemory backmem;
2043 bool swapchainImage = false;
2044 #endif
2045
2046 while(extensionCreateInfo)
2047 {
2048 // Casting to an int since some structures, such as VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID and
2049 // VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID, are not enumerated in the official Vulkan headers.
2050 switch((int)(extensionCreateInfo->sType))
2051 {
2052 #ifdef __ANDROID__
2053 case VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID:
2054 {
2055 const VkSwapchainImageCreateInfoANDROID *swapImageCreateInfo = reinterpret_cast<const VkSwapchainImageCreateInfoANDROID *>(extensionCreateInfo);
2056 backmem.androidUsage = swapImageCreateInfo->usage;
2057 }
2058 break;
2059 case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
2060 {
2061 const VkNativeBufferANDROID *nativeBufferInfo = reinterpret_cast<const VkNativeBufferANDROID *>(extensionCreateInfo);
2062 backmem.nativeBufferInfo = *nativeBufferInfo;
2063 backmem.nativeBufferInfo.pNext = nullptr;
2064 swapchainImage = true;
2065 }
2066 break;
2067 case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
2068 break;
2069 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
2070 // Do nothing. Should be handled by vk::Image::Create()
2071 break;
2072 #endif
2073 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
2074 // Do nothing. Should be handled by vk::Image::Create()
2075 break;
2076 case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
2077 /* Do nothing. We don't actually need the swapchain handle yet; we'll do all the work in vkBindImageMemory2. */
2078 break;
2079 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
2080 // Do nothing. This extension tells the driver which image formats will be used
2081 // by the application. Swiftshader is not impacted from lacking this information,
2082 // so we don't need to track the format list.
2083 break;
2084 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
2085 {
2086 // SwiftShader does not use an image's usage info for non-debug purposes outside of
2087 // vkGetPhysicalDeviceImageFormatProperties2. This also applies to separate stencil usage.
2088 const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionCreateInfo);
2089 (void)stencilUsageInfo->stencilUsage;
2090 }
2091 break;
2092 case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
2093 {
2094 // Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
2095 ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
2096 }
2097 break;
2098 case VK_STRUCTURE_TYPE_MAX_ENUM:
2099 // dEQP tests that this value is ignored.
2100 break;
2101 default:
2102 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
2103 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2104 break;
2105 }
2106
2107 extensionCreateInfo = extensionCreateInfo->pNext;
2108 }
2109
2110 VkResult result = vk::Image::Create(pAllocator, pCreateInfo, pImage, vk::Cast(device));
2111
2112 #ifdef __ANDROID__
2113 if(swapchainImage)
2114 {
2115 if(result != VK_SUCCESS)
2116 {
2117 return result;
2118 }
2119
2120 vk::Image *image = vk::Cast(*pImage);
2121 VkMemoryRequirements memRequirements = image->getMemoryRequirements();
2122
2123 VkMemoryAllocateInfo allocInfo = {};
2124 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
2125 allocInfo.allocationSize = memRequirements.size;
2126 allocInfo.memoryTypeIndex = 0;
2127
2128 VkDeviceMemory devmem = { VK_NULL_HANDLE };
2129 result = vkAllocateMemory(device, &allocInfo, pAllocator, &devmem);
2130 if(result != VK_SUCCESS)
2131 {
2132 return result;
2133 }
2134
2135 vkBindImageMemory(device, *pImage, devmem, 0);
2136 backmem.externalMemory = true;
2137
2138 image->setBackingMemory(backmem);
2139 }
2140 #endif
2141
2142 return result;
2143 }
2144
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)2145 VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
2146 {
2147 TRACE("(VkDevice device = %p, VkImage image = %p, const VkAllocationCallbacks* pAllocator = %p)",
2148 device, static_cast<void *>(image), pAllocator);
2149
2150 #ifdef __ANDROID__
2151 vk::Image *img = vk::Cast(image);
2152 if(img && img->hasExternalMemory())
2153 {
2154 vk::destroy(img->getExternalMemory(), pAllocator);
2155 }
2156 #endif
2157
2158 vk::destroy(image, pAllocator);
2159 }
2160
vkGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)2161 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout)
2162 {
2163 TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource* pSubresource = %p, VkSubresourceLayout* pLayout = %p)",
2164 device, static_cast<void *>(image), pSubresource, pLayout);
2165
2166 vk::Cast(image)->getSubresourceLayout(pSubresource, pLayout);
2167 }
2168
vkGetImageSubresourceLayout2EXT(VkDevice device,VkImage image,const VkImageSubresource2KHR * pSubresource,VkSubresourceLayout2KHR * pLayout)2169 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(VkDevice device, VkImage image, const VkImageSubresource2KHR *pSubresource, VkSubresourceLayout2KHR *pLayout)
2170 {
2171 TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource2KHR* pSubresource = %p, VkSubresourceLayout2KHR* pLayout = %p)",
2172 device, static_cast<void *>(image), pSubresource, pLayout);
2173
2174 // If tiling is OPTIMAL, this doesn't need to be done, but it's harmless especially since
2175 // LINEAR and OPTIMAL are the same.
2176 vk::Cast(image)->getSubresourceLayout(&pSubresource->imageSubresource, &pLayout->subresourceLayout);
2177
2178 VkBaseOutStructure *extInfo = reinterpret_cast<VkBaseOutStructure *>(pLayout->pNext);
2179 while(extInfo)
2180 {
2181 switch(extInfo->sType)
2182 {
2183 case VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT:
2184 {
2185 // Since the subresource layout is filled above already, get the size out of
2186 // that.
2187 VkSubresourceHostMemcpySizeEXT *hostMemcpySize = reinterpret_cast<VkSubresourceHostMemcpySizeEXT *>(extInfo);
2188 hostMemcpySize->size = pLayout->subresourceLayout.size;
2189 break;
2190 }
2191 default:
2192 UNSUPPORTED("pLayout->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2193 break;
2194 }
2195
2196 extInfo = extInfo->pNext;
2197 }
2198 }
2199
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)2200 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView)
2201 {
2202 TRACE("(VkDevice device = %p, const VkImageViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImageView* pView = %p)",
2203 device, pCreateInfo, pAllocator, pView);
2204
2205 if(pCreateInfo->flags != 0)
2206 {
2207 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2208 }
2209
2210 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2211 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2212
2213 while(extensionCreateInfo)
2214 {
2215 switch(extensionCreateInfo->sType)
2216 {
2217 case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
2218 {
2219 const VkImageViewUsageCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkImageViewUsageCreateInfo *>(extensionCreateInfo);
2220 ASSERT(!(~vk::Cast(pCreateInfo->image)->getUsage() & multiviewCreateInfo->usage));
2221 }
2222 break;
2223 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2224 {
2225 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2226 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2227
2228 if(ycbcrConversion)
2229 {
2230 ASSERT((pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_R) &&
2231 (pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_G) &&
2232 (pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_B) &&
2233 (pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_A));
2234 }
2235 }
2236 break;
2237 case VK_STRUCTURE_TYPE_MAX_ENUM:
2238 // dEQP tests that this value is ignored.
2239 break;
2240 case VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT:
2241 // TODO(b/218318109): Part of the VK_EXT_image_view_min_lod extension, which we don't support.
2242 // Remove when https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3094#note_348979 has been fixed.
2243 break;
2244 default:
2245 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2246 break;
2247 }
2248
2249 extensionCreateInfo = extensionCreateInfo->pNext;
2250 }
2251
2252 VkResult result = vk::ImageView::Create(pAllocator, pCreateInfo, pView, ycbcrConversion);
2253 if(result == VK_SUCCESS)
2254 {
2255 vk::Cast(device)->registerImageView(vk::Cast(*pView));
2256 }
2257
2258 return result;
2259 }
2260
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)2261 VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator)
2262 {
2263 TRACE("(VkDevice device = %p, VkImageView imageView = %p, const VkAllocationCallbacks* pAllocator = %p)",
2264 device, static_cast<void *>(imageView), pAllocator);
2265
2266 vk::Cast(device)->unregisterImageView(vk::Cast(imageView));
2267 vk::destroy(imageView, pAllocator);
2268 }
2269
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)2270 VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule)
2271 {
2272 TRACE("(VkDevice device = %p, const VkShaderModuleCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkShaderModule* pShaderModule = %p)",
2273 device, pCreateInfo, pAllocator, pShaderModule);
2274
2275 if(pCreateInfo->flags != 0)
2276 {
2277 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2278 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2279 }
2280
2281 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2282 while(nextInfo)
2283 {
2284 switch(nextInfo->sType)
2285 {
2286 case VK_STRUCTURE_TYPE_MAX_ENUM:
2287 // dEQP tests that this value is ignored.
2288 break;
2289 default:
2290 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2291 break;
2292 }
2293 nextInfo = nextInfo->pNext;
2294 }
2295
2296 return vk::ShaderModule::Create(pAllocator, pCreateInfo, pShaderModule);
2297 }
2298
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2299 VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator)
2300 {
2301 TRACE("(VkDevice device = %p, VkShaderModule shaderModule = %p, const VkAllocationCallbacks* pAllocator = %p)",
2302 device, static_cast<void *>(shaderModule), pAllocator);
2303
2304 vk::destroy(shaderModule, pAllocator);
2305 }
2306
vkCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)2307 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache)
2308 {
2309 TRACE("(VkDevice device = %p, const VkPipelineCacheCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineCache* pPipelineCache = %p)",
2310 device, pCreateInfo, pAllocator, pPipelineCache);
2311
2312 if(pCreateInfo->flags != 0 && pCreateInfo->flags != VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT)
2313 {
2314 // Flags must be 0 or VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT.
2315 // VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT: When set, the implementation may skip any
2316 // unnecessary processing needed to support simultaneous modification from multiple threads where allowed.
2317 // TODO(b/246369329): Optimize PipelineCache objects when VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT is used.
2318 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2319 }
2320
2321 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2322 while(extInfo)
2323 {
2324 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2325 extInfo = extInfo->pNext;
2326 }
2327
2328 return vk::PipelineCache::Create(pAllocator, pCreateInfo, pPipelineCache);
2329 }
2330
vkDestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2331 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator)
2332 {
2333 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, const VkAllocationCallbacks* pAllocator = %p)",
2334 device, static_cast<void *>(pipelineCache), pAllocator);
2335
2336 vk::destroy(pipelineCache, pAllocator);
2337 }
2338
vkGetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)2339 VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData)
2340 {
2341 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, size_t* pDataSize = %p, void* pData = %p)",
2342 device, static_cast<void *>(pipelineCache), pDataSize, pData);
2343
2344 return vk::Cast(pipelineCache)->getData(pDataSize, pData);
2345 }
2346
vkMergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)2347 VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
2348 {
2349 TRACE("(VkDevice device = %p, VkPipelineCache dstCache = %p, uint32_t srcCacheCount = %d, const VkPipelineCache* pSrcCaches = %p)",
2350 device, static_cast<void *>(dstCache), int(srcCacheCount), pSrcCaches);
2351
2352 return vk::Cast(dstCache)->merge(srcCacheCount, pSrcCaches);
2353 }
2354
vkCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2355 VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2356 {
2357 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkGraphicsPipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2358 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2359
2360 memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2361
2362 VkResult errorResult = VK_SUCCESS;
2363 for(uint32_t i = 0; i < createInfoCount; i++)
2364 {
2365 VkResult result = vk::GraphicsPipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2366
2367 if(result == VK_SUCCESS)
2368 {
2369 result = static_cast<vk::GraphicsPipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2370 if(result != VK_SUCCESS)
2371 {
2372 vk::destroy(pPipelines[i], pAllocator);
2373 }
2374 }
2375
2376 if(result != VK_SUCCESS)
2377 {
2378 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2379 // "When an application attempts to create many pipelines in a single command,
2380 // it is possible that some subset may fail creation. In that case, the
2381 // corresponding entries in the pPipelines output array will be filled with
2382 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2383 // out of memory errors), the vkCreate*Pipelines commands will return an
2384 // error code. The implementation will attempt to create all pipelines, and
2385 // only return VK_NULL_HANDLE values for those that actually failed."
2386 pPipelines[i] = VK_NULL_HANDLE;
2387 errorResult = result;
2388
2389 // VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2390 // will be returned to the application on failure of the corresponding pipeline
2391 // rather than continuing to create additional pipelines.
2392 if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2393 {
2394 return errorResult;
2395 }
2396 }
2397 }
2398
2399 return errorResult;
2400 }
2401
vkCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2402 VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2403 {
2404 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkComputePipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2405 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2406
2407 memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2408
2409 VkResult errorResult = VK_SUCCESS;
2410 for(uint32_t i = 0; i < createInfoCount; i++)
2411 {
2412 VkResult result = vk::ComputePipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2413
2414 if(result == VK_SUCCESS)
2415 {
2416 result = static_cast<vk::ComputePipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2417 if(result != VK_SUCCESS)
2418 {
2419 vk::destroy(pPipelines[i], pAllocator);
2420 }
2421 }
2422
2423 if(result != VK_SUCCESS)
2424 {
2425 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2426 // "When an application attempts to create many pipelines in a single command,
2427 // it is possible that some subset may fail creation. In that case, the
2428 // corresponding entries in the pPipelines output array will be filled with
2429 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2430 // out of memory errors), the vkCreate*Pipelines commands will return an
2431 // error code. The implementation will attempt to create all pipelines, and
2432 // only return VK_NULL_HANDLE values for those that actually failed."
2433 pPipelines[i] = VK_NULL_HANDLE;
2434 errorResult = result;
2435
2436 // VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2437 // will be returned to the application on failure of the corresponding pipeline
2438 // rather than continuing to create additional pipelines.
2439 if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2440 {
2441 return errorResult;
2442 }
2443 }
2444 }
2445
2446 return errorResult;
2447 }
2448
vkDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2449 VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator)
2450 {
2451 TRACE("(VkDevice device = %p, VkPipeline pipeline = %p, const VkAllocationCallbacks* pAllocator = %p)",
2452 device, static_cast<void *>(pipeline), pAllocator);
2453
2454 vk::destroy(pipeline, pAllocator);
2455 }
2456
vkCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)2457 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout)
2458 {
2459 TRACE("(VkDevice device = %p, const VkPipelineLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineLayout* pPipelineLayout = %p)",
2460 device, pCreateInfo, pAllocator, pPipelineLayout);
2461
2462 if(pCreateInfo->flags != 0 && pCreateInfo->flags != VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT)
2463 {
2464 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2465 }
2466
2467 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2468 while(nextInfo)
2469 {
2470 switch(nextInfo->sType)
2471 {
2472 case VK_STRUCTURE_TYPE_MAX_ENUM:
2473 // dEQP tests that this value is ignored.
2474 break;
2475 default:
2476 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2477 break;
2478 }
2479 nextInfo = nextInfo->pNext;
2480 }
2481
2482 return vk::PipelineLayout::Create(pAllocator, pCreateInfo, pPipelineLayout);
2483 }
2484
vkDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)2485 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator)
2486 {
2487 TRACE("(VkDevice device = %p, VkPipelineLayout pipelineLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2488 device, static_cast<void *>(pipelineLayout), pAllocator);
2489
2490 vk::release(pipelineLayout, pAllocator);
2491 }
2492
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2493 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler)
2494 {
2495 TRACE("(VkDevice device = %p, const VkSamplerCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSampler* pSampler = %p)",
2496 device, pCreateInfo, pAllocator, pSampler);
2497
2498 if(pCreateInfo->flags != 0)
2499 {
2500 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2501 }
2502
2503 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2504 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2505 VkClearColorValue borderColor = {};
2506
2507 while(extensionCreateInfo)
2508 {
2509 switch(static_cast<long>(extensionCreateInfo->sType))
2510 {
2511 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2512 {
2513 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo =
2514 reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2515 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2516 }
2517 break;
2518 case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT:
2519 {
2520 const VkSamplerCustomBorderColorCreateInfoEXT *borderColorInfo =
2521 reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>(extensionCreateInfo);
2522
2523 borderColor = borderColorInfo->customBorderColor;
2524 }
2525 break;
2526 default:
2527 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2528 break;
2529 }
2530
2531 extensionCreateInfo = extensionCreateInfo->pNext;
2532 }
2533
2534 vk::SamplerState samplerState(pCreateInfo, ycbcrConversion, borderColor);
2535 uint32_t samplerID = vk::Cast(device)->indexSampler(samplerState);
2536
2537 VkResult result = vk::Sampler::Create(pAllocator, pCreateInfo, pSampler, samplerState, samplerID);
2538
2539 if(*pSampler == VK_NULL_HANDLE)
2540 {
2541 ASSERT(result != VK_SUCCESS);
2542 vk::Cast(device)->removeSampler(samplerState);
2543 }
2544
2545 return result;
2546 }
2547
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2548 VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator)
2549 {
2550 TRACE("(VkDevice device = %p, VkSampler sampler = %p, const VkAllocationCallbacks* pAllocator = %p)",
2551 device, static_cast<void *>(sampler), pAllocator);
2552
2553 if(sampler != VK_NULL_HANDLE)
2554 {
2555 vk::Cast(device)->removeSampler(*vk::Cast(sampler));
2556
2557 vk::destroy(sampler, pAllocator);
2558 }
2559 }
2560
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2561 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout)
2562 {
2563 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorSetLayout* pSetLayout = %p)",
2564 device, pCreateInfo, pAllocator, pSetLayout);
2565
2566 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2567
2568 while(extensionCreateInfo)
2569 {
2570 switch(extensionCreateInfo->sType)
2571 {
2572 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
2573 ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME));
2574 break;
2575 default:
2576 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2577 break;
2578 }
2579
2580 extensionCreateInfo = extensionCreateInfo->pNext;
2581 }
2582
2583 return vk::DescriptorSetLayout::Create(pAllocator, pCreateInfo, pSetLayout);
2584 }
2585
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2586 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator)
2587 {
2588 TRACE("(VkDevice device = %p, VkDescriptorSetLayout descriptorSetLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2589 device, static_cast<void *>(descriptorSetLayout), pAllocator);
2590
2591 vk::destroy(descriptorSetLayout, pAllocator);
2592 }
2593
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2594 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
2595 {
2596 TRACE("(VkDevice device = %p, const VkDescriptorPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorPool* pDescriptorPool = %p)",
2597 device, pCreateInfo, pAllocator, pDescriptorPool);
2598
2599 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2600 while(extInfo)
2601 {
2602 switch(extInfo->sType)
2603 {
2604 case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO:
2605 break;
2606 default:
2607 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2608 break;
2609 }
2610 extInfo = extInfo->pNext;
2611 }
2612
2613 return vk::DescriptorPool::Create(pAllocator, pCreateInfo, pDescriptorPool);
2614 }
2615
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2616 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator)
2617 {
2618 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2619 device, static_cast<void *>(descriptorPool), pAllocator);
2620
2621 vk::destroy(descriptorPool, pAllocator);
2622 }
2623
vkResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2624 VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
2625 {
2626 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, VkDescriptorPoolResetFlags flags = 0x%08X)",
2627 device, static_cast<void *>(descriptorPool), int(flags));
2628
2629 if(flags != 0)
2630 {
2631 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2632 UNSUPPORTED("flags 0x%08X", int(flags));
2633 }
2634
2635 return vk::Cast(descriptorPool)->reset();
2636 }
2637
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2638 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets)
2639 {
2640 TRACE("(VkDevice device = %p, const VkDescriptorSetAllocateInfo* pAllocateInfo = %p, VkDescriptorSet* pDescriptorSets = %p)",
2641 device, pAllocateInfo, pDescriptorSets);
2642
2643 const VkDescriptorSetVariableDescriptorCountAllocateInfo *variableDescriptorCountAllocateInfo = nullptr;
2644
2645 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2646 while(extInfo)
2647 {
2648 switch(extInfo->sType)
2649 {
2650 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO:
2651 variableDescriptorCountAllocateInfo = reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>(extInfo);
2652 break;
2653 default:
2654 UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2655 break;
2656 }
2657 extInfo = extInfo->pNext;
2658 }
2659
2660 return vk::Cast(pAllocateInfo->descriptorPool)->allocateSets(pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, pDescriptorSets, variableDescriptorCountAllocateInfo);
2661 }
2662
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2663 VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets)
2664 {
2665 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p)",
2666 device, static_cast<void *>(descriptorPool), descriptorSetCount, pDescriptorSets);
2667
2668 vk::Cast(descriptorPool)->freeSets(descriptorSetCount, pDescriptorSets);
2669
2670 return VK_SUCCESS;
2671 }
2672
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2673 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies)
2674 {
2675 TRACE("(VkDevice device = %p, uint32_t descriptorWriteCount = %d, const VkWriteDescriptorSet* pDescriptorWrites = %p, uint32_t descriptorCopyCount = %d, const VkCopyDescriptorSet* pDescriptorCopies = %p)",
2676 device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2677
2678 vk::Cast(device)->updateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2679 }
2680
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)2681 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer)
2682 {
2683 TRACE("(VkDevice device = %p, const VkFramebufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFramebuffer* pFramebuffer = %p)",
2684 device, pCreateInfo, pAllocator, pFramebuffer);
2685
2686 return vk::Framebuffer::Create(pAllocator, pCreateInfo, pFramebuffer);
2687 }
2688
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)2689 VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator)
2690 {
2691 TRACE("(VkDevice device = %p, VkFramebuffer framebuffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
2692 device, static_cast<void *>(framebuffer), pAllocator);
2693
2694 vk::destroy(framebuffer, pAllocator);
2695 }
2696
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2697 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2698 {
2699 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2700 device, pCreateInfo, pAllocator, pRenderPass);
2701
2702 if(pCreateInfo->flags != 0)
2703 {
2704 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2705 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2706 }
2707
2708 ValidateRenderPassPNextChain(device, pCreateInfo);
2709
2710 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2711 }
2712
vkCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2713 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2714 {
2715 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2716 device, pCreateInfo, pAllocator, pRenderPass);
2717
2718 if(pCreateInfo->flags != 0)
2719 {
2720 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2721 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2722 }
2723
2724 ValidateRenderPassPNextChain(device, pCreateInfo);
2725
2726 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2727 }
2728
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)2729 VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator)
2730 {
2731 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, const VkAllocationCallbacks* pAllocator = %p)",
2732 device, static_cast<void *>(renderPass), pAllocator);
2733
2734 vk::destroy(renderPass, pAllocator);
2735 }
2736
vkGetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)2737 VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity)
2738 {
2739 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, VkExtent2D* pGranularity = %p)",
2740 device, static_cast<void *>(renderPass), pGranularity);
2741
2742 vk::Cast(renderPass)->getRenderAreaGranularity(pGranularity);
2743 }
2744
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)2745 VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool)
2746 {
2747 TRACE("(VkDevice device = %p, const VkCommandPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkCommandPool* pCommandPool = %p)",
2748 device, pCreateInfo, pAllocator, pCommandPool);
2749
2750 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2751 while(nextInfo)
2752 {
2753 switch(nextInfo->sType)
2754 {
2755 case VK_STRUCTURE_TYPE_MAX_ENUM:
2756 // dEQP tests that this value is ignored.
2757 break;
2758 default:
2759 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2760 break;
2761 }
2762 nextInfo = nextInfo->pNext;
2763 }
2764
2765 return vk::CommandPool::Create(pAllocator, pCreateInfo, pCommandPool);
2766 }
2767
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)2768 VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator)
2769 {
2770 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2771 device, static_cast<void *>(commandPool), pAllocator);
2772
2773 vk::destroy(commandPool, pAllocator);
2774 }
2775
vkResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)2776 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
2777 {
2778 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolResetFlags flags = %d)",
2779 device, static_cast<void *>(commandPool), int(flags));
2780
2781 return vk::Cast(commandPool)->reset(flags);
2782 }
2783
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)2784 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers)
2785 {
2786 TRACE("(VkDevice device = %p, const VkCommandBufferAllocateInfo* pAllocateInfo = %p, VkCommandBuffer* pCommandBuffers = %p)",
2787 device, pAllocateInfo, pCommandBuffers);
2788
2789 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2790 while(nextInfo)
2791 {
2792 switch(nextInfo->sType)
2793 {
2794 case VK_STRUCTURE_TYPE_MAX_ENUM:
2795 // dEQP tests that this value is ignored.
2796 break;
2797 default:
2798 UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2799 break;
2800 }
2801 nextInfo = nextInfo->pNext;
2802 }
2803
2804 return vk::Cast(pAllocateInfo->commandPool)->allocateCommandBuffers(vk::Cast(device), pAllocateInfo->level, pAllocateInfo->commandBufferCount, pCommandBuffers);
2805 }
2806
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2807 VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2808 {
2809 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2810 device, static_cast<void *>(commandPool), int(commandBufferCount), pCommandBuffers);
2811
2812 vk::Cast(commandPool)->freeCommandBuffers(commandBufferCount, pCommandBuffers);
2813 }
2814
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)2815 VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo)
2816 {
2817 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCommandBufferBeginInfo* pBeginInfo = %p)",
2818 commandBuffer, pBeginInfo);
2819
2820 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pBeginInfo->pNext);
2821 while(nextInfo)
2822 {
2823 switch(nextInfo->sType)
2824 {
2825 case VK_STRUCTURE_TYPE_MAX_ENUM:
2826 // dEQP tests that this value is ignored.
2827 break;
2828 default:
2829 UNSUPPORTED("pBeginInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2830 break;
2831 }
2832 nextInfo = nextInfo->pNext;
2833 }
2834
2835 return vk::Cast(commandBuffer)->begin(pBeginInfo->flags, pBeginInfo->pInheritanceInfo);
2836 }
2837
vkEndCommandBuffer(VkCommandBuffer commandBuffer)2838 VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
2839 {
2840 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2841
2842 return vk::Cast(commandBuffer)->end();
2843 }
2844
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)2845 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
2846 {
2847 TRACE("(VkCommandBuffer commandBuffer = %p, VkCommandBufferResetFlags flags = %d)", commandBuffer, int(flags));
2848
2849 return vk::Cast(commandBuffer)->reset(flags);
2850 }
2851
vkCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)2852 VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
2853 {
2854 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipeline pipeline = %p)",
2855 commandBuffer, int(pipelineBindPoint), static_cast<void *>(pipeline));
2856
2857 vk::Cast(commandBuffer)->bindPipeline(pipelineBindPoint, vk::Cast(pipeline));
2858 }
2859
vkCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)2860 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports)
2861 {
2862 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstViewport = %d, uint32_t viewportCount = %d, const VkViewport* pViewports = %p)",
2863 commandBuffer, int(firstViewport), int(viewportCount), pViewports);
2864
2865 vk::Cast(commandBuffer)->setViewport(firstViewport, viewportCount, pViewports);
2866 }
2867
vkCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)2868 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors)
2869 {
2870 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstScissor = %d, uint32_t scissorCount = %d, const VkRect2D* pScissors = %p)",
2871 commandBuffer, int(firstScissor), int(scissorCount), pScissors);
2872
2873 vk::Cast(commandBuffer)->setScissor(firstScissor, scissorCount, pScissors);
2874 }
2875
vkCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)2876 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
2877 {
2878 TRACE("(VkCommandBuffer commandBuffer = %p, float lineWidth = %f)", commandBuffer, lineWidth);
2879
2880 vk::Cast(commandBuffer)->setLineWidth(lineWidth);
2881 }
2882
vkCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)2883 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
2884 {
2885 TRACE("(VkCommandBuffer commandBuffer = %p, float depthBiasConstantFactor = %f, float depthBiasClamp = %f, float depthBiasSlopeFactor = %f)",
2886 commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2887
2888 vk::Cast(commandBuffer)->setDepthBias(depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2889 }
2890
vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2891 VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
2892 {
2893 TRACE("(VkCommandBuffer commandBuffer = %p, const float blendConstants[4] = {%f, %f, %f, %f})",
2894 commandBuffer, blendConstants[0], blendConstants[1], blendConstants[2], blendConstants[3]);
2895
2896 vk::Cast(commandBuffer)->setBlendConstants(blendConstants);
2897 }
2898
vkCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)2899 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
2900 {
2901 TRACE("(VkCommandBuffer commandBuffer = %p, float minDepthBounds = %f, float maxDepthBounds = %f)",
2902 commandBuffer, minDepthBounds, maxDepthBounds);
2903
2904 vk::Cast(commandBuffer)->setDepthBounds(minDepthBounds, maxDepthBounds);
2905 }
2906
vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2907 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
2908 {
2909 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t compareMask = %d)",
2910 commandBuffer, int(faceMask), int(compareMask));
2911
2912 vk::Cast(commandBuffer)->setStencilCompareMask(faceMask, compareMask);
2913 }
2914
vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2915 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
2916 {
2917 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t writeMask = %d)",
2918 commandBuffer, int(faceMask), int(writeMask));
2919
2920 vk::Cast(commandBuffer)->setStencilWriteMask(faceMask, writeMask);
2921 }
2922
vkCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2923 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
2924 {
2925 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t reference = %d)",
2926 commandBuffer, int(faceMask), int(reference));
2927
2928 vk::Cast(commandBuffer)->setStencilReference(faceMask, reference);
2929 }
2930
vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)2931 VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets)
2932 {
2933 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipelineLayout layout = %p, uint32_t firstSet = %d, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p, uint32_t dynamicOffsetCount = %d, const uint32_t* pDynamicOffsets = %p)",
2934 commandBuffer, int(pipelineBindPoint), static_cast<void *>(layout), int(firstSet), int(descriptorSetCount), pDescriptorSets, int(dynamicOffsetCount), pDynamicOffsets);
2935
2936 vk::Cast(commandBuffer)->bindDescriptorSets(pipelineBindPoint, vk::Cast(layout), firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
2937 }
2938
vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)2939 VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
2940 {
2941 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkIndexType indexType = %d)",
2942 commandBuffer, static_cast<void *>(buffer), int(offset), int(indexType));
2943
2944 vk::Cast(commandBuffer)->bindIndexBuffer(vk::Cast(buffer), offset, indexType);
2945 }
2946
vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)2947 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets)
2948 {
2949 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p)",
2950 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets);
2951
2952 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets, nullptr, nullptr);
2953 }
2954
vkCmdBindVertexBuffers2(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides)2955 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes, const VkDeviceSize *pStrides)
2956 {
2957 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p, const VkDeviceSize *pSizes = %p, const VkDeviceSize *pStrides = %p)",
2958 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets, pSizes, pStrides);
2959
2960 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides);
2961 }
2962
vkCmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)2963 VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode)
2964 {
2965 TRACE("(VkCommandBuffer commandBuffer = %p, VkCullModeFlags cullMode = %d)",
2966 commandBuffer, int(cullMode));
2967
2968 vk::Cast(commandBuffer)->setCullMode(cullMode);
2969 }
2970
vkCmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)2971 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable)
2972 {
2973 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthBoundsTestEnable = %d)",
2974 commandBuffer, int(depthBoundsTestEnable));
2975
2976 vk::Cast(commandBuffer)->setDepthBoundsTestEnable(depthBoundsTestEnable);
2977 }
2978
vkCmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)2979 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp)
2980 {
2981 TRACE("(VkCommandBuffer commandBuffer = %p, VkCompareOp depthCompareOp = %d)",
2982 commandBuffer, int(depthCompareOp));
2983
2984 vk::Cast(commandBuffer)->setDepthCompareOp(depthCompareOp);
2985 }
2986
vkCmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)2987 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable)
2988 {
2989 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthTestEnable = %d)",
2990 commandBuffer, int(depthTestEnable));
2991
2992 vk::Cast(commandBuffer)->setDepthTestEnable(depthTestEnable);
2993 }
2994
vkCmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)2995 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable)
2996 {
2997 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthWriteEnable = %d)",
2998 commandBuffer, int(depthWriteEnable));
2999
3000 vk::Cast(commandBuffer)->setDepthWriteEnable(depthWriteEnable);
3001 }
3002
vkCmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)3003 VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace)
3004 {
3005 TRACE("(VkCommandBuffer commandBuffer = %p, VkFrontFace frontFace = %d)",
3006 commandBuffer, int(frontFace));
3007
3008 vk::Cast(commandBuffer)->setFrontFace(frontFace);
3009 }
3010
vkCmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)3011 VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology)
3012 {
3013 TRACE("(VkCommandBuffer commandBuffer = %p, VkPrimitiveTopology primitiveTopology = %d)",
3014 commandBuffer, int(primitiveTopology));
3015
3016 vk::Cast(commandBuffer)->setPrimitiveTopology(primitiveTopology);
3017 }
3018
vkCmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)3019 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D *pScissors)
3020 {
3021 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t scissorCount = %d, const VkRect2D *pScissors = %p)",
3022 commandBuffer, scissorCount, pScissors);
3023
3024 vk::Cast(commandBuffer)->setScissorWithCount(scissorCount, pScissors);
3025 }
3026
vkCmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)3027 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp)
3028 {
3029 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, VkStencilOp failOp = %d, VkStencilOp passOp = %d, VkStencilOp depthFailOp = %d, VkCompareOp compareOp = %d)",
3030 commandBuffer, int(faceMask), int(failOp), int(passOp), int(depthFailOp), int(compareOp));
3031
3032 vk::Cast(commandBuffer)->setStencilOp(faceMask, failOp, passOp, depthFailOp, compareOp);
3033 }
3034
vkCmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)3035 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable)
3036 {
3037 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 stencilTestEnable = %d)",
3038 commandBuffer, int(stencilTestEnable));
3039
3040 vk::Cast(commandBuffer)->setStencilTestEnable(stencilTestEnable);
3041 }
3042
vkCmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)3043 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport *pViewports)
3044 {
3045 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t viewportCount = %d, const VkViewport *pViewports = %p)",
3046 commandBuffer, viewportCount, pViewports);
3047
3048 vk::Cast(commandBuffer)->setViewportWithCount(viewportCount, pViewports);
3049 }
3050
vkCmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)3051 VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable)
3052 {
3053 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 rasterizerDiscardEnable = %d)",
3054 commandBuffer, rasterizerDiscardEnable);
3055
3056 vk::Cast(commandBuffer)->setRasterizerDiscardEnable(rasterizerDiscardEnable);
3057 }
3058
vkCmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)3059 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable)
3060 {
3061 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthBiasEnable = %d)",
3062 commandBuffer, depthBiasEnable);
3063
3064 vk::Cast(commandBuffer)->setDepthBiasEnable(depthBiasEnable);
3065 }
3066
vkCmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)3067 VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable)
3068 {
3069 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 primitiveRestartEnable = %d)",
3070 commandBuffer, primitiveRestartEnable);
3071
3072 vk::Cast(commandBuffer)->setPrimitiveRestartEnable(primitiveRestartEnable);
3073 }
3074
vkCmdSetVertexInputEXT(VkCommandBuffer commandBuffer,uint32_t vertexBindingDescriptionCount,const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions)3075 VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
3076 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions,
3077 uint32_t vertexAttributeDescriptionCount,
3078 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions)
3079 {
3080 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexBindingDescriptionCount = %d, const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions = %p, uint32_t vertexAttributeDescriptionCount = %d, const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions = %p)",
3081 commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions);
3082
3083 vk::Cast(commandBuffer)->setVertexInput(vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions);
3084 }
3085
vkCmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)3086 VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
3087 {
3088 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexCount = %d, uint32_t instanceCount = %d, uint32_t firstVertex = %d, uint32_t firstInstance = %d)",
3089 commandBuffer, int(vertexCount), int(instanceCount), int(firstVertex), int(firstInstance));
3090
3091 vk::Cast(commandBuffer)->draw(vertexCount, instanceCount, firstVertex, firstInstance);
3092 }
3093
vkCmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)3094 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
3095 {
3096 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t indexCount = %d, uint32_t instanceCount = %d, uint32_t firstIndex = %d, int32_t vertexOffset = %d, uint32_t firstInstance = %d)",
3097 commandBuffer, int(indexCount), int(instanceCount), int(firstIndex), int(vertexOffset), int(firstInstance));
3098
3099 vk::Cast(commandBuffer)->drawIndexed(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
3100 }
3101
vkCmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)3102 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
3103 {
3104 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
3105 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
3106
3107 vk::Cast(commandBuffer)->drawIndirect(vk::Cast(buffer), offset, drawCount, stride);
3108 }
3109
vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)3110 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
3111 {
3112 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
3113 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
3114
3115 vk::Cast(commandBuffer)->drawIndexedIndirect(vk::Cast(buffer), offset, drawCount, stride);
3116 }
3117
vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)3118 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
3119 {
3120 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
3121 commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
3122 UNSUPPORTED("VK_KHR_draw_indirect_count");
3123 }
3124
vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)3125 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
3126 {
3127 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
3128 commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
3129 UNSUPPORTED("VK_KHR_draw_indirect_count");
3130 }
3131
vkCmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)3132 VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
3133 {
3134 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t groupCountX = %d, uint32_t groupCountY = %d, uint32_t groupCountZ = %d)",
3135 commandBuffer, int(groupCountX), int(groupCountY), int(groupCountZ));
3136
3137 vk::Cast(commandBuffer)->dispatch(groupCountX, groupCountY, groupCountZ);
3138 }
3139
vkCmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)3140 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
3141 {
3142 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d)",
3143 commandBuffer, static_cast<void *>(buffer), int(offset));
3144
3145 vk::Cast(commandBuffer)->dispatchIndirect(vk::Cast(buffer), offset);
3146 }
3147
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)3148 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
3149 {
3150 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferCopy* pRegions = %p)",
3151 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
3152
3153 vk::Cast(commandBuffer)->copyBuffer(vk::CopyBufferInfo(srcBuffer, dstBuffer, regionCount, pRegions));
3154 }
3155
vkCmdCopyBuffer2(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2 * pCopyBufferInfo)3156 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfo)
3157 {
3158 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferInfo2* pCopyBufferInfo = %p)",
3159 commandBuffer, pCopyBufferInfo);
3160
3161 vk::Cast(commandBuffer)->copyBuffer(*pCopyBufferInfo);
3162 }
3163
vkCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)3164 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions)
3165 {
3166 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageCopy* pRegions = %p)",
3167 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
3168
3169 vk::Cast(commandBuffer)->copyImage(vk::CopyImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
3170 }
3171
vkCmdCopyImage2(VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)3172 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo)
3173 {
3174 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageInfo2* pCopyImageInfo = %p)",
3175 commandBuffer, pCopyImageInfo);
3176
3177 vk::Cast(commandBuffer)->copyImage(*pCopyImageInfo);
3178 }
3179
vkCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)3180 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter)
3181 {
3182 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageBlit* pRegions = %p, VkFilter filter = %d)",
3183 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions, filter);
3184
3185 vk::Cast(commandBuffer)->blitImage(vk::BlitImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter));
3186 }
3187
vkCmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)3188 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo)
3189 {
3190 TRACE("(VkCommandBuffer commandBuffer = %p, const VkBlitImageInfo2* pBlitImageInfo = %p)",
3191 commandBuffer, pBlitImageInfo);
3192
3193 vk::Cast(commandBuffer)->blitImage(*pBlitImageInfo);
3194 }
3195
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)3196 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions)
3197 {
3198 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
3199 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
3200
3201 vk::Cast(commandBuffer)->copyBufferToImage(vk::CopyBufferToImageInfo(srcBuffer, dstImage, dstImageLayout, regionCount, pRegions));
3202 }
3203
vkCmdCopyBufferToImage2(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo)3204 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
3205 {
3206 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo = %p)",
3207 commandBuffer, pCopyBufferToImageInfo);
3208
3209 vk::Cast(commandBuffer)->copyBufferToImage(*pCopyBufferToImageInfo);
3210 }
3211
vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)3212 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions)
3213 {
3214 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
3215 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
3216
3217 vk::Cast(commandBuffer)->copyImageToBuffer(vk::CopyImageToBufferInfo(srcImage, srcImageLayout, dstBuffer, regionCount, pRegions));
3218 }
3219
vkCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)3220 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
3221 {
3222 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo = %p)",
3223 commandBuffer, pCopyImageToBufferInfo);
3224
3225 vk::Cast(commandBuffer)->copyImageToBuffer(*pCopyImageToBufferInfo);
3226 }
3227
vkCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)3228 VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
3229 {
3230 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize dataSize = %d, const void* pData = %p)",
3231 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(dataSize), pData);
3232
3233 vk::Cast(commandBuffer)->updateBuffer(vk::Cast(dstBuffer), dstOffset, dataSize, pData);
3234 }
3235
vkCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)3236 VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
3237 {
3238 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize size = %d, uint32_t data = %d)",
3239 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(size), data);
3240
3241 vk::Cast(commandBuffer)->fillBuffer(vk::Cast(dstBuffer), dstOffset, size, data);
3242 }
3243
vkCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)3244 VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
3245 {
3246 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearColorValue* pColor = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
3247 commandBuffer, static_cast<void *>(image), int(imageLayout), pColor, int(rangeCount), pRanges);
3248
3249 vk::Cast(commandBuffer)->clearColorImage(vk::Cast(image), imageLayout, pColor, rangeCount, pRanges);
3250 }
3251
vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)3252 VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
3253 {
3254 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearDepthStencilValue* pDepthStencil = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
3255 commandBuffer, static_cast<void *>(image), int(imageLayout), pDepthStencil, int(rangeCount), pRanges);
3256
3257 vk::Cast(commandBuffer)->clearDepthStencilImage(vk::Cast(image), imageLayout, pDepthStencil, rangeCount, pRanges);
3258 }
3259
vkCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)3260 VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects)
3261 {
3262 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t attachmentCount = %d, const VkClearAttachment* pAttachments = %p, uint32_t rectCount = %d, const VkClearRect* pRects = %p)",
3263 commandBuffer, int(attachmentCount), pAttachments, int(rectCount), pRects);
3264
3265 vk::Cast(commandBuffer)->clearAttachments(attachmentCount, pAttachments, rectCount, pRects);
3266 }
3267
vkCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)3268 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions)
3269 {
3270 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageResolve* pRegions = %p)",
3271 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstImage), int(dstImageLayout), regionCount, pRegions);
3272
3273 vk::Cast(commandBuffer)->resolveImage(vk::ResolveImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
3274 }
3275
vkCmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)3276 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2 *pResolveImageInfo)
3277 {
3278 TRACE("(VkCommandBuffer commandBuffer = %p, const VkResolveImageInfo2* pResolveImageInfo = %p)",
3279 commandBuffer, pResolveImageInfo);
3280
3281 vk::Cast(commandBuffer)->resolveImage(*pResolveImageInfo);
3282 }
3283
vkCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)3284 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
3285 {
3286 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
3287 commandBuffer, static_cast<void *>(event), int(stageMask));
3288
3289 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), vk::DependencyInfo(stageMask, stageMask, VkDependencyFlags(0), 0, nullptr, 0, nullptr, 0, nullptr));
3290 }
3291
vkCmdSetEvent2(VkCommandBuffer commandBuffer,VkEvent event,const VkDependencyInfo * pDependencyInfo)3292 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo *pDependencyInfo)
3293 {
3294 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, const VkDependencyInfo* pDependencyInfo = %p)",
3295 commandBuffer, static_cast<void *>(event), pDependencyInfo);
3296
3297 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), *pDependencyInfo);
3298 }
3299
vkCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)3300 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
3301 {
3302 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
3303 commandBuffer, static_cast<void *>(event), int(stageMask));
3304
3305 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
3306 }
3307
vkCmdResetEvent2(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags2 stageMask)3308 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask)
3309 {
3310 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags2 stageMask = %d)",
3311 commandBuffer, static_cast<void *>(event), int(stageMask));
3312
3313 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
3314 }
3315
vkCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3316 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
3317 {
3318 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, VkPipelineStageFlags srcStageMask = 0x%08X, VkPipelineStageFlags dstStageMask = 0x%08X, uint32_t memoryBarrierCount = %d, const VkMemoryBarrier* pMemoryBarriers = %p, uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
3319 commandBuffer, int(eventCount), pEvents, int(srcStageMask), int(dstStageMask), int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
3320
3321 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, vk::DependencyInfo(srcStageMask, dstStageMask, VkDependencyFlags(0), memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers));
3322 }
3323
vkCmdWaitEvents2(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,const VkDependencyInfo * pDependencyInfos)3324 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, const VkDependencyInfo *pDependencyInfos)
3325 {
3326 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, const VkDependencyInfo* pDependencyInfos = %p)",
3327 commandBuffer, int(eventCount), pEvents, pDependencyInfos);
3328
3329 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, *pDependencyInfos);
3330 }
3331
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3332 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
3333 {
3334 TRACE(
3335 "(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags srcStageMask = 0x%08X, VkPipelineStageFlags dstStageMask = 0x%08X, VkDependencyFlags dependencyFlags = %d, uint32_t memoryBarrierCount = %d, onst VkMemoryBarrier* pMemoryBarriers = %p,"
3336 " uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
3337 commandBuffer, int(srcStageMask), int(dstStageMask), dependencyFlags, int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
3338
3339 vk::Cast(commandBuffer)->pipelineBarrier(vk::DependencyInfo(srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers));
3340 }
3341
vkCmdPipelineBarrier2(VkCommandBuffer commandBuffer,const VkDependencyInfo * pDependencyInfo)3342 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo)
3343 {
3344 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDependencyInfo* pDependencyInfo = %p)",
3345 commandBuffer, pDependencyInfo);
3346
3347 vk::Cast(commandBuffer)->pipelineBarrier(*pDependencyInfo);
3348 }
3349
vkCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)3350 VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
3351 {
3352 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d, VkQueryControlFlags flags = %d)",
3353 commandBuffer, static_cast<void *>(queryPool), query, int(flags));
3354
3355 vk::Cast(commandBuffer)->beginQuery(vk::Cast(queryPool), query, flags);
3356 }
3357
vkCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)3358 VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
3359 {
3360 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d)",
3361 commandBuffer, static_cast<void *>(queryPool), int(query));
3362
3363 vk::Cast(commandBuffer)->endQuery(vk::Cast(queryPool), query);
3364 }
3365
vkCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3366 VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3367 {
3368 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3369 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount));
3370
3371 vk::Cast(commandBuffer)->resetQueryPool(vk::Cast(queryPool), firstQuery, queryCount);
3372 }
3373
vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)3374 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
3375 {
3376 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlagBits pipelineStage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
3377 commandBuffer, int(pipelineStage), static_cast<void *>(queryPool), int(query));
3378
3379 vk::Cast(commandBuffer)->writeTimestamp(pipelineStage, vk::Cast(queryPool), query);
3380 }
3381
vkCmdWriteTimestamp2(VkCommandBuffer commandBuffer,VkPipelineStageFlags2 stage,VkQueryPool queryPool,uint32_t query)3382 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query)
3383 {
3384 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags2 stage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
3385 commandBuffer, int(stage), static_cast<void *>(queryPool), int(query));
3386
3387 vk::Cast(commandBuffer)->writeTimestamp(stage, vk::Cast(queryPool), query);
3388 }
3389
vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)3390 VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
3391 {
3392 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
3393 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), static_cast<void *>(dstBuffer), int(dstOffset), int(stride), int(flags));
3394
3395 vk::Cast(commandBuffer)->copyQueryPoolResults(vk::Cast(queryPool), firstQuery, queryCount, vk::Cast(dstBuffer), dstOffset, stride, flags);
3396 }
3397
vkCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)3398 VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues)
3399 {
3400 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineLayout layout = %p, VkShaderStageFlags stageFlags = %d, uint32_t offset = %d, uint32_t size = %d, const void* pValues = %p)",
3401 commandBuffer, static_cast<void *>(layout), stageFlags, offset, size, pValues);
3402
3403 vk::Cast(commandBuffer)->pushConstants(vk::Cast(layout), stageFlags, offset, size, pValues);
3404 }
3405
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)3406 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
3407 {
3408 VkSubpassBeginInfo subpassBeginInfo = { VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, nullptr, contents };
3409 vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, &subpassBeginInfo);
3410 }
3411
vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)3412 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
3413 {
3414 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p)",
3415 commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
3416
3417 const VkBaseInStructure *renderPassBeginInfo = reinterpret_cast<const VkBaseInStructure *>(pRenderPassBegin->pNext);
3418 const VkRenderPassAttachmentBeginInfo *attachmentBeginInfo = nullptr;
3419 while(renderPassBeginInfo)
3420 {
3421 switch(renderPassBeginInfo->sType)
3422 {
3423 case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
3424 // This extension controls which render area is used on which physical device,
3425 // in order to distribute rendering between multiple physical devices.
3426 // SwiftShader only has a single physical device, so this extension does nothing in this case.
3427 break;
3428 case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO:
3429 attachmentBeginInfo = reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>(renderPassBeginInfo);
3430 break;
3431 case VK_STRUCTURE_TYPE_MAX_ENUM:
3432 // dEQP tests that this value is ignored.
3433 break;
3434 default:
3435 UNSUPPORTED("pRenderPassBegin->pNext sType = %s", vk::Stringify(renderPassBeginInfo->sType).c_str());
3436 break;
3437 }
3438
3439 renderPassBeginInfo = renderPassBeginInfo->pNext;
3440 }
3441
3442 vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, pSubpassBeginInfo->contents, attachmentBeginInfo);
3443 }
3444
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)3445 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
3446 {
3447 TRACE("(VkCommandBuffer commandBuffer = %p, VkSubpassContents contents = %d)",
3448 commandBuffer, contents);
3449
3450 vk::Cast(commandBuffer)->nextSubpass(contents);
3451 }
3452
vkCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)3453 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const VkSubpassEndInfoKHR *pSubpassEndInfo)
3454 {
3455 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)",
3456 commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
3457
3458 vk::Cast(commandBuffer)->nextSubpass(pSubpassBeginInfo->contents);
3459 }
3460
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)3461 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
3462 {
3463 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3464
3465 vk::Cast(commandBuffer)->endRenderPass();
3466 }
3467
vkCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)3468 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo)
3469 {
3470 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)", commandBuffer, pSubpassEndInfo);
3471
3472 vk::Cast(commandBuffer)->endRenderPass();
3473 }
3474
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)3475 VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
3476 {
3477 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
3478 commandBuffer, commandBufferCount, pCommandBuffers);
3479
3480 vk::Cast(commandBuffer)->executeCommands(commandBufferCount, pCommandBuffers);
3481 }
3482
vkCmdBeginRendering(VkCommandBuffer commandBuffer,const VkRenderingInfo * pRenderingInfo)3483 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering(VkCommandBuffer commandBuffer, const VkRenderingInfo *pRenderingInfo)
3484 {
3485 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderingInfo* pRenderingInfo = %p)",
3486 commandBuffer, pRenderingInfo);
3487
3488 vk::Cast(commandBuffer)->beginRendering(pRenderingInfo);
3489 }
3490
vkCmdEndRendering(VkCommandBuffer commandBuffer)3491 VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering(VkCommandBuffer commandBuffer)
3492 {
3493 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3494
3495 vk::Cast(commandBuffer)->endRendering();
3496 }
3497
vkCmdSetRenderingAttachmentLocationsKHR(VkCommandBuffer commandBuffer,const VkRenderingAttachmentLocationInfoKHR * pLocationInfo)3498 VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfoKHR *pLocationInfo)
3499 {
3500 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3501
3502 // No-op; the same information is provided in pipeline create info.
3503 }
3504
vkCmdSetRenderingInputAttachmentIndicesKHR(VkCommandBuffer commandBuffer,const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo)3505 VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndicesKHR(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR *pInputAttachmentIndexInfo)
3506 {
3507 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3508
3509 // No-op; the same information is provided in pipeline create info.
3510 }
3511
vkEnumerateInstanceVersion(uint32_t * pApiVersion)3512 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t *pApiVersion)
3513 {
3514 TRACE("(uint32_t* pApiVersion = %p)", pApiVersion);
3515 *pApiVersion = vk::API_VERSION;
3516 return VK_SUCCESS;
3517 }
3518
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3519 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
3520 {
3521 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindBufferMemoryInfo* pBindInfos = %p)",
3522 device, bindInfoCount, pBindInfos);
3523
3524 for(uint32_t i = 0; i < bindInfoCount; i++)
3525 {
3526 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pBindInfos[i].pNext);
3527 while(extInfo)
3528 {
3529 UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3530 extInfo = extInfo->pNext;
3531 }
3532
3533 if(!vk::Cast(pBindInfos[i].buffer)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3534 {
3535 UNSUPPORTED("vkBindBufferMemory2 with invalid external memory");
3536 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
3537 }
3538 }
3539
3540 for(uint32_t i = 0; i < bindInfoCount; i++)
3541 {
3542 vk::Cast(pBindInfos[i].buffer)->bind(vk::Cast(pBindInfos[i].memory), pBindInfos[i].memoryOffset);
3543 }
3544
3545 return VK_SUCCESS;
3546 }
3547
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)3548 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
3549 {
3550 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindImageMemoryInfo* pBindInfos = %p)",
3551 device, bindInfoCount, pBindInfos);
3552
3553 for(uint32_t i = 0; i < bindInfoCount; i++)
3554 {
3555 if(!vk::Cast(pBindInfos[i].image)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3556 {
3557 UNSUPPORTED("vkBindImageMemory2 with invalid external memory");
3558 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3559 }
3560 }
3561
3562 for(uint32_t i = 0; i < bindInfoCount; i++)
3563 {
3564 vk::DeviceMemory *memory = vk::Cast(pBindInfos[i].memory);
3565 VkDeviceSize offset = pBindInfos[i].memoryOffset;
3566
3567 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pBindInfos[i].pNext);
3568 while(extInfo)
3569 {
3570 switch(extInfo->sType)
3571 {
3572 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
3573 /* Do nothing */
3574 break;
3575
3576 #ifndef __ANDROID__
3577 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
3578 {
3579 const auto *swapchainInfo = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>(extInfo);
3580 memory = vk::Cast(swapchainInfo->swapchain)->getImage(swapchainInfo->imageIndex).getImageMemory();
3581 offset = 0;
3582 }
3583 break;
3584 #endif
3585
3586 default:
3587 UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3588 break;
3589 }
3590 extInfo = extInfo->pNext;
3591 }
3592
3593 vk::Cast(pBindInfos[i].image)->bind(memory, offset);
3594 }
3595
3596 return VK_SUCCESS;
3597 }
3598
vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)3599 VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
3600 {
3601 TRACE("(VkDevice device = %p, uint32_t heapIndex = %d, uint32_t localDeviceIndex = %d, uint32_t remoteDeviceIndex = %d, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures = %p)",
3602 device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
3603
3604 ASSERT(localDeviceIndex != remoteDeviceIndex); // "localDeviceIndex must not equal remoteDeviceIndex"
3605 UNSUPPORTED("remoteDeviceIndex: %d", int(remoteDeviceIndex)); // Only one physical device is supported, and since the device indexes can't be equal, this should never be called.
3606 }
3607
vkCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)3608 VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
3609 {
3610 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t deviceMask = %d", commandBuffer, deviceMask);
3611
3612 vk::Cast(commandBuffer)->setDeviceMask(deviceMask);
3613 }
3614
vkCmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)3615 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
3616 {
3617 TRACE("(VkCommandBuffer commandBuffer = %p, baseGroupX = %u, baseGroupY = %u, baseGroupZ = %u, groupCountX = %u, groupCountY = %u, groupCountZ = %u)",
3618 commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3619
3620 vk::Cast(commandBuffer)->dispatchBase(baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3621 }
3622
vkResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3623 VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3624 {
3625 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3626 device, static_cast<void *>(queryPool), firstQuery, queryCount);
3627 vk::Cast(queryPool)->reset(firstQuery, queryCount);
3628 }
3629
vkEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)3630 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
3631 {
3632 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceGroupCount = %p, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties = %p)",
3633 instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3634
3635 return vk::Cast(instance)->getPhysicalDeviceGroups(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3636 }
3637
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3638 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3639 {
3640 TRACE("(VkDevice device = %p, const VkImageMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3641 device, pInfo, pMemoryRequirements);
3642
3643 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
3644 while(extInfo)
3645 {
3646 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3647 extInfo = extInfo->pNext;
3648 }
3649
3650 vk::Cast(pInfo->image)->getMemoryRequirements(pMemoryRequirements);
3651 }
3652
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3653 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3654 {
3655 TRACE("(VkDevice device = %p, const VkBufferMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3656 device, pInfo, pMemoryRequirements);
3657
3658 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
3659 while(extInfo)
3660 {
3661 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3662 extInfo = extInfo->pNext;
3663 }
3664
3665 VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
3666 while(extensionRequirements)
3667 {
3668 switch(extensionRequirements->sType)
3669 {
3670 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
3671 {
3672 auto *requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
3673 vk::Cast(device)->getRequirements(requirements);
3674 }
3675 break;
3676 default:
3677 UNSUPPORTED("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3678 break;
3679 }
3680
3681 extensionRequirements = extensionRequirements->pNext;
3682 }
3683
3684 vkGetBufferMemoryRequirements(device, pInfo->buffer, &(pMemoryRequirements->memoryRequirements));
3685 }
3686
vkGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)3687 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
3688 {
3689 TRACE("(VkDevice device = %p, const VkImageSparseMemoryRequirementsInfo2* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
3690 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
3691
3692 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
3693 while(extInfo)
3694 {
3695 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3696 extInfo = extInfo->pNext;
3697 }
3698
3699 if(pSparseMemoryRequirements) // Valid to be NULL
3700 {
3701 const auto *extensionRequirements = reinterpret_cast<const VkBaseInStructure *>(pSparseMemoryRequirements->pNext);
3702 while(extensionRequirements)
3703 {
3704 UNSUPPORTED("pSparseMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3705 extensionRequirements = extensionRequirements->pNext;
3706 }
3707 }
3708
3709 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
3710 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
3711 *pSparseMemoryRequirementCount = 0;
3712 }
3713
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)3714 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures)
3715 {
3716 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures2* pFeatures = %p)", physicalDevice, pFeatures);
3717
3718 vk::Cast(physicalDevice)->getFeatures2(pFeatures);
3719 }
3720
vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)3721 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties)
3722 {
3723 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties2* pProperties = %p)", physicalDevice, pProperties);
3724
3725 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pProperties->pNext);
3726 while(extensionProperties)
3727 {
3728 // Casting to an int since some structures, such as VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID,
3729 // are not enumerated in the official Vulkan headers.
3730 switch((int)(extensionProperties->sType))
3731 {
3732 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
3733 {
3734 auto *properties = reinterpret_cast<VkPhysicalDeviceIDProperties *>(extensionProperties);
3735 vk::Cast(physicalDevice)->getProperties(properties);
3736 }
3737 break;
3738 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
3739 {
3740 auto *properties = reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(extensionProperties);
3741 vk::Cast(physicalDevice)->getProperties(properties);
3742 }
3743 break;
3744 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES:
3745 {
3746 auto *properties = reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>(extensionProperties);
3747 vk::Cast(physicalDevice)->getProperties(properties);
3748 }
3749 break;
3750 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
3751 {
3752 auto *properties = reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(extensionProperties);
3753 vk::Cast(physicalDevice)->getProperties(properties);
3754 }
3755 break;
3756 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
3757 {
3758 auto *properties = reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(extensionProperties);
3759 vk::Cast(physicalDevice)->getProperties(properties);
3760 }
3761 break;
3762 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
3763 {
3764 auto *properties = reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(extensionProperties);
3765 vk::Cast(physicalDevice)->getProperties(properties);
3766 }
3767 break;
3768 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
3769 {
3770 auto *properties = reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(extensionProperties);
3771 vk::Cast(physicalDevice)->getProperties(properties);
3772 }
3773 break;
3774 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
3775 {
3776 auto *properties = reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(extensionProperties);
3777 vk::Cast(physicalDevice)->getProperties(properties);
3778 }
3779 break;
3780 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
3781 {
3782 auto *properties = reinterpret_cast<VkPhysicalDeviceDriverProperties *>(extensionProperties);
3783 vk::Cast(physicalDevice)->getProperties(properties);
3784 }
3785 break;
3786 #ifdef __ANDROID__
3787 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
3788 {
3789 auto *properties = reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(extensionProperties);
3790 vk::Cast(physicalDevice)->getProperties(properties);
3791 }
3792 break;
3793 #endif
3794 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
3795 {
3796 auto *properties = reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(extensionProperties);
3797 vk::Cast(physicalDevice)->getProperties(properties);
3798 }
3799 break;
3800 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT:
3801 {
3802 auto *properties = reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>(extensionProperties);
3803 vk::Cast(physicalDevice)->getProperties(properties);
3804 }
3805 break;
3806 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
3807 {
3808 auto *properties = reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>(extensionProperties);
3809 vk::Cast(physicalDevice)->getProperties(properties);
3810 }
3811 break;
3812 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
3813 {
3814 auto *properties = reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>(extensionProperties);
3815 vk::Cast(physicalDevice)->getProperties(properties);
3816 }
3817 break;
3818 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
3819 {
3820 auto *properties = reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>(extensionProperties);
3821 vk::Cast(physicalDevice)->getProperties(properties);
3822 }
3823 break;
3824 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
3825 {
3826 auto *properties = reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>(extensionProperties);
3827 vk::Cast(physicalDevice)->getProperties(properties);
3828 }
3829 break;
3830 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
3831 {
3832 auto *properties = reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>(extensionProperties);
3833 vk::Cast(physicalDevice)->getProperties(properties);
3834 }
3835 break;
3836 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
3837 {
3838 auto *properties = reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>(extensionProperties);
3839 vk::Cast(physicalDevice)->getProperties(properties);
3840 }
3841 break;
3842 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
3843 {
3844 auto *properties = reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>(extensionProperties);
3845 vk::Cast(physicalDevice)->getProperties(properties);
3846 }
3847 break;
3848 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
3849 {
3850 auto *properties = reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>(extensionProperties);
3851 vk::Cast(physicalDevice)->getProperties(properties);
3852 }
3853 break;
3854 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT:
3855 {
3856 auto *properties = reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>(extensionProperties);
3857 vk::Cast(physicalDevice)->getProperties(properties);
3858 }
3859 break;
3860 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
3861 {
3862 auto *properties = reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(extensionProperties);
3863 vk::Cast(physicalDevice)->getProperties(properties);
3864 }
3865 break;
3866 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES:
3867 {
3868 auto *properties = reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>(extensionProperties);
3869 vk::Cast(physicalDevice)->getProperties(properties);
3870 }
3871 break;
3872 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES:
3873 {
3874 auto *properties = reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>(extensionProperties);
3875 vk::Cast(physicalDevice)->getProperties(properties);
3876 }
3877 break;
3878 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES:
3879 {
3880 auto *properties = reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>(extensionProperties);
3881 vk::Cast(physicalDevice)->getProperties(properties);
3882 }
3883 break;
3884 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES:
3885 {
3886 auto *properties = reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>(extensionProperties);
3887 vk::Cast(physicalDevice)->getProperties(properties);
3888 }
3889 break;
3890 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT:
3891 {
3892 auto *properties = reinterpret_cast<VkPhysicalDevicePipelineRobustnessPropertiesEXT *>(extensionProperties);
3893 vk::Cast(physicalDevice)->getProperties(properties);
3894 }
3895 break;
3896 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT:
3897 {
3898 auto *properties = reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>(extensionProperties);
3899 vk::Cast(physicalDevice)->getProperties(properties);
3900 }
3901 break;
3902 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT:
3903 {
3904 auto *properties = reinterpret_cast<VkPhysicalDeviceHostImageCopyPropertiesEXT *>(extensionProperties);
3905 vk::Cast(physicalDevice)->getProperties(properties);
3906 }
3907 break;
3908 default:
3909 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3910 UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3911 break;
3912 }
3913
3914 extensionProperties = extensionProperties->pNext;
3915 }
3916
3917 vkGetPhysicalDeviceProperties(physicalDevice, &(pProperties->properties));
3918 }
3919
vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)3920 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 *pFormatProperties)
3921 {
3922 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties2* pFormatProperties = %p)",
3923 physicalDevice, format, pFormatProperties);
3924
3925 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pFormatProperties->pNext);
3926 while(extensionProperties)
3927 {
3928 switch(extensionProperties->sType)
3929 {
3930 case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3:
3931 {
3932 auto *properties3 = reinterpret_cast<VkFormatProperties3 *>(extensionProperties);
3933 vk::Cast(physicalDevice)->GetFormatProperties(format, properties3);
3934 }
3935 break;
3936 default:
3937 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3938 UNSUPPORTED("pFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3939 break;
3940 }
3941
3942 extensionProperties = extensionProperties->pNext;
3943 }
3944
3945 vkGetPhysicalDeviceFormatProperties(physicalDevice, format, &(pFormatProperties->formatProperties));
3946 }
3947
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3948 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VkImageFormatProperties2 *pImageFormatProperties)
3949 {
3950 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo = %p, VkImageFormatProperties2* pImageFormatProperties = %p)",
3951 physicalDevice, pImageFormatInfo, pImageFormatProperties);
3952
3953 // "If the combination of parameters to vkGetPhysicalDeviceImageFormatProperties is not supported by the implementation
3954 // for use in vkCreateImage, then all members of VkImageFormatProperties will be filled with zero."
3955 memset(&pImageFormatProperties->imageFormatProperties, 0, sizeof(VkImageFormatProperties));
3956
3957 const VkBaseInStructure *extensionFormatInfo = reinterpret_cast<const VkBaseInStructure *>(pImageFormatInfo->pNext);
3958
3959 const VkExternalMemoryHandleTypeFlagBits *handleType = nullptr;
3960 VkImageUsageFlags stencilUsage = 0;
3961 while(extensionFormatInfo)
3962 {
3963 switch(extensionFormatInfo->sType)
3964 {
3965 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
3966 {
3967 // Per the Vulkan spec on VkImageFormatListcreateInfo:
3968 // "If the pNext chain of VkImageCreateInfo includes a
3969 // VkImageFormatListCreateInfo structure, then that
3970 // structure contains a list of all formats that can be
3971 // used when creating views of this image"
3972 // This limitation does not affect SwiftShader's behavior and
3973 // the Vulkan Validation Layers can detect Views created with a
3974 // format which is not included in that list.
3975 }
3976 break;
3977 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
3978 {
3979 const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionFormatInfo);
3980 stencilUsage = stencilUsageInfo->stencilUsage;
3981 }
3982 break;
3983 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
3984 {
3985 const VkPhysicalDeviceExternalImageFormatInfo *imageFormatInfo = reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(extensionFormatInfo);
3986 handleType = &(imageFormatInfo->handleType);
3987 }
3988 break;
3989 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
3990 {
3991 // Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
3992 ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
3993 }
3994 break;
3995 default:
3996 UNSUPPORTED("pImageFormatInfo->pNext sType = %s", vk::Stringify(extensionFormatInfo->sType).c_str());
3997 break;
3998 }
3999
4000 extensionFormatInfo = extensionFormatInfo->pNext;
4001 }
4002
4003 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pImageFormatProperties->pNext);
4004
4005 #ifdef __ANDROID__
4006 bool hasAHBUsage = false;
4007 #endif
4008
4009 while(extensionProperties)
4010 {
4011 switch(extensionProperties->sType)
4012 {
4013 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
4014 {
4015 auto *properties = reinterpret_cast<VkExternalImageFormatProperties *>(extensionProperties);
4016 vk::Cast(physicalDevice)->getProperties(handleType, properties);
4017 }
4018 break;
4019 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
4020 {
4021 auto *properties = reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(extensionProperties);
4022 vk::Cast(physicalDevice)->getProperties(properties);
4023 }
4024 break;
4025 case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
4026 {
4027 // Explicitly ignored, since VK_AMD_texture_gather_bias_lod is not supported
4028 ASSERT(!hasDeviceExtension(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME));
4029 }
4030 break;
4031 case VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT:
4032 {
4033 auto *properties = reinterpret_cast<VkHostImageCopyDevicePerformanceQueryEXT *>(extensionProperties);
4034 // Host image copy is equally performant on the host with SwiftShader; it's the same code running on the main thread.
4035 properties->optimalDeviceAccess = VK_TRUE;
4036 properties->identicalMemoryLayout = VK_TRUE;
4037 }
4038 break;
4039 #ifdef __ANDROID__
4040 case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
4041 {
4042 auto *properties = reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(extensionProperties);
4043 vk::Cast(physicalDevice)->getProperties(pImageFormatInfo, properties);
4044 hasAHBUsage = true;
4045 }
4046 break;
4047 #endif
4048 default:
4049 UNSUPPORTED("pImageFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
4050 break;
4051 }
4052
4053 extensionProperties = extensionProperties->pNext;
4054 }
4055
4056 vk::Format format = pImageFormatInfo->format;
4057 VkImageType type = pImageFormatInfo->type;
4058 VkImageTiling tiling = pImageFormatInfo->tiling;
4059 VkImageUsageFlags usage = pImageFormatInfo->usage;
4060 VkImageCreateFlags flags = pImageFormatInfo->flags;
4061
4062 if(!vk::Cast(physicalDevice)->isFormatSupported(format, type, tiling, usage, stencilUsage, flags))
4063 {
4064 return VK_ERROR_FORMAT_NOT_SUPPORTED;
4065 }
4066
4067 vk::Cast(physicalDevice)->getImageFormatProperties(format, type, tiling, usage, flags, &pImageFormatProperties->imageFormatProperties);
4068
4069 #ifdef __ANDROID__
4070 if(hasAHBUsage)
4071 {
4072 // AHardwareBuffer_lock may only be called with a single layer.
4073 pImageFormatProperties->imageFormatProperties.maxArrayLayers = 1;
4074 pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
4075 }
4076 #endif
4077
4078 return VK_SUCCESS;
4079 }
4080
vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)4081 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties)
4082 {
4083 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties2* pQueueFamilyProperties = %p)",
4084 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
4085
4086 if(!pQueueFamilyProperties)
4087 {
4088 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
4089 }
4090 else
4091 {
4092 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
4093 }
4094 }
4095
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)4096 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
4097 {
4098 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties2* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
4099
4100 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pMemoryProperties->pNext);
4101 while(extInfo)
4102 {
4103 UNSUPPORTED("pMemoryProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4104 extInfo = extInfo->pNext;
4105 }
4106
4107 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &(pMemoryProperties->memoryProperties));
4108 }
4109
vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)4110 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2 *pProperties)
4111 {
4112 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo = %p, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties2* pProperties = %p)",
4113 physicalDevice, pFormatInfo, pPropertyCount, pProperties);
4114
4115 if(pProperties)
4116 {
4117 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pProperties->pNext);
4118 while(extInfo)
4119 {
4120 UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4121 extInfo = extInfo->pNext;
4122 }
4123 }
4124
4125 // We do not support sparse images.
4126 *pPropertyCount = 0;
4127 }
4128
vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice,uint32_t * pToolCount,VkPhysicalDeviceToolProperties * pToolProperties)4129 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice, uint32_t *pToolCount, VkPhysicalDeviceToolProperties *pToolProperties)
4130 {
4131 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pToolCount = %p, VkPhysicalDeviceToolProperties* pToolProperties = %p)",
4132 physicalDevice, pToolCount, pToolProperties);
4133
4134 if(!pToolProperties)
4135 {
4136 *pToolCount = 0;
4137 return VK_SUCCESS;
4138 }
4139
4140 return VK_SUCCESS;
4141 }
4142
vkTrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)4143 VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags)
4144 {
4145 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolTrimFlags flags = %d)",
4146 device, static_cast<void *>(commandPool), flags);
4147
4148 if(flags != 0)
4149 {
4150 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4151 UNSUPPORTED("flags 0x%08X", int(flags));
4152 }
4153
4154 vk::Cast(commandPool)->trim(flags);
4155 }
4156
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)4157 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue)
4158 {
4159 TRACE("(VkDevice device = %p, const VkDeviceQueueInfo2* pQueueInfo = %p, VkQueue* pQueue = %p)",
4160 device, pQueueInfo, pQueue);
4161
4162 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pQueueInfo->pNext);
4163 while(extInfo)
4164 {
4165 UNSUPPORTED("pQueueInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4166 extInfo = extInfo->pNext;
4167 }
4168
4169 if(pQueueInfo->flags != 0)
4170 {
4171 // The only flag that can be set here is VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
4172 // According to the Vulkan 1.2.132 spec, 4.3.1. Queue Family Properties:
4173 // "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT specifies that the device queue is a
4174 // protected-capable queue. If the protected memory feature is not enabled,
4175 // the VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT bit of flags must not be set."
4176 UNSUPPORTED("VkPhysicalDeviceVulkan11Features::protectedMemory");
4177 }
4178
4179 vkGetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
4180 }
4181
vkCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)4182 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSamplerYcbcrConversion *pYcbcrConversion)
4183 {
4184 TRACE("(VkDevice device = %p, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSamplerYcbcrConversion* pYcbcrConversion = %p)",
4185 device, pCreateInfo, pAllocator, pYcbcrConversion);
4186
4187 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
4188 while(extInfo)
4189 {
4190 switch(extInfo->sType)
4191 {
4192 #ifdef __ANDROID__
4193 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
4194 break;
4195 #endif
4196 default:
4197 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4198 break;
4199 }
4200 extInfo = extInfo->pNext;
4201 }
4202
4203 return vk::SamplerYcbcrConversion::Create(pAllocator, pCreateInfo, pYcbcrConversion);
4204 }
4205
vkDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)4206 VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks *pAllocator)
4207 {
4208 TRACE("(VkDevice device = %p, VkSamplerYcbcrConversion ycbcrConversion = %p, const VkAllocationCallbacks* pAllocator = %p)",
4209 device, static_cast<void *>(ycbcrConversion), pAllocator);
4210
4211 vk::destroy(ycbcrConversion, pAllocator);
4212 }
4213
vkCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4214 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
4215 {
4216 TRACE("(VkDevice device = %p, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate = %p)",
4217 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
4218
4219 if(pCreateInfo->flags != 0)
4220 {
4221 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4222 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
4223 }
4224
4225 if(pCreateInfo->templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
4226 {
4227 UNSUPPORTED("pCreateInfo->templateType %d", int(pCreateInfo->templateType));
4228 }
4229
4230 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
4231 while(extInfo)
4232 {
4233 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4234 extInfo = extInfo->pNext;
4235 }
4236
4237 return vk::DescriptorUpdateTemplate::Create(pAllocator, pCreateInfo, pDescriptorUpdateTemplate);
4238 }
4239
vkDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4240 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator)
4241 {
4242 TRACE("(VkDevice device = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const VkAllocationCallbacks* pAllocator = %p)",
4243 device, static_cast<void *>(descriptorUpdateTemplate), pAllocator);
4244
4245 vk::destroy(descriptorUpdateTemplate, pAllocator);
4246 }
4247
vkUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)4248 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
4249 {
4250 TRACE("(VkDevice device = %p, VkDescriptorSet descriptorSet = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const void* pData = %p)",
4251 device, static_cast<void *>(descriptorSet), static_cast<void *>(descriptorUpdateTemplate), pData);
4252
4253 vk::Cast(descriptorUpdateTemplate)->updateDescriptorSet(vk::Cast(device), descriptorSet, pData);
4254 }
4255
vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)4256 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VkExternalBufferProperties *pExternalBufferProperties)
4257 {
4258 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo = %p, VkExternalBufferProperties* pExternalBufferProperties = %p)",
4259 physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
4260
4261 vk::Cast(physicalDevice)->getProperties(pExternalBufferInfo, pExternalBufferProperties);
4262 }
4263
vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)4264 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VkExternalFenceProperties *pExternalFenceProperties)
4265 {
4266 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo = %p, VkExternalFenceProperties* pExternalFenceProperties = %p)",
4267 physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
4268
4269 vk::Cast(physicalDevice)->getProperties(pExternalFenceInfo, pExternalFenceProperties);
4270 }
4271
vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)4272 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
4273 {
4274 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo = %p, VkExternalSemaphoreProperties* pExternalSemaphoreProperties = %p)",
4275 physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
4276
4277 vk::Cast(physicalDevice)->getProperties(pExternalSemaphoreInfo, pExternalSemaphoreProperties);
4278 }
4279
vkGetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)4280 VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, VkDescriptorSetLayoutSupport *pSupport)
4281 {
4282 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, VkDescriptorSetLayoutSupport* pSupport = %p)",
4283 device, pCreateInfo, pSupport);
4284
4285 VkBaseOutStructure *layoutSupport = reinterpret_cast<VkBaseOutStructure *>(pSupport->pNext);
4286 while(layoutSupport)
4287 {
4288 switch(layoutSupport->sType)
4289 {
4290 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT:
4291 break;
4292 default:
4293 UNSUPPORTED("pSupport->pNext sType = %s", vk::Stringify(layoutSupport->sType).c_str());
4294 break;
4295 }
4296
4297 layoutSupport = layoutSupport->pNext;
4298 }
4299
4300 vk::Cast(device)->getDescriptorSetLayoutSupport(pCreateInfo, pSupport);
4301 }
4302
vkCreatePrivateDataSlot(VkDevice device,const VkPrivateDataSlotCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPrivateDataSlot * pPrivateDataSlot)4303 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot(VkDevice device, const VkPrivateDataSlotCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPrivateDataSlot *pPrivateDataSlot)
4304 {
4305 TRACE("(VkDevice device = %p, const VkPrivateDataSlotCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPrivateDataSlot* pPrivateDataSlot = %p)",
4306 device, pCreateInfo, pAllocator, pPrivateDataSlot);
4307
4308 return vk::PrivateData::Create(pAllocator, pCreateInfo, pPrivateDataSlot);
4309 }
4310
vkDestroyPrivateDataSlot(VkDevice device,VkPrivateDataSlot privateDataSlot,const VkAllocationCallbacks * pAllocator)4311 VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks *pAllocator)
4312 {
4313 TRACE("(VkDevice device = %p, VkPrivateDataSlot privateDataSlot = %p, const VkAllocationCallbacks* pAllocator = %p)",
4314 device, static_cast<void *>(privateDataSlot), pAllocator);
4315
4316 vk::Cast(device)->removePrivateDataSlot(vk::Cast(privateDataSlot));
4317 vk::destroy(privateDataSlot, pAllocator);
4318 }
4319
vkSetPrivateData(VkDevice device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t data)4320 VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data)
4321 {
4322 TRACE("(VkDevice device = %p, VkObjectType objectType = %d, uint64_t objectHandle = %" PRIu64 ", VkPrivateDataSlot privateDataSlot = %p, uint64_t data = %" PRIu64 ")",
4323 device, objectType, objectHandle, static_cast<void *>(privateDataSlot), data);
4324
4325 return vk::Cast(device)->setPrivateData(objectType, objectHandle, vk::Cast(privateDataSlot), data);
4326 }
4327
vkGetPrivateData(VkDevice device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t * pData)4328 VKAPI_ATTR void VKAPI_CALL vkGetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t *pData)
4329 {
4330 TRACE("(VkDevice device = %p, VkObjectType objectType = %d, uint64_t objectHandle = %" PRIu64 ", VkPrivateDataSlot privateDataSlot = %p, uint64_t data = %p)",
4331 device, objectType, objectHandle, static_cast<void *>(privateDataSlot), pData);
4332
4333 vk::Cast(device)->getPrivateData(objectType, objectHandle, vk::Cast(privateDataSlot), pData);
4334 }
4335
vkGetDeviceBufferMemoryRequirements(VkDevice device,const VkDeviceBufferMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4336 VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements(VkDevice device, const VkDeviceBufferMemoryRequirements *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
4337 {
4338 TRACE("(VkDevice device = %p, const VkDeviceBufferMemoryRequirements* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
4339 device, pInfo, pMemoryRequirements);
4340
4341 pMemoryRequirements->memoryRequirements =
4342 vk::Buffer::GetMemoryRequirements(pInfo->pCreateInfo->size, pInfo->pCreateInfo->usage);
4343 }
4344
vkGetDeviceImageMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4345 VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements(VkDevice device, const VkDeviceImageMemoryRequirements *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
4346 {
4347 TRACE("(VkDevice device = %p, const VkDeviceImageMemoryRequirements* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
4348 device, pInfo, pMemoryRequirements);
4349
4350 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
4351 while(extInfo)
4352 {
4353 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4354 extInfo = extInfo->pNext;
4355 }
4356
4357 // Create a temporary image object to obtain the memory requirements.
4358 // TODO(b/221299948): Reduce overhead by using a lightweight local proxy.
4359 pMemoryRequirements->memoryRequirements = {};
4360 const VkAllocationCallbacks *pAllocator = nullptr;
4361 VkImage image = { VK_NULL_HANDLE };
4362 VkResult result = vk::Image::Create(pAllocator, pInfo->pCreateInfo, &image, vk::Cast(device));
4363 if(result == VK_SUCCESS)
4364 {
4365 vk::Cast(image)->getMemoryRequirements(pMemoryRequirements);
4366 }
4367 vk::destroy(image, pAllocator);
4368 }
4369
vkGetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)4370 VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements(VkDevice device, const VkDeviceImageMemoryRequirements *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
4371 {
4372 TRACE("(VkDevice device = %p, const VkDeviceImageMemoryRequirements* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
4373 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
4374
4375 *pSparseMemoryRequirementCount = 0;
4376 }
4377
vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)4378 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern)
4379 {
4380 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t lineStippleFactor = %u, uint16_t lineStipplePattern = %u)",
4381 commandBuffer, lineStippleFactor, lineStipplePattern);
4382
4383 static constexpr uint16_t solidLine = 0xFFFFu;
4384 if(lineStipplePattern != solidLine)
4385 {
4386 // VkPhysicalDeviceLineRasterizationFeaturesEXT::stippled*Lines are all set to VK_FALSE and,
4387 // according to the Vulkan spec for VkPipelineRasterizationLineStateCreateInfoEXT:
4388 // "If stippledLineEnable is VK_FALSE, the values of lineStippleFactor and lineStipplePattern are ignored."
4389 WARN("vkCmdSetLineStippleEXT: line stipple pattern ignored : 0x%04X", lineStipplePattern);
4390 }
4391 }
4392
vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)4393 VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
4394 {
4395 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4396 commandBuffer, pLabelInfo);
4397
4398 vk::Cast(commandBuffer)->beginDebugUtilsLabel(pLabelInfo);
4399 }
4400
vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)4401 VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)
4402 {
4403 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
4404
4405 vk::Cast(commandBuffer)->endDebugUtilsLabel();
4406 }
4407
vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)4408 VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
4409 {
4410 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4411 commandBuffer, pLabelInfo);
4412
4413 vk::Cast(commandBuffer)->insertDebugUtilsLabel(pLabelInfo);
4414 }
4415
vkCreateDebugUtilsMessengerEXT(VkInstance instance,const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDebugUtilsMessengerEXT * pMessenger)4416 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pMessenger)
4417 {
4418 TRACE("(VkInstance instance = %p, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDebugUtilsMessengerEXT* pMessenger = %p)",
4419 instance, pCreateInfo, pAllocator, pMessenger);
4420
4421 if(pCreateInfo->flags != 0)
4422 {
4423 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4424 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
4425 }
4426
4427 return vk::DebugUtilsMessenger::Create(pAllocator, pCreateInfo, pMessenger);
4428 }
4429
vkDestroyDebugUtilsMessengerEXT(VkInstance instance,VkDebugUtilsMessengerEXT messenger,const VkAllocationCallbacks * pAllocator)4430 VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks *pAllocator)
4431 {
4432 TRACE("(VkInstance instance = %p, VkDebugUtilsMessengerEXT messenger = %p, const VkAllocationCallbacks* pAllocator = %p)",
4433 instance, static_cast<void *>(messenger), pAllocator);
4434
4435 vk::destroy(messenger, pAllocator);
4436 }
4437
vkQueueBeginDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)4438 VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
4439 {
4440 TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4441 queue, pLabelInfo);
4442
4443 vk::Cast(queue)->beginDebugUtilsLabel(pLabelInfo);
4444 }
4445
vkQueueEndDebugUtilsLabelEXT(VkQueue queue)4446 VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(VkQueue queue)
4447 {
4448 TRACE("(VkQueue queue = %p)", queue);
4449
4450 vk::Cast(queue)->endDebugUtilsLabel();
4451 }
4452
vkQueueInsertDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)4453 VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
4454 {
4455 TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4456 queue, pLabelInfo);
4457
4458 vk::Cast(queue)->insertDebugUtilsLabel(pLabelInfo);
4459 }
4460
vkSetDebugUtilsObjectNameEXT(VkDevice device,const VkDebugUtilsObjectNameInfoEXT * pNameInfo)4461 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo)
4462 {
4463 TRACE("(VkDevice device = %p, const VkDebugUtilsObjectNameInfoEXT* pNameInfo = %p)",
4464 device, pNameInfo);
4465
4466 return vk::Cast(device)->setDebugUtilsObjectName(pNameInfo);
4467 }
4468
vkSetDebugUtilsObjectTagEXT(VkDevice device,const VkDebugUtilsObjectTagInfoEXT * pTagInfo)4469 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo)
4470 {
4471 TRACE("(VkDevice device = %p, const VkDebugUtilsObjectTagInfoEXT* pTagInfo = %p)",
4472 device, pTagInfo);
4473
4474 return vk::Cast(device)->setDebugUtilsObjectTag(pTagInfo);
4475 }
4476
vkSubmitDebugUtilsMessageEXT(VkInstance instance,VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData)4477 VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData)
4478 {
4479 TRACE("(VkInstance instance = %p, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity = %d, VkDebugUtilsMessageTypeFlagsEXT messageTypes = %d, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData = %p)",
4480 instance, messageSeverity, messageTypes, pCallbackData);
4481
4482 vk::Cast(instance)->submitDebugUtilsMessage(messageSeverity, messageTypes, pCallbackData);
4483 }
4484
vkCopyMemoryToImageEXT(VkDevice device,const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo)4485 VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT(VkDevice device, const VkCopyMemoryToImageInfoEXT *pCopyMemoryToImageInfo)
4486 {
4487 TRACE("(VkDevice device = %p, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo = %p)",
4488 device, pCopyMemoryToImageInfo);
4489
4490 constexpr auto allRecognizedFlagBits = VK_HOST_IMAGE_COPY_MEMCPY_EXT;
4491 ASSERT(!(pCopyMemoryToImageInfo->flags & ~allRecognizedFlagBits));
4492
4493 vk::Image *dstImage = vk::Cast(pCopyMemoryToImageInfo->dstImage);
4494 for(uint32_t i = 0; i < pCopyMemoryToImageInfo->regionCount; i++)
4495 {
4496 dstImage->copyFromMemory(pCopyMemoryToImageInfo->pRegions[i]);
4497 }
4498
4499 return VK_SUCCESS;
4500 }
4501
vkCopyImageToMemoryEXT(VkDevice device,const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo)4502 VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT(VkDevice device, const VkCopyImageToMemoryInfoEXT *pCopyImageToMemoryInfo)
4503 {
4504 TRACE("(VkDevice device = %p, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo = %p)",
4505 device, pCopyImageToMemoryInfo);
4506
4507 constexpr auto allRecognizedFlagBits = VK_HOST_IMAGE_COPY_MEMCPY_EXT;
4508 ASSERT(!(pCopyImageToMemoryInfo->flags & ~allRecognizedFlagBits));
4509
4510 vk::Image *srcImage = vk::Cast(pCopyImageToMemoryInfo->srcImage);
4511 for(uint32_t i = 0; i < pCopyImageToMemoryInfo->regionCount; i++)
4512 {
4513 srcImage->copyToMemory(pCopyImageToMemoryInfo->pRegions[i]);
4514 }
4515
4516 return VK_SUCCESS;
4517 }
4518
vkCopyImageToImageEXT(VkDevice device,const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo)4519 VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT(VkDevice device, const VkCopyImageToImageInfoEXT *pCopyImageToImageInfo)
4520 {
4521 TRACE("(VkDevice device = %p, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo = %p)",
4522 device, pCopyImageToImageInfo);
4523
4524 constexpr auto allRecognizedFlagBits = VK_HOST_IMAGE_COPY_MEMCPY_EXT;
4525 ASSERT(!(pCopyImageToImageInfo->flags & ~allRecognizedFlagBits));
4526
4527 vk::Image *srcImage = vk::Cast(pCopyImageToImageInfo->srcImage);
4528 vk::Image *dstImage = vk::Cast(pCopyImageToImageInfo->dstImage);
4529 for(uint32_t i = 0; i < pCopyImageToImageInfo->regionCount; i++)
4530 {
4531 srcImage->copyTo(dstImage, pCopyImageToImageInfo->pRegions[i]);
4532 }
4533
4534 return VK_SUCCESS;
4535 }
4536
vkTransitionImageLayoutEXT(VkDevice device,uint32_t transitionCount,const VkHostImageLayoutTransitionInfoEXT * pTransitions)4537 VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT *pTransitions)
4538 {
4539 TRACE("(VkDevice device = %p, uint32_t transitionCount = %u, const VkHostImageLayoutTransitionInfoEXT* pTransitions = %p)",
4540 device, transitionCount, pTransitions);
4541
4542 // This function is a no-op; there are no image layouts in SwiftShader.
4543 return VK_SUCCESS;
4544 }
4545
4546 #ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4547 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4548 {
4549 TRACE("(VkInstance instance = %p, VkXcbSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4550 instance, pCreateInfo, pAllocator, pSurface);
4551
4552 // VUID-VkXcbSurfaceCreateInfoKHR-connection-01310 : connection must point to a valid X11 xcb_connection_t
4553 ASSERT(pCreateInfo->connection);
4554
4555 return vk::XcbSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4556 }
4557
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)4558 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t *connection, xcb_visualid_t visual_id)
4559 {
4560 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, xcb_connection_t* connection = %p, xcb_visualid_t visual_id = %d)",
4561 physicalDevice, int(queueFamilyIndex), connection, int(visual_id));
4562
4563 return VK_TRUE;
4564 }
4565 #endif
4566
4567 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkCreateWaylandSurfaceKHR(VkInstance instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4568 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4569 {
4570 TRACE("(VkInstance instance = %p, VkWaylandSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4571 instance, pCreateInfo, pAllocator, pSurface);
4572
4573 return vk::WaylandSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4574 }
4575
vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display)4576 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display *display)
4577 {
4578 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, struct wl_display* display = %p)",
4579 physicalDevice, int(queueFamilyIndex), display);
4580
4581 return VK_TRUE;
4582 }
4583 #endif
4584
4585 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
vkCreateDirectFBSurfaceEXT(VkInstance instance,const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4586 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4587 {
4588 TRACE("(VkInstance instance = %p, VkDirectFBSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4589 instance, pCreateInfo, pAllocator, pSurface);
4590
4591 return vk::DirectFBSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4592 }
4593
vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,IDirectFB * dfb)4594 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB *dfb)
4595 {
4596 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, IDirectFB* dfb = %p)",
4597 physicalDevice, int(queueFamilyIndex), dfb);
4598
4599 return VK_TRUE;
4600 }
4601 #endif
4602
4603 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)4604 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode)
4605 {
4606 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, VkDisplayModeCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkDisplayModeKHR* pModei = %p)",
4607 physicalDevice, static_cast<void *>(display), pCreateInfo, pAllocator, pMode);
4608
4609 return VK_SUCCESS;
4610 }
4611
vkCreateDisplayPlaneSurfaceKHR(VkInstance instance,const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4612 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4613 {
4614 TRACE("(VkInstance instance = %p, VkDisplaySurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4615 instance, pCreateInfo, pAllocator, pSurface);
4616
4617 return vk::DisplaySurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4618 }
4619
vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,uint32_t * pPropertyCount,VkDisplayModePropertiesKHR * pProperties)4620 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties)
4621 {
4622 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, uint32_t* pPropertyCount = %p, VkDisplayModePropertiesKHR* pProperties = %p)",
4623 physicalDevice, static_cast<void *>(display), pPropertyCount, pProperties);
4624
4625 return vk::DisplaySurfaceKHR::GetDisplayModeProperties(pPropertyCount, pProperties);
4626 }
4627
vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities)4628 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities)
4629 {
4630 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayModeKHR mode = %p, uint32_t planeIndex = %d, VkDisplayPlaneCapabilitiesKHR* pCapabilities = %p)",
4631 physicalDevice, static_cast<void *>(mode), planeIndex, pCapabilities);
4632
4633 return vk::DisplaySurfaceKHR::GetDisplayPlaneCapabilities(pCapabilities);
4634 }
4635
vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays)4636 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount, VkDisplayKHR *pDisplays)
4637 {
4638 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t planeIndex = %d, uint32_t* pDisplayCount = %p, VkDisplayKHR* pDisplays = %p)",
4639 physicalDevice, planeIndex, pDisplayCount, pDisplays);
4640
4641 return vk::DisplaySurfaceKHR::GetDisplayPlaneSupportedDisplays(pDisplayCount, pDisplays);
4642 }
4643
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlanePropertiesKHR * pProperties)4644 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties)
4645 {
4646 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPlanePropertiesKHR* pProperties = %p)",
4647 physicalDevice, pPropertyCount, pProperties);
4648
4649 return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayPlaneProperties(pPropertyCount, pProperties);
4650 }
4651
vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPropertiesKHR * pProperties)4652 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties)
4653 {
4654 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPropertiesKHR* pProperties = %p)",
4655 physicalDevice, pPropertyCount, pProperties);
4656
4657 return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayProperties(pPropertyCount, pProperties);
4658 }
4659 #endif
4660
4661 #ifdef VK_USE_PLATFORM_MACOS_MVK
vkCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4662 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4663 {
4664 TRACE("(VkInstance instance = %p, VkMacOSSurfaceCreateInfoMVK* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4665 instance, pCreateInfo, pAllocator, pSurface);
4666
4667 return vk::MacOSSurfaceMVK::Create(pAllocator, pCreateInfo, pSurface);
4668 }
4669 #endif
4670
4671 #ifdef VK_USE_PLATFORM_METAL_EXT
vkCreateMetalSurfaceEXT(VkInstance instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4672 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4673 {
4674 TRACE("(VkInstance instance = %p, VkMetalSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4675 instance, pCreateInfo, pAllocator, pSurface);
4676
4677 return vk::MetalSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4678 }
4679 #endif
4680
4681 #ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4682 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4683 {
4684 TRACE("(VkInstance instance = %p, VkWin32SurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4685 instance, pCreateInfo, pAllocator, pSurface);
4686
4687 return vk::Win32SurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4688 }
4689
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)4690 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex)
4691 {
4692 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d)",
4693 physicalDevice, queueFamilyIndex);
4694 return VK_TRUE;
4695 }
4696 #endif
4697
vkCreateHeadlessSurfaceEXT(VkInstance instance,const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4698 VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4699 {
4700 TRACE("(VkInstance instance = %p, VkHeadlessSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4701 instance, pCreateInfo, pAllocator, pSurface);
4702
4703 return vk::HeadlessSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4704 }
4705
4706 #ifndef __ANDROID__
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)4707 VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator)
4708 {
4709 TRACE("(VkInstance instance = %p, VkSurfaceKHR surface = %p, const VkAllocationCallbacks* pAllocator = %p)",
4710 instance, static_cast<void *>(surface), pAllocator);
4711
4712 vk::destroy(surface, pAllocator);
4713 }
4714
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)4715 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported)
4716 {
4717 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, VkSurface surface = %p, VKBool32* pSupported = %p)",
4718 physicalDevice, int(queueFamilyIndex), static_cast<void *>(surface), pSupported);
4719
4720 *pSupported = VK_TRUE;
4721 return VK_SUCCESS;
4722 }
4723
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)4724 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities)
4725 {
4726 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities = %p)",
4727 physicalDevice, static_cast<void *>(surface), pSurfaceCapabilities);
4728
4729 return vk::Cast(surface)->getSurfaceCapabilities(nullptr, pSurfaceCapabilities, nullptr);
4730 }
4731
vkGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VkSurfaceCapabilities2KHR * pSurfaceCapabilities)4732 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, VkSurfaceCapabilities2KHR *pSurfaceCapabilities)
4733 {
4734 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo = %p, VkSurfaceCapabilities2KHR *pSurfaceCapabilities = %p)",
4735 physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
4736
4737 return vk::Cast(pSurfaceInfo->surface)->getSurfaceCapabilities(pSurfaceInfo->pNext, &pSurfaceCapabilities->surfaceCapabilities, pSurfaceCapabilities->pNext);
4738 }
4739
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)4740 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats)
4741 {
4742 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormatKHR* pSurfaceFormats = %p)",
4743 physicalDevice, static_cast<void *>(surface), pSurfaceFormatCount, pSurfaceFormats);
4744
4745 if(!pSurfaceFormats)
4746 {
4747 *pSurfaceFormatCount = vk::Cast(surface)->getSurfaceFormatsCount(nullptr);
4748 return VK_SUCCESS;
4749 }
4750
4751 std::vector<VkSurfaceFormat2KHR> formats(*pSurfaceFormatCount);
4752
4753 VkResult result = vk::Cast(surface)->getSurfaceFormats(nullptr, pSurfaceFormatCount, formats.data());
4754
4755 if(result == VK_SUCCESS || result == VK_INCOMPLETE)
4756 {
4757 // The value returned in pSurfaceFormatCount is either capped at the original value,
4758 // or is smaller because there aren't that many formats.
4759 ASSERT(*pSurfaceFormatCount <= formats.size());
4760
4761 for(size_t i = 0; i < *pSurfaceFormatCount; ++i)
4762 {
4763 pSurfaceFormats[i] = formats[i].surfaceFormat;
4764 }
4765 }
4766
4767 return result;
4768 }
4769
vkGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)4770 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, uint32_t *pSurfaceFormatCount, VkSurfaceFormat2KHR *pSurfaceFormats)
4771 {
4772 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormat2KHR* pSurfaceFormats = %p)",
4773 physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
4774
4775 if(!pSurfaceFormats)
4776 {
4777 *pSurfaceFormatCount = vk::Cast(pSurfaceInfo->surface)->getSurfaceFormatsCount(pSurfaceInfo->pNext);
4778 return VK_SUCCESS;
4779 }
4780
4781 return vk::Cast(pSurfaceInfo->surface)->getSurfaceFormats(pSurfaceInfo->pNext, pSurfaceFormatCount, pSurfaceFormats);
4782 }
4783
vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)4784 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes)
4785 {
4786 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p uint32_t* pPresentModeCount = %p, VkPresentModeKHR* pPresentModes = %p)",
4787 physicalDevice, static_cast<void *>(surface), pPresentModeCount, pPresentModes);
4788
4789 if(!pPresentModes)
4790 {
4791 *pPresentModeCount = vk::Cast(surface)->getPresentModeCount();
4792 return VK_SUCCESS;
4793 }
4794
4795 return vk::Cast(surface)->getPresentModes(pPresentModeCount, pPresentModes);
4796 }
4797
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)4798 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain)
4799 {
4800 TRACE("(VkDevice device = %p, const VkSwapchainCreateInfoKHR* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSwapchainKHR* pSwapchain = %p)",
4801 device, pCreateInfo, pAllocator, pSwapchain);
4802
4803 if(pCreateInfo->oldSwapchain)
4804 {
4805 vk::Cast(pCreateInfo->oldSwapchain)->retire();
4806 }
4807
4808 if(vk::Cast(pCreateInfo->surface)->hasAssociatedSwapchain())
4809 {
4810 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
4811 }
4812
4813 VkResult status = vk::SwapchainKHR::Create(pAllocator, pCreateInfo, pSwapchain);
4814
4815 if(status != VK_SUCCESS)
4816 {
4817 return status;
4818 }
4819
4820 auto *swapchain = vk::Cast(*pSwapchain);
4821 status = swapchain->createImages(device, pCreateInfo);
4822
4823 if(status != VK_SUCCESS)
4824 {
4825 vk::destroy(*pSwapchain, pAllocator);
4826 return status;
4827 }
4828
4829 vk::Cast(pCreateInfo->surface)->associateSwapchain(swapchain);
4830
4831 return VK_SUCCESS;
4832 }
4833
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)4834 VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator)
4835 {
4836 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, const VkAllocationCallbacks* pAllocator = %p)",
4837 device, static_cast<void *>(swapchain), pAllocator);
4838
4839 vk::destroy(swapchain, pAllocator);
4840 }
4841
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)4842 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages)
4843 {
4844 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint32_t* pSwapchainImageCount = %p, VkImage* pSwapchainImages = %p)",
4845 device, static_cast<void *>(swapchain), pSwapchainImageCount, pSwapchainImages);
4846
4847 if(!pSwapchainImages)
4848 {
4849 *pSwapchainImageCount = vk::Cast(swapchain)->getImageCount();
4850 return VK_SUCCESS;
4851 }
4852
4853 return vk::Cast(swapchain)->getImages(pSwapchainImageCount, pSwapchainImages);
4854 }
4855
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)4856 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex)
4857 {
4858 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint64_t timeout = %" PRIu64 ", VkSemaphore semaphore = %p, VkFence fence = %p, uint32_t* pImageIndex = %p)",
4859 device, static_cast<void *>(swapchain), timeout, static_cast<void *>(semaphore), static_cast<void *>(fence), pImageIndex);
4860
4861 return vk::Cast(swapchain)->getNextImage(timeout, vk::DynamicCast<vk::BinarySemaphore>(semaphore), vk::Cast(fence), pImageIndex);
4862 }
4863
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)4864 VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo)
4865 {
4866 TRACE("(VkQueue queue = %p, const VkPresentInfoKHR* pPresentInfo = %p)",
4867 queue, pPresentInfo);
4868
4869 return vk::Cast(queue)->present(pPresentInfo);
4870 }
4871
vkAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)4872 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex)
4873 {
4874 TRACE("(VkDevice device = %p, const VkAcquireNextImageInfoKHR *pAcquireInfo = %p, uint32_t *pImageIndex = %p",
4875 device, pAcquireInfo, pImageIndex);
4876
4877 return vk::Cast(pAcquireInfo->swapchain)->getNextImage(pAcquireInfo->timeout, vk::DynamicCast<vk::BinarySemaphore>(pAcquireInfo->semaphore), vk::Cast(pAcquireInfo->fence), pImageIndex);
4878 }
4879
vkReleaseSwapchainImagesEXT(VkDevice device,const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo)4880 VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT(VkDevice device, const VkReleaseSwapchainImagesInfoEXT *pReleaseInfo)
4881 {
4882 TRACE("(VkDevice device = %p, const VkReleaseSwapchainImagesInfoEXT *pReleaseInfo = %p",
4883 device, pReleaseInfo);
4884
4885 return vk::Cast(pReleaseInfo->swapchain)->releaseImages(pReleaseInfo->imageIndexCount, pReleaseInfo->pImageIndices);
4886 }
4887
vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)4888 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
4889 {
4890 TRACE("(VkDevice device = %p, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities = %p)",
4891 device, pDeviceGroupPresentCapabilities);
4892
4893 for(unsigned int i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; i++)
4894 {
4895 // The only real physical device in the presentation group is device 0,
4896 // and it can present to itself.
4897 pDeviceGroupPresentCapabilities->presentMask[i] = (i == 0) ? 1 : 0;
4898 }
4899
4900 pDeviceGroupPresentCapabilities->modes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4901
4902 return VK_SUCCESS;
4903 }
4904
vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device,VkSurfaceKHR surface,VkDeviceGroupPresentModeFlagsKHR * pModes)4905 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR *pModes)
4906 {
4907 TRACE("(VkDevice device = %p, VkSurfaceKHR surface = %p, VkDeviceGroupPresentModeFlagsKHR *pModes = %p)",
4908 device, static_cast<void *>(surface), pModes);
4909
4910 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4911 return VK_SUCCESS;
4912 }
4913
vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)4914 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects)
4915 {
4916 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, uint32_t* pRectCount = %p, VkRect2D* pRects = %p)",
4917 physicalDevice, static_cast<void *>(surface), pRectCount, pRects);
4918
4919 return vk::Cast(surface)->getPresentRectangles(pRectCount, pRects);
4920 }
4921
4922 #endif // ! __ANDROID__
4923
4924 #ifdef __ANDROID__
4925
vkGetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)4926 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsage2ANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainUsage, uint64_t *grallocConsumerUsage, uint64_t *grallocProducerUsage)
4927 {
4928 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, VkSwapchainImageUsageFlagsANDROID swapchainUsage = %d, uint64_t* grallocConsumerUsage = %p, uin64_t* grallocProducerUsage = %p)",
4929 device, format, imageUsage, swapchainUsage, grallocConsumerUsage, grallocProducerUsage);
4930
4931 *grallocConsumerUsage = 0;
4932 *grallocProducerUsage = GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
4933
4934 return VK_SUCCESS;
4935 }
4936
vkGetSwapchainGrallocUsageANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)4937 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsageANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int *grallocUsage)
4938 {
4939 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, int* grallocUsage = %p)",
4940 device, format, imageUsage, grallocUsage);
4941
4942 *grallocUsage = GRALLOC_USAGE_SW_WRITE_OFTEN;
4943
4944 return VK_SUCCESS;
4945 }
4946
vkAcquireImageANDROID(VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)4947 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireImageANDROID(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence)
4948 {
4949 TRACE("(VkDevice device = %p, VkImage image = %p, int nativeFenceFd = %d, VkSemaphore semaphore = %p, VkFence fence = %p)",
4950 device, static_cast<void *>(image), nativeFenceFd, static_cast<void *>(semaphore), static_cast<void *>(fence));
4951
4952 if(nativeFenceFd >= 0)
4953 {
4954 sync_wait(nativeFenceFd, -1);
4955 close(nativeFenceFd);
4956 }
4957
4958 if(fence != VK_NULL_HANDLE)
4959 {
4960 vk::Cast(fence)->complete();
4961 }
4962
4963 if(semaphore != VK_NULL_HANDLE)
4964 {
4965 vk::DynamicCast<vk::BinarySemaphore>(semaphore)->signal();
4966 }
4967
4968 return VK_SUCCESS;
4969 }
4970
vkQueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4971 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore *pWaitSemaphores, VkImage image, int *pNativeFenceFd)
4972 {
4973 TRACE("(VkQueue queue = %p, uint32_t waitSemaphoreCount = %d, const VkSemaphore* pWaitSemaphores = %p, VkImage image = %p, int* pNativeFenceFd = %p)",
4974 queue, waitSemaphoreCount, pWaitSemaphores, static_cast<void *>(image), pNativeFenceFd);
4975
4976 // This is a hack to deal with screen tearing for now.
4977 // Need to correctly implement threading using VkSemaphore
4978 // to get rid of it. b/132458423
4979 vkQueueWaitIdle(queue);
4980
4981 *pNativeFenceFd = -1;
4982
4983 return vk::Cast(image)->prepareForExternalUseANDROID();
4984 }
4985 #endif // __ANDROID__
4986 }
4987