1 //
2 // Copyright (c) 2017-2022 Advanced Micro Devices, Inc. All rights reserved.
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 //
22
23 #ifdef _WIN32
24
25 #include "SparseBindingTest.h"
26 #include "Tests.h"
27 #include "VmaUsage.h"
28 #include "Common.h"
29 #include <atomic>
30 #include <Shlwapi.h>
31
32 #pragma comment(lib, "shlwapi.lib")
33
34 static const char* const SHADER_PATH1 = "./";
35 static const char* const SHADER_PATH2 = "../bin/";
36 static const wchar_t* const WINDOW_CLASS_NAME = L"VULKAN_MEMORY_ALLOCATOR_SAMPLE";
37 static const char* const VALIDATION_LAYER_NAME = "VK_LAYER_KHRONOS_validation";
38 static const char* const APP_TITLE_A = "Vulkan Memory Allocator Sample 3.0.1";
39 static const wchar_t* const APP_TITLE_W = L"Vulkan Memory Allocator Sample 3.0.1";
40
41 static const bool VSYNC = true;
42 static const uint32_t COMMAND_BUFFER_COUNT = 2;
43 static void* const CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA = (void*)(intptr_t)43564544;
44 static const bool USE_CUSTOM_CPU_ALLOCATION_CALLBACKS = true;
45
46 enum class ExitCode : int
47 {
48 GPUList = 2,
49 Help = 1,
50 Success = 0,
51 RuntimeError = -1,
52 CommandLineError = -2,
53 };
54
55 VkPhysicalDevice g_hPhysicalDevice;
56 VkDevice g_hDevice;
57 VmaAllocator g_hAllocator;
58 VkInstance g_hVulkanInstance;
59
60 bool g_EnableValidationLayer = true;
61 bool VK_KHR_get_memory_requirements2_enabled = false;
62 bool VK_KHR_get_physical_device_properties2_enabled = false;
63 bool VK_KHR_dedicated_allocation_enabled = false;
64 bool VK_KHR_bind_memory2_enabled = false;
65 bool VK_EXT_memory_budget_enabled = false;
66 bool VK_AMD_device_coherent_memory_enabled = false;
67 bool VK_KHR_buffer_device_address_enabled = false;
68 bool VK_EXT_memory_priority_enabled = false;
69 bool VK_EXT_debug_utils_enabled = false;
70 bool g_SparseBindingEnabled = false;
71
72 // # Pointers to functions from extensions
73 PFN_vkGetBufferDeviceAddressKHR g_vkGetBufferDeviceAddressKHR;
74
75 static HINSTANCE g_hAppInstance;
76 static HWND g_hWnd;
77 static LONG g_SizeX = 1280, g_SizeY = 720;
78 static VkSurfaceKHR g_hSurface;
79 static VkQueue g_hPresentQueue;
80 static VkSurfaceFormatKHR g_SurfaceFormat;
81 static VkExtent2D g_Extent;
82 static VkSwapchainKHR g_hSwapchain;
83 static std::vector<VkImage> g_SwapchainImages;
84 static std::vector<VkImageView> g_SwapchainImageViews;
85 static std::vector<VkFramebuffer> g_Framebuffers;
86 static VkCommandPool g_hCommandPool;
87 static VkCommandBuffer g_MainCommandBuffers[COMMAND_BUFFER_COUNT];
88 static VkFence g_MainCommandBufferExecutedFances[COMMAND_BUFFER_COUNT];
89 VkFence g_ImmediateFence;
90 static uint32_t g_NextCommandBufferIndex;
91 static VkSemaphore g_hImageAvailableSemaphore;
92 static VkSemaphore g_hRenderFinishedSemaphore;
93 static uint32_t g_GraphicsQueueFamilyIndex = UINT_MAX;
94 static uint32_t g_PresentQueueFamilyIndex = UINT_MAX;
95 static uint32_t g_SparseBindingQueueFamilyIndex = UINT_MAX;
96 static VkDescriptorSetLayout g_hDescriptorSetLayout;
97 static VkDescriptorPool g_hDescriptorPool;
98 static VkDescriptorSet g_hDescriptorSet; // Automatically destroyed with m_DescriptorPool.
99 static VkSampler g_hSampler;
100 static VkFormat g_DepthFormat;
101 static VkImage g_hDepthImage;
102 static VmaAllocation g_hDepthImageAlloc;
103 static VkImageView g_hDepthImageView;
104
105 static VkSurfaceCapabilitiesKHR g_SurfaceCapabilities;
106 static std::vector<VkSurfaceFormatKHR> g_SurfaceFormats;
107 static std::vector<VkPresentModeKHR> g_PresentModes;
108
109 static const VkDebugUtilsMessageSeverityFlagsEXT DEBUG_UTILS_MESSENGER_MESSAGE_SEVERITY =
110 //VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
111 //VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
112 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
113 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
114 static const VkDebugUtilsMessageTypeFlagsEXT DEBUG_UTILS_MESSENGER_MESSAGE_TYPE =
115 VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
116 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
117 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
118 static PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT_Func;
119 static PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT_Func;
120 static PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT_Func;
121
122 static VkQueue g_hGraphicsQueue;
123 VkQueue g_hSparseBindingQueue;
124 VkCommandBuffer g_hTemporaryCommandBuffer;
125
126 static VkPipelineLayout g_hPipelineLayout;
127 static VkRenderPass g_hRenderPass;
128 static VkPipeline g_hPipeline;
129
130 static VkBuffer g_hVertexBuffer;
131 static VmaAllocation g_hVertexBufferAlloc;
132 static VkBuffer g_hIndexBuffer;
133 static VmaAllocation g_hIndexBufferAlloc;
134 static uint32_t g_VertexCount;
135 static uint32_t g_IndexCount;
136
137 static VkImage g_hTextureImage;
138 static VmaAllocation g_hTextureImageAlloc;
139 static VkImageView g_hTextureImageView;
140
141 static std::atomic_uint32_t g_CpuAllocCount;
142
CustomCpuAllocation(void * pUserData,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)143 static void* CustomCpuAllocation(
144 void* pUserData, size_t size, size_t alignment,
145 VkSystemAllocationScope allocationScope)
146 {
147 assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
148 void* const result = _aligned_malloc(size, alignment);
149 if(result)
150 {
151 ++g_CpuAllocCount;
152 }
153 return result;
154 }
155
CustomCpuReallocation(void * pUserData,void * pOriginal,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)156 static void* CustomCpuReallocation(
157 void* pUserData, void* pOriginal, size_t size, size_t alignment,
158 VkSystemAllocationScope allocationScope)
159 {
160 assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
161 void* const result = _aligned_realloc(pOriginal, size, alignment);
162 if(pOriginal && !result)
163 {
164 --g_CpuAllocCount;
165 }
166 else if(!pOriginal && result)
167 {
168 ++g_CpuAllocCount;
169 }
170 return result;
171 }
172
CustomCpuFree(void * pUserData,void * pMemory)173 static void CustomCpuFree(void* pUserData, void* pMemory)
174 {
175 assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
176 if(pMemory)
177 {
178 const uint32_t oldAllocCount = g_CpuAllocCount.fetch_sub(1);
179 TEST(oldAllocCount > 0);
180 _aligned_free(pMemory);
181 }
182 }
183
184 static const VkAllocationCallbacks g_CpuAllocationCallbacks = {
185 CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA, // pUserData
186 &CustomCpuAllocation, // pfnAllocation
187 &CustomCpuReallocation, // pfnReallocation
188 &CustomCpuFree // pfnFree
189 };
190
191 const VkAllocationCallbacks* g_Allocs;
192
193 struct GPUSelection
194 {
195 uint32_t Index = UINT32_MAX;
196 std::wstring Substring;
197 };
198
199 class VulkanUsage
200 {
201 public:
202 void Init();
203 ~VulkanUsage();
204 void PrintPhysicalDeviceList() const;
205 // If failed, returns VK_NULL_HANDLE.
206 VkPhysicalDevice SelectPhysicalDevice(const GPUSelection& GPUSelection) const;
207
208 private:
209 VkDebugUtilsMessengerEXT m_DebugUtilsMessenger = VK_NULL_HANDLE;
210
211 void RegisterDebugCallbacks();
212 static bool IsLayerSupported(const VkLayerProperties* pProps, size_t propCount, const char* pLayerName);
213 };
214
215 struct CommandLineParameters
216 {
217 bool m_Help = false;
218 bool m_List = false;
219 bool m_Test = false;
220 bool m_TestSparseBinding = false;
221 GPUSelection m_GPUSelection;
222
ParseCommandLineParameters223 bool Parse(int argc, wchar_t** argv)
224 {
225 for(int i = 1; i < argc; ++i)
226 {
227 if(_wcsicmp(argv[i], L"-h") == 0 || _wcsicmp(argv[i], L"--Help") == 0)
228 {
229 m_Help = true;
230 }
231 else if(_wcsicmp(argv[i], L"-l") == 0 || _wcsicmp(argv[i], L"--List") == 0)
232 {
233 m_List = true;
234 }
235 else if((_wcsicmp(argv[i], L"-g") == 0 || _wcsicmp(argv[i], L"--GPU") == 0) && i + 1 < argc)
236 {
237 m_GPUSelection.Substring = argv[i + 1];
238 ++i;
239 }
240 else if((_wcsicmp(argv[i], L"-i") == 0 || _wcsicmp(argv[i], L"--GPUIndex") == 0) && i + 1 < argc)
241 {
242 m_GPUSelection.Index = _wtoi(argv[i + 1]);
243 ++i;
244 }
245 else if (_wcsicmp(argv[i], L"-t") == 0 || _wcsicmp(argv[i], L"--Test") == 0)
246 {
247 m_Test = true;
248 }
249 else if (_wcsicmp(argv[i], L"-s") == 0 || _wcsicmp(argv[i], L"--TestSparseBinding") == 0)
250 {
251 m_TestSparseBinding = true;
252 }
253 else
254 return false;
255 }
256 return true;
257 }
258 } g_CommandLineParameters;
259
SetDebugUtilsObjectName(VkObjectType type,uint64_t handle,const char * name)260 void SetDebugUtilsObjectName(VkObjectType type, uint64_t handle, const char* name)
261 {
262 if(vkSetDebugUtilsObjectNameEXT_Func == nullptr)
263 return;
264
265 VkDebugUtilsObjectNameInfoEXT info = { VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT };
266 info.objectType = type;
267 info.objectHandle = handle;
268 info.pObjectName = name;
269 vkSetDebugUtilsObjectNameEXT_Func(g_hDevice, &info);
270 }
271
BeginSingleTimeCommands()272 void BeginSingleTimeCommands()
273 {
274 VkCommandBufferBeginInfo cmdBufBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
275 cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
276 ERR_GUARD_VULKAN( vkBeginCommandBuffer(g_hTemporaryCommandBuffer, &cmdBufBeginInfo) );
277 }
278
EndSingleTimeCommands()279 void EndSingleTimeCommands()
280 {
281 ERR_GUARD_VULKAN( vkEndCommandBuffer(g_hTemporaryCommandBuffer) );
282
283 VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
284 submitInfo.commandBufferCount = 1;
285 submitInfo.pCommandBuffers = &g_hTemporaryCommandBuffer;
286
287 ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, VK_NULL_HANDLE) );
288 ERR_GUARD_VULKAN( vkQueueWaitIdle(g_hGraphicsQueue) );
289 }
290
LoadShader(std::vector<char> & out,const char * fileName)291 void LoadShader(std::vector<char>& out, const char* fileName)
292 {
293 std::ifstream file(std::string(SHADER_PATH1) + fileName, std::ios::ate | std::ios::binary);
294 if(file.is_open() == false)
295 file.open(std::string(SHADER_PATH2) + fileName, std::ios::ate | std::ios::binary);
296 assert(file.is_open());
297 size_t fileSize = (size_t)file.tellg();
298 if(fileSize > 0)
299 {
300 out.resize(fileSize);
301 file.seekg(0);
302 file.read(out.data(), fileSize);
303 file.close();
304 }
305 else
306 out.clear();
307 }
308
MyDebugReportCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData,void * pUserData)309 static VkBool32 VKAPI_PTR MyDebugReportCallback(
310 VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
311 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
312 const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData,
313 void* pUserData)
314 {
315 assert(pCallbackData && pCallbackData->pMessageIdName && pCallbackData->pMessage);
316
317 switch(messageSeverity)
318 {
319 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
320 SetConsoleColor(CONSOLE_COLOR::WARNING);
321 break;
322 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
323 SetConsoleColor(CONSOLE_COLOR::ERROR_);
324 break;
325 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
326 SetConsoleColor(CONSOLE_COLOR::NORMAL);
327 break;
328 default: // VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT
329 SetConsoleColor(CONSOLE_COLOR::INFO);
330 }
331
332 printf("%s \xBA %s\n", pCallbackData->pMessageIdName, pCallbackData->pMessage);
333
334 SetConsoleColor(CONSOLE_COLOR::NORMAL);
335
336 if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT ||
337 messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT)
338 {
339 OutputDebugStringA(pCallbackData->pMessage);
340 OutputDebugStringA("\n");
341 }
342
343 return VK_FALSE;
344 }
345
ChooseSurfaceFormat()346 static VkSurfaceFormatKHR ChooseSurfaceFormat()
347 {
348 assert(!g_SurfaceFormats.empty());
349
350 if((g_SurfaceFormats.size() == 1) && (g_SurfaceFormats[0].format == VK_FORMAT_UNDEFINED))
351 {
352 VkSurfaceFormatKHR result = { VK_FORMAT_B8G8R8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR };
353 return result;
354 }
355
356 for(const auto& format : g_SurfaceFormats)
357 {
358 if((format.format == VK_FORMAT_B8G8R8A8_UNORM) &&
359 (format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR))
360 {
361 return format;
362 }
363 }
364
365 return g_SurfaceFormats[0];
366 }
367
ChooseSwapPresentMode()368 VkPresentModeKHR ChooseSwapPresentMode()
369 {
370 VkPresentModeKHR preferredMode = VSYNC ? VK_PRESENT_MODE_MAILBOX_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
371
372 if(std::find(g_PresentModes.begin(), g_PresentModes.end(), preferredMode) !=
373 g_PresentModes.end())
374 {
375 return preferredMode;
376 }
377
378 return VK_PRESENT_MODE_FIFO_KHR;
379 }
380
ChooseSwapExtent()381 static VkExtent2D ChooseSwapExtent()
382 {
383 if(g_SurfaceCapabilities.currentExtent.width != UINT_MAX)
384 return g_SurfaceCapabilities.currentExtent;
385
386 VkExtent2D result = {
387 std::max(g_SurfaceCapabilities.minImageExtent.width,
388 std::min(g_SurfaceCapabilities.maxImageExtent.width, (uint32_t)g_SizeX)),
389 std::max(g_SurfaceCapabilities.minImageExtent.height,
390 std::min(g_SurfaceCapabilities.maxImageExtent.height, (uint32_t)g_SizeY)) };
391 return result;
392 }
393
GetVulkanApiVersion()394 static constexpr uint32_t GetVulkanApiVersion()
395 {
396 #if VMA_VULKAN_VERSION == 1003000
397 return VK_API_VERSION_1_3;
398 #elif VMA_VULKAN_VERSION == 1002000
399 return VK_API_VERSION_1_2;
400 #elif VMA_VULKAN_VERSION == 1001000
401 return VK_API_VERSION_1_1;
402 #elif VMA_VULKAN_VERSION == 1000000
403 return VK_API_VERSION_1_0;
404 #else
405 #error Invalid VMA_VULKAN_VERSION.
406 return UINT32_MAX;
407 #endif
408 }
409
Init()410 void VulkanUsage::Init()
411 {
412 g_hAppInstance = (HINSTANCE)GetModuleHandle(NULL);
413
414 if(USE_CUSTOM_CPU_ALLOCATION_CALLBACKS)
415 {
416 g_Allocs = &g_CpuAllocationCallbacks;
417 }
418
419 uint32_t instanceLayerPropCount = 0;
420 ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, nullptr) );
421 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerPropCount);
422 if(instanceLayerPropCount > 0)
423 {
424 ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, instanceLayerProps.data()) );
425 }
426
427 if(g_EnableValidationLayer)
428 {
429 if(IsLayerSupported(instanceLayerProps.data(), instanceLayerProps.size(), VALIDATION_LAYER_NAME) == false)
430 {
431 wprintf(L"Layer \"%hs\" not supported.", VALIDATION_LAYER_NAME);
432 g_EnableValidationLayer = false;
433 }
434 }
435
436 uint32_t availableInstanceExtensionCount = 0;
437 ERR_GUARD_VULKAN( vkEnumerateInstanceExtensionProperties(nullptr, &availableInstanceExtensionCount, nullptr) );
438 std::vector<VkExtensionProperties> availableInstanceExtensions(availableInstanceExtensionCount);
439 if(availableInstanceExtensionCount > 0)
440 {
441 ERR_GUARD_VULKAN( vkEnumerateInstanceExtensionProperties(nullptr, &availableInstanceExtensionCount, availableInstanceExtensions.data()) );
442 }
443
444 std::vector<const char*> enabledInstanceExtensions;
445 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
446 enabledInstanceExtensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
447
448 std::vector<const char*> instanceLayers;
449 if(g_EnableValidationLayer)
450 {
451 instanceLayers.push_back(VALIDATION_LAYER_NAME);
452 }
453
454 for(const auto& extensionProperties : availableInstanceExtensions)
455 {
456 if(strcmp(extensionProperties.extensionName, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0)
457 {
458 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
459 {
460 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
461 VK_KHR_get_physical_device_properties2_enabled = true;
462 }
463 }
464 else if(strcmp(extensionProperties.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0)
465 {
466 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
467 VK_EXT_debug_utils_enabled = true;
468 }
469 }
470
471 VkApplicationInfo appInfo = { VK_STRUCTURE_TYPE_APPLICATION_INFO };
472 appInfo.pApplicationName = APP_TITLE_A;
473 appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
474 appInfo.pEngineName = "Adam Sawicki Engine";
475 appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0);
476 appInfo.apiVersion = GetVulkanApiVersion();
477
478 VkInstanceCreateInfo instInfo = { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
479 instInfo.pApplicationInfo = &appInfo;
480 instInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
481 instInfo.ppEnabledExtensionNames = enabledInstanceExtensions.data();
482 instInfo.enabledLayerCount = static_cast<uint32_t>(instanceLayers.size());
483 instInfo.ppEnabledLayerNames = instanceLayers.data();
484
485 wprintf(L"Vulkan API version used: ");
486 switch(appInfo.apiVersion)
487 {
488 case VK_API_VERSION_1_0: wprintf(L"1.0\n"); break;
489 #ifdef VK_VERSION_1_1
490 case VK_API_VERSION_1_1: wprintf(L"1.1\n"); break;
491 #endif
492 #ifdef VK_VERSION_1_2
493 case VK_API_VERSION_1_2: wprintf(L"1.2\n"); break;
494 #endif
495 #ifdef VK_VERSION_1_3
496 case VK_API_VERSION_1_3: wprintf(L"1.3\n"); break;
497 #endif
498 default: assert(0);
499 }
500
501 ERR_GUARD_VULKAN( vkCreateInstance(&instInfo, g_Allocs, &g_hVulkanInstance) );
502
503 if(VK_EXT_debug_utils_enabled)
504 {
505 RegisterDebugCallbacks();
506 }
507 }
508
~VulkanUsage()509 VulkanUsage::~VulkanUsage()
510 {
511 if(m_DebugUtilsMessenger)
512 {
513 vkDestroyDebugUtilsMessengerEXT_Func(g_hVulkanInstance, m_DebugUtilsMessenger, g_Allocs);
514 }
515
516 if(g_hVulkanInstance)
517 {
518 vkDestroyInstance(g_hVulkanInstance, g_Allocs);
519 g_hVulkanInstance = VK_NULL_HANDLE;
520 }
521 }
522
PrintPhysicalDeviceList() const523 void VulkanUsage::PrintPhysicalDeviceList() const
524 {
525 uint32_t deviceCount = 0;
526 ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, nullptr));
527 std::vector<VkPhysicalDevice> physicalDevices(deviceCount);
528 if(deviceCount > 0)
529 {
530 ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, physicalDevices.data()));
531 }
532
533 for(size_t i = 0; i < deviceCount; ++i)
534 {
535 VkPhysicalDeviceProperties props = {};
536 vkGetPhysicalDeviceProperties(physicalDevices[i], &props);
537 wprintf(L"Physical device %zu: %hs\n", i, props.deviceName);
538 }
539 }
540
SelectPhysicalDevice(const GPUSelection & GPUSelection) const541 VkPhysicalDevice VulkanUsage::SelectPhysicalDevice(const GPUSelection& GPUSelection) const
542 {
543 uint32_t deviceCount = 0;
544 ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, nullptr));
545 std::vector<VkPhysicalDevice> physicalDevices(deviceCount);
546 if(deviceCount > 0)
547 {
548 ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, physicalDevices.data()));
549 }
550
551 if(GPUSelection.Index != UINT32_MAX)
552 {
553 // Cannot specify both index and name.
554 if(!GPUSelection.Substring.empty())
555 {
556 return VK_NULL_HANDLE;
557 }
558
559 return GPUSelection.Index < deviceCount ? physicalDevices[GPUSelection.Index] : VK_NULL_HANDLE;
560 }
561
562 if(!GPUSelection.Substring.empty())
563 {
564 VkPhysicalDevice result = VK_NULL_HANDLE;
565 std::wstring name;
566 for(uint32_t i = 0; i < deviceCount; ++i)
567 {
568 VkPhysicalDeviceProperties props = {};
569 vkGetPhysicalDeviceProperties(physicalDevices[i], &props);
570 if(ConvertCharsToUnicode(&name, props.deviceName, strlen(props.deviceName), CP_UTF8) &&
571 StrStrI(name.c_str(), GPUSelection.Substring.c_str()))
572 {
573 // Second matching device found - error.
574 if(result != VK_NULL_HANDLE)
575 {
576 return VK_NULL_HANDLE;
577 }
578 // First matching device found.
579 result = physicalDevices[i];
580 }
581 }
582 // Found or not, return it.
583 return result;
584 }
585
586 // Select first one.
587 return deviceCount > 0 ? physicalDevices[0] : VK_NULL_HANDLE;
588 }
589
RegisterDebugCallbacks()590 void VulkanUsage::RegisterDebugCallbacks()
591 {
592 vkCreateDebugUtilsMessengerEXT_Func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(
593 g_hVulkanInstance, "vkCreateDebugUtilsMessengerEXT");
594 vkDestroyDebugUtilsMessengerEXT_Func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(
595 g_hVulkanInstance, "vkDestroyDebugUtilsMessengerEXT");
596 vkSetDebugUtilsObjectNameEXT_Func = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(
597 g_hVulkanInstance, "vkSetDebugUtilsObjectNameEXT");
598 assert(vkCreateDebugUtilsMessengerEXT_Func);
599 assert(vkDestroyDebugUtilsMessengerEXT_Func);
600 assert(vkSetDebugUtilsObjectNameEXT_Func);
601
602 VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = { VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT };
603 messengerCreateInfo.messageSeverity = DEBUG_UTILS_MESSENGER_MESSAGE_SEVERITY;
604 messengerCreateInfo.messageType = DEBUG_UTILS_MESSENGER_MESSAGE_TYPE;
605 messengerCreateInfo.pfnUserCallback = MyDebugReportCallback;
606 ERR_GUARD_VULKAN( vkCreateDebugUtilsMessengerEXT_Func(g_hVulkanInstance, &messengerCreateInfo, g_Allocs, &m_DebugUtilsMessenger) );
607 }
608
IsLayerSupported(const VkLayerProperties * pProps,size_t propCount,const char * pLayerName)609 bool VulkanUsage::IsLayerSupported(const VkLayerProperties* pProps, size_t propCount, const char* pLayerName)
610 {
611 const VkLayerProperties* propsEnd = pProps + propCount;
612 return std::find_if(
613 pProps,
614 propsEnd,
615 [pLayerName](const VkLayerProperties& prop) -> bool {
616 return strcmp(pLayerName, prop.layerName) == 0;
617 }) != propsEnd;
618 }
619
620 struct Vertex
621 {
622 float pos[3];
623 float color[3];
624 float texCoord[2];
625 };
626
CreateMesh()627 static void CreateMesh()
628 {
629 assert(g_hAllocator);
630
631 static Vertex vertices[] = {
632 // -X
633 { { -1.f, -1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 0.f} },
634 { { -1.f, -1.f, 1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 0.f} },
635 { { -1.f, 1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 1.f} },
636 { { -1.f, 1.f, 1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 1.f} },
637 // +X
638 { { 1.f, -1.f, 1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 0.f} },
639 { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 0.f} },
640 { { 1.f, 1.f, 1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 1.f} },
641 { { 1.f, 1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 1.f} },
642 // -Z
643 { { 1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 0.f} },
644 { {-1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 0.f} },
645 { { 1.f, 1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 1.f} },
646 { {-1.f, 1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 1.f} },
647 // +Z
648 { {-1.f, -1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 0.f} },
649 { { 1.f, -1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 0.f} },
650 { {-1.f, 1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 1.f} },
651 { { 1.f, 1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 1.f} },
652 // -Y
653 { {-1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 0.f} },
654 { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 0.f} },
655 { {-1.f, -1.f, 1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 1.f} },
656 { { 1.f, -1.f, 1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 1.f} },
657 // +Y
658 { { 1.f, 1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 0.f} },
659 { {-1.f, 1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 0.f} },
660 { { 1.f, 1.f, 1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 1.f} },
661 { {-1.f, 1.f, 1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 1.f} },
662 };
663 static uint16_t indices[] = {
664 0, 1, 2, 3, USHRT_MAX,
665 4, 5, 6, 7, USHRT_MAX,
666 8, 9, 10, 11, USHRT_MAX,
667 12, 13, 14, 15, USHRT_MAX,
668 16, 17, 18, 19, USHRT_MAX,
669 20, 21, 22, 23, USHRT_MAX,
670 };
671
672 size_t vertexBufferSize = sizeof(Vertex) * _countof(vertices);
673 size_t indexBufferSize = sizeof(uint16_t) * _countof(indices);
674 g_IndexCount = (uint32_t)_countof(indices);
675
676 // Create vertex buffer
677
678 VkBufferCreateInfo vbInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
679 vbInfo.size = vertexBufferSize;
680 vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
681 vbInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
682
683 VmaAllocationCreateInfo vbAllocCreateInfo = {};
684 vbAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO;
685 vbAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT;
686
687 VkBuffer stagingVertexBuffer = VK_NULL_HANDLE;
688 VmaAllocation stagingVertexBufferAlloc = VK_NULL_HANDLE;
689 VmaAllocationInfo stagingVertexBufferAllocInfo = {};
690 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbAllocCreateInfo, &stagingVertexBuffer, &stagingVertexBufferAlloc, &stagingVertexBufferAllocInfo) );
691
692 memcpy(stagingVertexBufferAllocInfo.pMappedData, vertices, vertexBufferSize);
693
694 // No need to flush stagingVertexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
695
696 vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
697 vbAllocCreateInfo.flags = 0;
698 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbAllocCreateInfo, &g_hVertexBuffer, &g_hVertexBufferAlloc, nullptr) );
699
700 // Create index buffer
701
702 VkBufferCreateInfo ibInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
703 ibInfo.size = indexBufferSize;
704 ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
705 ibInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
706
707 VmaAllocationCreateInfo ibAllocCreateInfo = {};
708 ibAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO;
709 ibAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT;
710
711 VkBuffer stagingIndexBuffer = VK_NULL_HANDLE;
712 VmaAllocation stagingIndexBufferAlloc = VK_NULL_HANDLE;
713 VmaAllocationInfo stagingIndexBufferAllocInfo = {};
714 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibAllocCreateInfo, &stagingIndexBuffer, &stagingIndexBufferAlloc, &stagingIndexBufferAllocInfo) );
715
716 memcpy(stagingIndexBufferAllocInfo.pMappedData, indices, indexBufferSize);
717
718 // No need to flush stagingIndexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
719
720 ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
721 ibAllocCreateInfo.flags = 0;
722 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibAllocCreateInfo, &g_hIndexBuffer, &g_hIndexBufferAlloc, nullptr) );
723
724 // Copy buffers
725
726 BeginSingleTimeCommands();
727
728 VkBufferCopy vbCopyRegion = {};
729 vbCopyRegion.srcOffset = 0;
730 vbCopyRegion.dstOffset = 0;
731 vbCopyRegion.size = vbInfo.size;
732 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingVertexBuffer, g_hVertexBuffer, 1, &vbCopyRegion);
733
734 VkBufferCopy ibCopyRegion = {};
735 ibCopyRegion.srcOffset = 0;
736 ibCopyRegion.dstOffset = 0;
737 ibCopyRegion.size = ibInfo.size;
738 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingIndexBuffer, g_hIndexBuffer, 1, &ibCopyRegion);
739
740 EndSingleTimeCommands();
741
742 vmaDestroyBuffer(g_hAllocator, stagingIndexBuffer, stagingIndexBufferAlloc);
743 vmaDestroyBuffer(g_hAllocator, stagingVertexBuffer, stagingVertexBufferAlloc);
744 }
745
CreateTexture(uint32_t sizeX,uint32_t sizeY)746 static void CreateTexture(uint32_t sizeX, uint32_t sizeY)
747 {
748 // Create staging buffer.
749
750 const VkDeviceSize imageSize = sizeX * sizeY * 4;
751
752 VkBufferCreateInfo stagingBufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
753 stagingBufInfo.size = imageSize;
754 stagingBufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
755
756 VmaAllocationCreateInfo stagingBufAllocCreateInfo = {};
757 stagingBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO;
758 stagingBufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT;
759
760 VkBuffer stagingBuf = VK_NULL_HANDLE;
761 VmaAllocation stagingBufAlloc = VK_NULL_HANDLE;
762 VmaAllocationInfo stagingBufAllocInfo = {};
763 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &stagingBufInfo, &stagingBufAllocCreateInfo, &stagingBuf, &stagingBufAlloc, &stagingBufAllocInfo) );
764
765 char* const pImageData = (char*)stagingBufAllocInfo.pMappedData;
766 uint8_t* pRowData = (uint8_t*)pImageData;
767 for(uint32_t y = 0; y < sizeY; ++y)
768 {
769 uint32_t* pPixelData = (uint32_t*)pRowData;
770 for(uint32_t x = 0; x < sizeX; ++x)
771 {
772 *pPixelData =
773 ((x & 0x18) == 0x08 ? 0x000000FF : 0x00000000) |
774 ((x & 0x18) == 0x10 ? 0x0000FFFF : 0x00000000) |
775 ((y & 0x18) == 0x08 ? 0x0000FF00 : 0x00000000) |
776 ((y & 0x18) == 0x10 ? 0x00FF0000 : 0x00000000);
777 ++pPixelData;
778 }
779 pRowData += sizeX * 4;
780 }
781
782 // No need to flush stagingImage memory because CPU_ONLY memory is always HOST_COHERENT.
783
784 // Create g_hTextureImage in GPU memory.
785
786 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
787 imageInfo.imageType = VK_IMAGE_TYPE_2D;
788 imageInfo.extent.width = sizeX;
789 imageInfo.extent.height = sizeY;
790 imageInfo.extent.depth = 1;
791 imageInfo.mipLevels = 1;
792 imageInfo.arrayLayers = 1;
793 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
794 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
795 imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
796 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
797 imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
798 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
799 imageInfo.flags = 0;
800
801 VmaAllocationCreateInfo imageAllocCreateInfo = {};
802 imageAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO;
803
804 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &imageInfo, &imageAllocCreateInfo, &g_hTextureImage, &g_hTextureImageAlloc, nullptr) );
805
806 // Transition image layouts, copy image.
807
808 BeginSingleTimeCommands();
809
810 VkImageMemoryBarrier imgMemBarrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
811 imgMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
812 imgMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
813 imgMemBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
814 imgMemBarrier.subresourceRange.baseMipLevel = 0;
815 imgMemBarrier.subresourceRange.levelCount = 1;
816 imgMemBarrier.subresourceRange.baseArrayLayer = 0;
817 imgMemBarrier.subresourceRange.layerCount = 1;
818 imgMemBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
819 imgMemBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
820 imgMemBarrier.image = g_hTextureImage;
821 imgMemBarrier.srcAccessMask = 0;
822 imgMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
823
824 vkCmdPipelineBarrier(
825 g_hTemporaryCommandBuffer,
826 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
827 VK_PIPELINE_STAGE_TRANSFER_BIT,
828 0,
829 0, nullptr,
830 0, nullptr,
831 1, &imgMemBarrier);
832
833 VkBufferImageCopy region = {};
834 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
835 region.imageSubresource.layerCount = 1;
836 region.imageExtent.width = sizeX;
837 region.imageExtent.height = sizeY;
838 region.imageExtent.depth = 1;
839
840 vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, g_hTextureImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
841
842 imgMemBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
843 imgMemBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
844 imgMemBarrier.image = g_hTextureImage;
845 imgMemBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
846 imgMemBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
847
848 vkCmdPipelineBarrier(
849 g_hTemporaryCommandBuffer,
850 VK_PIPELINE_STAGE_TRANSFER_BIT,
851 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
852 0,
853 0, nullptr,
854 0, nullptr,
855 1, &imgMemBarrier);
856
857 EndSingleTimeCommands();
858
859 vmaDestroyBuffer(g_hAllocator, stagingBuf, stagingBufAlloc);
860
861 // Create ImageView
862
863 VkImageViewCreateInfo textureImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
864 textureImageViewInfo.image = g_hTextureImage;
865 textureImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
866 textureImageViewInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
867 textureImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
868 textureImageViewInfo.subresourceRange.baseMipLevel = 0;
869 textureImageViewInfo.subresourceRange.levelCount = 1;
870 textureImageViewInfo.subresourceRange.baseArrayLayer = 0;
871 textureImageViewInfo.subresourceRange.layerCount = 1;
872 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &textureImageViewInfo, g_Allocs, &g_hTextureImageView) );
873 }
874
875 struct UniformBufferObject
876 {
877 mat4 ModelViewProj;
878 };
879
FindSupportedFormat(const std::vector<VkFormat> & candidates,VkImageTiling tiling,VkFormatFeatureFlags features)880 static VkFormat FindSupportedFormat(
881 const std::vector<VkFormat>& candidates,
882 VkImageTiling tiling,
883 VkFormatFeatureFlags features)
884 {
885 for (VkFormat format : candidates)
886 {
887 VkFormatProperties props;
888 vkGetPhysicalDeviceFormatProperties(g_hPhysicalDevice, format, &props);
889
890 if ((tiling == VK_IMAGE_TILING_LINEAR) &&
891 ((props.linearTilingFeatures & features) == features))
892 {
893 return format;
894 }
895 else if ((tiling == VK_IMAGE_TILING_OPTIMAL) &&
896 ((props.optimalTilingFeatures & features) == features))
897 {
898 return format;
899 }
900 }
901 return VK_FORMAT_UNDEFINED;
902 }
903
FindDepthFormat()904 static VkFormat FindDepthFormat()
905 {
906 std::vector<VkFormat> formats;
907 formats.push_back(VK_FORMAT_D32_SFLOAT);
908 formats.push_back(VK_FORMAT_D32_SFLOAT_S8_UINT);
909 formats.push_back(VK_FORMAT_D24_UNORM_S8_UINT);
910
911 return FindSupportedFormat(
912 formats,
913 VK_IMAGE_TILING_OPTIMAL,
914 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT);
915 }
916
CreateSwapchain()917 static void CreateSwapchain()
918 {
919 // Query surface formats.
920
921 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceCapabilitiesKHR(g_hPhysicalDevice, g_hSurface, &g_SurfaceCapabilities) );
922
923 uint32_t formatCount = 0;
924 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, nullptr) );
925 g_SurfaceFormats.resize(formatCount);
926 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, g_SurfaceFormats.data()) );
927
928 uint32_t presentModeCount = 0;
929 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, nullptr) );
930 g_PresentModes.resize(presentModeCount);
931 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, g_PresentModes.data()) );
932
933 // Create swap chain
934
935 g_SurfaceFormat = ChooseSurfaceFormat();
936 VkPresentModeKHR presentMode = ChooseSwapPresentMode();
937 g_Extent = ChooseSwapExtent();
938
939 uint32_t imageCount = g_SurfaceCapabilities.minImageCount + 1;
940 if((g_SurfaceCapabilities.maxImageCount > 0) &&
941 (imageCount > g_SurfaceCapabilities.maxImageCount))
942 {
943 imageCount = g_SurfaceCapabilities.maxImageCount;
944 }
945
946 VkSwapchainCreateInfoKHR swapChainInfo = { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
947 swapChainInfo.surface = g_hSurface;
948 swapChainInfo.minImageCount = imageCount;
949 swapChainInfo.imageFormat = g_SurfaceFormat.format;
950 swapChainInfo.imageColorSpace = g_SurfaceFormat.colorSpace;
951 swapChainInfo.imageExtent = g_Extent;
952 swapChainInfo.imageArrayLayers = 1;
953 swapChainInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
954 swapChainInfo.preTransform = g_SurfaceCapabilities.currentTransform;
955 swapChainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
956 swapChainInfo.presentMode = presentMode;
957 swapChainInfo.clipped = VK_TRUE;
958 swapChainInfo.oldSwapchain = g_hSwapchain;
959
960 uint32_t queueFamilyIndices[] = { g_GraphicsQueueFamilyIndex, g_PresentQueueFamilyIndex };
961 if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
962 {
963 swapChainInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
964 swapChainInfo.queueFamilyIndexCount = 2;
965 swapChainInfo.pQueueFamilyIndices = queueFamilyIndices;
966 }
967 else
968 {
969 swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
970 }
971
972 VkSwapchainKHR hNewSwapchain = VK_NULL_HANDLE;
973 ERR_GUARD_VULKAN( vkCreateSwapchainKHR(g_hDevice, &swapChainInfo, g_Allocs, &hNewSwapchain) );
974 if(g_hSwapchain != VK_NULL_HANDLE)
975 vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, g_Allocs);
976 g_hSwapchain = hNewSwapchain;
977
978 // Retrieve swapchain images.
979
980 uint32_t swapchainImageCount = 0;
981 ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, nullptr) );
982 g_SwapchainImages.resize(swapchainImageCount);
983 ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, g_SwapchainImages.data()) );
984
985 // Create swapchain image views.
986
987 for(size_t i = g_SwapchainImageViews.size(); i--; )
988 vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], g_Allocs);
989 g_SwapchainImageViews.clear();
990
991 VkImageViewCreateInfo swapchainImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
992 g_SwapchainImageViews.resize(swapchainImageCount);
993 for(uint32_t i = 0; i < swapchainImageCount; ++i)
994 {
995 swapchainImageViewInfo.image = g_SwapchainImages[i];
996 swapchainImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
997 swapchainImageViewInfo.format = g_SurfaceFormat.format;
998 swapchainImageViewInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
999 swapchainImageViewInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
1000 swapchainImageViewInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
1001 swapchainImageViewInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
1002 swapchainImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1003 swapchainImageViewInfo.subresourceRange.baseMipLevel = 0;
1004 swapchainImageViewInfo.subresourceRange.levelCount = 1;
1005 swapchainImageViewInfo.subresourceRange.baseArrayLayer = 0;
1006 swapchainImageViewInfo.subresourceRange.layerCount = 1;
1007 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &swapchainImageViewInfo, g_Allocs, &g_SwapchainImageViews[i]) );
1008 }
1009
1010 // Create depth buffer
1011
1012 g_DepthFormat = FindDepthFormat();
1013 assert(g_DepthFormat != VK_FORMAT_UNDEFINED);
1014
1015 VkImageCreateInfo depthImageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1016 depthImageInfo.imageType = VK_IMAGE_TYPE_2D;
1017 depthImageInfo.extent.width = g_Extent.width;
1018 depthImageInfo.extent.height = g_Extent.height;
1019 depthImageInfo.extent.depth = 1;
1020 depthImageInfo.mipLevels = 1;
1021 depthImageInfo.arrayLayers = 1;
1022 depthImageInfo.format = g_DepthFormat;
1023 depthImageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1024 depthImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1025 depthImageInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
1026 depthImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1027 depthImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1028 depthImageInfo.flags = 0;
1029
1030 VmaAllocationCreateInfo depthImageAllocCreateInfo = {};
1031 depthImageAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO;
1032
1033 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &depthImageInfo, &depthImageAllocCreateInfo, &g_hDepthImage, &g_hDepthImageAlloc, nullptr) );
1034
1035 VkImageViewCreateInfo depthImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
1036 depthImageViewInfo.image = g_hDepthImage;
1037 depthImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
1038 depthImageViewInfo.format = g_DepthFormat;
1039 depthImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1040 depthImageViewInfo.subresourceRange.baseMipLevel = 0;
1041 depthImageViewInfo.subresourceRange.levelCount = 1;
1042 depthImageViewInfo.subresourceRange.baseArrayLayer = 0;
1043 depthImageViewInfo.subresourceRange.layerCount = 1;
1044
1045 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &depthImageViewInfo, g_Allocs, &g_hDepthImageView) );
1046
1047 // Create pipeline layout
1048 {
1049 if(g_hPipelineLayout != VK_NULL_HANDLE)
1050 {
1051 vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, g_Allocs);
1052 g_hPipelineLayout = VK_NULL_HANDLE;
1053 }
1054
1055 VkPushConstantRange pushConstantRanges[1];
1056 ZeroMemory(&pushConstantRanges, sizeof pushConstantRanges);
1057 pushConstantRanges[0].offset = 0;
1058 pushConstantRanges[0].size = sizeof(UniformBufferObject);
1059 pushConstantRanges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
1060
1061 VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
1062 VkPipelineLayoutCreateInfo pipelineLayoutInfo = { VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO };
1063 pipelineLayoutInfo.setLayoutCount = 1;
1064 pipelineLayoutInfo.pSetLayouts = descriptorSetLayouts;
1065 pipelineLayoutInfo.pushConstantRangeCount = 1;
1066 pipelineLayoutInfo.pPushConstantRanges = pushConstantRanges;
1067 ERR_GUARD_VULKAN( vkCreatePipelineLayout(g_hDevice, &pipelineLayoutInfo, g_Allocs, &g_hPipelineLayout) );
1068 }
1069
1070 // Create render pass
1071 {
1072 if(g_hRenderPass != VK_NULL_HANDLE)
1073 {
1074 vkDestroyRenderPass(g_hDevice, g_hRenderPass, g_Allocs);
1075 g_hRenderPass = VK_NULL_HANDLE;
1076 }
1077
1078 VkAttachmentDescription attachments[2];
1079 ZeroMemory(attachments, sizeof(attachments));
1080
1081 attachments[0].format = g_SurfaceFormat.format;
1082 attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
1083 attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
1084 attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1085 attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1086 attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1087 attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1088 attachments[0].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
1089
1090 attachments[1].format = g_DepthFormat;
1091 attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
1092 attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
1093 attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1094 attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1095 attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1096 attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1097 attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1098
1099 VkAttachmentReference colorAttachmentRef = {};
1100 colorAttachmentRef.attachment = 0;
1101 colorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1102
1103 VkAttachmentReference depthStencilAttachmentRef = {};
1104 depthStencilAttachmentRef.attachment = 1;
1105 depthStencilAttachmentRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1106
1107 VkSubpassDescription subpassDesc = {};
1108 subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
1109 subpassDesc.colorAttachmentCount = 1;
1110 subpassDesc.pColorAttachments = &colorAttachmentRef;
1111 subpassDesc.pDepthStencilAttachment = &depthStencilAttachmentRef;
1112
1113 VkRenderPassCreateInfo renderPassInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
1114 renderPassInfo.attachmentCount = (uint32_t)_countof(attachments);
1115 renderPassInfo.pAttachments = attachments;
1116 renderPassInfo.subpassCount = 1;
1117 renderPassInfo.pSubpasses = &subpassDesc;
1118 renderPassInfo.dependencyCount = 0;
1119 ERR_GUARD_VULKAN( vkCreateRenderPass(g_hDevice, &renderPassInfo, g_Allocs, &g_hRenderPass) );
1120 }
1121
1122 // Create pipeline
1123 {
1124 std::vector<char> vertShaderCode;
1125 LoadShader(vertShaderCode, "Shader.vert.spv");
1126 VkShaderModuleCreateInfo shaderModuleInfo = { VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
1127 shaderModuleInfo.codeSize = vertShaderCode.size();
1128 shaderModuleInfo.pCode = (const uint32_t*)vertShaderCode.data();
1129 VkShaderModule hVertShaderModule = VK_NULL_HANDLE;
1130 ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, g_Allocs, &hVertShaderModule) );
1131
1132 std::vector<char> hFragShaderCode;
1133 LoadShader(hFragShaderCode, "Shader.frag.spv");
1134 shaderModuleInfo.codeSize = hFragShaderCode.size();
1135 shaderModuleInfo.pCode = (const uint32_t*)hFragShaderCode.data();
1136 VkShaderModule fragShaderModule = VK_NULL_HANDLE;
1137 ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, g_Allocs, &fragShaderModule) );
1138
1139 VkPipelineShaderStageCreateInfo vertPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
1140 vertPipelineShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
1141 vertPipelineShaderStageInfo.module = hVertShaderModule;
1142 vertPipelineShaderStageInfo.pName = "main";
1143
1144 VkPipelineShaderStageCreateInfo fragPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
1145 fragPipelineShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
1146 fragPipelineShaderStageInfo.module = fragShaderModule;
1147 fragPipelineShaderStageInfo.pName = "main";
1148
1149 VkPipelineShaderStageCreateInfo pipelineShaderStageInfos[] = {
1150 vertPipelineShaderStageInfo,
1151 fragPipelineShaderStageInfo
1152 };
1153
1154 VkVertexInputBindingDescription bindingDescription = {};
1155 bindingDescription.binding = 0;
1156 bindingDescription.stride = sizeof(Vertex);
1157 bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1158
1159 VkVertexInputAttributeDescription attributeDescriptions[3];
1160 ZeroMemory(attributeDescriptions, sizeof(attributeDescriptions));
1161
1162 attributeDescriptions[0].binding = 0;
1163 attributeDescriptions[0].location = 0;
1164 attributeDescriptions[0].format = VK_FORMAT_R32G32B32_SFLOAT;
1165 attributeDescriptions[0].offset = offsetof(Vertex, pos);
1166
1167 attributeDescriptions[1].binding = 0;
1168 attributeDescriptions[1].location = 1;
1169 attributeDescriptions[1].format = VK_FORMAT_R32G32B32_SFLOAT;
1170 attributeDescriptions[1].offset = offsetof(Vertex, color);
1171
1172 attributeDescriptions[2].binding = 0;
1173 attributeDescriptions[2].location = 2;
1174 attributeDescriptions[2].format = VK_FORMAT_R32G32_SFLOAT;
1175 attributeDescriptions[2].offset = offsetof(Vertex, texCoord);
1176
1177 VkPipelineVertexInputStateCreateInfo pipelineVertexInputStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO };
1178 pipelineVertexInputStateInfo.vertexBindingDescriptionCount = 1;
1179 pipelineVertexInputStateInfo.pVertexBindingDescriptions = &bindingDescription;
1180 pipelineVertexInputStateInfo.vertexAttributeDescriptionCount = _countof(attributeDescriptions);
1181 pipelineVertexInputStateInfo.pVertexAttributeDescriptions = attributeDescriptions;
1182
1183 VkPipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO };
1184 pipelineInputAssemblyStateInfo.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1185 pipelineInputAssemblyStateInfo.primitiveRestartEnable = VK_TRUE;
1186
1187 VkViewport viewport = {};
1188 viewport.x = 0.f;
1189 viewport.y = 0.f;
1190 viewport.width = (float)g_Extent.width;
1191 viewport.height = (float)g_Extent.height;
1192 viewport.minDepth = 0.f;
1193 viewport.maxDepth = 1.f;
1194
1195 VkRect2D scissor = {};
1196 scissor.offset.x = 0;
1197 scissor.offset.y = 0;
1198 scissor.extent = g_Extent;
1199
1200 VkPipelineViewportStateCreateInfo pipelineViewportStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO };
1201 pipelineViewportStateInfo.viewportCount = 1;
1202 pipelineViewportStateInfo.pViewports = &viewport;
1203 pipelineViewportStateInfo.scissorCount = 1;
1204 pipelineViewportStateInfo.pScissors = &scissor;
1205
1206 VkPipelineRasterizationStateCreateInfo pipelineRasterizationStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO };
1207 pipelineRasterizationStateInfo.depthClampEnable = VK_FALSE;
1208 pipelineRasterizationStateInfo.rasterizerDiscardEnable = VK_FALSE;
1209 pipelineRasterizationStateInfo.polygonMode = VK_POLYGON_MODE_FILL;
1210 pipelineRasterizationStateInfo.lineWidth = 1.f;
1211 pipelineRasterizationStateInfo.cullMode = VK_CULL_MODE_BACK_BIT;
1212 pipelineRasterizationStateInfo.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
1213 pipelineRasterizationStateInfo.depthBiasEnable = VK_FALSE;
1214 pipelineRasterizationStateInfo.depthBiasConstantFactor = 0.f;
1215 pipelineRasterizationStateInfo.depthBiasClamp = 0.f;
1216 pipelineRasterizationStateInfo.depthBiasSlopeFactor = 0.f;
1217
1218 VkPipelineMultisampleStateCreateInfo pipelineMultisampleStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
1219 pipelineMultisampleStateInfo.sampleShadingEnable = VK_FALSE;
1220 pipelineMultisampleStateInfo.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1221 pipelineMultisampleStateInfo.minSampleShading = 1.f;
1222 pipelineMultisampleStateInfo.pSampleMask = nullptr;
1223 pipelineMultisampleStateInfo.alphaToCoverageEnable = VK_FALSE;
1224 pipelineMultisampleStateInfo.alphaToOneEnable = VK_FALSE;
1225
1226 VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState = {};
1227 pipelineColorBlendAttachmentState.colorWriteMask =
1228 VK_COLOR_COMPONENT_R_BIT |
1229 VK_COLOR_COMPONENT_G_BIT |
1230 VK_COLOR_COMPONENT_B_BIT |
1231 VK_COLOR_COMPONENT_A_BIT;
1232 pipelineColorBlendAttachmentState.blendEnable = VK_FALSE;
1233 pipelineColorBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
1234 pipelineColorBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
1235 pipelineColorBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD; // Optional
1236 pipelineColorBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
1237 pipelineColorBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
1238 pipelineColorBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD; // Optional
1239
1240 VkPipelineColorBlendStateCreateInfo pipelineColorBlendStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO };
1241 pipelineColorBlendStateInfo.logicOpEnable = VK_FALSE;
1242 pipelineColorBlendStateInfo.logicOp = VK_LOGIC_OP_COPY;
1243 pipelineColorBlendStateInfo.attachmentCount = 1;
1244 pipelineColorBlendStateInfo.pAttachments = &pipelineColorBlendAttachmentState;
1245
1246 VkPipelineDepthStencilStateCreateInfo depthStencilStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO };
1247 depthStencilStateInfo.depthTestEnable = VK_TRUE;
1248 depthStencilStateInfo.depthWriteEnable = VK_TRUE;
1249 depthStencilStateInfo.depthCompareOp = VK_COMPARE_OP_LESS;
1250 depthStencilStateInfo.depthBoundsTestEnable = VK_FALSE;
1251 depthStencilStateInfo.stencilTestEnable = VK_FALSE;
1252
1253 VkGraphicsPipelineCreateInfo pipelineInfo = { VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
1254 pipelineInfo.stageCount = 2;
1255 pipelineInfo.pStages = pipelineShaderStageInfos;
1256 pipelineInfo.pVertexInputState = &pipelineVertexInputStateInfo;
1257 pipelineInfo.pInputAssemblyState = &pipelineInputAssemblyStateInfo;
1258 pipelineInfo.pViewportState = &pipelineViewportStateInfo;
1259 pipelineInfo.pRasterizationState = &pipelineRasterizationStateInfo;
1260 pipelineInfo.pMultisampleState = &pipelineMultisampleStateInfo;
1261 pipelineInfo.pDepthStencilState = &depthStencilStateInfo;
1262 pipelineInfo.pColorBlendState = &pipelineColorBlendStateInfo;
1263 pipelineInfo.pDynamicState = nullptr;
1264 pipelineInfo.layout = g_hPipelineLayout;
1265 pipelineInfo.renderPass = g_hRenderPass;
1266 pipelineInfo.subpass = 0;
1267 pipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
1268 pipelineInfo.basePipelineIndex = -1;
1269 ERR_GUARD_VULKAN( vkCreateGraphicsPipelines(
1270 g_hDevice,
1271 VK_NULL_HANDLE,
1272 1,
1273 &pipelineInfo,
1274 g_Allocs,
1275 &g_hPipeline) );
1276
1277 vkDestroyShaderModule(g_hDevice, fragShaderModule, g_Allocs);
1278 vkDestroyShaderModule(g_hDevice, hVertShaderModule, g_Allocs);
1279 }
1280
1281 // Create frambuffers
1282
1283 for(size_t i = g_Framebuffers.size(); i--; )
1284 vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], g_Allocs);
1285 g_Framebuffers.clear();
1286
1287 g_Framebuffers.resize(g_SwapchainImageViews.size());
1288 for(size_t i = 0; i < g_SwapchainImages.size(); ++i)
1289 {
1290 VkImageView attachments[] = { g_SwapchainImageViews[i], g_hDepthImageView };
1291
1292 VkFramebufferCreateInfo framebufferInfo = { VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO };
1293 framebufferInfo.renderPass = g_hRenderPass;
1294 framebufferInfo.attachmentCount = (uint32_t)_countof(attachments);
1295 framebufferInfo.pAttachments = attachments;
1296 framebufferInfo.width = g_Extent.width;
1297 framebufferInfo.height = g_Extent.height;
1298 framebufferInfo.layers = 1;
1299 ERR_GUARD_VULKAN( vkCreateFramebuffer(g_hDevice, &framebufferInfo, g_Allocs, &g_Framebuffers[i]) );
1300 }
1301
1302 // Create semaphores
1303
1304 if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1305 {
1306 vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, g_Allocs);
1307 g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1308 }
1309 if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1310 {
1311 vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, g_Allocs);
1312 g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1313 }
1314
1315 VkSemaphoreCreateInfo semaphoreInfo = { VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
1316 ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, g_Allocs, &g_hImageAvailableSemaphore) );
1317 ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, g_Allocs, &g_hRenderFinishedSemaphore) );
1318 }
1319
DestroySwapchain(bool destroyActualSwapchain)1320 static void DestroySwapchain(bool destroyActualSwapchain)
1321 {
1322 if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1323 {
1324 vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, g_Allocs);
1325 g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1326 }
1327 if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1328 {
1329 vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, g_Allocs);
1330 g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1331 }
1332
1333 for(size_t i = g_Framebuffers.size(); i--; )
1334 vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], g_Allocs);
1335 g_Framebuffers.clear();
1336
1337 if(g_hDepthImageView != VK_NULL_HANDLE)
1338 {
1339 vkDestroyImageView(g_hDevice, g_hDepthImageView, g_Allocs);
1340 g_hDepthImageView = VK_NULL_HANDLE;
1341 }
1342 if(g_hDepthImage != VK_NULL_HANDLE)
1343 {
1344 vmaDestroyImage(g_hAllocator, g_hDepthImage, g_hDepthImageAlloc);
1345 g_hDepthImage = VK_NULL_HANDLE;
1346 }
1347
1348 if(g_hPipeline != VK_NULL_HANDLE)
1349 {
1350 vkDestroyPipeline(g_hDevice, g_hPipeline, g_Allocs);
1351 g_hPipeline = VK_NULL_HANDLE;
1352 }
1353
1354 if(g_hRenderPass != VK_NULL_HANDLE)
1355 {
1356 vkDestroyRenderPass(g_hDevice, g_hRenderPass, g_Allocs);
1357 g_hRenderPass = VK_NULL_HANDLE;
1358 }
1359
1360 if(g_hPipelineLayout != VK_NULL_HANDLE)
1361 {
1362 vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, g_Allocs);
1363 g_hPipelineLayout = VK_NULL_HANDLE;
1364 }
1365
1366 for(size_t i = g_SwapchainImageViews.size(); i--; )
1367 vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], g_Allocs);
1368 g_SwapchainImageViews.clear();
1369
1370 if(destroyActualSwapchain && (g_hSwapchain != VK_NULL_HANDLE))
1371 {
1372 vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, g_Allocs);
1373 g_hSwapchain = VK_NULL_HANDLE;
1374 }
1375 }
1376
PrintEnabledFeatures()1377 static void PrintEnabledFeatures()
1378 {
1379 wprintf(L"Enabled extensions and features:\n");
1380 wprintf(L"Validation layer: %d\n", g_EnableValidationLayer ? 1 : 0);
1381 wprintf(L"Sparse binding: %d\n", g_SparseBindingEnabled ? 1 : 0);
1382 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1383 {
1384 wprintf(L"VK_KHR_get_memory_requirements2: %d\n", VK_KHR_get_memory_requirements2_enabled ? 1 : 0);
1385 wprintf(L"VK_KHR_get_physical_device_properties2: %d\n", VK_KHR_get_physical_device_properties2_enabled ? 1 : 0);
1386 wprintf(L"VK_KHR_dedicated_allocation: %d\n", VK_KHR_dedicated_allocation_enabled ? 1 : 0);
1387 wprintf(L"VK_KHR_bind_memory2: %d\n", VK_KHR_bind_memory2_enabled ? 1 : 0);
1388 }
1389 wprintf(L"VK_EXT_memory_budget: %d\n", VK_EXT_memory_budget_enabled ? 1 : 0);
1390 wprintf(L"VK_AMD_device_coherent_memory: %d\n", VK_AMD_device_coherent_memory_enabled ? 1 : 0);
1391 if(GetVulkanApiVersion() < VK_API_VERSION_1_2)
1392 {
1393 wprintf(L"VK_KHR_buffer_device_address: %d\n", VK_KHR_buffer_device_address_enabled ? 1 : 0);
1394 }
1395 else
1396 {
1397 wprintf(L"bufferDeviceAddress: %d\n", VK_KHR_buffer_device_address_enabled ? 1 : 0);
1398 }
1399 wprintf(L"VK_EXT_memory_priority: %d\n", VK_EXT_memory_priority ? 1 : 0);
1400 }
1401
SetAllocatorCreateInfo(VmaAllocatorCreateInfo & outInfo)1402 void SetAllocatorCreateInfo(VmaAllocatorCreateInfo& outInfo)
1403 {
1404 outInfo = {};
1405
1406 outInfo.physicalDevice = g_hPhysicalDevice;
1407 outInfo.device = g_hDevice;
1408 outInfo.instance = g_hVulkanInstance;
1409 outInfo.vulkanApiVersion = GetVulkanApiVersion();
1410
1411 if(VK_KHR_dedicated_allocation_enabled)
1412 {
1413 outInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
1414 }
1415 if(VK_KHR_bind_memory2_enabled)
1416 {
1417 outInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT;
1418 }
1419 #if !defined(VMA_MEMORY_BUDGET) || VMA_MEMORY_BUDGET == 1
1420 if(VK_EXT_memory_budget_enabled && (
1421 GetVulkanApiVersion() >= VK_API_VERSION_1_1 || VK_KHR_get_physical_device_properties2_enabled))
1422 {
1423 outInfo.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
1424 }
1425 #endif
1426 if(VK_AMD_device_coherent_memory_enabled)
1427 {
1428 outInfo.flags |= VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
1429 }
1430 if(VK_KHR_buffer_device_address_enabled)
1431 {
1432 outInfo.flags |= VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT;
1433 }
1434 #if !defined(VMA_MEMORY_PRIORITY) || VMA_MEMORY_PRIORITY == 1
1435 if(VK_EXT_memory_priority_enabled)
1436 {
1437 outInfo.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT;
1438 }
1439 #endif
1440
1441 if(USE_CUSTOM_CPU_ALLOCATION_CALLBACKS)
1442 {
1443 outInfo.pAllocationCallbacks = &g_CpuAllocationCallbacks;
1444 }
1445
1446 #if VMA_DYNAMIC_VULKAN_FUNCTIONS
1447 static VmaVulkanFunctions vulkanFunctions = {};
1448 vulkanFunctions.vkGetInstanceProcAddr = vkGetInstanceProcAddr;
1449 vulkanFunctions.vkGetDeviceProcAddr = vkGetDeviceProcAddr;
1450 outInfo.pVulkanFunctions = &vulkanFunctions;
1451 #endif
1452
1453 // Uncomment to enable recording to CSV file.
1454 /*
1455 static VmaRecordSettings recordSettings = {};
1456 recordSettings.pFilePath = "VulkanSample.csv";
1457 outInfo.pRecordSettings = &recordSettings;
1458 */
1459
1460 // Uncomment to enable HeapSizeLimit.
1461 /*
1462 static std::array<VkDeviceSize, VK_MAX_MEMORY_HEAPS> heapSizeLimit;
1463 std::fill(heapSizeLimit.begin(), heapSizeLimit.end(), VK_WHOLE_SIZE);
1464 heapSizeLimit[0] = 512ull * 1024 * 1024;
1465 outInfo.pHeapSizeLimit = heapSizeLimit.data();
1466 */
1467 }
1468
PrintPhysicalDeviceProperties(const VkPhysicalDeviceProperties & properties)1469 static void PrintPhysicalDeviceProperties(const VkPhysicalDeviceProperties& properties)
1470 {
1471 wprintf(L"physicalDeviceProperties:\n");
1472 wprintf(L" driverVersion: 0x%X\n", properties.driverVersion);
1473 wprintf(L" vendorID: 0x%X (%s)\n", properties.vendorID, VendorIDToStr(properties.vendorID));
1474 wprintf(L" deviceID: 0x%X\n", properties.deviceID);
1475 wprintf(L" deviceType: %u (%s)\n", properties.deviceType, PhysicalDeviceTypeToStr(properties.deviceType));
1476 wprintf(L" deviceName: %hs\n", properties.deviceName);
1477 wprintf(L" limits:\n");
1478 wprintf(L" maxMemoryAllocationCount: %u\n", properties.limits.maxMemoryAllocationCount);
1479 wprintf(L" bufferImageGranularity: %llu B\n", properties.limits.bufferImageGranularity);
1480 wprintf(L" nonCoherentAtomSize: %llu B\n", properties.limits.nonCoherentAtomSize);
1481 }
1482
1483 #if VMA_VULKAN_VERSION >= 1002000
PrintPhysicalDeviceVulkan11Properties(const VkPhysicalDeviceVulkan11Properties & properties)1484 static void PrintPhysicalDeviceVulkan11Properties(const VkPhysicalDeviceVulkan11Properties& properties)
1485 {
1486 wprintf(L"physicalDeviceVulkan11Properties:\n");
1487 std::wstring sizeStr = SizeToStr(properties.maxMemoryAllocationSize);
1488 wprintf(L" maxMemoryAllocationSize: %llu B (%s)\n", properties.maxMemoryAllocationSize, sizeStr.c_str());
1489 }
PrintPhysicalDeviceVulkan12Properties(const VkPhysicalDeviceVulkan12Properties & properties)1490 static void PrintPhysicalDeviceVulkan12Properties(const VkPhysicalDeviceVulkan12Properties& properties)
1491 {
1492 wprintf(L"physicalDeviceVulkan12Properties:\n");
1493 std::wstring str = DriverIDToStr(properties.driverID);
1494 wprintf(L" driverID: %u (%s)\n", properties.driverID, str.c_str());
1495 wprintf(L" driverName: %hs\n", properties.driverName);
1496 wprintf(L" driverInfo: %hs\n", properties.driverInfo);
1497 }
1498 #endif // #if VMA_VULKAN_VERSION > 1002000
1499
AddFlagToStr(std::wstring & inout,const wchar_t * flagStr)1500 static void AddFlagToStr(std::wstring& inout, const wchar_t* flagStr)
1501 {
1502 if(!inout.empty())
1503 inout += L", ";
1504 inout += flagStr;
1505 }
1506
HeapFlagsToStr(VkMemoryHeapFlags flags)1507 static std::wstring HeapFlagsToStr(VkMemoryHeapFlags flags)
1508 {
1509 std::wstring result;
1510 if(flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT)
1511 AddFlagToStr(result, L"DEVICE_LOCAL");
1512 if(flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT)
1513 AddFlagToStr(result, L"MULTI_INSTANCE");
1514 return result;
1515 }
1516
PropertyFlagsToStr(VkMemoryPropertyFlags flags)1517 static std::wstring PropertyFlagsToStr(VkMemoryPropertyFlags flags)
1518 {
1519 std::wstring result;
1520 if(flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
1521 AddFlagToStr(result, L"DEVICE_LOCAL");
1522 if(flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
1523 AddFlagToStr(result, L"HOST_VISIBLE");
1524 if(flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
1525 AddFlagToStr(result, L"HOST_COHERENT");
1526 if(flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT)
1527 AddFlagToStr(result, L"HOST_CACHED");
1528 if(flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
1529 AddFlagToStr(result, L"LAZILY_ALLOCATED");
1530
1531 #if VMA_VULKAN_VERSION >= 1001000
1532 if(flags & VK_MEMORY_PROPERTY_PROTECTED_BIT)
1533 AddFlagToStr(result, L"PROTECTED");
1534 #endif
1535
1536 #if VK_AMD_device_coherent_memory
1537 if(flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
1538 AddFlagToStr(result, L"DEVICE_COHERENT (AMD)");
1539 if(flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD)
1540 AddFlagToStr(result, L"DEVICE_UNCACHED (AMD)");
1541 #endif
1542
1543 return result;
1544 }
1545
PrintMemoryTypes()1546 static void PrintMemoryTypes()
1547 {
1548 wprintf(L"MEMORY HEAPS:\n");
1549 const VkPhysicalDeviceMemoryProperties* memProps = nullptr;
1550 vmaGetMemoryProperties(g_hAllocator, &memProps);
1551
1552 wprintf(L"heapCount=%u, typeCount=%u\n", memProps->memoryHeapCount, memProps->memoryTypeCount);
1553
1554 std::wstring sizeStr, flagsStr;
1555 for(uint32_t heapIndex = 0; heapIndex < memProps->memoryHeapCount; ++heapIndex)
1556 {
1557 const VkMemoryHeap& heap = memProps->memoryHeaps[heapIndex];
1558 sizeStr = SizeToStr(heap.size);
1559 flagsStr = HeapFlagsToStr(heap.flags);
1560 wprintf(L"Heap %u: %llu B (%s) %s\n", heapIndex, heap.size, sizeStr.c_str(), flagsStr.c_str());
1561
1562 for(uint32_t typeIndex = 0; typeIndex < memProps->memoryTypeCount; ++typeIndex)
1563 {
1564 const VkMemoryType& type = memProps->memoryTypes[typeIndex];
1565 if(type.heapIndex == heapIndex)
1566 {
1567 flagsStr = PropertyFlagsToStr(type.propertyFlags);
1568 wprintf(L" Type %u: %s\n", typeIndex, flagsStr.c_str());
1569 }
1570 }
1571 }
1572 }
1573
1574 #if 0
1575 template<typename It, typename MapFunc>
1576 inline VkDeviceSize MapSum(It beg, It end, MapFunc mapFunc)
1577 {
1578 VkDeviceSize result = 0;
1579 for(It it = beg; it != end; ++it)
1580 result += mapFunc(*it);
1581 return result;
1582 }
1583 #endif
1584
CanCreateVertexBuffer(uint32_t allowedMemoryTypeBits)1585 static bool CanCreateVertexBuffer(uint32_t allowedMemoryTypeBits)
1586 {
1587 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1588 bufCreateInfo.size = 0x10000;
1589 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1590
1591 VkBuffer buf = VK_NULL_HANDLE;
1592 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
1593 assert(res == VK_SUCCESS);
1594
1595 VkMemoryRequirements memReq = {};
1596 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
1597
1598 vkDestroyBuffer(g_hDevice, buf, g_Allocs);
1599
1600 return (memReq.memoryTypeBits & allowedMemoryTypeBits) != 0;
1601 }
1602
CanCreateOptimalSampledImage(uint32_t allowedMemoryTypeBits)1603 static bool CanCreateOptimalSampledImage(uint32_t allowedMemoryTypeBits)
1604 {
1605 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1606 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1607 imgCreateInfo.extent.width = 256;
1608 imgCreateInfo.extent.height = 256;
1609 imgCreateInfo.extent.depth = 1;
1610 imgCreateInfo.mipLevels = 1;
1611 imgCreateInfo.arrayLayers = 1;
1612 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1613 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1614 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1615 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
1616 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1617
1618 VkImage img = VK_NULL_HANDLE;
1619 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
1620 assert(res == VK_SUCCESS);
1621
1622 VkMemoryRequirements memReq = {};
1623 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
1624
1625 vkDestroyImage(g_hDevice, img, g_Allocs);
1626
1627 return (memReq.memoryTypeBits & allowedMemoryTypeBits) != 0;
1628 }
1629
PrintMemoryConclusions()1630 static void PrintMemoryConclusions()
1631 {
1632 wprintf(L"Conclusions:\n");
1633
1634 const VkPhysicalDeviceProperties* props = nullptr;
1635 const VkPhysicalDeviceMemoryProperties* memProps = nullptr;
1636 vmaGetPhysicalDeviceProperties(g_hAllocator, &props);
1637 vmaGetMemoryProperties(g_hAllocator, &memProps);
1638
1639 const uint32_t heapCount = memProps->memoryHeapCount;
1640
1641 uint32_t deviceLocalHeapCount = 0;
1642 uint32_t hostVisibleHeapCount = 0;
1643 uint32_t deviceLocalAndHostVisibleHeapCount = 0;
1644 VkDeviceSize deviceLocalHeapSumSize = 0;
1645 VkDeviceSize hostVisibleHeapSumSize = 0;
1646 VkDeviceSize deviceLocalAndHostVisibleHeapSumSize = 0;
1647
1648 for(uint32_t heapIndex = 0; heapIndex < heapCount; ++heapIndex)
1649 {
1650 const VkMemoryHeap& heap = memProps->memoryHeaps[heapIndex];
1651 const bool isDeviceLocal = (heap.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0;
1652 bool isHostVisible = false;
1653 for(uint32_t typeIndex = 0; typeIndex < memProps->memoryTypeCount; ++typeIndex)
1654 {
1655 const VkMemoryType& type = memProps->memoryTypes[typeIndex];
1656 if(type.heapIndex == heapIndex && (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
1657 {
1658 isHostVisible = true;
1659 break;
1660 }
1661 }
1662 if(isDeviceLocal)
1663 {
1664 ++deviceLocalHeapCount;
1665 deviceLocalHeapSumSize += heap.size;
1666 }
1667 if(isHostVisible)
1668 {
1669 ++hostVisibleHeapCount;
1670 hostVisibleHeapSumSize += heap.size;
1671 if(isDeviceLocal)
1672 {
1673 ++deviceLocalAndHostVisibleHeapCount;
1674 deviceLocalAndHostVisibleHeapSumSize += heap.size;
1675 }
1676 }
1677 }
1678
1679 uint32_t hostVisibleNotHostCoherentTypeCount = 0;
1680 uint32_t notDeviceLocalNotHostVisibleTypeCount = 0;
1681 uint32_t amdSpecificTypeCount = 0;
1682 uint32_t lazilyAllocatedTypeCount = 0;
1683 uint32_t allTypeBits = 0;
1684 uint32_t deviceLocalTypeBits = 0;
1685 for(uint32_t typeIndex = 0; typeIndex < memProps->memoryTypeCount; ++typeIndex)
1686 {
1687 const VkMemoryType& type = memProps->memoryTypes[typeIndex];
1688 allTypeBits |= 1u << typeIndex;
1689 if(type.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
1690 {
1691 deviceLocalTypeBits |= 1u << typeIndex;
1692 }
1693 if((type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) &&
1694 (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
1695 {
1696 ++hostVisibleNotHostCoherentTypeCount;
1697 }
1698 if((type.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0 &&
1699 (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
1700 {
1701 ++notDeviceLocalNotHostVisibleTypeCount;
1702 }
1703 if(type.propertyFlags & (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD))
1704 {
1705 ++amdSpecificTypeCount;
1706 }
1707 if(type.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
1708 {
1709 ++lazilyAllocatedTypeCount;
1710 }
1711 }
1712
1713 assert(deviceLocalHeapCount > 0);
1714 if(deviceLocalHeapCount == heapCount)
1715 wprintf(L"- All heaps are DEVICE_LOCAL.\n");
1716 else
1717 wprintf(L"- %u heaps are DEVICE_LOCAL, total %s.\n", deviceLocalHeapCount, SizeToStr(deviceLocalHeapSumSize).c_str());
1718
1719 assert(hostVisibleHeapCount > 0);
1720 if(hostVisibleHeapCount == heapCount)
1721 wprintf(L"- All heaps are HOST_VISIBLE.\n");
1722 else
1723 wprintf(L"- %u heaps are HOST_VISIBLE, total %s.\n", deviceLocalHeapCount, SizeToStr(hostVisibleHeapSumSize).c_str());
1724
1725 if(deviceLocalHeapCount < heapCount && hostVisibleHeapCount < heapCount)
1726 {
1727 if(deviceLocalAndHostVisibleHeapCount == 0)
1728 wprintf(L"- No heaps are DEVICE_LOCAL and HOST_VISIBLE.\n");
1729 if(deviceLocalAndHostVisibleHeapCount == heapCount)
1730 wprintf(L"- All heaps are DEVICE_LOCAL and HOST_VISIBLE.\n");
1731 else
1732 wprintf(L"- %u heaps are DEVICE_LOCAL and HOST_VISIBLE, total %s.\n", deviceLocalAndHostVisibleHeapCount, SizeToStr(deviceLocalAndHostVisibleHeapSumSize).c_str());
1733 }
1734
1735 if(hostVisibleNotHostCoherentTypeCount == 0)
1736 wprintf(L"- No types are HOST_VISIBLE but not HOST_COHERENT.\n");
1737 else
1738 wprintf(L"- %u types are HOST_VISIBLE but not HOST_COHERENT.\n", hostVisibleNotHostCoherentTypeCount);
1739
1740 if(notDeviceLocalNotHostVisibleTypeCount == 0)
1741 wprintf(L"- No types are not DEVICE_LOCAL and not HOST_VISIBLE.\n");
1742 else
1743 wprintf(L"- %u types are not DEVICE_LOCAL and not HOST_VISIBLE.\n", notDeviceLocalNotHostVisibleTypeCount);
1744
1745 if(amdSpecificTypeCount == 0)
1746 wprintf(L"- No types are AMD-specific DEVICE_COHERENT or DEVICE_UNCACHED.\n");
1747 else
1748 wprintf(L"- %u types are AMD-specific DEVICE_COHERENT or DEVICE_UNCACHED.\n", amdSpecificTypeCount);
1749
1750 if(lazilyAllocatedTypeCount == 0)
1751 wprintf(L"- No types are LAZILY_ALLOCATED.\n");
1752 else
1753 wprintf(L"- %u types are LAZILY_ALLOCATED.\n", lazilyAllocatedTypeCount);
1754
1755 if(props->vendorID == VENDOR_ID_AMD &&
1756 props->deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU &&
1757 deviceLocalAndHostVisibleHeapSumSize > 256llu * 1024 * 1024)
1758 {
1759 wprintf(L"- AMD Smart Access Memory (SAM) is enabled!\n");
1760 }
1761
1762 if(deviceLocalHeapCount < heapCount)
1763 {
1764 const uint32_t nonDeviceLocalTypeBits = ~deviceLocalTypeBits & allTypeBits;
1765
1766 if(CanCreateVertexBuffer(nonDeviceLocalTypeBits))
1767 wprintf(L"- A buffer with VERTEX_BUFFER usage can be created in some non-DEVICE_LOCAL type.\n");
1768 else
1769 wprintf(L"- A buffer with VERTEX_BUFFER usage cannot be created in some non-DEVICE_LOCAL type.\n");
1770
1771 if(CanCreateOptimalSampledImage(nonDeviceLocalTypeBits))
1772 wprintf(L"- An image with OPTIMAL tiling and SAMPLED usage can be created in some non-DEVICE_LOCAL type.\n");
1773 else
1774 wprintf(L"- An image with OPTIMAL tiling and SAMPLED usage cannot be created in some non-DEVICE_LOCAL type.\n");
1775 }
1776
1777 //wprintf(L"\n");
1778 }
1779
InitializeApplication()1780 static void InitializeApplication()
1781 {
1782 // Create VkSurfaceKHR.
1783 VkWin32SurfaceCreateInfoKHR surfaceInfo = { VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
1784 surfaceInfo.hinstance = g_hAppInstance;
1785 surfaceInfo.hwnd = g_hWnd;
1786 VkResult result = vkCreateWin32SurfaceKHR(g_hVulkanInstance, &surfaceInfo, g_Allocs, &g_hSurface);
1787 assert(result == VK_SUCCESS);
1788
1789 // Query for device extensions
1790
1791 uint32_t physicalDeviceExtensionPropertyCount = 0;
1792 ERR_GUARD_VULKAN( vkEnumerateDeviceExtensionProperties(g_hPhysicalDevice, nullptr, &physicalDeviceExtensionPropertyCount, nullptr) );
1793 std::vector<VkExtensionProperties> physicalDeviceExtensionProperties{physicalDeviceExtensionPropertyCount};
1794 if(physicalDeviceExtensionPropertyCount)
1795 {
1796 ERR_GUARD_VULKAN( vkEnumerateDeviceExtensionProperties(
1797 g_hPhysicalDevice,
1798 nullptr,
1799 &physicalDeviceExtensionPropertyCount,
1800 physicalDeviceExtensionProperties.data()) );
1801 }
1802
1803 for(uint32_t i = 0; i < physicalDeviceExtensionPropertyCount; ++i)
1804 {
1805 if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME) == 0)
1806 {
1807 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1808 {
1809 VK_KHR_get_memory_requirements2_enabled = true;
1810 }
1811 }
1812 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME) == 0)
1813 {
1814 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1815 {
1816 VK_KHR_dedicated_allocation_enabled = true;
1817 }
1818 }
1819 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME) == 0)
1820 {
1821 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1822 {
1823 VK_KHR_bind_memory2_enabled = true;
1824 }
1825 }
1826 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_EXT_MEMORY_BUDGET_EXTENSION_NAME) == 0)
1827 VK_EXT_memory_budget_enabled = true;
1828 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME) == 0)
1829 VK_AMD_device_coherent_memory_enabled = true;
1830 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME) == 0)
1831 {
1832 if(GetVulkanApiVersion() < VK_API_VERSION_1_2)
1833 {
1834 VK_KHR_buffer_device_address_enabled = true;
1835 }
1836 }
1837 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME) == 0)
1838 VK_EXT_memory_priority_enabled = true;
1839 }
1840
1841 if(GetVulkanApiVersion() >= VK_API_VERSION_1_2)
1842 VK_KHR_buffer_device_address_enabled = true; // Promoted to core Vulkan 1.2.
1843
1844 // Query for features
1845
1846 #if VMA_VULKAN_VERSION >= 1001000
1847 VkPhysicalDeviceProperties2 physicalDeviceProperties2 = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 };
1848
1849 #if VMA_VULKAN_VERSION >= 1002000
1850 // Vulkan spec says structure VkPhysicalDeviceVulkan11Properties is "Provided by VK_VERSION_1_2" - is this a mistake? Assuming not...
1851 VkPhysicalDeviceVulkan11Properties physicalDeviceVulkan11Properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES };
1852 VkPhysicalDeviceVulkan12Properties physicalDeviceVulkan12Properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES };
1853 PnextChainPushFront(&physicalDeviceProperties2, &physicalDeviceVulkan11Properties);
1854 PnextChainPushFront(&physicalDeviceProperties2, &physicalDeviceVulkan12Properties);
1855 #endif
1856
1857 vkGetPhysicalDeviceProperties2(g_hPhysicalDevice, &physicalDeviceProperties2);
1858
1859 PrintPhysicalDeviceProperties(physicalDeviceProperties2.properties);
1860 #if VMA_VULKAN_VERSION >= 1002000
1861 PrintPhysicalDeviceVulkan11Properties(physicalDeviceVulkan11Properties);
1862 PrintPhysicalDeviceVulkan12Properties(physicalDeviceVulkan12Properties);
1863 #endif
1864
1865 #else // #if VMA_VULKAN_VERSION >= 1001000
1866 VkPhysicalDeviceProperties physicalDeviceProperties = {};
1867 vkGetPhysicalDeviceProperties(g_hPhysicalDevice, &physicalDeviceProperties);
1868 PrintPhysicalDeviceProperties(physicalDeviceProperties);
1869
1870 #endif // #if VMA_VULKAN_VERSION >= 1001000
1871
1872 wprintf(L"\n");
1873
1874 VkPhysicalDeviceFeatures2 physicalDeviceFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
1875
1876 VkPhysicalDeviceCoherentMemoryFeaturesAMD physicalDeviceCoherentMemoryFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD };
1877 if(VK_AMD_device_coherent_memory_enabled)
1878 {
1879 PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceCoherentMemoryFeatures);
1880 }
1881
1882 VkPhysicalDeviceBufferDeviceAddressFeaturesKHR physicalDeviceBufferDeviceAddressFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR };
1883 if(VK_KHR_buffer_device_address_enabled)
1884 {
1885 PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceBufferDeviceAddressFeatures);
1886 }
1887
1888 VkPhysicalDeviceMemoryPriorityFeaturesEXT physicalDeviceMemoryPriorityFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT };
1889 if(VK_EXT_memory_priority_enabled)
1890 {
1891 PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceMemoryPriorityFeatures);
1892 }
1893
1894 vkGetPhysicalDeviceFeatures2(g_hPhysicalDevice, &physicalDeviceFeatures);
1895
1896 g_SparseBindingEnabled = physicalDeviceFeatures.features.sparseBinding != 0;
1897
1898 // The extension is supported as fake with no real support for this feature? Don't use it.
1899 if(VK_AMD_device_coherent_memory_enabled && !physicalDeviceCoherentMemoryFeatures.deviceCoherentMemory)
1900 VK_AMD_device_coherent_memory_enabled = false;
1901 if(VK_KHR_buffer_device_address_enabled && !physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress)
1902 VK_KHR_buffer_device_address_enabled = false;
1903 if(VK_EXT_memory_priority_enabled && !physicalDeviceMemoryPriorityFeatures.memoryPriority)
1904 VK_EXT_memory_priority_enabled = false;
1905
1906 // Find queue family index
1907
1908 uint32_t queueFamilyCount = 0;
1909 vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, nullptr);
1910 assert(queueFamilyCount > 0);
1911 std::vector<VkQueueFamilyProperties> queueFamilies(queueFamilyCount);
1912 vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, queueFamilies.data());
1913 for(uint32_t i = 0;
1914 (i < queueFamilyCount) &&
1915 (g_GraphicsQueueFamilyIndex == UINT_MAX ||
1916 g_PresentQueueFamilyIndex == UINT_MAX ||
1917 (g_SparseBindingEnabled && g_SparseBindingQueueFamilyIndex == UINT_MAX));
1918 ++i)
1919 {
1920 if(queueFamilies[i].queueCount > 0)
1921 {
1922 const uint32_t flagsForGraphicsQueue = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1923 if((g_GraphicsQueueFamilyIndex != 0) &&
1924 ((queueFamilies[i].queueFlags & flagsForGraphicsQueue) == flagsForGraphicsQueue))
1925 {
1926 g_GraphicsQueueFamilyIndex = i;
1927 }
1928
1929 VkBool32 surfaceSupported = 0;
1930 VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(g_hPhysicalDevice, i, g_hSurface, &surfaceSupported);
1931 if((res >= 0) && (surfaceSupported == VK_TRUE))
1932 {
1933 g_PresentQueueFamilyIndex = i;
1934 }
1935
1936 if(g_SparseBindingEnabled &&
1937 g_SparseBindingQueueFamilyIndex == UINT32_MAX &&
1938 (queueFamilies[i].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) != 0)
1939 {
1940 g_SparseBindingQueueFamilyIndex = i;
1941 }
1942 }
1943 }
1944 assert(g_GraphicsQueueFamilyIndex != UINT_MAX);
1945
1946 g_SparseBindingEnabled = g_SparseBindingEnabled && g_SparseBindingQueueFamilyIndex != UINT32_MAX;
1947
1948 // Create logical device
1949
1950 const float queuePriority = 1.f;
1951
1952 VkDeviceQueueCreateInfo queueCreateInfo[3] = {};
1953 uint32_t queueCount = 1;
1954 queueCreateInfo[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1955 queueCreateInfo[0].queueFamilyIndex = g_GraphicsQueueFamilyIndex;
1956 queueCreateInfo[0].queueCount = 1;
1957 queueCreateInfo[0].pQueuePriorities = &queuePriority;
1958
1959 if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
1960 {
1961
1962 queueCreateInfo[queueCount].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1963 queueCreateInfo[queueCount].queueFamilyIndex = g_PresentQueueFamilyIndex;
1964 queueCreateInfo[queueCount].queueCount = 1;
1965 queueCreateInfo[queueCount].pQueuePriorities = &queuePriority;
1966 ++queueCount;
1967 }
1968
1969 if(g_SparseBindingEnabled &&
1970 g_SparseBindingQueueFamilyIndex != g_GraphicsQueueFamilyIndex &&
1971 g_SparseBindingQueueFamilyIndex != g_PresentQueueFamilyIndex)
1972 {
1973
1974 queueCreateInfo[queueCount].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1975 queueCreateInfo[queueCount].queueFamilyIndex = g_SparseBindingQueueFamilyIndex;
1976 queueCreateInfo[queueCount].queueCount = 1;
1977 queueCreateInfo[queueCount].pQueuePriorities = &queuePriority;
1978 ++queueCount;
1979 }
1980
1981 std::vector<const char*> enabledDeviceExtensions;
1982 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1983 if(VK_KHR_get_memory_requirements2_enabled)
1984 enabledDeviceExtensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
1985 if(VK_KHR_dedicated_allocation_enabled)
1986 enabledDeviceExtensions.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
1987 if(VK_KHR_bind_memory2_enabled)
1988 enabledDeviceExtensions.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
1989 if(VK_EXT_memory_budget_enabled)
1990 enabledDeviceExtensions.push_back(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME);
1991 if(VK_AMD_device_coherent_memory_enabled)
1992 enabledDeviceExtensions.push_back(VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME);
1993 if(VK_KHR_buffer_device_address_enabled && GetVulkanApiVersion() < VK_API_VERSION_1_2)
1994 enabledDeviceExtensions.push_back(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
1995 if(VK_EXT_memory_priority_enabled)
1996 enabledDeviceExtensions.push_back(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME);
1997
1998 VkPhysicalDeviceFeatures2 deviceFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
1999 deviceFeatures.features.samplerAnisotropy = VK_TRUE;
2000 deviceFeatures.features.sparseBinding = g_SparseBindingEnabled ? VK_TRUE : VK_FALSE;
2001
2002 if(VK_AMD_device_coherent_memory_enabled)
2003 {
2004 physicalDeviceCoherentMemoryFeatures.deviceCoherentMemory = VK_TRUE;
2005 PnextChainPushBack(&deviceFeatures, &physicalDeviceCoherentMemoryFeatures);
2006 }
2007 if(VK_KHR_buffer_device_address_enabled)
2008 {
2009 physicalDeviceBufferDeviceAddressFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR };
2010 physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress = VK_TRUE;
2011 PnextChainPushBack(&deviceFeatures, &physicalDeviceBufferDeviceAddressFeatures);
2012 }
2013 if(VK_EXT_memory_priority_enabled)
2014 {
2015 PnextChainPushBack(&deviceFeatures, &physicalDeviceMemoryPriorityFeatures);
2016 }
2017
2018 VkDeviceCreateInfo deviceCreateInfo = { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
2019 deviceCreateInfo.pNext = &deviceFeatures;
2020 deviceCreateInfo.enabledLayerCount = 0;
2021 deviceCreateInfo.ppEnabledLayerNames = nullptr;
2022 deviceCreateInfo.enabledExtensionCount = (uint32_t)enabledDeviceExtensions.size();
2023 deviceCreateInfo.ppEnabledExtensionNames = !enabledDeviceExtensions.empty() ? enabledDeviceExtensions.data() : nullptr;
2024 deviceCreateInfo.queueCreateInfoCount = queueCount;
2025 deviceCreateInfo.pQueueCreateInfos = queueCreateInfo;
2026
2027 ERR_GUARD_VULKAN( vkCreateDevice(g_hPhysicalDevice, &deviceCreateInfo, g_Allocs, &g_hDevice) );
2028
2029 // Fetch pointers to extension functions
2030 if(VK_KHR_buffer_device_address_enabled)
2031 {
2032 if(GetVulkanApiVersion() >= VK_API_VERSION_1_2)
2033 {
2034 g_vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddress");
2035 }
2036 else if(VK_KHR_buffer_device_address_enabled)
2037 {
2038 g_vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddressKHR");
2039 }
2040 assert(g_vkGetBufferDeviceAddressKHR != nullptr);
2041 }
2042
2043 // Create memory allocator
2044
2045 VmaAllocatorCreateInfo allocatorInfo = {};
2046 SetAllocatorCreateInfo(allocatorInfo);
2047 ERR_GUARD_VULKAN( vmaCreateAllocator(&allocatorInfo, &g_hAllocator) );
2048
2049 PrintMemoryTypes();
2050 wprintf(L"\n");
2051 PrintMemoryConclusions();
2052 wprintf(L"\n");
2053 PrintEnabledFeatures();
2054 wprintf(L"\n");
2055
2056 // Retrieve queues (don't need to be destroyed).
2057
2058 vkGetDeviceQueue(g_hDevice, g_GraphicsQueueFamilyIndex, 0, &g_hGraphicsQueue);
2059 vkGetDeviceQueue(g_hDevice, g_PresentQueueFamilyIndex, 0, &g_hPresentQueue);
2060 assert(g_hGraphicsQueue);
2061 assert(g_hPresentQueue);
2062
2063 if(g_SparseBindingEnabled)
2064 {
2065 vkGetDeviceQueue(g_hDevice, g_SparseBindingQueueFamilyIndex, 0, &g_hSparseBindingQueue);
2066 assert(g_hSparseBindingQueue);
2067 }
2068
2069 // Create command pool
2070
2071 VkCommandPoolCreateInfo commandPoolInfo = { VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO };
2072 commandPoolInfo.queueFamilyIndex = g_GraphicsQueueFamilyIndex;
2073 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2074 ERR_GUARD_VULKAN( vkCreateCommandPool(g_hDevice, &commandPoolInfo, g_Allocs, &g_hCommandPool) );
2075
2076 VkCommandBufferAllocateInfo commandBufferInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO };
2077 commandBufferInfo.commandPool = g_hCommandPool;
2078 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2079 commandBufferInfo.commandBufferCount = COMMAND_BUFFER_COUNT;
2080 ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, g_MainCommandBuffers) );
2081
2082 VkFenceCreateInfo fenceInfo = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
2083 fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
2084 for(size_t i = 0; i < COMMAND_BUFFER_COUNT; ++i)
2085 {
2086 ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, g_Allocs, &g_MainCommandBufferExecutedFances[i]) );
2087 }
2088
2089 ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, g_Allocs, &g_ImmediateFence) );
2090
2091 commandBufferInfo.commandBufferCount = 1;
2092 ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, &g_hTemporaryCommandBuffer) );
2093
2094 // Create texture sampler
2095
2096 VkSamplerCreateInfo samplerInfo = { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO };
2097 samplerInfo.magFilter = VK_FILTER_LINEAR;
2098 samplerInfo.minFilter = VK_FILTER_LINEAR;
2099 samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2100 samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2101 samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2102 samplerInfo.anisotropyEnable = VK_TRUE;
2103 samplerInfo.maxAnisotropy = 16;
2104 samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2105 samplerInfo.unnormalizedCoordinates = VK_FALSE;
2106 samplerInfo.compareEnable = VK_FALSE;
2107 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2108 samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
2109 samplerInfo.mipLodBias = 0.f;
2110 samplerInfo.minLod = 0.f;
2111 samplerInfo.maxLod = FLT_MAX;
2112 ERR_GUARD_VULKAN( vkCreateSampler(g_hDevice, &samplerInfo, g_Allocs, &g_hSampler) );
2113
2114 CreateTexture(128, 128);
2115 CreateMesh();
2116
2117 VkDescriptorSetLayoutBinding samplerLayoutBinding = {};
2118 samplerLayoutBinding.binding = 1;
2119 samplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2120 samplerLayoutBinding.descriptorCount = 1;
2121 samplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
2122
2123 VkDescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO };
2124 descriptorSetLayoutInfo.bindingCount = 1;
2125 descriptorSetLayoutInfo.pBindings = &samplerLayoutBinding;
2126 ERR_GUARD_VULKAN( vkCreateDescriptorSetLayout(g_hDevice, &descriptorSetLayoutInfo, g_Allocs, &g_hDescriptorSetLayout) );
2127
2128 // Create descriptor pool
2129
2130 VkDescriptorPoolSize descriptorPoolSizes[2];
2131 ZeroMemory(descriptorPoolSizes, sizeof(descriptorPoolSizes));
2132 descriptorPoolSizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2133 descriptorPoolSizes[0].descriptorCount = 1;
2134 descriptorPoolSizes[1].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2135 descriptorPoolSizes[1].descriptorCount = 1;
2136
2137 VkDescriptorPoolCreateInfo descriptorPoolInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO };
2138 descriptorPoolInfo.poolSizeCount = (uint32_t)_countof(descriptorPoolSizes);
2139 descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
2140 descriptorPoolInfo.maxSets = 1;
2141 ERR_GUARD_VULKAN( vkCreateDescriptorPool(g_hDevice, &descriptorPoolInfo, g_Allocs, &g_hDescriptorPool) );
2142
2143 // Create descriptor set layout
2144
2145 VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
2146 VkDescriptorSetAllocateInfo descriptorSetInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO };
2147 descriptorSetInfo.descriptorPool = g_hDescriptorPool;
2148 descriptorSetInfo.descriptorSetCount = 1;
2149 descriptorSetInfo.pSetLayouts = descriptorSetLayouts;
2150 ERR_GUARD_VULKAN( vkAllocateDescriptorSets(g_hDevice, &descriptorSetInfo, &g_hDescriptorSet) );
2151
2152 VkDescriptorImageInfo descriptorImageInfo = {};
2153 descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2154 descriptorImageInfo.imageView = g_hTextureImageView;
2155 descriptorImageInfo.sampler = g_hSampler;
2156
2157 VkWriteDescriptorSet writeDescriptorSet = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET };
2158 writeDescriptorSet.dstSet = g_hDescriptorSet;
2159 writeDescriptorSet.dstBinding = 1;
2160 writeDescriptorSet.dstArrayElement = 0;
2161 writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2162 writeDescriptorSet.descriptorCount = 1;
2163 writeDescriptorSet.pImageInfo = &descriptorImageInfo;
2164
2165 vkUpdateDescriptorSets(g_hDevice, 1, &writeDescriptorSet, 0, nullptr);
2166
2167 CreateSwapchain();
2168 }
2169
FinalizeApplication()2170 static void FinalizeApplication()
2171 {
2172 vkDeviceWaitIdle(g_hDevice);
2173
2174 DestroySwapchain(true);
2175
2176 if(g_hDescriptorPool != VK_NULL_HANDLE)
2177 {
2178 vkDestroyDescriptorPool(g_hDevice, g_hDescriptorPool, g_Allocs);
2179 g_hDescriptorPool = VK_NULL_HANDLE;
2180 }
2181
2182 if(g_hDescriptorSetLayout != VK_NULL_HANDLE)
2183 {
2184 vkDestroyDescriptorSetLayout(g_hDevice, g_hDescriptorSetLayout, g_Allocs);
2185 g_hDescriptorSetLayout = VK_NULL_HANDLE;
2186 }
2187
2188 if(g_hTextureImageView != VK_NULL_HANDLE)
2189 {
2190 vkDestroyImageView(g_hDevice, g_hTextureImageView, g_Allocs);
2191 g_hTextureImageView = VK_NULL_HANDLE;
2192 }
2193 if(g_hTextureImage != VK_NULL_HANDLE)
2194 {
2195 vmaDestroyImage(g_hAllocator, g_hTextureImage, g_hTextureImageAlloc);
2196 g_hTextureImage = VK_NULL_HANDLE;
2197 }
2198
2199 if(g_hIndexBuffer != VK_NULL_HANDLE)
2200 {
2201 vmaDestroyBuffer(g_hAllocator, g_hIndexBuffer, g_hIndexBufferAlloc);
2202 g_hIndexBuffer = VK_NULL_HANDLE;
2203 }
2204 if(g_hVertexBuffer != VK_NULL_HANDLE)
2205 {
2206 vmaDestroyBuffer(g_hAllocator, g_hVertexBuffer, g_hVertexBufferAlloc);
2207 g_hVertexBuffer = VK_NULL_HANDLE;
2208 }
2209
2210 if(g_hSampler != VK_NULL_HANDLE)
2211 {
2212 vkDestroySampler(g_hDevice, g_hSampler, g_Allocs);
2213 g_hSampler = VK_NULL_HANDLE;
2214 }
2215
2216 if(g_ImmediateFence)
2217 {
2218 vkDestroyFence(g_hDevice, g_ImmediateFence, g_Allocs);
2219 g_ImmediateFence = VK_NULL_HANDLE;
2220 }
2221
2222 for(size_t i = COMMAND_BUFFER_COUNT; i--; )
2223 {
2224 if(g_MainCommandBufferExecutedFances[i] != VK_NULL_HANDLE)
2225 {
2226 vkDestroyFence(g_hDevice, g_MainCommandBufferExecutedFances[i], g_Allocs);
2227 g_MainCommandBufferExecutedFances[i] = VK_NULL_HANDLE;
2228 }
2229 }
2230 if(g_MainCommandBuffers[0] != VK_NULL_HANDLE)
2231 {
2232 vkFreeCommandBuffers(g_hDevice, g_hCommandPool, COMMAND_BUFFER_COUNT, g_MainCommandBuffers);
2233 ZeroMemory(g_MainCommandBuffers, sizeof(g_MainCommandBuffers));
2234 }
2235 if(g_hTemporaryCommandBuffer != VK_NULL_HANDLE)
2236 {
2237 vkFreeCommandBuffers(g_hDevice, g_hCommandPool, 1, &g_hTemporaryCommandBuffer);
2238 g_hTemporaryCommandBuffer = VK_NULL_HANDLE;
2239 }
2240
2241 if(g_hCommandPool != VK_NULL_HANDLE)
2242 {
2243 vkDestroyCommandPool(g_hDevice, g_hCommandPool, g_Allocs);
2244 g_hCommandPool = VK_NULL_HANDLE;
2245 }
2246
2247 if(g_hAllocator != VK_NULL_HANDLE)
2248 {
2249 vmaDestroyAllocator(g_hAllocator);
2250 g_hAllocator = nullptr;
2251 }
2252
2253 if(g_hDevice != VK_NULL_HANDLE)
2254 {
2255 vkDestroyDevice(g_hDevice, g_Allocs);
2256 g_hDevice = nullptr;
2257 }
2258
2259 if(g_hSurface != VK_NULL_HANDLE)
2260 {
2261 vkDestroySurfaceKHR(g_hVulkanInstance, g_hSurface, g_Allocs);
2262 g_hSurface = VK_NULL_HANDLE;
2263 }
2264 }
2265
PrintAllocatorStats()2266 static void PrintAllocatorStats()
2267 {
2268 #if VMA_STATS_STRING_ENABLED
2269 char* statsString = nullptr;
2270 vmaBuildStatsString(g_hAllocator, &statsString, true);
2271 printf("%s\n", statsString);
2272 vmaFreeStatsString(g_hAllocator, statsString);
2273 #endif
2274 }
2275
RecreateSwapChain()2276 static void RecreateSwapChain()
2277 {
2278 vkDeviceWaitIdle(g_hDevice);
2279 DestroySwapchain(false);
2280 CreateSwapchain();
2281 }
2282
DrawFrame()2283 static void DrawFrame()
2284 {
2285 // Begin main command buffer
2286 size_t cmdBufIndex = (g_NextCommandBufferIndex++) % COMMAND_BUFFER_COUNT;
2287 VkCommandBuffer hCommandBuffer = g_MainCommandBuffers[cmdBufIndex];
2288 VkFence hCommandBufferExecutedFence = g_MainCommandBufferExecutedFances[cmdBufIndex];
2289
2290 ERR_GUARD_VULKAN( vkWaitForFences(g_hDevice, 1, &hCommandBufferExecutedFence, VK_TRUE, UINT64_MAX) );
2291 ERR_GUARD_VULKAN( vkResetFences(g_hDevice, 1, &hCommandBufferExecutedFence) );
2292
2293 VkCommandBufferBeginInfo commandBufferBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
2294 commandBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2295 ERR_GUARD_VULKAN( vkBeginCommandBuffer(hCommandBuffer, &commandBufferBeginInfo) );
2296
2297 // Acquire swapchain image
2298 uint32_t imageIndex = 0;
2299 VkResult res = vkAcquireNextImageKHR(g_hDevice, g_hSwapchain, UINT64_MAX, g_hImageAvailableSemaphore, VK_NULL_HANDLE, &imageIndex);
2300 if(res == VK_ERROR_OUT_OF_DATE_KHR)
2301 {
2302 RecreateSwapChain();
2303 return;
2304 }
2305 else if(res < 0)
2306 {
2307 ERR_GUARD_VULKAN(res);
2308 }
2309
2310 // Record geometry pass
2311
2312 VkClearValue clearValues[2];
2313 ZeroMemory(clearValues, sizeof(clearValues));
2314 clearValues[0].color.float32[0] = 0.25f;
2315 clearValues[0].color.float32[1] = 0.25f;
2316 clearValues[0].color.float32[2] = 0.5f;
2317 clearValues[0].color.float32[3] = 1.0f;
2318 clearValues[1].depthStencil.depth = 1.0f;
2319
2320 VkRenderPassBeginInfo renderPassBeginInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO };
2321 renderPassBeginInfo.renderPass = g_hRenderPass;
2322 renderPassBeginInfo.framebuffer = g_Framebuffers[imageIndex];
2323 renderPassBeginInfo.renderArea.offset.x = 0;
2324 renderPassBeginInfo.renderArea.offset.y = 0;
2325 renderPassBeginInfo.renderArea.extent = g_Extent;
2326 renderPassBeginInfo.clearValueCount = (uint32_t)_countof(clearValues);
2327 renderPassBeginInfo.pClearValues = clearValues;
2328 vkCmdBeginRenderPass(hCommandBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2329
2330 vkCmdBindPipeline(
2331 hCommandBuffer,
2332 VK_PIPELINE_BIND_POINT_GRAPHICS,
2333 g_hPipeline);
2334
2335 mat4 view = mat4::LookAt(
2336 vec3(0.f, 0.f, 0.f),
2337 vec3(0.f, -2.f, 4.f),
2338 vec3(0.f, 1.f, 0.f));
2339 mat4 proj = mat4::Perspective(
2340 1.0471975511966f, // 60 degrees
2341 (float)g_Extent.width / (float)g_Extent.height,
2342 0.1f,
2343 1000.f);
2344 mat4 viewProj = view * proj;
2345
2346 vkCmdBindDescriptorSets(
2347 hCommandBuffer,
2348 VK_PIPELINE_BIND_POINT_GRAPHICS,
2349 g_hPipelineLayout,
2350 0,
2351 1,
2352 &g_hDescriptorSet,
2353 0,
2354 nullptr);
2355
2356 float rotationAngle = (float)GetTickCount() * 0.001f * (float)PI * 0.2f;
2357 mat4 model = mat4::RotationY(rotationAngle);
2358
2359 UniformBufferObject ubo = {};
2360 ubo.ModelViewProj = model * viewProj;
2361 vkCmdPushConstants(hCommandBuffer, g_hPipelineLayout, VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(UniformBufferObject), &ubo);
2362
2363 VkBuffer vertexBuffers[] = { g_hVertexBuffer };
2364 VkDeviceSize offsets[] = { 0 };
2365 vkCmdBindVertexBuffers(hCommandBuffer, 0, 1, vertexBuffers, offsets);
2366
2367 vkCmdBindIndexBuffer(hCommandBuffer, g_hIndexBuffer, 0, VK_INDEX_TYPE_UINT16);
2368
2369 vkCmdDrawIndexed(hCommandBuffer, g_IndexCount, 1, 0, 0, 0);
2370
2371 vkCmdEndRenderPass(hCommandBuffer);
2372
2373 vkEndCommandBuffer(hCommandBuffer);
2374
2375 // Submit command buffer
2376
2377 VkSemaphore submitWaitSemaphores[] = { g_hImageAvailableSemaphore };
2378 VkPipelineStageFlags submitWaitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
2379 VkSemaphore submitSignalSemaphores[] = { g_hRenderFinishedSemaphore };
2380 VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
2381 submitInfo.waitSemaphoreCount = 1;
2382 submitInfo.pWaitSemaphores = submitWaitSemaphores;
2383 submitInfo.pWaitDstStageMask = submitWaitStages;
2384 submitInfo.commandBufferCount = 1;
2385 submitInfo.pCommandBuffers = &hCommandBuffer;
2386 submitInfo.signalSemaphoreCount = _countof(submitSignalSemaphores);
2387 submitInfo.pSignalSemaphores = submitSignalSemaphores;
2388 ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, hCommandBufferExecutedFence) );
2389
2390 VkSemaphore presentWaitSemaphores[] = { g_hRenderFinishedSemaphore };
2391
2392 VkSwapchainKHR swapchains[] = { g_hSwapchain };
2393 VkPresentInfoKHR presentInfo = { VK_STRUCTURE_TYPE_PRESENT_INFO_KHR };
2394 presentInfo.waitSemaphoreCount = _countof(presentWaitSemaphores);
2395 presentInfo.pWaitSemaphores = presentWaitSemaphores;
2396 presentInfo.swapchainCount = 1;
2397 presentInfo.pSwapchains = swapchains;
2398 presentInfo.pImageIndices = &imageIndex;
2399 presentInfo.pResults = nullptr;
2400 res = vkQueuePresentKHR(g_hPresentQueue, &presentInfo);
2401 if(res == VK_ERROR_OUT_OF_DATE_KHR)
2402 {
2403 RecreateSwapChain();
2404 }
2405 else
2406 ERR_GUARD_VULKAN(res);
2407 }
2408
HandlePossibleSizeChange()2409 static void HandlePossibleSizeChange()
2410 {
2411 RECT clientRect;
2412 GetClientRect(g_hWnd, &clientRect);
2413 LONG newSizeX = clientRect.right - clientRect.left;
2414 LONG newSizeY = clientRect.bottom - clientRect.top;
2415 if((newSizeX > 0) &&
2416 (newSizeY > 0) &&
2417 ((newSizeX != g_SizeX) || (newSizeY != g_SizeY)))
2418 {
2419 g_SizeX = newSizeX;
2420 g_SizeY = newSizeY;
2421
2422 RecreateSwapChain();
2423 }
2424 }
2425
2426 #define CATCH_PRINT_ERROR(extraCatchCode) \
2427 catch(const std::exception& ex) \
2428 { \
2429 fwprintf(stderr, L"ERROR: %hs\n", ex.what()); \
2430 extraCatchCode \
2431 } \
2432 catch(...) \
2433 { \
2434 fwprintf(stderr, L"UNKNOWN ERROR.\n"); \
2435 extraCatchCode \
2436 }
2437
WndProc(HWND hWnd,UINT msg,WPARAM wParam,LPARAM lParam)2438 static LRESULT WINAPI WndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
2439 {
2440 switch(msg)
2441 {
2442 case WM_DESTROY:
2443 try
2444 {
2445 FinalizeApplication();
2446 }
2447 CATCH_PRINT_ERROR(;)
2448 PostQuitMessage(0);
2449 return 0;
2450
2451 // This prevents app from freezing when left Alt is pressed
2452 // (which normally enters modal menu loop).
2453 case WM_SYSKEYDOWN:
2454 case WM_SYSKEYUP:
2455 return 0;
2456
2457 case WM_SIZE:
2458 if((wParam == SIZE_MAXIMIZED) || (wParam == SIZE_RESTORED))
2459 {
2460 try
2461 {
2462 HandlePossibleSizeChange();
2463 }
2464 CATCH_PRINT_ERROR(DestroyWindow(hWnd);)
2465 }
2466 return 0;
2467
2468 case WM_EXITSIZEMOVE:
2469 try
2470 {
2471 HandlePossibleSizeChange();
2472 }
2473 CATCH_PRINT_ERROR(DestroyWindow(hWnd);)
2474 return 0;
2475
2476 case WM_KEYDOWN:
2477 switch(wParam)
2478 {
2479 case VK_ESCAPE:
2480 PostMessage(hWnd, WM_CLOSE, 0, 0);
2481 break;
2482 case 'T':
2483 try
2484 {
2485 Test();
2486 }
2487 CATCH_PRINT_ERROR(;)
2488 break;
2489 case 'S':
2490 if (g_SparseBindingEnabled)
2491 {
2492 try
2493 {
2494 TestSparseBinding();
2495 }
2496 CATCH_PRINT_ERROR(;)
2497 }
2498 else
2499 {
2500 printf("Sparse binding not supported.\n");
2501 }
2502 break;
2503 }
2504 return 0;
2505
2506 default:
2507 break;
2508 }
2509
2510 return DefWindowProc(hWnd, msg, wParam, lParam);
2511 }
2512
PrintLogo()2513 static void PrintLogo()
2514 {
2515 wprintf(L"%s\n", APP_TITLE_W);
2516 }
2517
PrintHelp()2518 static void PrintHelp()
2519 {
2520 wprintf(
2521 L"Command line syntax:\n"
2522 L"-h, --Help Print this information\n"
2523 L"-l, --List Print list of GPUs\n"
2524 L"-g S, --GPU S Select GPU with name containing S\n"
2525 L"-i N, --GPUIndex N Select GPU index N\n"
2526 L"-t, --Test Run tests and exit\n"
2527 L"-s, --TestSparseBinding Run sparese binding tests and exit\n"
2528 );
2529 }
2530
MainWindow()2531 int MainWindow()
2532 {
2533 WNDCLASSEX wndClassDesc = { sizeof(WNDCLASSEX) };
2534 wndClassDesc.style = CS_VREDRAW | CS_HREDRAW | CS_DBLCLKS;
2535 wndClassDesc.hbrBackground = NULL;
2536 wndClassDesc.hCursor = LoadCursor(NULL, IDC_CROSS);
2537 wndClassDesc.hIcon = LoadIcon(NULL, IDI_APPLICATION);
2538 wndClassDesc.hInstance = g_hAppInstance;
2539 wndClassDesc.lpfnWndProc = WndProc;
2540 wndClassDesc.lpszClassName = WINDOW_CLASS_NAME;
2541
2542 const ATOM hWndClass = RegisterClassEx(&wndClassDesc);
2543 assert(hWndClass);
2544
2545 const DWORD style = WS_VISIBLE | WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_MINIMIZEBOX | WS_MAXIMIZEBOX | WS_THICKFRAME;
2546 const DWORD exStyle = 0;
2547
2548 RECT rect = { 0, 0, g_SizeX, g_SizeY };
2549 AdjustWindowRectEx(&rect, style, FALSE, exStyle);
2550
2551 g_hWnd = CreateWindowEx(
2552 exStyle, WINDOW_CLASS_NAME, APP_TITLE_W, style,
2553 CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
2554 NULL, NULL, g_hAppInstance, NULL);
2555 assert(g_hWnd);
2556
2557 InitializeApplication();
2558 //PrintAllocatorStats();
2559
2560 // Run tests and close program
2561 if(g_CommandLineParameters.m_Test)
2562 Test();
2563 if(g_CommandLineParameters.m_TestSparseBinding)
2564 {
2565 if(g_SparseBindingEnabled)
2566 TestSparseBinding();
2567 else
2568 printf("Sparse binding not supported.\n");
2569 }
2570 if(g_CommandLineParameters.m_Test || g_CommandLineParameters.m_TestSparseBinding)
2571 PostMessage(g_hWnd, WM_CLOSE, 0, 0);
2572
2573 MSG msg;
2574 for(;;)
2575 {
2576 if(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
2577 {
2578 if(msg.message == WM_QUIT)
2579 break;
2580 TranslateMessage(&msg);
2581 DispatchMessage(&msg);
2582 }
2583 else
2584 {
2585 DrawFrame();
2586 }
2587 }
2588
2589 return (int)msg.wParam;;
2590 }
2591
Main2(int argc,wchar_t ** argv)2592 int Main2(int argc, wchar_t** argv)
2593 {
2594 PrintLogo();
2595
2596 if(!g_CommandLineParameters.Parse(argc, argv))
2597 {
2598 wprintf(L"ERROR: Invalid command line syntax.\n");
2599 PrintHelp();
2600 return (int)ExitCode::CommandLineError;
2601 }
2602
2603 if(g_CommandLineParameters.m_Help)
2604 {
2605 PrintHelp();
2606 return (int)ExitCode::Help;
2607 }
2608
2609 VulkanUsage vulkanUsage;
2610 vulkanUsage.Init();
2611
2612 if(g_CommandLineParameters.m_List)
2613 {
2614 vulkanUsage.PrintPhysicalDeviceList();
2615 return (int)ExitCode::GPUList;
2616 }
2617
2618 g_hPhysicalDevice = vulkanUsage.SelectPhysicalDevice(g_CommandLineParameters.m_GPUSelection);
2619 TEST(g_hPhysicalDevice);
2620
2621 return MainWindow();
2622 }
2623
wmain(int argc,wchar_t ** argv)2624 int wmain(int argc, wchar_t** argv)
2625 {
2626 int result = 0;
2627 try
2628 {
2629 result = Main2(argc, argv);
2630 TEST(g_CpuAllocCount.load() == 0);
2631 }
2632 CATCH_PRINT_ERROR(return (int)ExitCode::RuntimeError;)
2633 return result;
2634 }
2635
2636 #else // #ifdef _WIN32
2637
2638 #include "VmaUsage.h"
2639
main()2640 int main()
2641 {
2642 }
2643
2644 #endif // #ifdef _WIN32
2645